text
stringlengths 5
22M
| id
stringlengths 12
177
| metadata
dict | __index_level_0__
int64 0
1.37k
|
---|---|---|---|
# Retrain Workspace
|
Cream/Cream/experiments/workspace/retrain/README.md/0
|
{
"file_path": "Cream/Cream/experiments/workspace/retrain/README.md",
"repo_id": "Cream",
"token_count": 5
}
| 333 |
import math
import torch.nn as nn
from timm.utils import *
from timm.models.layers.activations import Swish
from timm.models.layers import CondConv2d, get_condconv_initializer
def parse_ksize(ss):
if ss.isdigit():
return int(ss)
else:
return [int(k) for k in ss.split('.')]
def decode_arch_def(
arch_def,
depth_multiplier=1.0,
depth_trunc='ceil',
experts_multiplier=1):
arch_args = []
for stack_idx, block_strings in enumerate(arch_def):
assert isinstance(block_strings, list)
stack_args = []
repeats = []
for block_str in block_strings:
assert isinstance(block_str, str)
ba, rep = decode_block_str(block_str)
if ba.get('num_experts', 0) > 0 and experts_multiplier > 1:
ba['num_experts'] *= experts_multiplier
stack_args.append(ba)
repeats.append(rep)
arch_args.append(
scale_stage_depth(
stack_args,
repeats,
depth_multiplier,
depth_trunc))
return arch_args
def modify_block_args(block_args, kernel_size, exp_ratio):
block_type = block_args['block_type']
if block_type == 'cn':
block_args['kernel_size'] = kernel_size
elif block_type == 'er':
block_args['exp_kernel_size'] = kernel_size
else:
block_args['dw_kernel_size'] = kernel_size
if block_type == 'ir' or block_type == 'er':
block_args['exp_ratio'] = exp_ratio
return block_args
def decode_block_str(block_str):
""" Decode block definition string
Gets a list of block arg (dicts) through a string notation of arguments.
E.g. ir_r2_k3_s2_e1_i32_o16_se0.25_noskip
All args can exist in any order with the exception of the leading string which
is assumed to indicate the block type.
leading string - block type (
ir = InvertedResidual, ds = DepthwiseSep, dsa = DeptwhiseSep with pw act, cn = ConvBnAct)
r - number of repeat blocks,
k - kernel size,
s - strides (1-9),
e - expansion ratio,
c - output channels,
se - squeeze/excitation ratio
n - activation fn ('re', 'r6', 'hs', or 'sw')
Args:
block_str: a string representation of block arguments.
Returns:
A list of block args (dicts)
Raises:
ValueError: if the string def not properly specified (TODO)
"""
assert isinstance(block_str, str)
ops = block_str.split('_')
block_type = ops[0] # take the block type off the front
ops = ops[1:]
options = {}
noskip = False
for op in ops:
# string options being checked on individual basis, combine if they
# grow
if op == 'noskip':
noskip = True
elif op.startswith('n'):
# activation fn
key = op[0]
v = op[1:]
if v == 're':
value = nn.ReLU
elif v == 'r6':
value = nn.ReLU6
elif v == 'sw':
value = Swish
else:
continue
options[key] = value
else:
# all numeric options
splits = re.split(r'(\d.*)', op)
if len(splits) >= 2:
key, value = splits[:2]
options[key] = value
# if act_layer is None, the model default (passed to model init) will be
# used
act_layer = options['n'] if 'n' in options else None
exp_kernel_size = parse_ksize(options['a']) if 'a' in options else 1
pw_kernel_size = parse_ksize(options['p']) if 'p' in options else 1
# FIXME hack to deal with in_chs issue in TPU def
fake_in_chs = int(options['fc']) if 'fc' in options else 0
num_repeat = int(options['r'])
# each type of block has different valid arguments, fill accordingly
if block_type == 'ir':
block_args = dict(
block_type=block_type,
dw_kernel_size=parse_ksize(options['k']),
exp_kernel_size=exp_kernel_size,
pw_kernel_size=pw_kernel_size,
out_chs=int(options['c']),
exp_ratio=float(options['e']),
se_ratio=float(options['se']) if 'se' in options else None,
stride=int(options['s']),
act_layer=act_layer,
noskip=noskip,
)
if 'cc' in options:
block_args['num_experts'] = int(options['cc'])
elif block_type == 'ds' or block_type == 'dsa':
block_args = dict(
block_type=block_type,
dw_kernel_size=parse_ksize(options['k']),
pw_kernel_size=pw_kernel_size,
out_chs=int(options['c']),
se_ratio=float(options['se']) if 'se' in options else None,
stride=int(options['s']),
act_layer=act_layer,
pw_act=block_type == 'dsa',
noskip=block_type == 'dsa' or noskip,
)
elif block_type == 'cn':
block_args = dict(
block_type=block_type,
kernel_size=int(options['k']),
out_chs=int(options['c']),
stride=int(options['s']),
act_layer=act_layer,
)
else:
assert False, 'Unknown block type (%s)' % block_type
return block_args, num_repeat
def scale_stage_depth(
stack_args,
repeats,
depth_multiplier=1.0,
depth_trunc='ceil'):
""" Per-stage depth scaling
Scales the block repeats in each stage. This depth scaling impl maintains
compatibility with the EfficientNet scaling method, while allowing sensible
scaling for other models that may have multiple block arg definitions in each stage.
"""
# We scale the total repeat count for each stage, there may be multiple
# block arg defs per stage so we need to sum.
num_repeat = sum(repeats)
if depth_trunc == 'round':
# Truncating to int by rounding allows stages with few repeats to remain
# proportionally smaller for longer. This is a good choice when stage definitions
# include single repeat stages that we'd prefer to keep that way as
# long as possible
num_repeat_scaled = max(1, round(num_repeat * depth_multiplier))
else:
# The default for EfficientNet truncates repeats to int via 'ceil'.
# Any multiplier > 1.0 will result in an increased depth for every
# stage.
num_repeat_scaled = int(math.ceil(num_repeat * depth_multiplier))
# Proportionally distribute repeat count scaling to each block definition in the stage.
# Allocation is done in reverse as it results in the first block being less likely to be scaled.
# The first block makes less sense to repeat in most of the arch
# definitions.
repeats_scaled = []
for r in repeats[::-1]:
rs = max(1, round((r / num_repeat * num_repeat_scaled)))
repeats_scaled.append(rs)
num_repeat -= r
num_repeat_scaled -= rs
repeats_scaled = repeats_scaled[::-1]
# Apply the calculated scaling to each block arg in the stage
sa_scaled = []
for ba, rep in zip(stack_args, repeats_scaled):
sa_scaled.extend([deepcopy(ba) for _ in range(rep)])
return sa_scaled
def init_weight_goog(m, n='', fix_group_fanout=True, last_bn=None):
""" Weight initialization as per Tensorflow official implementations.
Args:
m (nn.Module): module to init
n (str): module name
fix_group_fanout (bool): enable correct (matching Tensorflow TPU impl) fanout calculation w/ group convs
Handles layers in EfficientNet, EfficientNet-CondConv, MixNet, MnasNet, MobileNetV3, etc:
* https://github.com/tensorflow/tpu/blob/master/models/official/mnasnet/mnasnet_model.py
* https://github.com/tensorflow/tpu/blob/master/models/official/efficientnet/efficientnet_model.py
"""
if isinstance(m, CondConv2d):
fan_out = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
if fix_group_fanout:
fan_out //= m.groups
init_weight_fn = get_condconv_initializer(lambda w: w.data.normal_(
0, math.sqrt(2.0 / fan_out)), m.num_experts, m.weight_shape)
init_weight_fn(m.weight)
if m.bias is not None:
m.bias.data.zero_()
elif isinstance(m, nn.Conv2d):
fan_out = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
if fix_group_fanout:
fan_out //= m.groups
m.weight.data.normal_(0, math.sqrt(2.0 / fan_out))
if m.bias is not None:
m.bias.data.zero_()
elif isinstance(m, nn.BatchNorm2d):
if n in last_bn:
m.weight.data.zero_()
m.bias.data.zero_()
else:
m.weight.data.fill_(1.0)
m.bias.data.zero_()
m.weight.data.fill_(1.0)
m.bias.data.zero_()
elif isinstance(m, nn.Linear):
fan_out = m.weight.size(0) # fan-out
fan_in = 0
if 'routing_fn' in n:
fan_in = m.weight.size(1)
init_range = 1.0 / math.sqrt(fan_in + fan_out)
m.weight.data.uniform_(-init_range, init_range)
m.bias.data.zero_()
def efficientnet_init_weights(
model: nn.Module,
init_fn=None,
zero_gamma=False):
last_bn = []
if zero_gamma:
prev_n = ''
for n, m in model.named_modules():
if isinstance(m, nn.BatchNorm2d):
if ''.join(
prev_n.split('.')[
:-
1]) != ''.join(
n.split('.')[
:-
1]):
last_bn.append(prev_n)
prev_n = n
last_bn.append(prev_n)
init_fn = init_fn or init_weight_goog
for n, m in model.named_modules():
init_fn(m, n, last_bn=last_bn)
init_fn(m, n, last_bn=last_bn)
|
Cream/Cream/lib/utils/builder_util.py/0
|
{
"file_path": "Cream/Cream/lib/utils/builder_util.py",
"repo_id": "Cream",
"token_count": 4620
}
| 334 |
# EfficientViT for Object Detection and Instance Segmentation
The codebase implements the object detection and instance segmentation framework with [MMDetection](https://github.com/open-mmlab/mmdetection), using EfficientViT as the backbone.
## Model Zoo
### RetinaNet Object Detection
|Model | Pretrain | Lr Schd | Box AP | AP@50 | AP@75 | Config | Link |
|:----:|:----:|:----:|:----:|:----:|:----:|:----:|:----:|
|EfficientViT-M4 | ImageNet-1k | 1x | 32.7 | 52.2 | 34.1 | [config](./configs/retinanet_efficientvit_m4_fpn_1x_coco.py) | [model](https://github.com/xinyuliu-jeffrey/EfficientViT_Model_Zoo/releases/download/v1.0/retinanet_efficientvit_m4_fpn_1x_coco.pth)/[log](https://github.com/xinyuliu-jeffrey/EfficientViT_Model_Zoo/releases/download/v1.0/retinanet_efficientvit_m4_fpn_1x_coco_log.json) |
### Mask R-CNN Instance Segmentation
|Model | Pretrain | Lr Schd | Mask AP | AP@50 | AP@75 | Config | Link |
|:----:|:----:|:----:|:----:|:----:|:----:|:----:|:----:|
|EfficientViT-M4 | ImageNet-1k |1x| 31.0 | 51.2 | 32.2 | [config](./configs/mask_rcnn_efficientvit_m4_fpn_1x_coco.py) | [model](https://github.com/xinyuliu-jeffrey/EfficientViT_Model_Zoo/releases/download/v1.0/mask_rcnn_efficientvit_m4_fpn_1x_coco.pth)/[log](https://github.com/xinyuliu-jeffrey/EfficientViT_Model_Zoo/releases/download/v1.0/mask_rcnn_efficientvit_m4_fpn_1x_coco_log.json) |
## Get Started
Please follow the following steps to setup EfficientViT for downstream tasks.
### Install requirements
Install [mmcv-full](https://github.com/open-mmlab/mmcv) and [MMDetection](https://github.com/open-mmlab/mmdetection) via [MIM](https://github.com/open-mmlab/mim):
```
pip install -U openmim
mim install mmcv-full
mim install mmdet
```
### Data preparation
Prepare COCO 2017 dataset according to the [instructions in MMDetection](https://github.com/open-mmlab/mmdetection/blob/master/docs/en/1_exist_data_model.md#test-existing-models-on-standard-datasets).
The dataset should be organized as
```
downstream
βββ data
β βββ coco
β β βββ annotations
β β βββ train2017
β β βββ val2017
β β βββ test2017
```
## Evaluation
Firstly, prepare the MSCOCO pretrained models by downloading from the [model-zoo](https://github.com/xinyuliu-jeffrey/EfficientViT_Model_Zoo).
Below are the instructions for evaluating the models on MSCOCO 2017 val set:
<details>
<summary>
Object Detection
</summary>
To evaluate the RetinaNet model with EfficientViT_M4 as backbone, run:
```bash
bash ./dist_test.sh configs/retinanet_efficientvit_m4_fpn_1x_coco.py ./retinanet_efficientvit_m4_fpn_1x_coco.pth 8 --eval bbox
```
where 8 refers to the number of GPUs. For the usage of more arguments, please refer to [MMDetection](https://github.com/open-mmlab/mmdetection/blob/master/docs/en/1_exist_data_model.md#training-on-multiple-gpus).
</details>
<details>
<summary>
Instance Segmentation
</summary>
To evaluate the Mask R-CNN model with EfficientViT_M4 as backbone, run:
```bash
bash ./dist_test.sh configs/mask_rcnn_efficientvit_m4_fpn_1x_coco.py ./mask_rcnn_efficientvit_m4_fpn_1x_coco.pth 8 --eval bbox segm
```
where 8 refers to the number of GPUs. For the usage of more arguments, please refer to [MMDetection](https://github.com/open-mmlab/mmdetection/blob/master/docs/en/1_exist_data_model.md#training-on-multiple-gpus).
</details>
## Training
Firstly, prepare the ImageNet-1k pretrained EfficientViT-M4 model by downloading from the [model-zoo](https://github.com/xinyuliu-jeffrey/EfficientViT_Model_Zoo).
Below are the instructions for training the models on MSCOCO 2017 train set:
<details>
<summary>
Object Detection
</summary>
To train the RetinaNet model with EfficientViT_M4 as backbone on a single machine using multi-GPUs, run:
```bash
bash ./dist_train.sh configs/retinanet_efficientvit_m4_fpn_1x_coco.py 8 --cfg-options model.backbone.pretrained=$PATH_TO_IMGNET_PRETRAIN_MODEL
```
where 8 refers to the number of GPUs. For the usage of more arguments, please refer to [MMDetection](https://github.com/open-mmlab/mmdetection/blob/master/docs/en/1_exist_data_model.md#training-on-multiple-gpus).
</details>
<details>
<summary>
Instance Segmentation
</summary>
To train the Mask R-CNN model with EfficientViT_M4 as backbone on a single machine using multi-GPUs, run:
```bash
bash ./dist_train.sh configs/mask_rcnn_efficientvit_m4_fpn_1x_coco.py 8 --cfg-options model.backbone.pretrained=$PATH_TO_IMGNET_PRETRAIN_MODEL
```
where 8 refers to the number of GPUs. For the usage of more arguments, please refer to [MMDetection](https://github.com/open-mmlab/mmdetection/blob/master/docs/en/1_exist_data_model.md#training-on-multiple-gpus).
</details>
## Acknowledge
The downstream task implementation is mainly based on the following codebases. We gratefully thank the authors for their wonderful works.
[MMDetection](https://github.com/open-mmlab/mmdetection), [Swin-Transformer-Object-Detection](https://github.com/SwinTransformer/Swin-Transformer-Object-Detection), [PoolFormer](https://github.com/sail-sg/poolformer/tree/main/detection).
|
Cream/EfficientViT/downstream/README.md/0
|
{
"file_path": "Cream/EfficientViT/downstream/README.md",
"repo_id": "Cream",
"token_count": 1886
}
| 335 |
# model settings
norm_cfg = dict(type='BN', requires_grad=False)
model = dict(
type='FasterRCNN',
pretrained='open-mmlab://detectron2/resnet50_caffe',
backbone=dict(
type='ResNet',
depth=50,
num_stages=3,
strides=(1, 2, 2),
dilations=(1, 1, 1),
out_indices=(2, ),
frozen_stages=1,
norm_cfg=norm_cfg,
norm_eval=True,
style='caffe'),
rpn_head=dict(
type='RPNHead',
in_channels=1024,
feat_channels=1024,
anchor_generator=dict(
type='AnchorGenerator',
scales=[2, 4, 8, 16, 32],
ratios=[0.5, 1.0, 2.0],
strides=[16]),
bbox_coder=dict(
type='DeltaXYWHBBoxCoder',
target_means=[.0, .0, .0, .0],
target_stds=[1.0, 1.0, 1.0, 1.0]),
loss_cls=dict(
type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0),
loss_bbox=dict(type='L1Loss', loss_weight=1.0)),
roi_head=dict(
type='StandardRoIHead',
shared_head=dict(
type='ResLayer',
depth=50,
stage=3,
stride=2,
dilation=1,
style='caffe',
norm_cfg=norm_cfg,
norm_eval=True),
bbox_roi_extractor=dict(
type='SingleRoIExtractor',
roi_layer=dict(type='RoIAlign', output_size=14, sampling_ratio=0),
out_channels=1024,
featmap_strides=[16]),
bbox_head=dict(
type='BBoxHead',
with_avg_pool=True,
roi_feat_size=7,
in_channels=2048,
num_classes=80,
bbox_coder=dict(
type='DeltaXYWHBBoxCoder',
target_means=[0., 0., 0., 0.],
target_stds=[0.1, 0.1, 0.2, 0.2]),
reg_class_agnostic=False,
loss_cls=dict(
type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0),
loss_bbox=dict(type='L1Loss', loss_weight=1.0))),
# model training and testing settings
train_cfg=dict(
rpn=dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.7,
neg_iou_thr=0.3,
min_pos_iou=0.3,
match_low_quality=True,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=256,
pos_fraction=0.5,
neg_pos_ub=-1,
add_gt_as_proposals=False),
allowed_border=0,
pos_weight=-1,
debug=False),
rpn_proposal=dict(
nms_pre=12000,
max_per_img=2000,
nms=dict(type='nms', iou_threshold=0.7),
min_bbox_size=0),
rcnn=dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.5,
neg_iou_thr=0.5,
min_pos_iou=0.5,
match_low_quality=False,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=512,
pos_fraction=0.25,
neg_pos_ub=-1,
add_gt_as_proposals=True),
pos_weight=-1,
debug=False)),
test_cfg=dict(
rpn=dict(
nms_pre=6000,
max_per_img=1000,
nms=dict(type='nms', iou_threshold=0.7),
min_bbox_size=0),
rcnn=dict(
score_thr=0.05,
nms=dict(type='nms', iou_threshold=0.5),
max_per_img=100)))
|
Cream/EfficientViT/downstream/configs/_base_/models/faster_rcnn_r50_caffe_c4.py/0
|
{
"file_path": "Cream/EfficientViT/downstream/configs/_base_/models/faster_rcnn_r50_caffe_c4.py",
"repo_id": "Cream",
"token_count": 2254
}
| 336 |
# optimizer
optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.0001)
optimizer_config = dict(grad_clip=None)
# learning policy
lr_config = dict(
policy='step',
warmup='linear',
warmup_iters=500,
warmup_ratio=0.001,
step=[16, 22])
runner = dict(type='EpochBasedRunner', max_epochs=24)
|
Cream/EfficientViT/downstream/configs/_base_/schedules/schedule_2x.py/0
|
{
"file_path": "Cream/EfficientViT/downstream/configs/_base_/schedules/schedule_2x.py",
"repo_id": "Cream",
"token_count": 135
}
| 337 |
import torch
import rpe_index_cpp
EXPECTED_VERSION = "1.2.0"
assert rpe_index_cpp.version() == EXPECTED_VERSION, \
f"""Unmatched `rpe_index_cpp` version: {rpe_index_cpp.version()}, expected version: {EXPECTED_VERSION}
Please re-build the package `rpe_ops`."""
class RPEIndexFunction(torch.autograd.Function):
'''Y[b, h, i, j] = input[b, h, i, index[i, j]]'''
@staticmethod
def forward(ctx, input, index):
'''
Y[b, h, i, j] = input[b, h, i, index[i, j]]
Parameters
----------
input: torch.Tensor, float32
The shape is (B, H, L_query, num_buckets)
index: torch.Tensor, int32
The shape is (L_query, L_key)
where B is the batch size, and H is the number of attention heads.
Returns
-------
Y: torch.Tensor, float32
The shape is (B, H, L_query, L_key)
'''
num_buckets = input.size(-1)
ctx.save_for_backward(index)
ctx.input_shape = input.shape
forward_fn = rpe_index_cpp.forward_cpu if \
input.device.type == 'cpu' else rpe_index_cpp.forward_gpu
output = forward_fn(input, index)
return output
@staticmethod
def backward(ctx, grad_output):
'''
- Inputs
grad_output: float32 (B, H, L_query, L_key)
- Outputs
grad_input: float32 (B, H, L_query, num_buckets)
'''
index = ctx.saved_tensors[0]
if ctx.needs_input_grad[0]:
grad_input = grad_output.new_zeros(ctx.input_shape)
backward_fn = rpe_index_cpp.backward_cpu if \
grad_output.device.type == 'cpu' else rpe_index_cpp.backward_gpu
backward_fn(grad_input, grad_output, index)
return grad_input, None
return None, None
if __name__ == '__main__':
import numpy as np
import time
B = 128
H = 32
L_query = 50
L_key = L_query
num_buckets = 50
x = torch.randn(B, H, L_query, num_buckets)
index = torch.randint(low=0, high=num_buckets, size=(L_query, L_key))
index = index.to(torch.int)
offset = torch.arange(0, L_query * num_buckets, num_buckets).view(-1, 1)
def test(x, index, offset):
tic = time.time()
x1 = x.clone()
x1.requires_grad = True
x2 = x.clone()
x2.requires_grad = True
y = RPEIndexFunction.apply(x1, index)
gt_y = x2.flatten(2)[:, :, (index + offset).flatten()
].view(B, H, L_query, L_key)
np.testing.assert_almost_equal(
gt_y.detach().cpu().numpy(), y.detach().cpu().numpy())
mask = torch.randn(gt_y.shape, device=x.device)
(gt_y * mask).sum().backward()
(y * mask).sum().backward()
print("X1:", x1.grad.cpu().numpy().flatten().sum())
print("X2:", x2.grad.cpu().numpy().flatten().sum())
np.testing.assert_almost_equal(
x1.grad.cpu().numpy(), x2.grad.cpu().numpy(), decimal=5)
print("Test over", x.device)
print("Cost:", time.time() - tic)
test(x, index, offset)
if torch.cuda.is_available():
test(x.cuda(), index.cuda(), offset.cuda())
|
Cream/MiniViT/Mini-DeiT/rpe_ops/rpe_index.py/0
|
{
"file_path": "Cream/MiniViT/Mini-DeiT/rpe_ops/rpe_index.py",
"repo_id": "Cream",
"token_count": 1572
}
| 338 |
MODEL:
TYPE: swin
NAME: swin_tiny_patch4_window7_224
DROP_PATH_RATE: 0.2
SWIN:
EMBED_DIM: 96
DEPTHS: [ 2, 2, 6, 2 ]
NUM_HEADS: [ 3, 6, 12, 24 ]
WINDOW_SIZE: 7
|
Cream/MiniViT/Mini-Swin/configs/swin_tiny_patch4_window7_224.yaml/0
|
{
"file_path": "Cream/MiniViT/Mini-Swin/configs/swin_tiny_patch4_window7_224.yaml",
"repo_id": "Cream",
"token_count": 102
}
| 339 |
OPENAI_DATASET_MEAN = (0.48145466, 0.4578275, 0.40821073)
OPENAI_DATASET_STD = (0.26862954, 0.26130258, 0.27577711)
|
Cream/TinyCLIP/src/open_clip/constants.py/0
|
{
"file_path": "Cream/TinyCLIP/src/open_clip/constants.py",
"repo_id": "Cream",
"token_count": 66
}
| 340 |
import hashlib
import os
import time
import urllib
import warnings
from functools import partial
from typing import Dict, Union
from tqdm import tqdm
from .version import __version__
try:
from huggingface_hub import hf_hub_download
hf_hub_download = partial(
hf_hub_download, library_name="open_clip", library_version=__version__)
_has_hf_hub = True
except ImportError:
hf_hub_download = None
_has_hf_hub = False
def _pcfg(url='', hf_hub='', mean=None, std=None):
return dict(
url=url,
hf_hub=hf_hub,
mean=mean,
std=std,
)
_RN50 = dict(
openai=_pcfg(
"https://openaipublic.azureedge.net/clip/models/afeb0e10f9e5a86da6080e35cf09123aca3b358a0c3e3b6c78a7b63bc04b6762/RN50.pt"),
yfcc15m=_pcfg(
"https://github.com/mlfoundations/open_clip/releases/download/v0.2-weights/rn50-quickgelu-yfcc15m-455df137.pt"),
cc12m=_pcfg(
"https://github.com/mlfoundations/open_clip/releases/download/v0.2-weights/rn50-quickgelu-cc12m-f000538c.pt"),
)
_RN50_quickgelu = dict(
openai=_pcfg(
"https://openaipublic.azureedge.net/clip/models/afeb0e10f9e5a86da6080e35cf09123aca3b358a0c3e3b6c78a7b63bc04b6762/RN50.pt"),
yfcc15m=_pcfg(
"https://github.com/mlfoundations/open_clip/releases/download/v0.2-weights/rn50-quickgelu-yfcc15m-455df137.pt"),
cc12m=_pcfg(
"https://github.com/mlfoundations/open_clip/releases/download/v0.2-weights/rn50-quickgelu-cc12m-f000538c.pt"),
)
_RN101 = dict(
openai=_pcfg(
"https://openaipublic.azureedge.net/clip/models/8fa8567bab74a42d41c5915025a8e4538c3bdbe8804a470a72f30b0d94fab599/RN101.pt"),
yfcc15m=_pcfg(
"https://github.com/mlfoundations/open_clip/releases/download/v0.2-weights/rn101-quickgelu-yfcc15m-3e04b30e.pt"),
)
_RN101_quickgelu = dict(
openai=_pcfg(
"https://openaipublic.azureedge.net/clip/models/8fa8567bab74a42d41c5915025a8e4538c3bdbe8804a470a72f30b0d94fab599/RN101.pt"),
yfcc15m=_pcfg(
"https://github.com/mlfoundations/open_clip/releases/download/v0.2-weights/rn101-quickgelu-yfcc15m-3e04b30e.pt"),
)
_RN50x4 = dict(
openai=_pcfg(
"https://openaipublic.azureedge.net/clip/models/7e526bd135e493cef0776de27d5f42653e6b4c8bf9e0f653bb11773263205fdd/RN50x4.pt"),
)
_RN50x16 = dict(
openai=_pcfg(
"https://openaipublic.azureedge.net/clip/models/52378b407f34354e150460fe41077663dd5b39c54cd0bfd2b27167a4a06ec9aa/RN50x16.pt"),
)
_RN50x64 = dict(
openai=_pcfg(
"https://openaipublic.azureedge.net/clip/models/be1cfb55d75a9666199fb2206c106743da0f6468c9d327f3e0d0a543a9919d9c/RN50x64.pt"),
)
_VITB32 = dict(
openai=_pcfg(
"https://openaipublic.azureedge.net/clip/models/40d365715913c9da98579312b702a82c18be219cc2a73407c4526f58eba950af/ViT-B-32.pt"),
laion400m_e31=_pcfg(
"https://github.com/mlfoundations/open_clip/releases/download/v0.2-weights/vit_b_32-quickgelu-laion400m_e31-d867053b.pt"),
laion400m_e32=_pcfg(
"https://github.com/mlfoundations/open_clip/releases/download/v0.2-weights/vit_b_32-quickgelu-laion400m_e32-46683a32.pt"),
laion2b_e16=_pcfg(
"https://github.com/mlfoundations/open_clip/releases/download/v0.2-weights/vit_b_32-laion2b_e16-af8dbd0c.pth"),
laion2b_s34b_b79k=_pcfg(hf_hub='laion/CLIP-ViT-B-32-laion2B-s34B-b79K/')
)
_VITB32_quickgelu = dict(
openai=_pcfg(
"https://openaipublic.azureedge.net/clip/models/40d365715913c9da98579312b702a82c18be219cc2a73407c4526f58eba950af/ViT-B-32.pt"),
laion400m_e31=_pcfg(
"https://github.com/mlfoundations/open_clip/releases/download/v0.2-weights/vit_b_32-quickgelu-laion400m_e31-d867053b.pt"),
laion400m_e32=_pcfg(
"https://github.com/mlfoundations/open_clip/releases/download/v0.2-weights/vit_b_32-quickgelu-laion400m_e32-46683a32.pt"),
)
_VITB16 = dict(
openai=_pcfg(
"https://openaipublic.azureedge.net/clip/models/5806e77cd80f8b59890b7e101eabd078d9fb84e6937f9e85e4ecb61988df416f/ViT-B-16.pt"),
laion400m_e31=_pcfg(
"https://github.com/mlfoundations/open_clip/releases/download/v0.2-weights/vit_b_16-laion400m_e31-00efa78f.pt"),
laion400m_e32=_pcfg(
"https://github.com/mlfoundations/open_clip/releases/download/v0.2-weights/vit_b_16-laion400m_e32-55e67d44.pt"),
# laion400m_32k=_pcfg(
# url="",
# mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5)),
# laion400m_64k=_pcfg(
# url="",
# mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5)),
laion2b_s34b_b88k=_pcfg(hf_hub='laion/CLIP-ViT-B-16-laion2B-s34B-b88K/'),
)
_VITB16_PLUS_240 = dict(
laion400m_e31=_pcfg(
"https://github.com/mlfoundations/open_clip/releases/download/v0.2-weights/vit_b_16_plus_240-laion400m_e31-8fb26589.pt"),
laion400m_e32=_pcfg(
"https://github.com/mlfoundations/open_clip/releases/download/v0.2-weights/vit_b_16_plus_240-laion400m_e32-699c4b84.pt"),
)
_VITL14 = dict(
openai=_pcfg(
"https://openaipublic.azureedge.net/clip/models/b8cca3fd41ae0c99ba7e8951adf17d267cdb84cd88be6f7c2e0eca1737a03836/ViT-L-14.pt"),
laion400m_e31=_pcfg(
"https://github.com/mlfoundations/open_clip/releases/download/v0.2-weights/vit_l_14-laion400m_e31-69988bb6.pt"),
laion400m_e32=_pcfg(
"https://github.com/mlfoundations/open_clip/releases/download/v0.2-weights/vit_l_14-laion400m_e32-3d133497.pt"),
laion2b_s32b_b82k=_pcfg(
hf_hub='laion/CLIP-ViT-L-14-laion2B-s32B-b82K/',
mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5)),
)
_VITL14_336 = dict(
openai=_pcfg(
"https://openaipublic.azureedge.net/clip/models/3035c92b350959924f9f00213499208652fc7ea050643e8b385c2dac08641f02/ViT-L-14-336px.pt"),
)
_VITH14 = dict(
laion2b_s32b_b79k=_pcfg(hf_hub='laion/CLIP-ViT-H-14-laion2B-s32B-b79K/'),
)
_VITg14 = dict(
laion2b_s12b_b42k=_pcfg(hf_hub='laion/CLIP-ViT-g-14-laion2B-s12B-b42K/'),
)
# TinyCLIP
_TINYCLIP_VIT_39M_16_TEXT_19M = {
"YFCC15M": _pcfg(
"https://github.com/wkcn/TinyCLIP-model-zoo/releases/download/checkpoints/TinyCLIP-ViT-39M-16-Text-19M-YFCC15M.pt",
),
}
_TINYCLIP_VIT_8M_16_TEXT_3M = {
"YFCC15M": _pcfg(
"https://github.com/wkcn/TinyCLIP-model-zoo/releases/download/checkpoints/TinyCLIP-ViT-8M-16-Text-3M-YFCC15M.pt",
),
}
_TINYCLIP_RESNET_30M_TEXT_29M = {
"LAION400M": _pcfg(
"https://github.com/wkcn/TinyCLIP-model-zoo/releases/download/checkpoints/TinyCLIP-ResNet-30M-Text-29M-LAION400M.pt",
),
}
_TINYCLIP_RESNET_19M_TEXT_19M = {
"LAION400M": _pcfg(
"https://github.com/wkcn/TinyCLIP-model-zoo/releases/download/checkpoints/TinyCLIP-ResNet-19M-Text-19M-LAION400M.pt",
),
}
_TINYCLIP_VIT_61M_32_TEXT_29M = {
"LAION400M": _pcfg(
"https://github.com/wkcn/TinyCLIP-model-zoo/releases/download/checkpoints/TinyCLIP-ViT-61M-32-Text-29M-LAION400M.pt",
),
}
_TINYCLIP_VIT_40M_32_TEXT_19M = {
"LAION400M": _pcfg(
"https://github.com/wkcn/TinyCLIP-model-zoo/releases/download/checkpoints/TinyCLIP-ViT-40M-32-Text-19M-LAION400M.pt",
),
}
_PRETRAINED = {
"RN50": _RN50,
"RN50-quickgelu": _RN50_quickgelu,
"RN101": _RN101,
"RN101-quickgelu": _RN101_quickgelu,
"RN50x4": _RN50x4,
"RN50x16": _RN50x16,
"RN50x64": _RN50x64,
"ViT-B-32": _VITB32,
"ViT-B-32-quickgelu": _VITB32_quickgelu,
"ViT-B-16": _VITB16,
"ViT-B-16-plus-240": _VITB16_PLUS_240,
"ViT-L-14": _VITL14,
"ViT-L-14-336": _VITL14_336,
"ViT-H-14": _VITH14,
"ViT-g-14": _VITg14,
"TinyCLIP-ViT-39M-16-Text-19M": _TINYCLIP_VIT_39M_16_TEXT_19M,
"TinyCLIP-ViT-8M-16-Text-3M": _TINYCLIP_VIT_8M_16_TEXT_3M,
"TinyCLIP-ResNet-30M-Text-29M": _TINYCLIP_RESNET_30M_TEXT_29M,
"TinyCLIP-ResNet-19M-Text-19M": _TINYCLIP_RESNET_19M_TEXT_19M,
"TinyCLIP-ViT-61M-32-Text-29M": _TINYCLIP_VIT_61M_32_TEXT_29M,
"TinyCLIP-ViT-40M-32-Text-19M": _TINYCLIP_VIT_40M_32_TEXT_19M,
}
def list_pretrained(as_str: bool = False):
""" returns list of pretrained models
Returns a tuple (model_name, pretrain_tag) by default or 'name:tag' if as_str == True
"""
return [':'.join([k, t]) if as_str else (k, t) for k in _PRETRAINED.keys() for t in _PRETRAINED[k].keys()]
def list_pretrained_tag_models(tag: str):
""" return all models having the specified pretrain tag """
models = []
for k in _PRETRAINED.keys():
if tag in _PRETRAINED[k]:
models.append(k)
return models
def list_pretrained_model_tags(model: str):
""" return all pretrain tags for the specified model architecture """
tags = []
if model in _PRETRAINED:
tags.extend(_PRETRAINED[model].keys())
return tags
def is_pretrained_cfg(model: str, tag: str):
if model not in _PRETRAINED:
return False
return tag.lower() in _PRETRAINED[model]
def get_pretrained_cfg(model: str, tag: str):
if model not in _PRETRAINED:
return {}
model_pretrained = _PRETRAINED[model]
if tag in model_pretrained:
return model_pretrained[tag]
return model_pretrained.get(tag.lower(), {})
def get_pretrained_url(model: str, tag: str):
cfg = get_pretrained_cfg(model, tag)
return cfg.get('url', '')
def is_local_master():
return int(os.getenv('LOCAL_RANK', 0)) == 0
def download_pretrained_from_url(
url: str = os.path.expanduser("~/.cache/clip"),
cache_dir: Union[str, None] = None,
):
if not cache_dir:
cache_dir = os.path.expanduser("~/.cache/clip")
os.makedirs(cache_dir, exist_ok=True)
filename = os.path.basename(url)
download_target = os.path.join(cache_dir, filename)
if is_local_master():
for _ in range(20):
try:
return _download_pretrained(url, cache_dir)
except Exception as e:
print(f'Download pretrained: {url}, {cache_dir}, {e}')
time.sleep(10)
else:
while not os.path.exists(download_target):
time.sleep(1)
return download_target
def _download_pretrained(url: str, root: str = os.path.expanduser("~/.cache/clip")):
os.makedirs(root, exist_ok=True)
filename = os.path.basename(url)
if 'openaipublic' in url:
expected_sha256 = url.split("/")[-2]
else:
expected_sha256 = ''
download_target = os.path.join(root, filename)
if os.path.exists(download_target) and not os.path.isfile(download_target):
raise RuntimeError(
f"{download_target} exists and is not a regular file")
if os.path.isfile(download_target):
if expected_sha256:
if hashlib.sha256(open(download_target, "rb").read()).hexdigest() == expected_sha256:
return download_target
else:
warnings.warn(
f"{download_target} exists, but the SHA256 checksum does not match; re-downloading the file")
else:
return download_target
download_target_tmp = download_target + ".tmp"
with urllib.request.urlopen(url) as source, open(download_target_tmp, "wb") as output:
with tqdm(total=int(source.info().get("Content-Length")), ncols=80, unit='iB', unit_scale=True) as loop:
while True:
buffer = source.read(8192)
if not buffer:
break
output.write(buffer)
loop.update(len(buffer))
if expected_sha256 and hashlib.sha256(open(download_target_tmp, "rb").read()).hexdigest() != expected_sha256:
os.remove(download_target_tmp)
raise RuntimeError(
f"Model has been downloaded but the SHA256 checksum does not not match")
os.rename(download_target_tmp, download_target)
return download_target
def has_hf_hub(necessary=False):
if not _has_hf_hub and necessary:
# if no HF Hub module installed, and it is necessary to continue, raise error
raise RuntimeError(
'Hugging Face hub model specified but package not installed. Run `pip install huggingface_hub`.')
return _has_hf_hub
def download_pretrained_from_hf(
model_id: str,
filename: str = 'open_clip_pytorch_model.bin',
revision=None,
cache_dir: Union[str, None] = None,
):
has_hf_hub(True)
cached_file = hf_hub_download(
model_id, filename, revision=revision, cache_dir=cache_dir)
return cached_file
def download_pretrained(
cfg: Dict,
force_hf_hub: bool = False,
cache_dir: Union[str, None] = None,
):
target = ''
if not cfg:
return target
download_url = cfg.get('url', '')
download_hf_hub = cfg.get('hf_hub', '')
if download_hf_hub and force_hf_hub:
# use HF hub even if url exists
download_url = ''
if download_url:
target = download_pretrained_from_url(
download_url, cache_dir=cache_dir)
elif download_hf_hub:
has_hf_hub(True)
# we assume the hf_hub entries in pretrained config combine model_id + filename in
# 'org/model_name/filename.pt' form. To specify just the model id w/o filename and
# use 'open_clip_pytorch_model.bin' default, there must be a trailing slash 'org/model_name/'.
model_id, filename = os.path.split(download_hf_hub)
if filename:
target = download_pretrained_from_hf(
model_id, filename=filename, cache_dir=cache_dir)
else:
target = download_pretrained_from_hf(model_id, cache_dir=cache_dir)
return target
|
Cream/TinyCLIP/src/open_clip/pretrained.py/0
|
{
"file_path": "Cream/TinyCLIP/src/open_clip/pretrained.py",
"repo_id": "Cream",
"token_count": 6734
}
| 341 |
# --------------------------------------------------------
# TinyViT Utils
# Copyright (c) 2022 Microsoft
# --------------------------------------------------------
import torch
import torch.distributed as dist
def reduce_tensor(tensor, n=None):
if n is None:
n = dist.get_world_size()
rt = tensor.clone()
dist.all_reduce(rt, op=dist.ReduceOp.SUM)
rt = rt / n
return rt
class AverageMeter:
"""Computes and stores the average and current value"""
def __init__(self):
self.reset()
def reset(self):
# local
self._val = 0
self._sum = 0
self._count = 0
# global
self._history_avg = 0
self._history_count = 0
self._avg = None
def update(self, val, n=1):
self._val = val
self._sum += val * n
self._count += n
self._avg = None
@property
def val(self):
return self._val
@property
def count(self):
return self._count + self._history_count
@property
def avg(self):
if self._avg is None:
# compute avg
r = self._history_count / max(1, self._history_count + self._count)
_avg = self._sum / max(1, self._count)
self._avg = r * self._history_avg + (1.0 - r) * _avg
return self._avg
def sync(self):
buf = torch.tensor([self._sum, self._count],
dtype=torch.float32).cuda()
buf = reduce_tensor(buf, 1)
_sum, _count = buf.tolist()
_avg = _sum / max(1, _count)
r = self._history_count / max(1, self._history_count + _count)
self._history_avg = r * self._history_avg + (1.0 - r) * _avg
self._history_count += _count
self._sum = 0
self._count = 0
self._avg = None
|
Cream/TinyCLIP/src/training/my_meter.py/0
|
{
"file_path": "Cream/TinyCLIP/src/training/my_meter.py",
"repo_id": "Cream",
"token_count": 829
}
| 342 |
MODEL:
NAME: TinyViT-21M-1k
TYPE: tiny_vit
DROP_PATH_RATE: 0.2
TINY_VIT:
DEPTHS: [ 2, 2, 6, 2 ]
NUM_HEADS: [ 3, 6, 12, 18 ]
WINDOW_SIZES: [ 7, 7, 14, 7 ]
EMBED_DIMS: [96, 192, 384, 576]
|
Cream/TinyViT/configs/1k/tiny_vit_21m.yaml/0
|
{
"file_path": "Cream/TinyViT/configs/1k/tiny_vit_21m.yaml",
"repo_id": "Cream",
"token_count": 127
}
| 343 |
""" AutoAugment, RandAugment, and AugMix for PyTorch
This code implements the searched ImageNet policies with various tweaks and improvements and
does not include any of the search code.
AA and RA Implementation adapted from:
https://github.com/tensorflow/tpu/blob/master/models/official/efficientnet/autoaugment.py
AugMix adapted from:
https://github.com/google-research/augmix
Papers:
AutoAugment: Learning Augmentation Policies from Data - https://arxiv.org/abs/1805.09501
Learning Data Augmentation Strategies for Object Detection - https://arxiv.org/abs/1906.11172
RandAugment: Practical automated data augmentation... - https://arxiv.org/abs/1909.13719
AugMix: A Simple Data Processing Method to Improve Robustness and Uncertainty - https://arxiv.org/abs/1912.02781
Hacked together by / Copyright 2020 Ross Wightman
"""
from .aug_random import random, np_random
import math
import re
from PIL import Image, ImageOps, ImageEnhance, ImageChops
import PIL
import numpy as np
_PIL_VER = tuple([int(x) for x in PIL.__version__.split('.')[:2]])
_FILL = (128, 128, 128)
_LEVEL_DENOM = 10. # denominator for conversion from 'Mx' magnitude scale to fractional aug level for op arguments
_HPARAMS_DEFAULT = dict(
translate_const=250,
img_mean=_FILL,
)
_RANDOM_INTERPOLATION = (Image.BILINEAR, Image.BICUBIC)
def _interpolation(kwargs):
interpolation = kwargs.pop('resample', Image.BILINEAR)
if isinstance(interpolation, (list, tuple)):
return random.choice(interpolation)
else:
return interpolation
def _check_args_tf(kwargs):
if 'fillcolor' in kwargs and _PIL_VER < (5, 0):
kwargs.pop('fillcolor')
kwargs['resample'] = _interpolation(kwargs)
def shear_x(img, factor, **kwargs):
_check_args_tf(kwargs)
return img.transform(img.size, Image.AFFINE, (1, factor, 0, 0, 1, 0), **kwargs)
def shear_y(img, factor, **kwargs):
_check_args_tf(kwargs)
return img.transform(img.size, Image.AFFINE, (1, 0, 0, factor, 1, 0), **kwargs)
def translate_x_rel(img, pct, **kwargs):
pixels = pct * img.size[0]
_check_args_tf(kwargs)
return img.transform(img.size, Image.AFFINE, (1, 0, pixels, 0, 1, 0), **kwargs)
def translate_y_rel(img, pct, **kwargs):
pixels = pct * img.size[1]
_check_args_tf(kwargs)
return img.transform(img.size, Image.AFFINE, (1, 0, 0, 0, 1, pixels), **kwargs)
def translate_x_abs(img, pixels, **kwargs):
_check_args_tf(kwargs)
return img.transform(img.size, Image.AFFINE, (1, 0, pixels, 0, 1, 0), **kwargs)
def translate_y_abs(img, pixels, **kwargs):
_check_args_tf(kwargs)
return img.transform(img.size, Image.AFFINE, (1, 0, 0, 0, 1, pixels), **kwargs)
def rotate(img, degrees, **kwargs):
_check_args_tf(kwargs)
if _PIL_VER >= (5, 2):
return img.rotate(degrees, **kwargs)
elif _PIL_VER >= (5, 0):
w, h = img.size
post_trans = (0, 0)
rotn_center = (w / 2.0, h / 2.0)
angle = -math.radians(degrees)
matrix = [
round(math.cos(angle), 15),
round(math.sin(angle), 15),
0.0,
round(-math.sin(angle), 15),
round(math.cos(angle), 15),
0.0,
]
def transform(x, y, matrix):
(a, b, c, d, e, f) = matrix
return a * x + b * y + c, d * x + e * y + f
matrix[2], matrix[5] = transform(
-rotn_center[0] - post_trans[0], -rotn_center[1] - post_trans[1], matrix
)
matrix[2] += rotn_center[0]
matrix[5] += rotn_center[1]
return img.transform(img.size, Image.AFFINE, matrix, **kwargs)
else:
return img.rotate(degrees, resample=kwargs['resample'])
def auto_contrast(img, **__):
return ImageOps.autocontrast(img)
def invert(img, **__):
return ImageOps.invert(img)
def equalize(img, **__):
return ImageOps.equalize(img)
def solarize(img, thresh, **__):
return ImageOps.solarize(img, thresh)
def solarize_add(img, add, thresh=128, **__):
lut = []
for i in range(256):
if i < thresh:
lut.append(min(255, i + add))
else:
lut.append(i)
if img.mode in ("L", "RGB"):
if img.mode == "RGB" and len(lut) == 256:
lut = lut + lut + lut
return img.point(lut)
else:
return img
def posterize(img, bits_to_keep, **__):
if bits_to_keep >= 8:
return img
return ImageOps.posterize(img, bits_to_keep)
def contrast(img, factor, **__):
return ImageEnhance.Contrast(img).enhance(factor)
def color(img, factor, **__):
return ImageEnhance.Color(img).enhance(factor)
def brightness(img, factor, **__):
return ImageEnhance.Brightness(img).enhance(factor)
def sharpness(img, factor, **__):
return ImageEnhance.Sharpness(img).enhance(factor)
def _randomly_negate(v):
"""With 50% prob, negate the value"""
return -v if random.random() > 0.5 else v
def _rotate_level_to_arg(level, _hparams):
# range [-30, 30]
level = (level / _LEVEL_DENOM) * 30.
level = _randomly_negate(level)
return level,
def _enhance_level_to_arg(level, _hparams):
# range [0.1, 1.9]
return (level / _LEVEL_DENOM) * 1.8 + 0.1,
def _enhance_increasing_level_to_arg(level, _hparams):
# the 'no change' level is 1.0, moving away from that towards 0. or 2.0 increases the enhancement blend
# range [0.1, 1.9] if level <= _LEVEL_DENOM
level = (level / _LEVEL_DENOM) * .9
level = max(0.1, 1.0 + _randomly_negate(level)) # keep it >= 0.1
return level,
def _shear_level_to_arg(level, _hparams):
# range [-0.3, 0.3]
level = (level / _LEVEL_DENOM) * 0.3
level = _randomly_negate(level)
return level,
def _translate_abs_level_to_arg(level, hparams):
translate_const = hparams['translate_const']
level = (level / _LEVEL_DENOM) * float(translate_const)
level = _randomly_negate(level)
return level,
def _translate_rel_level_to_arg(level, hparams):
# default range [-0.45, 0.45]
translate_pct = hparams.get('translate_pct', 0.45)
level = (level / _LEVEL_DENOM) * translate_pct
level = _randomly_negate(level)
return level,
def _posterize_level_to_arg(level, _hparams):
# As per Tensorflow TPU EfficientNet impl
# range [0, 4], 'keep 0 up to 4 MSB of original image'
# intensity/severity of augmentation decreases with level
return int((level / _LEVEL_DENOM) * 4),
def _posterize_increasing_level_to_arg(level, hparams):
# As per Tensorflow models research and UDA impl
# range [4, 0], 'keep 4 down to 0 MSB of original image',
# intensity/severity of augmentation increases with level
return 4 - _posterize_level_to_arg(level, hparams)[0],
def _posterize_original_level_to_arg(level, _hparams):
# As per original AutoAugment paper description
# range [4, 8], 'keep 4 up to 8 MSB of image'
# intensity/severity of augmentation decreases with level
return int((level / _LEVEL_DENOM) * 4) + 4,
def _solarize_level_to_arg(level, _hparams):
# range [0, 256]
# intensity/severity of augmentation decreases with level
return int((level / _LEVEL_DENOM) * 256),
def _solarize_increasing_level_to_arg(level, _hparams):
# range [0, 256]
# intensity/severity of augmentation increases with level
return 256 - _solarize_level_to_arg(level, _hparams)[0],
def _solarize_add_level_to_arg(level, _hparams):
# range [0, 110]
return int((level / _LEVEL_DENOM) * 110),
LEVEL_TO_ARG = {
'AutoContrast': None,
'Equalize': None,
'Invert': None,
'Rotate': _rotate_level_to_arg,
# There are several variations of the posterize level scaling in various Tensorflow/Google repositories/papers
'Posterize': _posterize_level_to_arg,
'PosterizeIncreasing': _posterize_increasing_level_to_arg,
'PosterizeOriginal': _posterize_original_level_to_arg,
'Solarize': _solarize_level_to_arg,
'SolarizeIncreasing': _solarize_increasing_level_to_arg,
'SolarizeAdd': _solarize_add_level_to_arg,
'Color': _enhance_level_to_arg,
'ColorIncreasing': _enhance_increasing_level_to_arg,
'Contrast': _enhance_level_to_arg,
'ContrastIncreasing': _enhance_increasing_level_to_arg,
'Brightness': _enhance_level_to_arg,
'BrightnessIncreasing': _enhance_increasing_level_to_arg,
'Sharpness': _enhance_level_to_arg,
'SharpnessIncreasing': _enhance_increasing_level_to_arg,
'ShearX': _shear_level_to_arg,
'ShearY': _shear_level_to_arg,
'TranslateX': _translate_abs_level_to_arg,
'TranslateY': _translate_abs_level_to_arg,
'TranslateXRel': _translate_rel_level_to_arg,
'TranslateYRel': _translate_rel_level_to_arg,
}
NAME_TO_OP = {
'AutoContrast': auto_contrast,
'Equalize': equalize,
'Invert': invert,
'Rotate': rotate,
'Posterize': posterize,
'PosterizeIncreasing': posterize,
'PosterizeOriginal': posterize,
'Solarize': solarize,
'SolarizeIncreasing': solarize,
'SolarizeAdd': solarize_add,
'Color': color,
'ColorIncreasing': color,
'Contrast': contrast,
'ContrastIncreasing': contrast,
'Brightness': brightness,
'BrightnessIncreasing': brightness,
'Sharpness': sharpness,
'SharpnessIncreasing': sharpness,
'ShearX': shear_x,
'ShearY': shear_y,
'TranslateX': translate_x_abs,
'TranslateY': translate_y_abs,
'TranslateXRel': translate_x_rel,
'TranslateYRel': translate_y_rel,
}
class AugmentOp:
def __init__(self, name, prob=0.5, magnitude=10, hparams=None):
hparams = hparams or _HPARAMS_DEFAULT
self.name = name
self.aug_fn = NAME_TO_OP[name]
self.level_fn = LEVEL_TO_ARG[name]
self.prob = prob
self.magnitude = magnitude
self.hparams = hparams.copy()
self.kwargs = dict(
fillcolor=hparams['img_mean'] if 'img_mean' in hparams else _FILL,
resample=hparams['interpolation'] if 'interpolation' in hparams else _RANDOM_INTERPOLATION,
)
# If magnitude_std is > 0, we introduce some randomness
# in the usually fixed policy and sample magnitude from a normal distribution
# with mean `magnitude` and std-dev of `magnitude_std`.
# NOTE This is my own hack, being tested, not in papers or reference impls.
# If magnitude_std is inf, we sample magnitude from a uniform distribution
self.magnitude_std = self.hparams.get('magnitude_std', 0)
self.magnitude_max = self.hparams.get('magnitude_max', None)
def __call__(self, img):
if self.prob < 1.0 and random.random() > self.prob:
return img
magnitude = self.magnitude
if self.magnitude_std > 0:
# magnitude randomization enabled
if self.magnitude_std == float('inf'):
magnitude = random.uniform(0, magnitude)
elif self.magnitude_std > 0:
magnitude = random.gauss(magnitude, self.magnitude_std)
# default upper_bound for the timm RA impl is _LEVEL_DENOM (10)
# setting magnitude_max overrides this to allow M > 10 (behaviour closer to Google TF RA impl)
upper_bound = self.magnitude_max or _LEVEL_DENOM
magnitude = max(0., min(magnitude, upper_bound))
level_args = self.level_fn(magnitude, self.hparams) if self.level_fn is not None else tuple()
return self.aug_fn(img, *level_args, **self.kwargs)
def __repr__(self):
fs = self.__class__.__name__ + f'(name={self.name}, p={self.prob}'
fs += f', m={self.magnitude}, mstd={self.magnitude_std}'
if self.magnitude_max is not None:
fs += f', mmax={self.magnitude_max}'
fs += ')'
return fs
def auto_augment_policy_v0(hparams):
# ImageNet v0 policy from TPU EfficientNet impl, cannot find a paper reference.
policy = [
[('Equalize', 0.8, 1), ('ShearY', 0.8, 4)],
[('Color', 0.4, 9), ('Equalize', 0.6, 3)],
[('Color', 0.4, 1), ('Rotate', 0.6, 8)],
[('Solarize', 0.8, 3), ('Equalize', 0.4, 7)],
[('Solarize', 0.4, 2), ('Solarize', 0.6, 2)],
[('Color', 0.2, 0), ('Equalize', 0.8, 8)],
[('Equalize', 0.4, 8), ('SolarizeAdd', 0.8, 3)],
[('ShearX', 0.2, 9), ('Rotate', 0.6, 8)],
[('Color', 0.6, 1), ('Equalize', 1.0, 2)],
[('Invert', 0.4, 9), ('Rotate', 0.6, 0)],
[('Equalize', 1.0, 9), ('ShearY', 0.6, 3)],
[('Color', 0.4, 7), ('Equalize', 0.6, 0)],
[('Posterize', 0.4, 6), ('AutoContrast', 0.4, 7)],
[('Solarize', 0.6, 8), ('Color', 0.6, 9)],
[('Solarize', 0.2, 4), ('Rotate', 0.8, 9)],
[('Rotate', 1.0, 7), ('TranslateYRel', 0.8, 9)],
[('ShearX', 0.0, 0), ('Solarize', 0.8, 4)],
[('ShearY', 0.8, 0), ('Color', 0.6, 4)],
[('Color', 1.0, 0), ('Rotate', 0.6, 2)],
[('Equalize', 0.8, 4), ('Equalize', 0.0, 8)],
[('Equalize', 1.0, 4), ('AutoContrast', 0.6, 2)],
[('ShearY', 0.4, 7), ('SolarizeAdd', 0.6, 7)],
[('Posterize', 0.8, 2), ('Solarize', 0.6, 10)], # This results in black image with Tpu posterize
[('Solarize', 0.6, 8), ('Equalize', 0.6, 1)],
[('Color', 0.8, 6), ('Rotate', 0.4, 5)],
]
pc = [[AugmentOp(*a, hparams=hparams) for a in sp] for sp in policy]
return pc
def auto_augment_policy_v0r(hparams):
# ImageNet v0 policy from TPU EfficientNet impl, with variation of Posterize used
# in Google research implementation (number of bits discarded increases with magnitude)
policy = [
[('Equalize', 0.8, 1), ('ShearY', 0.8, 4)],
[('Color', 0.4, 9), ('Equalize', 0.6, 3)],
[('Color', 0.4, 1), ('Rotate', 0.6, 8)],
[('Solarize', 0.8, 3), ('Equalize', 0.4, 7)],
[('Solarize', 0.4, 2), ('Solarize', 0.6, 2)],
[('Color', 0.2, 0), ('Equalize', 0.8, 8)],
[('Equalize', 0.4, 8), ('SolarizeAdd', 0.8, 3)],
[('ShearX', 0.2, 9), ('Rotate', 0.6, 8)],
[('Color', 0.6, 1), ('Equalize', 1.0, 2)],
[('Invert', 0.4, 9), ('Rotate', 0.6, 0)],
[('Equalize', 1.0, 9), ('ShearY', 0.6, 3)],
[('Color', 0.4, 7), ('Equalize', 0.6, 0)],
[('PosterizeIncreasing', 0.4, 6), ('AutoContrast', 0.4, 7)],
[('Solarize', 0.6, 8), ('Color', 0.6, 9)],
[('Solarize', 0.2, 4), ('Rotate', 0.8, 9)],
[('Rotate', 1.0, 7), ('TranslateYRel', 0.8, 9)],
[('ShearX', 0.0, 0), ('Solarize', 0.8, 4)],
[('ShearY', 0.8, 0), ('Color', 0.6, 4)],
[('Color', 1.0, 0), ('Rotate', 0.6, 2)],
[('Equalize', 0.8, 4), ('Equalize', 0.0, 8)],
[('Equalize', 1.0, 4), ('AutoContrast', 0.6, 2)],
[('ShearY', 0.4, 7), ('SolarizeAdd', 0.6, 7)],
[('PosterizeIncreasing', 0.8, 2), ('Solarize', 0.6, 10)],
[('Solarize', 0.6, 8), ('Equalize', 0.6, 1)],
[('Color', 0.8, 6), ('Rotate', 0.4, 5)],
]
pc = [[AugmentOp(*a, hparams=hparams) for a in sp] for sp in policy]
return pc
def auto_augment_policy_original(hparams):
# ImageNet policy from https://arxiv.org/abs/1805.09501
policy = [
[('PosterizeOriginal', 0.4, 8), ('Rotate', 0.6, 9)],
[('Solarize', 0.6, 5), ('AutoContrast', 0.6, 5)],
[('Equalize', 0.8, 8), ('Equalize', 0.6, 3)],
[('PosterizeOriginal', 0.6, 7), ('PosterizeOriginal', 0.6, 6)],
[('Equalize', 0.4, 7), ('Solarize', 0.2, 4)],
[('Equalize', 0.4, 4), ('Rotate', 0.8, 8)],
[('Solarize', 0.6, 3), ('Equalize', 0.6, 7)],
[('PosterizeOriginal', 0.8, 5), ('Equalize', 1.0, 2)],
[('Rotate', 0.2, 3), ('Solarize', 0.6, 8)],
[('Equalize', 0.6, 8), ('PosterizeOriginal', 0.4, 6)],
[('Rotate', 0.8, 8), ('Color', 0.4, 0)],
[('Rotate', 0.4, 9), ('Equalize', 0.6, 2)],
[('Equalize', 0.0, 7), ('Equalize', 0.8, 8)],
[('Invert', 0.6, 4), ('Equalize', 1.0, 8)],
[('Color', 0.6, 4), ('Contrast', 1.0, 8)],
[('Rotate', 0.8, 8), ('Color', 1.0, 2)],
[('Color', 0.8, 8), ('Solarize', 0.8, 7)],
[('Sharpness', 0.4, 7), ('Invert', 0.6, 8)],
[('ShearX', 0.6, 5), ('Equalize', 1.0, 9)],
[('Color', 0.4, 0), ('Equalize', 0.6, 3)],
[('Equalize', 0.4, 7), ('Solarize', 0.2, 4)],
[('Solarize', 0.6, 5), ('AutoContrast', 0.6, 5)],
[('Invert', 0.6, 4), ('Equalize', 1.0, 8)],
[('Color', 0.6, 4), ('Contrast', 1.0, 8)],
[('Equalize', 0.8, 8), ('Equalize', 0.6, 3)],
]
pc = [[AugmentOp(*a, hparams=hparams) for a in sp] for sp in policy]
return pc
def auto_augment_policy_originalr(hparams):
# ImageNet policy from https://arxiv.org/abs/1805.09501 with research posterize variation
policy = [
[('PosterizeIncreasing', 0.4, 8), ('Rotate', 0.6, 9)],
[('Solarize', 0.6, 5), ('AutoContrast', 0.6, 5)],
[('Equalize', 0.8, 8), ('Equalize', 0.6, 3)],
[('PosterizeIncreasing', 0.6, 7), ('PosterizeIncreasing', 0.6, 6)],
[('Equalize', 0.4, 7), ('Solarize', 0.2, 4)],
[('Equalize', 0.4, 4), ('Rotate', 0.8, 8)],
[('Solarize', 0.6, 3), ('Equalize', 0.6, 7)],
[('PosterizeIncreasing', 0.8, 5), ('Equalize', 1.0, 2)],
[('Rotate', 0.2, 3), ('Solarize', 0.6, 8)],
[('Equalize', 0.6, 8), ('PosterizeIncreasing', 0.4, 6)],
[('Rotate', 0.8, 8), ('Color', 0.4, 0)],
[('Rotate', 0.4, 9), ('Equalize', 0.6, 2)],
[('Equalize', 0.0, 7), ('Equalize', 0.8, 8)],
[('Invert', 0.6, 4), ('Equalize', 1.0, 8)],
[('Color', 0.6, 4), ('Contrast', 1.0, 8)],
[('Rotate', 0.8, 8), ('Color', 1.0, 2)],
[('Color', 0.8, 8), ('Solarize', 0.8, 7)],
[('Sharpness', 0.4, 7), ('Invert', 0.6, 8)],
[('ShearX', 0.6, 5), ('Equalize', 1.0, 9)],
[('Color', 0.4, 0), ('Equalize', 0.6, 3)],
[('Equalize', 0.4, 7), ('Solarize', 0.2, 4)],
[('Solarize', 0.6, 5), ('AutoContrast', 0.6, 5)],
[('Invert', 0.6, 4), ('Equalize', 1.0, 8)],
[('Color', 0.6, 4), ('Contrast', 1.0, 8)],
[('Equalize', 0.8, 8), ('Equalize', 0.6, 3)],
]
pc = [[AugmentOp(*a, hparams=hparams) for a in sp] for sp in policy]
return pc
def auto_augment_policy(name='v0', hparams=None):
hparams = hparams or _HPARAMS_DEFAULT
if name == 'original':
return auto_augment_policy_original(hparams)
elif name == 'originalr':
return auto_augment_policy_originalr(hparams)
elif name == 'v0':
return auto_augment_policy_v0(hparams)
elif name == 'v0r':
return auto_augment_policy_v0r(hparams)
else:
assert False, 'Unknown AA policy (%s)' % name
class AutoAugment:
def __init__(self, policy):
self.policy = policy
def __call__(self, img):
sub_policy = random.choice(self.policy)
for op in sub_policy:
img = op(img)
return img
def __repr__(self):
fs = self.__class__.__name__ + f'(policy='
for p in self.policy:
fs += '\n\t['
fs += ', '.join([str(op) for op in p])
fs += ']'
fs += ')'
return fs
def auto_augment_transform(config_str, hparams):
"""
Create a AutoAugment transform
:param config_str: String defining configuration of auto augmentation. Consists of multiple sections separated by
dashes ('-'). The first section defines the AutoAugment policy (one of 'v0', 'v0r', 'original', 'originalr').
The remaining sections, not order sepecific determine
'mstd' - float std deviation of magnitude noise applied
Ex 'original-mstd0.5' results in AutoAugment with original policy, magnitude_std 0.5
:param hparams: Other hparams (kwargs) for the AutoAugmentation scheme
:return: A PyTorch compatible Transform
"""
config = config_str.split('-')
policy_name = config[0]
config = config[1:]
for c in config:
cs = re.split(r'(\d.*)', c)
if len(cs) < 2:
continue
key, val = cs[:2]
if key == 'mstd':
# noise param injected via hparams for now
hparams.setdefault('magnitude_std', float(val))
else:
assert False, 'Unknown AutoAugment config section'
aa_policy = auto_augment_policy(policy_name, hparams=hparams)
return AutoAugment(aa_policy)
_RAND_TRANSFORMS = [
'AutoContrast',
'Equalize',
'Invert',
'Rotate',
'Posterize',
'Solarize',
'SolarizeAdd',
'Color',
'Contrast',
'Brightness',
'Sharpness',
'ShearX',
'ShearY',
'TranslateXRel',
'TranslateYRel',
#'Cutout' # NOTE I've implement this as random erasing separately
]
_RAND_INCREASING_TRANSFORMS = [
'AutoContrast',
'Equalize',
'Invert',
'Rotate',
'PosterizeIncreasing',
'SolarizeIncreasing',
'SolarizeAdd',
'ColorIncreasing',
'ContrastIncreasing',
'BrightnessIncreasing',
'SharpnessIncreasing',
'ShearX',
'ShearY',
'TranslateXRel',
'TranslateYRel',
#'Cutout' # NOTE I've implement this as random erasing separately
]
# These experimental weights are based loosely on the relative improvements mentioned in paper.
# They may not result in increased performance, but could likely be tuned to so.
_RAND_CHOICE_WEIGHTS_0 = {
'Rotate': 0.3,
'ShearX': 0.2,
'ShearY': 0.2,
'TranslateXRel': 0.1,
'TranslateYRel': 0.1,
'Color': .025,
'Sharpness': 0.025,
'AutoContrast': 0.025,
'Solarize': .005,
'SolarizeAdd': .005,
'Contrast': .005,
'Brightness': .005,
'Equalize': .005,
'Posterize': 0,
'Invert': 0,
}
def _select_rand_weights(weight_idx=0, transforms=None):
transforms = transforms or _RAND_TRANSFORMS
assert weight_idx == 0 # only one set of weights currently
rand_weights = _RAND_CHOICE_WEIGHTS_0
probs = [rand_weights[k] for k in transforms]
probs /= np.sum(probs)
return probs
def rand_augment_ops(magnitude=10, hparams=None, transforms=None):
hparams = hparams or _HPARAMS_DEFAULT
transforms = transforms or _RAND_TRANSFORMS
return [AugmentOp(
name, prob=0.5, magnitude=magnitude, hparams=hparams) for name in transforms]
class RandAugment:
def __init__(self, ops, num_layers=2, choice_weights=None):
self.ops = ops
self.num_layers = num_layers
self.choice_weights = choice_weights
def __call__(self, img):
# no replacement when using weighted choice
ops = np_random.choice(
self.ops, self.num_layers, replace=self.choice_weights is None, p=self.choice_weights)
for op in ops:
img = op(img)
return img
def __repr__(self):
fs = self.__class__.__name__ + f'(n={self.num_layers}, ops='
for op in self.ops:
fs += f'\n\t{op}'
fs += ')'
return fs
def rand_augment_transform(config_str, hparams):
"""
Create a RandAugment transform
:param config_str: String defining configuration of random augmentation. Consists of multiple sections separated by
dashes ('-'). The first section defines the specific variant of rand augment (currently only 'rand'). The remaining
sections, not order sepecific determine
'm' - integer magnitude of rand augment
'n' - integer num layers (number of transform ops selected per image)
'w' - integer probabiliy weight index (index of a set of weights to influence choice of op)
'mstd' - float std deviation of magnitude noise applied, or uniform sampling if infinity (or > 100)
'mmax' - set upper bound for magnitude to something other than default of _LEVEL_DENOM (10)
'inc' - integer (bool), use augmentations that increase in severity with magnitude (default: 0)
Ex 'rand-m9-n3-mstd0.5' results in RandAugment with magnitude 9, num_layers 3, magnitude_std 0.5
'rand-mstd1-w0' results in magnitude_std 1.0, weights 0, default magnitude of 10 and num_layers 2
:param hparams: Other hparams (kwargs) for the RandAugmentation scheme
:return: A PyTorch compatible Transform
"""
magnitude = _LEVEL_DENOM # default to _LEVEL_DENOM for magnitude (currently 10)
num_layers = 2 # default to 2 ops per image
weight_idx = None # default to no probability weights for op choice
transforms = _RAND_TRANSFORMS
config = config_str.split('-')
assert config[0] == 'rand'
config = config[1:]
for c in config:
cs = re.split(r'(\d.*)', c)
if len(cs) < 2:
continue
key, val = cs[:2]
if key == 'mstd':
# noise param / randomization of magnitude values
mstd = float(val)
if mstd > 100:
# use uniform sampling in 0 to magnitude if mstd is > 100
mstd = float('inf')
hparams.setdefault('magnitude_std', mstd)
elif key == 'mmax':
# clip magnitude between [0, mmax] instead of default [0, _LEVEL_DENOM]
hparams.setdefault('magnitude_max', int(val))
elif key == 'inc':
if bool(val):
transforms = _RAND_INCREASING_TRANSFORMS
elif key == 'm':
magnitude = int(val)
elif key == 'n':
num_layers = int(val)
elif key == 'w':
weight_idx = int(val)
else:
assert False, 'Unknown RandAugment config section'
ra_ops = rand_augment_ops(magnitude=magnitude, hparams=hparams, transforms=transforms)
choice_weights = None if weight_idx is None else _select_rand_weights(weight_idx)
return RandAugment(ra_ops, num_layers, choice_weights=choice_weights)
_AUGMIX_TRANSFORMS = [
'AutoContrast',
'ColorIncreasing', # not in paper
'ContrastIncreasing', # not in paper
'BrightnessIncreasing', # not in paper
'SharpnessIncreasing', # not in paper
'Equalize',
'Rotate',
'PosterizeIncreasing',
'SolarizeIncreasing',
'ShearX',
'ShearY',
'TranslateXRel',
'TranslateYRel',
]
def augmix_ops(magnitude=10, hparams=None, transforms=None):
hparams = hparams or _HPARAMS_DEFAULT
transforms = transforms or _AUGMIX_TRANSFORMS
return [AugmentOp(
name, prob=1.0, magnitude=magnitude, hparams=hparams) for name in transforms]
class AugMixAugment:
""" AugMix Transform
Adapted and improved from impl here: https://github.com/google-research/augmix/blob/master/imagenet.py
From paper: 'AugMix: A Simple Data Processing Method to Improve Robustness and Uncertainty -
https://arxiv.org/abs/1912.02781
"""
def __init__(self, ops, alpha=1., width=3, depth=-1, blended=False):
self.ops = ops
self.alpha = alpha
self.width = width
self.depth = depth
self.blended = blended # blended mode is faster but not well tested
def _calc_blended_weights(self, ws, m):
ws = ws * m
cump = 1.
rws = []
for w in ws[::-1]:
alpha = w / cump
cump *= (1 - alpha)
rws.append(alpha)
return np.array(rws[::-1], dtype=np.float32)
def _apply_blended(self, img, mixing_weights, m):
# This is my first crack and implementing a slightly faster mixed augmentation. Instead
# of accumulating the mix for each chain in a Numpy array and then blending with original,
# it recomputes the blending coefficients and applies one PIL image blend per chain.
# TODO the results appear in the right ballpark but they differ by more than rounding.
img_orig = img.copy()
ws = self._calc_blended_weights(mixing_weights, m)
for w in ws:
depth = self.depth if self.depth > 0 else np_random.randint(1, 4)
ops = np_random.choice(self.ops, depth, replace=True)
img_aug = img_orig # no ops are in-place, deep copy not necessary
for op in ops:
img_aug = op(img_aug)
img = Image.blend(img, img_aug, w)
return img
def _apply_basic(self, img, mixing_weights, m):
# This is a literal adaptation of the paper/official implementation without normalizations and
# PIL <-> Numpy conversions between every op. It is still quite CPU compute heavy compared to the
# typical augmentation transforms, could use a GPU / Kornia implementation.
img_shape = img.size[0], img.size[1], len(img.getbands())
mixed = np.zeros(img_shape, dtype=np.float32)
for mw in mixing_weights:
depth = self.depth if self.depth > 0 else np_random.randint(1, 4)
ops = np_random.choice(self.ops, depth, replace=True)
img_aug = img # no ops are in-place, deep copy not necessary
for op in ops:
img_aug = op(img_aug)
mixed += mw * np.asarray(img_aug, dtype=np.float32)
np.clip(mixed, 0, 255., out=mixed)
mixed = Image.fromarray(mixed.astype(np.uint8))
return Image.blend(img, mixed, m)
def __call__(self, img):
mixing_weights = np.float32(np_random.dirichlet([self.alpha] * self.width))
m = np.float32(np_random.beta(self.alpha, self.alpha))
if self.blended:
mixed = self._apply_blended(img, mixing_weights, m)
else:
mixed = self._apply_basic(img, mixing_weights, m)
return mixed
def __repr__(self):
fs = self.__class__.__name__ + f'(alpha={self.alpha}, width={self.width}, depth={self.depth}, ops='
for op in self.ops:
fs += f'\n\t{op}'
fs += ')'
return fs
def augment_and_mix_transform(config_str, hparams):
""" Create AugMix PyTorch transform
:param config_str: String defining configuration of random augmentation. Consists of multiple sections separated by
dashes ('-'). The first section defines the specific variant of rand augment (currently only 'rand'). The remaining
sections, not order sepecific determine
'm' - integer magnitude (severity) of augmentation mix (default: 3)
'w' - integer width of augmentation chain (default: 3)
'd' - integer depth of augmentation chain (-1 is random [1, 3], default: -1)
'b' - integer (bool), blend each branch of chain into end result without a final blend, less CPU (default: 0)
'mstd' - float std deviation of magnitude noise applied (default: 0)
Ex 'augmix-m5-w4-d2' results in AugMix with severity 5, chain width 4, chain depth 2
:param hparams: Other hparams (kwargs) for the Augmentation transforms
:return: A PyTorch compatible Transform
"""
magnitude = 3
width = 3
depth = -1
alpha = 1.
blended = False
config = config_str.split('-')
assert config[0] == 'augmix'
config = config[1:]
for c in config:
cs = re.split(r'(\d.*)', c)
if len(cs) < 2:
continue
key, val = cs[:2]
if key == 'mstd':
# noise param injected via hparams for now
hparams.setdefault('magnitude_std', float(val))
elif key == 'm':
magnitude = int(val)
elif key == 'w':
width = int(val)
elif key == 'd':
depth = int(val)
elif key == 'a':
alpha = float(val)
elif key == 'b':
blended = bool(val)
else:
assert False, 'Unknown AugMix config section'
hparams.setdefault('magnitude_std', float('inf')) # default to uniform sampling (if not set via mstd arg)
ops = augmix_ops(magnitude=magnitude, hparams=hparams)
return AugMixAugment(ops, alpha=alpha, width=width, depth=depth, blended=blended)
|
Cream/TinyViT/data/augmentation/auto_augment.py/0
|
{
"file_path": "Cream/TinyViT/data/augmentation/auto_augment.py",
"repo_id": "Cream",
"token_count": 13996
}
| 344 |
""" A dataset parser that reads tarfile based datasets
This parser can read and extract image samples from:
* a single tar of image files
* a folder of multiple tarfiles containing imagefiles
* a tar of tars containing image files
Labels are based on the combined folder and/or tar name structure.
Hacked together by / Copyright 2020 Ross Wightman
"""
import os
import tarfile
import pickle
import logging
import numpy as np
from glob import glob
from typing import List, Dict
from timm.utils.misc import natural_key
from .parser import Parser
from .class_map import load_class_map
from .constants import IMG_EXTENSIONS
_logger = logging.getLogger(__name__)
CACHE_FILENAME_SUFFIX = '_tarinfos.pickle'
class TarState:
def __init__(self, tf: tarfile.TarFile = None, ti: tarfile.TarInfo = None):
self.tf: tarfile.TarFile = tf
self.ti: tarfile.TarInfo = ti
self.children: Dict[str, TarState] = {} # child states (tars within tars)
def reset(self):
self.tf = None
def _extract_tarinfo(tf: tarfile.TarFile, parent_info: Dict, extensions=IMG_EXTENSIONS):
sample_count = 0
for i, ti in enumerate(tf):
if not ti.isfile():
continue
dirname, basename = os.path.split(ti.path)
name, ext = os.path.splitext(basename)
ext = ext.lower()
if ext == '.tar':
with tarfile.open(fileobj=tf.extractfile(ti), mode='r|') as ctf:
child_info = dict(
name=ti.name, path=os.path.join(parent_info['path'], name), ti=ti, children=[], samples=[])
sample_count += _extract_tarinfo(ctf, child_info, extensions=extensions)
_logger.debug(f'{i}/?. Extracted child tarinfos from {ti.name}. {len(child_info["samples"])} images.')
parent_info['children'].append(child_info)
elif ext in extensions:
parent_info['samples'].append(ti)
sample_count += 1
return sample_count
def extract_tarinfos(root, class_name_to_idx=None, cache_tarinfo=None, extensions=IMG_EXTENSIONS, sort=True):
root_is_tar = False
if os.path.isfile(root):
assert os.path.splitext(root)[-1].lower() == '.tar'
tar_filenames = [root]
root, root_name = os.path.split(root)
root_name = os.path.splitext(root_name)[0]
root_is_tar = True
else:
root_name = root.strip(os.path.sep).split(os.path.sep)[-1]
tar_filenames = glob(os.path.join(root, '*.tar'), recursive=True)
num_tars = len(tar_filenames)
tar_bytes = sum([os.path.getsize(f) for f in tar_filenames])
assert num_tars, f'No .tar files found at specified path ({root}).'
_logger.info(f'Scanning {tar_bytes/1024**2:.2f}MB of tar files...')
info = dict(tartrees=[])
cache_path = ''
if cache_tarinfo is None:
cache_tarinfo = True if tar_bytes > 10*1024**3 else False # FIXME magic number, 10GB
if cache_tarinfo:
cache_filename = '_' + root_name + CACHE_FILENAME_SUFFIX
cache_path = os.path.join(root, cache_filename)
if os.path.exists(cache_path):
_logger.info(f'Reading tar info from cache file {cache_path}.')
with open(cache_path, 'rb') as pf:
info = pickle.load(pf)
assert len(info['tartrees']) == num_tars, "Cached tartree len doesn't match number of tarfiles"
else:
for i, fn in enumerate(tar_filenames):
path = '' if root_is_tar else os.path.splitext(os.path.basename(fn))[0]
with tarfile.open(fn, mode='r|') as tf: # tarinfo scans done in streaming mode
parent_info = dict(name=os.path.relpath(fn, root), path=path, ti=None, children=[], samples=[])
num_samples = _extract_tarinfo(tf, parent_info, extensions=extensions)
num_children = len(parent_info["children"])
_logger.debug(
f'{i}/{num_tars}. Extracted tarinfos from {fn}. {num_children} children, {num_samples} samples.')
info['tartrees'].append(parent_info)
if cache_path:
_logger.info(f'Writing tar info to cache file {cache_path}.')
with open(cache_path, 'wb') as pf:
pickle.dump(info, pf)
samples = []
labels = []
build_class_map = False
if class_name_to_idx is None:
build_class_map = True
# Flatten tartree info into lists of samples and targets w/ targets based on label id via
# class map arg or from unique paths.
# NOTE: currently only flattening up to two-levels, filesystem .tars and then one level of sub-tar children
# this covers my current use cases and keeps things a little easier to test for now.
tarfiles = []
def _label_from_paths(*path, leaf_only=True):
path = os.path.join(*path).strip(os.path.sep)
return path.split(os.path.sep)[-1] if leaf_only else path.replace(os.path.sep, '_')
def _add_samples(info, fn):
added = 0
for s in info['samples']:
label = _label_from_paths(info['path'], os.path.dirname(s.path))
if not build_class_map and label not in class_name_to_idx:
continue
samples.append((s, fn, info['ti']))
labels.append(label)
added += 1
return added
_logger.info(f'Collecting samples and building tar states.')
for parent_info in info['tartrees']:
# if tartree has children, we assume all samples are at the child level
tar_name = None if root_is_tar else parent_info['name']
tar_state = TarState()
parent_added = 0
for child_info in parent_info['children']:
child_added = _add_samples(child_info, fn=tar_name)
if child_added:
tar_state.children[child_info['name']] = TarState(ti=child_info['ti'])
parent_added += child_added
parent_added += _add_samples(parent_info, fn=tar_name)
if parent_added:
tarfiles.append((tar_name, tar_state))
del info
if build_class_map:
# build class index
sorted_labels = list(sorted(set(labels), key=natural_key))
class_name_to_idx = {c: idx for idx, c in enumerate(sorted_labels)}
_logger.info(f'Mapping targets and sorting samples.')
samples_and_targets = [(s, class_name_to_idx[l]) for s, l in zip(samples, labels) if l in class_name_to_idx]
if sort:
samples_and_targets = sorted(samples_and_targets, key=lambda k: natural_key(k[0][0].path))
samples, targets = zip(*samples_and_targets)
samples = np.array(samples)
targets = np.array(targets)
_logger.info(f'Finished processing {len(samples)} samples across {len(tarfiles)} tar files.')
return samples, targets, class_name_to_idx, tarfiles
class ParserImageInTar(Parser):
""" Multi-tarfile dataset parser where there is one .tar file per class
"""
def __init__(self, root, class_map='', cache_tarfiles=True, cache_tarinfo=None):
super().__init__()
class_name_to_idx = None
if class_map:
class_name_to_idx = load_class_map(class_map, root)
self.root = root
self.samples, self.targets, self.class_name_to_idx, tarfiles = extract_tarinfos(
self.root,
class_name_to_idx=class_name_to_idx,
cache_tarinfo=cache_tarinfo,
extensions=IMG_EXTENSIONS)
self.class_idx_to_name = {v: k for k, v in self.class_name_to_idx.items()}
if len(tarfiles) == 1 and tarfiles[0][0] is None:
self.root_is_tar = True
self.tar_state = tarfiles[0][1]
else:
self.root_is_tar = False
self.tar_state = dict(tarfiles)
self.cache_tarfiles = cache_tarfiles
def __len__(self):
return len(self.samples)
def __getitem__(self, index):
sample = self.samples[index]
target = self.targets[index]
sample_ti, parent_fn, child_ti = sample
parent_abs = os.path.join(self.root, parent_fn) if parent_fn else self.root
tf = None
cache_state = None
if self.cache_tarfiles:
cache_state = self.tar_state if self.root_is_tar else self.tar_state[parent_fn]
tf = cache_state.tf
if tf is None:
tf = tarfile.open(parent_abs)
if self.cache_tarfiles:
cache_state.tf = tf
if child_ti is not None:
ctf = cache_state.children[child_ti.name].tf if self.cache_tarfiles else None
if ctf is None:
ctf = tarfile.open(fileobj=tf.extractfile(child_ti))
if self.cache_tarfiles:
cache_state.children[child_ti.name].tf = ctf
tf = ctf
return tf.extractfile(sample_ti), target
def _filename(self, index, basename=False, absolute=False):
filename = self.samples[index][0].name
if basename:
filename = os.path.basename(filename)
return filename
|
Cream/TinyViT/data/augmentation/parsers/parser_image_in_tar.py/0
|
{
"file_path": "Cream/TinyViT/data/augmentation/parsers/parser_image_in_tar.py",
"repo_id": "Cream",
"token_count": 3968
}
| 345 |
# --------------------------------------------------------
# TinyViT Utils (save/load checkpoints, etc.)
# Copyright (c) 2022 Microsoft
# Based on the code: Swin Transformer
# (https://github.com/microsoft/swin-transformer)
# Adapted for TinyViT
# --------------------------------------------------------
import os
import torch
import torch.distributed as dist
import subprocess
def add_common_args(parser):
parser.add_argument('--cfg', type=str, required=True,
metavar="FILE", help='path to config file', )
parser.add_argument(
"--opts",
help="Modify config options by adding 'KEY VALUE' pairs. ",
default=None,
nargs='+',
)
# easy config modification
parser.add_argument('--batch-size', type=int,
help="batch size for single GPU")
parser.add_argument('--data-path', type=str, help='path to dataset')
parser.add_argument('--pretrained',
help='pretrained weight from checkpoint, could be imagenet22k pretrained weight')
parser.add_argument('--resume', help='resume from checkpoint')
parser.add_argument('--accumulation-steps', type=int,
help="gradient accumulation steps")
parser.add_argument('--use-checkpoint', action='store_true',
help="whether to use gradient checkpointing to save memory")
parser.add_argument('--disable_amp', action='store_true',
help='Disable pytorch amp')
parser.add_argument('--output', default='output', type=str, metavar='PATH',
help='root of output folder, the full path is <output>/<model_name>/<tag> (default: output)')
parser.add_argument('--tag', help='tag of experiment')
parser.add_argument('--eval', action='store_true',
help='Perform evaluation only')
parser.add_argument('--only-cpu', action='store_true',
help='Perform evaluation on CPU')
parser.add_argument('--throughput', action='store_true',
help='Test throughput only')
parser.add_argument('--use-sync-bn', action='store_true',
default=False, help='sync bn')
parser.add_argument('--use-wandb', action='store_true',
default=False, help='use wandb to record log')
# distributed training
parser.add_argument("--local_rank", type=int,
help='local rank for DistributedDataParallel')
def load_checkpoint(config, model, optimizer, lr_scheduler, loss_scaler, logger):
logger.info(
f"==============> Resuming form {config.MODEL.RESUME}....................")
if config.MODEL.RESUME.startswith('https'):
checkpoint = torch.hub.load_state_dict_from_url(
config.MODEL.RESUME, map_location='cpu', check_hash=True)
else:
checkpoint = torch.load(config.MODEL.RESUME, map_location='cpu')
params = checkpoint['model']
now_model_state = model.state_dict()
mnames = ['head.weight', 'head.bias'] # (cls, 1024), (cls, )
if mnames[-1] in params:
ckpt_head_bias = params[mnames[-1]]
now_model_bias = now_model_state[mnames[-1]]
if ckpt_head_bias.shape != now_model_bias.shape:
num_classes = 1000
if len(ckpt_head_bias) == 21841 and len(now_model_bias) == num_classes:
logger.info("Convert checkpoint from 21841 to 1k")
# convert 22kto1k
fname = './imagenet_1kto22k.txt'
with open(fname) as fin:
mapping = torch.Tensor(
list(map(int, fin.readlines()))).to(torch.long)
for name in mnames:
v = params[name]
shape = list(v.shape)
shape[0] = num_classes
mean_v = v[mapping[mapping != -1]].mean(0, keepdim=True)
v = torch.cat([v, mean_v], 0)
v = v[mapping]
params[name] = v
msg = model.load_state_dict(params, strict=False)
logger.info(msg)
max_accuracy = 0.0
if not config.EVAL_MODE:
if 'optimizer' in checkpoint and 'lr_scheduler' in checkpoint:
if optimizer is not None:
optimizer.load_state_dict(checkpoint['optimizer'])
if lr_scheduler is not None:
lr_scheduler.load_state_dict(checkpoint['lr_scheduler'])
if 'scaler' in checkpoint:
loss_scaler.load_state_dict(checkpoint['scaler'])
logger.info(
f"=> loaded successfully '{config.MODEL.RESUME}' (epoch {checkpoint['epoch']})")
if 'max_accuracy' in checkpoint:
max_accuracy = checkpoint['max_accuracy']
if 'epoch' in checkpoint:
config.defrost()
config.TRAIN.START_EPOCH = checkpoint['epoch'] + 1
config.freeze()
del checkpoint
torch.cuda.empty_cache()
return max_accuracy
def load_pretrained(config, model, logger):
logger.info(
f"==============> Loading weight {config.MODEL.PRETRAINED} for fine-tuning......")
checkpoint = torch.load(config.MODEL.PRETRAINED, map_location='cpu')
state_dict = checkpoint['model']
# delete relative_position_index since we always re-init it
relative_position_index_keys = [
k for k in state_dict.keys() if "relative_position_index" in k]
for k in relative_position_index_keys:
del state_dict[k]
# delete relative_coords_table since we always re-init it
relative_position_index_keys = [
k for k in state_dict.keys() if "relative_coords_table" in k]
for k in relative_position_index_keys:
del state_dict[k]
# delete attn_mask since we always re-init it
attn_mask_keys = [k for k in state_dict.keys() if "attn_mask" in k]
for k in attn_mask_keys:
del state_dict[k]
model_state_dict = model.state_dict()
# bicubic interpolate relative_position_bias_table if not match
relative_position_bias_table_keys = [
k for k in state_dict.keys() if "relative_position_bias_table" in k]
for k in relative_position_bias_table_keys:
relative_position_bias_table_pretrained = state_dict[k]
relative_position_bias_table_current = model_state_dict[k]
L1, nH1 = relative_position_bias_table_pretrained.size()
L2, nH2 = relative_position_bias_table_current.size()
if nH1 != nH2:
logger.warning(f"Error in loading {k}, passing......")
else:
if L1 != L2:
# bicubic interpolate relative_position_bias_table if not match
S1 = int(L1 ** 0.5)
S2 = int(L2 ** 0.5)
relative_position_bias_table_pretrained_resized = torch.nn.functional.interpolate(
relative_position_bias_table_pretrained.permute(1, 0).view(1, nH1, S1, S1), size=(S2, S2),
mode='bicubic')
state_dict[k] = relative_position_bias_table_pretrained_resized.view(
nH2, L2).permute(1, 0)
# bicubic interpolate attention_biases if not match
relative_position_bias_table_keys = [
k for k in state_dict.keys() if "attention_biases" in k]
for k in relative_position_bias_table_keys:
relative_position_bias_table_pretrained = state_dict[k]
relative_position_bias_table_current = model_state_dict[k]
nH1, L1 = relative_position_bias_table_pretrained.size()
nH2, L2 = relative_position_bias_table_current.size()
if nH1 != nH2:
logger.warning(f"Error in loading {k}, passing......")
else:
if L1 != L2:
# bicubic interpolate relative_position_bias_table if not match
S1 = int(L1 ** 0.5)
S2 = int(L2 ** 0.5)
relative_position_bias_table_pretrained_resized = torch.nn.functional.interpolate(
relative_position_bias_table_pretrained.view(1, nH1, S1, S1), size=(S2, S2),
mode='bicubic')
state_dict[k] = relative_position_bias_table_pretrained_resized.view(
nH2, L2)
# bicubic interpolate absolute_pos_embed if not match
absolute_pos_embed_keys = [
k for k in state_dict.keys() if "absolute_pos_embed" in k]
for k in absolute_pos_embed_keys:
# dpe
absolute_pos_embed_pretrained = state_dict[k]
absolute_pos_embed_current = model.state_dict()[k]
_, L1, C1 = absolute_pos_embed_pretrained.size()
_, L2, C2 = absolute_pos_embed_current.size()
if C1 != C1:
logger.warning(f"Error in loading {k}, passing......")
else:
if L1 != L2:
S1 = int(L1 ** 0.5)
S2 = int(L2 ** 0.5)
absolute_pos_embed_pretrained = absolute_pos_embed_pretrained.reshape(
-1, S1, S1, C1)
absolute_pos_embed_pretrained = absolute_pos_embed_pretrained.permute(
0, 3, 1, 2)
absolute_pos_embed_pretrained_resized = torch.nn.functional.interpolate(
absolute_pos_embed_pretrained, size=(S2, S2), mode='bicubic')
absolute_pos_embed_pretrained_resized = absolute_pos_embed_pretrained_resized.permute(
0, 2, 3, 1)
absolute_pos_embed_pretrained_resized = absolute_pos_embed_pretrained_resized.flatten(
1, 2)
state_dict[k] = absolute_pos_embed_pretrained_resized
# check classifier, if not match, then re-init classifier to zero
head_bias_pretrained = state_dict['head.bias']
Nc1 = head_bias_pretrained.shape[0]
Nc2 = model.head.bias.shape[0]
if (Nc1 != Nc2):
if Nc1 == 21841 and Nc2 == 1000:
logger.info("loading ImageNet-21841 weight to ImageNet-1K ......")
map22kto1k_path = f'./imagenet_1kto22k.txt'
with open(map22kto1k_path) as fin:
mapping = torch.Tensor(
list(map(int, fin.readlines()))).to(torch.long)
for name in ['head.weight', 'head.bias']:
v = state_dict[name]
mean_v = v[mapping[mapping != -1]].mean(0, keepdim=True)
v = torch.cat([v, mean_v], 0)
v = v[mapping]
state_dict[name] = v
else:
torch.nn.init.constant_(model.head.bias, 0.)
torch.nn.init.constant_(model.head.weight, 0.)
del state_dict['head.weight']
del state_dict['head.bias']
logger.warning(
f"Error in loading classifier head, re-init classifier head to 0")
msg = model.load_state_dict(state_dict, strict=False)
logger.warning(msg)
logger.info(f"=> loaded successfully '{config.MODEL.PRETRAINED}'")
del checkpoint
torch.cuda.empty_cache()
def save_checkpoint(config, epoch, model, max_accuracy, optimizer, lr_scheduler, loss_scaler, logger):
save_state = {'model': model.state_dict(),
'optimizer': optimizer.state_dict(),
'lr_scheduler': lr_scheduler.state_dict(),
'max_accuracy': max_accuracy,
'scaler': loss_scaler.state_dict(),
'epoch': epoch,
'config': config}
save_path = os.path.join(config.OUTPUT, f'ckpt_epoch_{epoch}.pth')
logger.info(f"{save_path} saving......")
torch.save(save_state, save_path)
logger.info(f"{save_path} saved !!!")
def auto_resume_helper(output_dir):
checkpoints = os.listdir(output_dir)
checkpoints = [ckpt for ckpt in checkpoints if ckpt.endswith('pth')]
print(f"All checkpoints founded in {output_dir}: {checkpoints}")
if len(checkpoints) > 0:
latest_checkpoint = max([os.path.join(output_dir, d)
for d in checkpoints], key=os.path.getmtime)
print(f"The latest checkpoint founded: {latest_checkpoint}")
resume_file = latest_checkpoint
else:
resume_file = None
return resume_file
def reduce_tensor(tensor, n=None):
if n is None:
n = dist.get_world_size()
rt = tensor.clone()
dist.all_reduce(rt, op=dist.ReduceOp.SUM)
rt = rt / n
return rt
def ampscaler_get_grad_norm(parameters, norm_type: float = 2.0) -> torch.Tensor:
if isinstance(parameters, torch.Tensor):
parameters = [parameters]
parameters = [p for p in parameters if p.grad is not None]
norm_type = float(norm_type)
if len(parameters) == 0:
return torch.tensor(0.)
device = parameters[0].grad.device
if norm_type == float('inf'):
total_norm = max(p.grad.detach().abs().max().to(device)
for p in parameters)
else:
total_norm = torch.norm(torch.stack([torch.norm(p.grad.detach(),
norm_type).to(device) for p in parameters]), norm_type)
return total_norm
class NativeScalerWithGradNormCount:
state_dict_key = "amp_scaler"
def __init__(self, grad_scaler_enabled=True):
self._scaler = torch.cuda.amp.GradScaler(enabled=grad_scaler_enabled)
def __call__(self, loss, optimizer, clip_grad=None, parameters=None, create_graph=False, update_grad=True):
self._scaler.scale(loss).backward(create_graph=create_graph)
if update_grad:
if clip_grad is not None and clip_grad > 0.0:
assert parameters is not None
# unscale the gradients of optimizer's assigned params in-place
self._scaler.unscale_(optimizer)
norm = torch.nn.utils.clip_grad_norm_(parameters, clip_grad)
else:
self._scaler.unscale_(optimizer)
norm = ampscaler_get_grad_norm(parameters)
self._scaler.step(optimizer)
self._scaler.update()
else:
norm = None
return norm
def state_dict(self):
return self._scaler.state_dict()
def load_state_dict(self, state_dict):
self._scaler.load_state_dict(state_dict)
def is_main_process():
return dist.get_rank() == 0
def run_cmd(cmd, default=None):
try:
return subprocess.check_output(cmd.split(), universal_newlines=True).strip()
except:
if default is None:
raise
return default
def get_git_info():
return dict(
branch=run_cmd('git rev-parse --abbrev-ref HEAD', 'custom'),
git_hash=run_cmd('git rev-parse --short HEAD', 'custom'),
)
|
Cream/TinyViT/utils.py/0
|
{
"file_path": "Cream/TinyViT/utils.py",
"repo_id": "Cream",
"token_count": 6809
}
| 346 |
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
"""
Various positional encodings for the transformer.
"""
import math
import torch
from torch import nn
from util.misc import NestedTensor
class PositionEmbeddingSine(nn.Module):
"""
This is a more standard version of the position embedding, very similar to the one
used by the Attention is all you need paper, generalized to work on images.
"""
def __init__(self, num_pos_feats=64, temperature=10000, normalize=False, scale=None):
super().__init__()
self.num_pos_feats = num_pos_feats
self.temperature = temperature
self.normalize = normalize
if scale is not None and normalize is False:
raise ValueError("normalize should be True if scale is passed")
if scale is None:
scale = 2 * math.pi
self.scale = scale
def forward(self, tensor_list: NestedTensor):
x = tensor_list.tensors
mask = tensor_list.mask
assert mask is not None
not_mask = ~mask
y_embed = not_mask.cumsum(1, dtype=torch.float32)
x_embed = not_mask.cumsum(2, dtype=torch.float32)
if self.normalize:
eps = 1e-6
y_embed = y_embed / (y_embed[:, -1:, :] + eps) * self.scale
x_embed = x_embed / (x_embed[:, :, -1:] + eps) * self.scale
dim_t = torch.arange(self.num_pos_feats, dtype=torch.float32, device=x.device)
dim_t = self.temperature ** (2 * (dim_t // 2) / self.num_pos_feats)
pos_x = x_embed[:, :, :, None] / dim_t
pos_y = y_embed[:, :, :, None] / dim_t
pos_x = torch.stack((pos_x[:, :, :, 0::2].sin(), pos_x[:, :, :, 1::2].cos()), dim=4).flatten(3)
pos_y = torch.stack((pos_y[:, :, :, 0::2].sin(), pos_y[:, :, :, 1::2].cos()), dim=4).flatten(3)
pos = torch.cat((pos_y, pos_x), dim=3).permute(0, 3, 1, 2)
return pos
class PositionEmbeddingLearned(nn.Module):
"""
Absolute pos embedding, learned.
"""
def __init__(self, num_pos_feats=256):
super().__init__()
self.row_embed = nn.Embedding(50, num_pos_feats)
self.col_embed = nn.Embedding(50, num_pos_feats)
self.reset_parameters()
def reset_parameters(self):
nn.init.uniform_(self.row_embed.weight)
nn.init.uniform_(self.col_embed.weight)
def forward(self, tensor_list: NestedTensor):
x = tensor_list.tensors
h, w = x.shape[-2:]
i = torch.arange(w, device=x.device)
j = torch.arange(h, device=x.device)
x_emb = self.col_embed(i)
y_emb = self.row_embed(j)
pos = torch.cat([
x_emb.unsqueeze(0).repeat(h, 1, 1),
y_emb.unsqueeze(1).repeat(1, w, 1),
], dim=-1).permute(2, 0, 1).unsqueeze(0).repeat(x.shape[0], 1, 1, 1)
return pos
def build_position_encoding(args):
N_steps = args.hidden_dim // 2
if args.position_embedding in ('v2', 'sine'):
# TODO find a better way of exposing other arguments
position_embedding = PositionEmbeddingSine(N_steps, normalize=True)
elif args.position_embedding in ('v3', 'learned'):
position_embedding = PositionEmbeddingLearned(N_steps)
else:
raise ValueError(f"not supported {args.position_embedding}")
return position_embedding
|
Cream/iRPE/DETR-with-iRPE/models/position_encoding.py/0
|
{
"file_path": "Cream/iRPE/DETR-with-iRPE/models/position_encoding.py",
"repo_id": "Cream",
"token_count": 1509
}
| 347 |
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
"""
Misc functions, including distributed helpers.
Mostly copy-paste from torchvision references.
"""
import os
import subprocess
import time
from collections import defaultdict, deque
import datetime
import pickle
from typing import Optional, List
import torch
import torch.distributed as dist
from torch import Tensor
# needed due to empty tensor bug in pytorch and torchvision 0.5
import torchvision
if float(torchvision.__version__.split(".")[1]) < 7.0:
from torchvision.ops import _new_empty_tensor
from torchvision.ops.misc import _output_size
class SmoothedValue(object):
"""Track a series of values and provide access to smoothed values over a
window or the global series average.
"""
def __init__(self, window_size=20, fmt=None):
if fmt is None:
fmt = "{median:.4f} ({global_avg:.4f})"
self.deque = deque(maxlen=window_size)
self.total = 0.0
self.count = 0
self.fmt = fmt
def update(self, value, n=1):
self.deque.append(value)
self.count += n
self.total += value * n
def synchronize_between_processes(self):
"""
Warning: does not synchronize the deque!
"""
if not is_dist_avail_and_initialized():
return
t = torch.tensor([self.count, self.total], dtype=torch.float64, device='cuda')
dist.barrier()
dist.all_reduce(t)
t = t.tolist()
self.count = int(t[0])
self.total = t[1]
@property
def median(self):
d = torch.tensor(list(self.deque))
return d.median().item()
@property
def avg(self):
d = torch.tensor(list(self.deque), dtype=torch.float32)
return d.mean().item()
@property
def global_avg(self):
return self.total / self.count
@property
def max(self):
return max(self.deque)
@property
def value(self):
return self.deque[-1]
def __str__(self):
return self.fmt.format(
median=self.median,
avg=self.avg,
global_avg=self.global_avg,
max=self.max,
value=self.value)
def all_gather(data):
"""
Run all_gather on arbitrary picklable data (not necessarily tensors)
Args:
data: any picklable object
Returns:
list[data]: list of data gathered from each rank
"""
world_size = get_world_size()
if world_size == 1:
return [data]
# serialized to a Tensor
buffer = pickle.dumps(data)
storage = torch.ByteStorage.from_buffer(buffer)
tensor = torch.ByteTensor(storage).to("cuda")
# obtain Tensor size of each rank
local_size = torch.tensor([tensor.numel()], device="cuda")
size_list = [torch.tensor([0], device="cuda") for _ in range(world_size)]
dist.all_gather(size_list, local_size)
size_list = [int(size.item()) for size in size_list]
max_size = max(size_list)
# receiving Tensor from all ranks
# we pad the tensor because torch all_gather does not support
# gathering tensors of different shapes
tensor_list = []
for _ in size_list:
tensor_list.append(torch.empty((max_size,), dtype=torch.uint8, device="cuda"))
if local_size != max_size:
padding = torch.empty(size=(max_size - local_size,), dtype=torch.uint8, device="cuda")
tensor = torch.cat((tensor, padding), dim=0)
dist.all_gather(tensor_list, tensor)
data_list = []
for size, tensor in zip(size_list, tensor_list):
buffer = tensor.cpu().numpy().tobytes()[:size]
data_list.append(pickle.loads(buffer))
return data_list
def reduce_dict(input_dict, average=True):
"""
Args:
input_dict (dict): all the values will be reduced
average (bool): whether to do average or sum
Reduce the values in the dictionary from all processes so that all processes
have the averaged results. Returns a dict with the same fields as
input_dict, after reduction.
"""
world_size = get_world_size()
if world_size < 2:
return input_dict
with torch.no_grad():
names = []
values = []
# sort the keys so that they are consistent across processes
for k in sorted(input_dict.keys()):
names.append(k)
values.append(input_dict[k])
values = torch.stack(values, dim=0)
dist.all_reduce(values)
if average:
values /= world_size
reduced_dict = {k: v for k, v in zip(names, values)}
return reduced_dict
class MetricLogger(object):
def __init__(self, delimiter="\t"):
self.meters = defaultdict(SmoothedValue)
self.delimiter = delimiter
def update(self, **kwargs):
for k, v in kwargs.items():
if isinstance(v, torch.Tensor):
v = v.item()
assert isinstance(v, (float, int))
self.meters[k].update(v)
def __getattr__(self, attr):
if attr in self.meters:
return self.meters[attr]
if attr in self.__dict__:
return self.__dict__[attr]
raise AttributeError("'{}' object has no attribute '{}'".format(
type(self).__name__, attr))
def __str__(self):
loss_str = []
for name, meter in self.meters.items():
loss_str.append(
"{}: {}".format(name, str(meter))
)
return self.delimiter.join(loss_str)
def synchronize_between_processes(self):
for meter in self.meters.values():
meter.synchronize_between_processes()
def add_meter(self, name, meter):
self.meters[name] = meter
def log_every(self, iterable, print_freq, header=None):
i = 0
if not header:
header = ''
start_time = time.time()
end = time.time()
iter_time = SmoothedValue(fmt='{avg:.4f}')
data_time = SmoothedValue(fmt='{avg:.4f}')
space_fmt = ':' + str(len(str(len(iterable)))) + 'd'
if torch.cuda.is_available():
log_msg = self.delimiter.join([
header,
'[{0' + space_fmt + '}/{1}]',
'eta: {eta}',
'{meters}',
'time: {time}',
'data: {data}',
'max mem: {memory:.0f}'
])
else:
log_msg = self.delimiter.join([
header,
'[{0' + space_fmt + '}/{1}]',
'eta: {eta}',
'{meters}',
'time: {time}',
'data: {data}'
])
MB = 1024.0 * 1024.0
for obj in iterable:
data_time.update(time.time() - end)
yield obj
iter_time.update(time.time() - end)
if i % print_freq == 0 or i == len(iterable) - 1:
eta_seconds = iter_time.global_avg * (len(iterable) - i)
eta_string = str(datetime.timedelta(seconds=int(eta_seconds)))
if torch.cuda.is_available():
print(log_msg.format(
i, len(iterable), eta=eta_string,
meters=str(self),
time=str(iter_time), data=str(data_time),
memory=torch.cuda.max_memory_allocated() / MB))
else:
print(log_msg.format(
i, len(iterable), eta=eta_string,
meters=str(self),
time=str(iter_time), data=str(data_time)))
i += 1
end = time.time()
total_time = time.time() - start_time
total_time_str = str(datetime.timedelta(seconds=int(total_time)))
print('{} Total time: {} ({:.4f} s / it)'.format(
header, total_time_str, total_time / len(iterable)))
def get_sha():
cwd = os.path.dirname(os.path.abspath(__file__))
def _run(command):
return subprocess.check_output(command, cwd=cwd).decode('ascii').strip()
sha = 'N/A'
diff = "clean"
branch = 'N/A'
try:
sha = _run(['git', 'rev-parse', 'HEAD'])
subprocess.check_output(['git', 'diff'], cwd=cwd)
diff = _run(['git', 'diff-index', 'HEAD'])
diff = "has uncommited changes" if diff else "clean"
branch = _run(['git', 'rev-parse', '--abbrev-ref', 'HEAD'])
except Exception:
pass
message = f"sha: {sha}, status: {diff}, branch: {branch}"
return message
def collate_fn(batch):
batch = list(zip(*batch))
batch[0] = nested_tensor_from_tensor_list(batch[0])
return tuple(batch)
def _max_by_axis(the_list):
# type: (List[List[int]]) -> List[int]
maxes = the_list[0]
for sublist in the_list[1:]:
for index, item in enumerate(sublist):
maxes[index] = max(maxes[index], item)
return maxes
class NestedTensor(object):
def __init__(self, tensors, mask: Optional[Tensor]):
self.tensors = tensors
self.mask = mask
def to(self, device):
# type: (Device) -> NestedTensor # noqa
cast_tensor = self.tensors.to(device)
mask = self.mask
if mask is not None:
assert mask is not None
cast_mask = mask.to(device)
else:
cast_mask = None
return NestedTensor(cast_tensor, cast_mask)
def decompose(self):
return self.tensors, self.mask
def __repr__(self):
return str(self.tensors)
def nested_tensor_from_tensor_list(tensor_list: List[Tensor]):
# TODO make this more general
if tensor_list[0].ndim == 3:
if torchvision._is_tracing():
# nested_tensor_from_tensor_list() does not export well to ONNX
# call _onnx_nested_tensor_from_tensor_list() instead
return _onnx_nested_tensor_from_tensor_list(tensor_list)
# TODO make it support different-sized images
max_size = _max_by_axis([list(img.shape) for img in tensor_list])
# min_size = tuple(min(s) for s in zip(*[img.shape for img in tensor_list]))
batch_shape = [len(tensor_list)] + max_size
b, c, h, w = batch_shape
dtype = tensor_list[0].dtype
device = tensor_list[0].device
tensor = torch.zeros(batch_shape, dtype=dtype, device=device)
mask = torch.ones((b, h, w), dtype=torch.bool, device=device)
for img, pad_img, m in zip(tensor_list, tensor, mask):
pad_img[: img.shape[0], : img.shape[1], : img.shape[2]].copy_(img)
m[: img.shape[1], :img.shape[2]] = False
else:
raise ValueError('not supported')
return NestedTensor(tensor, mask)
# _onnx_nested_tensor_from_tensor_list() is an implementation of
# nested_tensor_from_tensor_list() that is supported by ONNX tracing.
@torch.jit.unused
def _onnx_nested_tensor_from_tensor_list(tensor_list: List[Tensor]) -> NestedTensor:
max_size = []
for i in range(tensor_list[0].dim()):
max_size_i = torch.max(torch.stack([img.shape[i] for img in tensor_list]).to(torch.float32)).to(torch.int64)
max_size.append(max_size_i)
max_size = tuple(max_size)
# work around for
# pad_img[: img.shape[0], : img.shape[1], : img.shape[2]].copy_(img)
# m[: img.shape[1], :img.shape[2]] = False
# which is not yet supported in onnx
padded_imgs = []
padded_masks = []
for img in tensor_list:
padding = [(s1 - s2) for s1, s2 in zip(max_size, tuple(img.shape))]
padded_img = torch.nn.functional.pad(img, (0, padding[2], 0, padding[1], 0, padding[0]))
padded_imgs.append(padded_img)
m = torch.zeros_like(img[0], dtype=torch.int, device=img.device)
padded_mask = torch.nn.functional.pad(m, (0, padding[2], 0, padding[1]), "constant", 1)
padded_masks.append(padded_mask.to(torch.bool))
tensor = torch.stack(padded_imgs)
mask = torch.stack(padded_masks)
return NestedTensor(tensor, mask=mask)
def setup_for_distributed(is_master):
"""
This function disables printing when not in master process
"""
import builtins as __builtin__
builtin_print = __builtin__.print
def print(*args, **kwargs):
force = kwargs.pop('force', False)
if is_master or force:
builtin_print(*args, **kwargs)
__builtin__.print = print
def is_dist_avail_and_initialized():
if not dist.is_available():
return False
if not dist.is_initialized():
return False
return True
def get_world_size():
if not is_dist_avail_and_initialized():
return 1
return dist.get_world_size()
def get_rank():
if not is_dist_avail_and_initialized():
return 0
return dist.get_rank()
def is_main_process():
return get_rank() == 0
def save_on_master(*args, **kwargs):
if is_main_process():
torch.save(*args, **kwargs)
def init_distributed_mode(args):
if 'RANK' in os.environ and 'WORLD_SIZE' in os.environ:
args.rank = int(os.environ["RANK"])
args.world_size = int(os.environ['WORLD_SIZE'])
args.gpu = int(os.environ['LOCAL_RANK'])
elif 'SLURM_PROCID' in os.environ:
args.rank = int(os.environ['SLURM_PROCID'])
args.gpu = args.rank % torch.cuda.device_count()
else:
print('Not using distributed mode')
args.distributed = False
return
args.distributed = True
torch.cuda.set_device(args.gpu)
args.dist_backend = 'nccl'
print('| distributed init (rank {}): {}'.format(
args.rank, args.dist_url), flush=True)
torch.distributed.init_process_group(backend=args.dist_backend, init_method=args.dist_url,
world_size=args.world_size, rank=args.rank)
torch.distributed.barrier()
setup_for_distributed(args.rank == 0)
@torch.no_grad()
def accuracy(output, target, topk=(1,)):
"""Computes the precision@k for the specified values of k"""
if target.numel() == 0:
return [torch.zeros([], device=output.device)]
maxk = max(topk)
batch_size = target.size(0)
_, pred = output.topk(maxk, 1, True, True)
pred = pred.t()
correct = pred.eq(target.view(1, -1).expand_as(pred))
res = []
for k in topk:
correct_k = correct[:k].view(-1).float().sum(0)
res.append(correct_k.mul_(100.0 / batch_size))
return res
def interpolate(input, size=None, scale_factor=None, mode="nearest", align_corners=None):
# type: (Tensor, Optional[List[int]], Optional[float], str, Optional[bool]) -> Tensor
"""
Equivalent to nn.functional.interpolate, but with support for empty batch sizes.
This will eventually be supported natively by PyTorch, and this
class can go away.
"""
if float(torchvision.__version__.split(".")[1]) < 7.0:
if input.numel() > 0:
return torch.nn.functional.interpolate(
input, size, scale_factor, mode, align_corners
)
output_shape = _output_size(2, input, size, scale_factor)
output_shape = list(input.shape[:-2]) + list(output_shape)
return _new_empty_tensor(input, output_shape)
else:
return torchvision.ops.misc.interpolate(input, size, scale_factor, mode, align_corners)
|
Cream/iRPE/DETR-with-iRPE/util/misc.py/0
|
{
"file_path": "Cream/iRPE/DETR-with-iRPE/util/misc.py",
"repo_id": "Cream",
"token_count": 7011
}
| 348 |
from .default import _C as config
from .default import update_config
from .default import _update_config_from_file
from .default import save_config
|
CvT/lib/config/__init__.py/0
|
{
"file_path": "CvT/lib/config/__init__.py",
"repo_id": "CvT",
"token_count": 38
}
| 349 |
from .build import build_optimizer
|
CvT/lib/optim/__init__.py/0
|
{
"file_path": "CvT/lib/optim/__init__.py",
"repo_id": "CvT",
"token_count": 9
}
| 350 |
InvalidTimestamps = '''The timestamp column specified is malformed.'''
InvalidSeriesOrder = '''The timestamp column specified is not in ascending order.'''
DuplicateSeriesTimestamp = '''The timestamp column specified has duplicated timestamps.'''
InvalidValueFormat = '''The data in column "{0}" can not be parsed as float values.'''
InvalidSeriesValue = '''The data in column "{0}" contains nan values.'''
ValueOverflow = '''The magnitude of data in column "{0}" exceeds limitation.'''
NotEnoughPoints = '''The dataset should contain at least {0} points to run this module.'''
InvalidBatchSize = '''The "batchSize" parameter should be at least {0} or 0 ''' \
'''that indicates to run all data in a batch.'''
ColumnNotFoundError = '''Column with name or index "{0}" not found.'''
|
anomalydetector/aml_component/error_messages.py/0
|
{
"file_path": "anomalydetector/aml_component/error_messages.py",
"repo_id": "anomalydetector",
"token_count": 220
}
| 351 |
from setuptools import setup, find_packages, Extension
from Cython.Build import cythonize
from Cython.Distutils import build_ext
import numpy as np
__version__ = "can't find version.py"
exec(compile(open('version.py').read(),
'version.py', 'exec'))
extensions = [
Extension("msanomalydetector._anomaly_kernel_cython", ["msanomalydetector/_anomaly_kernel_cython.pyx"],
define_macros=[('CYTHON_TRACE', '1')])
]
cmdclass = {'build_ext': build_ext}
install_requires = [
'Cython>=0.29.2',
'numpy==1.18.1',
'pandas==0.25.3'
]
setup(
name="msanomalydetector",
description='Microsoft Anomaly Detector Package Based On Saliency Detection',
packages=find_packages(),
include_dirs=[np.get_include()],
cmdclass=cmdclass,
ext_modules=cythonize(extensions),
version=__version__,
setup_requires=['Cython>=0.29.2', 'numpy==1.18.1'],
install_requires=install_requires,
requires=['Cython', 'numpy', 'pandas'],
python_requires='>=3.6.0',
package_data={'': ['*.txt']}
)
|
anomalydetector/setup.py/0
|
{
"file_path": "anomalydetector/setup.py",
"repo_id": "anomalydetector",
"token_count": 430
}
| 352 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from abc import abstractmethod
from typing import Any
from overrides import EnforceOverrides
class DatasetProvider(EnforceOverrides):
"""Abstract class for dataset providers.
This class serves as a base for implementing dataset providers that can return
training, validation and testing datasets. The class enforces implementation
of three methods: `get_train_dataset`, `get_val_dataset` and `get_test_dataset`.
These methods should return an instance of the respective dataset, regardless of
its structure.
Note:
This class is inherited from `EnforceOverrides` and any overridden methods in the
subclass should be decorated with `@overrides` to ensure they are properly overridden.
Examples:
>>> class MyDatasetProvider(DatasetProvider):
>>> def __init__(self) -> None:
>>> super().__init__()
>>>
>>> @overrides
>>> def get_train_dataset(self) -> Any:
>>> return torchvision.datasets.MNIST(train=True)
>>>
>>> @overrides
>>> def get_val_dataset(self) -> Any:
>>> return torchvision.datasets.MNIST(train=False)
>>>
>>> @overrides
>>> def get_test_dataset(self) -> Any:
>>> return torchvision.datasets.MNIST(train=False)
"""
def __init__(self) -> None:
"""Initialize the dataset provider."""
pass
@abstractmethod
def get_train_dataset(self) -> Any:
"""Get a training dataset.
Returns:
An instance of a training dataset.
"""
pass
@abstractmethod
def get_val_dataset(self) -> Any:
"""Get a validation dataset.
Returns:
An instance of a validation dataset, or the training dataset if
validation dataset is not available.
"""
pass
@abstractmethod
def get_test_dataset(self) -> Any:
"""Get a testing dataset.
Returns:
An instance of a testing dataset, or the training/validation
dataset if testing dataset is not available.
"""
pass
|
archai/archai/api/dataset_provider.py/0
|
{
"file_path": "archai/archai/api/dataset_provider.py",
"repo_id": "archai",
"token_count": 903
}
| 353 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from typing import Union, Optional
from pathlib import Path
import pandas as pd
def get_search_csv(output_path: Union[str, Path], iteration_num: Optional[int] = -1) -> pd.DataFrame:
"""Reads the search csv file from the output path and returns a pandas dataframe
Args:
output_path (Union[str, Path]): Path to the output directory
iteration_num (int, optional): Search iteration to read from. Defaults to -1, which will point to the last iteration
Returns:
pd.DataFrame: Pandas dataframe with the search state
"""
if iteration_num == -1:
search_csv_path = max(Path(output_path).glob("search_state_*.csv"), key=lambda x: int(x.stem.split("_")[-1]))
else:
search_csv_path = Path(output_path) / f"search_state_{iteration_num}.csv"
if not search_csv_path.is_file():
raise FileNotFoundError(f"Search csv file not found at {search_csv_path}")
df = pd.read_csv(search_csv_path)
return df
def get_arch_abs_path(archid: str, downloaded_folder: Union[str, Path], iteration_num: Optional[int] = -1) -> Path:
"""Returns the absolute path to the architecture file
Args:
archid (str): Architecture id
downloaded_folder (Union[str, Path]): Path to the downloaded folder
iteration_num (int, optional): Search iteration to read from. Defaults to -1, which will point to the last iteration
Returns:
Path: Absolute path to the architecture file
"""
if iteration_num == -1:
dir_path = max(Path(downloaded_folder).glob("pareto_models_iter_*"), key=lambda x: int(x.stem.split("_")[-1]))
else:
dir_path = Path(downloaded_folder) / f"pareto_models_iter_{iteration_num}"
file_path = dir_path / archid
if not file_path.is_file():
raise FileNotFoundError(f"File not found at {file_path}")
return file_path.absolute()
|
archai/archai/common/notebook_helper.py/0
|
{
"file_path": "archai/archai/common/notebook_helper.py",
"repo_id": "archai",
"token_count": 708
}
| 354 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from typing import Callable, Optional
from overrides import overrides
from torch.utils.data import Dataset
from torchvision.datasets import Flowers102
from torchvision.transforms import ToTensor
from archai.api.dataset_provider import DatasetProvider
class Flowers102DatasetProvider(DatasetProvider):
"""Oxford 102 Flower dataset provider."""
def __init__(
self,
root: Optional[str] = "dataroot",
) -> None:
"""Initialize Oxford 102 Flower dataset provider.
Args:
root: Root directory of dataset where is saved.
"""
super().__init__()
self.root = root
@overrides
def get_train_dataset(
self,
transform: Optional[Callable] = None,
target_transform: Optional[Callable] = None,
) -> Dataset:
return Flowers102(
self.root,
split="train",
transform=transform or ToTensor(),
target_transform=target_transform,
download=True,
)
@overrides
def get_val_dataset(
self,
transform: Optional[Callable] = None,
target_transform: Optional[Callable] = None,
) -> Dataset:
return Flowers102(
self.root,
split="val",
transform=transform or ToTensor(),
target_transform=target_transform,
download=True,
)
@overrides
def get_test_dataset(
self,
transform: Optional[Callable] = None,
target_transform: Optional[Callable] = None,
) -> Dataset:
return Flowers102(
self.root,
split="test",
transform=transform or ToTensor(),
target_transform=target_transform,
download=True,
)
|
archai/archai/datasets/cv/flowers102_dataset_provider.py/0
|
{
"file_path": "archai/archai/datasets/cv/flowers102_dataset_provider.py",
"repo_id": "archai",
"token_count": 811
}
| 355 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
#
# Copyright (c) Hazy Research.
# Licensed under the BSD-3-Clause license.
# https://github.com/HazyResearch/flash-attention/blob/main/training/src/datamodules
from __future__ import annotations
import math
import mmap
import sys
from typing import Any, Dict, Optional, Tuple
from types import TracebackType
import numpy as np
import torch
from datasets.dataset_dict import DatasetDict
from torch.utils.data import Dataset
# `multiprocessing.shared_memory` is only available in Python 3.8+`
if sys.version_info.major == 3 and sys.version_info.minor >= 8:
from multiprocessing.shared_memory import SharedMemory
class FastHfDataset(Dataset):
"""Fast Hugging Face dataset."""
def __init__(self, input_ids: torch.Tensor, seq_len: Optional[int] = 1) -> None:
"""Initialize the dataset.
Args:
input_ids: Tensor with the inputs (encoded data).
seq_len: Sequence length.
"""
super().__init__()
self.n_input_ids = ((len(input_ids) - 1) // seq_len) * seq_len + 1
self.seq_len = seq_len
# `input_ids` should not be sliced since they could be memory mapped
self.input_ids = input_ids
self.n_sequences = math.ceil((self.n_input_ids - 1) / self.seq_len)
def __enter__(self):
return self
def __exit__(self, exc_type: type[BaseException], exc_val: BaseException, exc_tb: TracebackType) -> None:
if isinstance(self.input_ids, np.memmap) and self.input_ids._mmap is not None:
self.input_ids._mmap.close()
def __len__(self) -> int:
return self.n_sequences
def __getitem__(self, idx: int) -> Tuple[torch.Tensor, torch.Tensor]:
start_idx = idx * self.seq_len
seq_len = min(self.seq_len, self.n_input_ids - 1 - start_idx)
input_ids = torch.as_tensor(self.input_ids[start_idx : (start_idx + seq_len + 1)].astype(np.int64))
labels = input_ids[1:].clone()
return input_ids[:-1], labels
class SHMArray(np.ndarray):
"""Numpy array compatible with SharedMemory from `multiprocessing.shared_memory`.
Reference:
https://numpy.org/doc/stable/user/basics.subclassing.html#slightly-more-realistic-example-attribute-added-to-existing-array
"""
def __new__(cls: SHMArray, input_array: np.ndarray, shm: Optional[SharedMemory] = None) -> SHMArray:
obj = np.asarray(input_array).view(cls)
obj.shm = shm
return obj
def __array_finalize__(self, obj: SHMArray) -> None:
if obj is None:
return
self.shm = getattr(obj, "shm", None)
def process_with_shared_memory(
dataset_dict: DatasetDict, dtype: np.dtype, num_proc: Optional[int] = 1
) -> Dict[str, SHMArray]:
"""Process the dataset with a shared memory.
Args:
dataset_dict: Dataset dictionary.
dtype: Numpy data type.
num_proc: Number of processes.
Returns:
Dictionary with shared memory-processed datasets.
"""
def _process_with_shared_memory(example: Dict[str, Any], name, length: int) -> None:
shared_memory = SharedMemory(name=name)
shared_memory_array = np.ndarray((length,), dtype=dtype, buffer=shared_memory.buf)
start_idx = example["offset"] - len(example["input_ids"])
shared_memory_array[start_idx : example["offset"]] = example["input_ids"]
shared_memory.close()
processed_dataset_dict = {}
for name, ds in dataset_dict.items():
dataset_dict[name] = ds.add_column("offset", np.cumsum(ds["length"]))
length = dataset_dict[name][-1]["offset"]
shared_memory = SharedMemory(create=True, size=length * np.dtype(dtype).itemsize)
shared_memory_name = shared_memory.name
dataset_dict[name].map(
_process_with_shared_memory,
fn_kwargs={"name": shared_memory_name, "length": length},
batched=False,
num_proc=num_proc,
)
shared_memory_array = np.ndarray((length,), dtype=dtype, buffer=shared_memory.buf)
processed_dataset_dict[name] = SHMArray(shared_memory_array, shm=shared_memory)
return processed_dataset_dict
def process_with_memory_map_files(
dataset_dict: DatasetDict, cache_dir: str, dtype: np.dtype, num_proc: Optional[int] = 1
) -> Dict[str, np.ndarray]:
"""Process the dataset with memory map files.
Args:
dataset_dict: Dataset dictionary.
cache_dir: Cache directory.
dtype: Numpy data type.
num_proc: Number of processes.
Returns:
Dictionary with memory map file-processed datasets.
"""
def _process_with_memory_map_files(example: Dict[str, Any], file_path: str) -> None:
with open(file_path, "r+b") as f:
memory_map = mmap.mmap(f.fileno(), 0)
start_idx = example["offset"] - len(example["input_ids"])
length = len(example["input_ids"])
memory_map_array = np.ndarray(
(length,), dtype=dtype, buffer=memory_map, offset=np.dtype(dtype).itemsize * start_idx
)
memory_map_array[:] = example["input_ids"]
memory_map.flush()
processed_dataset_dict = {}
for split, dataset in dataset_dict.items():
dataset_dict[split] = dataset.add_column("offset", np.cumsum(dataset["length"]))
length = dataset_dict[split][-1]["offset"]
file_path = cache_dir / f"{split}.bin"
with open(file_path.as_posix(), "wb") as f:
f.truncate(length * np.dtype(dtype).itemsize)
dataset_dict[split].map(
_process_with_memory_map_files,
fn_kwargs={"file_path": file_path},
batched=False,
num_proc=num_proc,
)
processed_dataset_dict[split] = np.memmap(file_path, dtype=dtype, mode="r", shape=(length,))
return processed_dataset_dict
def xor(p: Any, q: Any) -> bool:
"""Implements the logical XOR operator.
Args:
p: Any instance that may act as `True` or `False`.
q: Any instance that may act as `True` or `False`.
Returns:
Logical value.
"""
return (p and not q) or (not p and q)
|
archai/archai/datasets/nlp/fast_hf_dataset_provider_utils.py/0
|
{
"file_path": "archai/archai/datasets/nlp/fast_hf_dataset_provider_utils.py",
"repo_id": "archai",
"token_count": 2639
}
| 356 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import random
from pathlib import Path
from typing import List, Optional
from overrides import overrides
from tqdm import tqdm
from archai.common.ordered_dict_logger import OrderedDictLogger
from archai.discrete_search.api.archai_model import ArchaiModel
from archai.discrete_search.api.search_objectives import SearchObjectives
from archai.discrete_search.api.search_results import SearchResults
from archai.discrete_search.api.search_space import EvolutionarySearchSpace
from archai.discrete_search.api.searcher import Searcher
logger = OrderedDictLogger(source=__name__)
class LocalSearch(Searcher):
def __init__(
self,
search_space: EvolutionarySearchSpace,
search_objectives: SearchObjectives,
output_dir: str,
num_iters: Optional[int] = 10,
init_num_models: Optional[int] = 10,
initial_population_paths: Optional[List[str]] = None,
mutations_per_parent: Optional[int] = 1,
clear_evaluated_models: bool = True,
save_pareto_model_weights: bool = True,
seed: Optional[int] = 1,
):
"""Local search algorithm. In each iteration, the algorithm generates a new population by
mutating the current Pareto frontier. The process is repeated until `num_iters` is reached.
Args:
search_space (EvolutionarySearchSpace): Discrete search space compatible with evolutionary algorithms
search_objectives (SearchObjectives): Search objectives
output_dir (str): Output directory
num_iters (int, optional): Number of search iterations. Defaults to 10.
init_num_models (int, optional): Number of initial models. Defaults to 10.
initial_population_paths (Optional[List[str]], optional): Paths to initial population.
If None, then `init_num_models` random models are used. Defaults to None.
mutations_per_parent (int, optional): Number of mutations per parent. Defaults to 1.
clear_evaluated_models (bool, optional): Optimizes memory usage by clearing the architecture
of `ArchaiModel` after each iteration. Defaults to True.
save_pareto_model_weights: If `True`, saves the weights of the pareto models.
seed (int, optional): Random seed. Defaults to 1.
"""
super(LocalSearch, self).__init__()
assert isinstance(
search_space, EvolutionarySearchSpace
), f"{str(search_space.__class__)} is not compatible with {str(self.__class__)}"
self.iter_num = 0
self.search_space = search_space
self.so = search_objectives
self.output_dir = Path(output_dir)
self.output_dir.mkdir(exist_ok=True, parents=True)
# Algorithm settings
self.num_iters = num_iters
self.init_num_models = init_num_models
self.initial_population_paths = initial_population_paths
self.mutations_per_parent = mutations_per_parent
# Utils
self.clear_evaluated_models = clear_evaluated_models
self.save_pareto_model_weights = save_pareto_model_weights
self.search_state = SearchResults(search_space, self.so)
self.seed = seed
self.rng = random.Random(seed)
self.seen_archs = set()
self.num_sampled_archs = 0
assert self.init_num_models > 0
assert self.num_iters > 0
def sample_models(self, num_models: int, patience: Optional[int] = 5) -> List[ArchaiModel]:
"""Sample models from the search space.
Args:
num_models: Number of models to sample.
patience: Number of tries to sample a valid model.
Returns:
List of sampled models.
"""
nb_tries, valid_sample = 0, []
while len(valid_sample) < num_models and nb_tries < patience:
sample = [self.search_space.random_sample() for _ in range(num_models)]
_, valid_indices = self.so.validate_constraints(sample)
valid_sample += [sample[i] for i in valid_indices]
return valid_sample[:num_models]
def mutate_parents(
self, parents: List[ArchaiModel], mutations_per_parent: Optional[int] = 1, patience: Optional[int] = 20
) -> List[ArchaiModel]:
"""Mutate parents to generate new models.
Args:
parents: List of parent models.
mutations_per_parent: Number of mutations to apply to each parent.
patience: Number of tries to sample a valid model.
Returns:
List of mutated models.
"""
mutations = {}
for p in tqdm(parents, desc="Mutating parents"):
candidates = {}
nb_tries = 0
while len(candidates) < mutations_per_parent and nb_tries < patience:
mutated_model = self.search_space.mutate(p)
mutated_model.metadata["parent"] = p.archid
if not self.so.is_model_valid(mutated_model):
continue
if mutated_model.archid not in self.seen_archs:
mutated_model.metadata["generation"] = self.iter_num
candidates[mutated_model.archid] = mutated_model
nb_tries += 1
mutations.update(candidates)
return list(mutations.values())
@overrides
def search(self) -> SearchResults:
self.iter_num = 0
if self.initial_population_paths:
logger.info(f"Loading initial population from {len(self.initial_population_paths)} architectures ...")
unseen_pop = [self.search_space.load_arch(path) for path in self.initial_population_paths]
else:
logger.info(f"Using {self.init_num_models} random architectures as the initial population ...")
unseen_pop = self.sample_models(self.init_num_models)
self.all_pop = unseen_pop
for i in range(self.num_iters):
self.iter_num = i + 1
self.on_start_iteration(self.iter_num)
logger.info(f"Iteration {i+1}/{self.num_iters}")
if len(unseen_pop) == 0:
logger.info("No models to evaluate. Stopping search ...")
break
# Calculates objectives
logger.info(f"Calculating search objectives {list(self.so.objective_names)} for {len(unseen_pop)} models ...")
results = self.so.eval_all_objs(unseen_pop)
self.search_state.add_iteration_results(
unseen_pop,
results,
# Mutation info
extra_model_data={
"parent": [p.metadata.get("parent", None) for p in unseen_pop],
},
)
# Records evaluated archs to avoid computing the same architecture twice
self.seen_archs.update([m.archid for m in unseen_pop])
# update the pareto frontier
logger.info("Updating Pareto frontier ...")
pareto = self.search_state.get_pareto_frontier()["models"]
logger.info(f"Found {len(pareto)} members.")
# Saves search iteration results
self.search_state.save_search_state(str(self.output_dir / f"search_state_{self.iter_num}.csv"))
self.search_state.save_pareto_frontier_models(
str(self.output_dir / f"pareto_models_iter_{self.iter_num}"),
save_weights=self.save_pareto_model_weights
)
self.search_state.save_all_2d_pareto_evolution_plots(str(self.output_dir))
# Clears models from memory if needed
if self.clear_evaluated_models:
logger.info("Optimzing memory usage ...")
[model.clear() for model in unseen_pop]
# mutate random 'k' subsets of the parents
# while ensuring the mutations fall within
# desired constraint limits
unseen_pop = self.mutate_parents(pareto, self.mutations_per_parent)
logger.info(f"Mutation: {len(unseen_pop)} new models.")
# update the set of architectures ever visited
self.all_pop.extend(unseen_pop)
return self.search_state
|
archai/archai/discrete_search/algos/local_search.py/0
|
{
"file_path": "archai/archai/discrete_search/algos/local_search.py",
"repo_id": "archai",
"token_count": 3521
}
| 357 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
|
archai/archai/discrete_search/evaluators/nlp/__init__.py/0
|
{
"file_path": "archai/archai/discrete_search/evaluators/nlp/__init__.py",
"repo_id": "archai",
"token_count": 17
}
| 358 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from archai.discrete_search.search_spaces.benchmark.natsbench_tss import (
NatsbenchTssSearchSpace,
)
|
archai/archai/discrete_search/search_spaces/benchmark/__init__.py/0
|
{
"file_path": "archai/archai/discrete_search/search_spaces/benchmark/__init__.py",
"repo_id": "archai",
"token_count": 58
}
| 359 |
from .codegen.model import CodeGenForCausalLM, CodeGenConfig
from .gpt2.model import GPT2LMHeadModel, GPT2Config
BACKBONES = {
'codegen': CodeGenForCausalLM,
'gpt2': GPT2LMHeadModel
}
CONFIGS = {
'codegen': CodeGenConfig,
'gpt2': GPT2Config
}
|
archai/archai/discrete_search/search_spaces/nlp/tfpp/backbones/__init__.py/0
|
{
"file_path": "archai/archai/discrete_search/search_spaces/nlp/tfpp/backbones/__init__.py",
"repo_id": "archai",
"token_count": 111
}
| 360 |
# TD: [2023-01-05]: Extracted the OptimModule class from
# https://github.com/HazyResearch/state-spaces/blob/06dbbdfd0876501a7f12bf3262121badbc7658af/src/models/sequence/ss/kernel.py
import torch.nn as nn
class OptimModule(nn.Module):
""" Interface for Module that allows registering buffers/parameters with configurable optimizer hyperparameters """
def register(self, name, tensor, lr=None):
"""Register a tensor with a configurable learning rate and 0 weight decay"""
if lr == 0.0:
self.register_buffer(name, tensor)
else:
self.register_parameter(name, nn.Parameter(tensor))
optim = {"weight_decay": 0.0}
if lr is not None: optim["lr"] = lr
setattr(getattr(self, name), "_optim", optim)
|
archai/archai/discrete_search/search_spaces/nlp/tfpp/ops/ssm_utils/utils.py/0
|
{
"file_path": "archai/archai/discrete_search/search_spaces/nlp/tfpp/ops/ssm_utils/utils.py",
"repo_id": "archai",
"token_count": 313
}
| 361 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import json
from copy import deepcopy
from hashlib import sha1
from random import Random
from typing import Any, Dict, List, Optional
import torch
from overrides import overrides
from transformers.modeling_utils import no_init_weights
from transformers.models.auto.configuration_auto import AutoConfig
from transformers.models.auto.modeling_auto import AutoModelForCausalLM
from archai.discrete_search.api.archai_model import ArchaiModel
from archai.discrete_search.api.search_space import (
BayesOptSearchSpace,
EvolutionarySearchSpace,
)
from archai.discrete_search.search_spaces.nlp.transformer_flex.models.configuration_gpt2_flex import (
GPT2FlexConfig,
)
from archai.discrete_search.search_spaces.nlp.transformer_flex.models.configuration_mem_transformer import (
MemTransformerConfig,
)
from archai.discrete_search.search_spaces.nlp.transformer_flex.models.modeling_gpt2_flex import (
GPT2FlexLMHeadModel,
)
from archai.discrete_search.search_spaces.nlp.transformer_flex.models.modeling_mem_transformer import (
MemTransformerLMHeadModel,
)
# Register internal models to be compatible with auto classes
AutoConfig.register("gpt2-flex", GPT2FlexConfig)
AutoConfig.register("mem-transformer", MemTransformerConfig)
AutoModelForCausalLM.register(GPT2FlexConfig, GPT2FlexLMHeadModel)
AutoModelForCausalLM.register(MemTransformerConfig, MemTransformerLMHeadModel)
class TransformerFlexSearchSpace(EvolutionarySearchSpace, BayesOptSearchSpace):
"""Search space for Transformer models with flexible architecture.
This class allows defining a search space for Transformer models with flexible architectures,
using evolutionary or Bayesian optimization algorithms.
The search space can be customized to include different values for hyperparameters,
such as the number of layers, embedding dimensions, and number of attention heads.
It also supports different Transformer variants, such as CodeGen, GPT-2, and Transformer-XL.
"""
_DEFAULT_MODELS = {
"codegen": {"d_model": "n_embd", "d_inner": "n_inner", "n_head": "n_head", "n_layer": "n_layer"},
"gpt2": {
"d_model": "n_embd",
"d_inner": "n_inner",
"n_head": "n_head",
"n_layer": "n_layer",
"vocab_size": "vocab_size",
"max_sequence_length": "n_positions",
"dropout": "resid_pdrop",
"dropatt": "attn_pdrop",
},
"gpt2-flex": {
"d_model": "n_embd",
"d_inner": "n_inner",
"n_head": "n_head",
"n_layer": "n_layer",
"vocab_size": "vocab_size",
"max_sequence_length": "n_positions",
"dropout": "resid_pdrop",
"dropatt": "attn_pdrop",
},
"mem-transformer": {"d_model": "d_model", "d_inner": "d_inner", "n_head": "n_head", "n_layer": "n_layer"},
"opt": {
"d_model": "hidden_size",
"d_inner": "ffn_dim",
"n_head": "num_attention_heads",
"n_layer": "num_hidden_layers",
},
"transfo-xl": {"d_model": "d_model", "d_inner": "d_inner", "n_head": "n_head", "n_layer": "n_layer"},
}
_DEFAULT_D_MODEL = list(range(128, 1024, 64))
_DEFAULT_D_INNER = list(range(128, 1024, 64))
_DEFAULT_N_HEAD = [2, 4, 8]
def __init__(
self,
arch_type: str,
min_layers: Optional[int] = 1,
max_layers: Optional[int] = 10,
d_inner_options: Optional[List[int]] = None,
d_model_options: Optional[List[int]] = None,
n_head_options: Optional[List[int]] = None,
share_d_inner: Optional[bool] = True,
mutation_prob: Optional[float] = 0.3,
vocab_size: Optional[int] = 10_000,
max_sequence_length: Optional[int] = 1024,
att_dropout_rate: Optional[float] = 0.0,
disable_weights_init: Optional[bool] = False,
random_seed: Optional[int] = 1,
) -> None:
"""Initialize search space.
Args:
arch_type: Type of Transformer architecture. Must be one of `codegen`, `gpt2`,
`gpt2-flex`, `mem-transformer`, `opt` or `transfo-xl`.
min_layers: Minimum number of layers in the model.
max_layers: Maximum number of layers in the model.
d_inner_options: List of options for the intermediate dimension (`d_inner`).
d_model_options: List of options for the model dimension (`d_model`).
n_head_options: List of options for the number of attention heads (`n_head`).
share_d_inner: Whether to share the intermediate dimension (`d_inner`) across layers.
mutation_prob: Probability of mutating a hyperparameter during evolution.
vocab_size: Size of the vocabulary.
max_sequence_length: Maximum sequence length.
att_dropout_rate: Dropout rate for attention.
disable_weights_init: Whether to disable weights initialization.
random_seed: Random seed for reproducibility.
"""
assert (
arch_type in self._DEFAULT_MODELS
), f"The value of `arch_type` must be one of {list(self._DEFAULT_MODELS.keys())}"
self.arch_type = arch_type
self.min_layers = min_layers
self.max_layers = max_layers
self.options = {
"d_inner": {"values": d_inner_options or self._DEFAULT_D_INNER, "share": share_d_inner},
"d_model": {"values": d_model_options or self._DEFAULT_D_MODEL, "share": True},
"n_head": {"values": n_head_options or self._DEFAULT_N_HEAD, "share": True},
}
self.mutation_prob = mutation_prob
self.rng = Random(random_seed)
self.vocab_size = vocab_size
self.max_sequence_length = max_sequence_length
self.att_dropout_rate = att_dropout_rate
self.disable_weights_init = disable_weights_init
def _load_model_from_config(self, model_config: Dict[str, Any]) -> torch.nn.Module:
param_map = self._DEFAULT_MODELS[self.arch_type]
mapped_config = {param_map.get(p_name, p_name): p_value for p_name, p_value in model_config.items()}
config = AutoConfig.for_model(self.arch_type, **mapped_config)
if self.disable_weights_init:
with no_init_weights():
return AutoModelForCausalLM.from_config(config)
return AutoModelForCausalLM.from_config(config)
def get_archid(self, config: Dict[str, Any]) -> str:
"""Returns a unique identifier for a given configuration.
Args:
config: Configuration dictionary.
Returns:
A unique identifier for the configuration.
"""
pruned_config = deepcopy(config)
n_layer = config["n_layer"]
for c, opts in self.options.items():
if not opts["share"]:
pruned_config[c] = pruned_config[c][:n_layer]
arch_str = json.dumps(pruned_config, sort_keys=True, ensure_ascii=True)
return f'{self.arch_type}_{sha1(arch_str.encode("ascii")).hexdigest()}'
@overrides
def random_sample(self) -> ArchaiModel:
model = None
# Fixed params
config = {
"vocab_size": self.vocab_size,
"dropatt": self.att_dropout_rate,
"max_sequence_length": self.max_sequence_length,
}
while model is None:
config["n_layer"] = self.rng.randint(self.min_layers, self.max_layers)
for param, param_opts in self.options.items():
if param_opts["share"]:
config[param] = self.rng.choice(param_opts["values"])
else:
config[param] = [self.rng.choice(param_opts["values"]) for _ in range(self.max_layers)]
if config["d_model"] % config["n_head"] == 0:
model = self._load_model_from_config(config)
return ArchaiModel(arch=model, archid=self.get_archid(config), metadata={"config": config})
@overrides
def save_arch(self, model: ArchaiModel, path: str) -> None:
arch_config = model.metadata["config"]
arch_config["arch_type"] = self.arch_type
with open(path, "w", encoding="utf-8") as fp:
json.dump(arch_config, fp, sort_keys=True, indent=2, ensure_ascii=True)
@overrides
def load_arch(self, path: str) -> ArchaiModel:
with open(path, "r", encoding="utf-8") as fp:
arch_config = json.load(fp)
arch_type = arch_config.pop("arch_type")
assert arch_type == self.arch_type, (
f"Arch type value ({arch_type}) is different from the search space" f"arch type ({self.arch_type})."
)
return ArchaiModel(
arch=self._load_model_from_config(arch_config),
archid=self.get_archid(arch_config),
metadata={"config": arch_config},
)
@overrides
def save_model_weights(self, model: ArchaiModel, path: str) -> None:
torch.save(model.arch.get_state_dict(), path)
@overrides
def load_model_weights(self, model: ArchaiModel, path: str) -> None:
model.arch.load_state_dict(torch.load(path))
@overrides
def mutate(self, arch: ArchaiModel) -> ArchaiModel:
config = deepcopy(arch.metadata["config"])
if self.rng.random() < self.mutation_prob:
config["n_layer"] = self.rng.randint(self.min_layers, self.max_layers)
for param, opts in self.options.items():
if opts["share"]:
if self.rng.random() < self.mutation_prob:
config[param] = self.rng.choice(opts["values"])
else:
config[param] = [
self.rng.choice(opts["values"]) if self.rng.random() < self.mutation_prob else c
for c in config[param]
]
return ArchaiModel(
arch=self._load_model_from_config(config), archid=self.get_archid(config), metadata={"config": config}
)
@overrides
def crossover(self, arch_list: List[ArchaiModel]) -> ArchaiModel:
c0 = deepcopy(arch_list[0].metadata["config"])
c1 = arch_list[1].metadata["config"]
c0["n_layer"] = self.rng.choice([c0["n_layer"], c1["n_layer"]])
for param, opts in self.options.items():
if opts["share"]:
c0[param] = self.rng.choice([c0[param], c1[param]])
else:
assert len(c0[param]) == len(c1[param]) == self.max_layers
for layer in range(self.max_layers):
c0[param][layer] = self.rng.choice([c0[param][layer], c1[param][layer]])
return ArchaiModel(arch=self._load_model_from_config(c0), archid=self.get_archid(c0), metadata={"config": c0})
@overrides
def encode(self, model: ArchaiModel) -> List[float]:
config = model.metadata["config"]
n_layer = config["n_layer"]
gene = [n_layer]
for param, opts in self.options.items():
if opts["share"]:
gene.append(config[param])
else:
gene += config[param][:n_layer]
gene += [0] * (self.max_layers - n_layer)
return gene
|
archai/archai/discrete_search/search_spaces/nlp/transformer_flex/search_space.py/0
|
{
"file_path": "archai/archai/discrete_search/search_spaces/nlp/transformer_flex/search_space.py",
"repo_id": "archai",
"token_count": 5025
}
| 362 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from overrides import overrides
from archai.supergraph.algos.darts.bilevel_arch_trainer import BilevelArchTrainer
from archai.supergraph.algos.darts.darts_model_desc_builder import DartsModelDescBuilder
from archai.supergraph.nas.arch_trainer import TArchTrainer
from archai.supergraph.nas.exp_runner import ExperimentRunner
class DartsExperimentRunner(ExperimentRunner):
@overrides
def model_desc_builder(self)->DartsModelDescBuilder:
return DartsModelDescBuilder()
@overrides
def trainer_class(self)->TArchTrainer:
return BilevelArchTrainer
|
archai/archai/supergraph/algos/darts/darts_exp_runner.py/0
|
{
"file_path": "archai/archai/supergraph/algos/darts/darts_exp_runner.py",
"repo_id": "archai",
"token_count": 214
}
| 363 |
"""Builds the Pytorch computational graph.
Tensors flowing into a single vertex are added together for all vertices
except the output, which is concatenated instead. Tensors flowing out of input
are always added.
If interior edge channels don't match, drop the extra channels (channels are
guaranteed non-decreasing). Tensors flowing out of the input as always
projected instead.
"""
from __future__ import absolute_import, division, print_function
import logging
import math
import numpy as np
import torch
import torch.nn as nn
from archai.supergraph.algos.nasbench101.base_ops import *
class Network(nn.Module):
def __init__(self, spec, stem_out_channels, num_stacks, num_modules_per_stack, num_labels):
super(Network, self).__init__()
logging.info(f'model matrix: {spec.matrix}')
logging.info(f'model ops: {spec.ops}')
self.layers = nn.ModuleList([])
in_channels = 3
out_channels = stem_out_channels # out channels for the model stem
# initial stem convolution
stem_conv = ConvBnRelu(in_channels, out_channels, 3, 1, 1)
self.layers.append(stem_conv)
in_channels = out_channels
for stack_num in range(num_stacks):
if stack_num > 0:
# downsampling by maxpool doesn't change the channel
downsample = nn.MaxPool2d(kernel_size=2, stride=2)
self.layers.append(downsample)
out_channels *= 2
for module_num in range(num_modules_per_stack):
logging.debug(f'stack={stack_num}, cell={module_num}, in_channels={in_channels}, out_channels={out_channels}')
cell = Cell(spec, in_channels, out_channels)
self.layers.append(cell)
in_channels = out_channels
self.classifier = nn.Linear(out_channels, num_labels)
self._initialize_weights()
def forward(self, x):
for _, layer in enumerate(self.layers):
x = layer(x)
out = torch.mean(x, (2, 3))
out = self.classifier(out)
return out
def _initialize_weights(self):
for m in self.modules():
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2.0 / n))
if m.bias is not None:
m.bias.data.zero_()
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
elif isinstance(m, nn.Linear):
n = m.weight.size(1)
m.weight.data.normal_(0, 0.01)
m.bias.data.zero_()
class Cell(nn.Module):
"""
Builds the model using the adjacency matrix and op labels specified. Channels
controls the module output channel count but the interior channels are
determined via equally splitting the channel count whenever there is a
concatenation of Tensors.
"""
def __init__(self, spec, in_channels, out_channels):
super(Cell, self).__init__()
self.spec = spec
self.num_vertices = np.shape(self.spec.matrix)[0]
# vertex_channels[i] = number of output channels of vertex i
self.vertex_channels = ComputeVertexChannels(in_channels, out_channels, self.spec.matrix)
#self.vertex_channels = [in_channels] + [out_channels] * (self.num_vertices - 1)
# operation for each node
self.vertex_op = nn.ModuleList([None])
for t in range(1, self.num_vertices-1):
op = OP_MAP[spec.ops[t]](self.vertex_channels[t], self.vertex_channels[t])
self.vertex_op.append(op)
# operation for input on each vertex
# if edge comes to vertex, we always apply 1x1 projection to equilize channels
self.input_op = nn.ModuleList([None])
for t in range(1, self.num_vertices):
if self.spec.matrix[0, t]:
self.input_op.append(Projection(in_channels, self.vertex_channels[t]))
else:
self.input_op.append(None)
def forward(self, x):
tensors = [x]
out_concat = []
for t in range(1, self.num_vertices-1): # except input
# gather input tensors for this vertex, excluding input
fan_in = [Truncate(tensors[src], self.vertex_channels[t]) for src in range(1, t) if self.spec.matrix[src, t]]
# add input tensor but without truncation
if self.spec.matrix[0, t]:
fan_in.append(self.input_op[t](x))
# First sum all input tensors
#vertex_input = torch.stack(fan_in, dim=0).sum(dim=0)
vertex_input = sum(fan_in)
# compute vertex ouput by applying vertex op
#vertex_input = sum(fan_in) / len(fan_in)
vertex_output = self.vertex_op[t](vertex_input)
tensors.append(vertex_output)
# if vertex is connected to output, add in outputs
if self.spec.matrix[t, self.num_vertices-1]:
out_concat.append(tensors[t])
if not out_concat:
assert self.spec.matrix[0, self.num_vertices-1]
outputs = self.input_op[self.num_vertices-1](tensors[0])
else:
if len(out_concat) == 1: # perf optimization
outputs = out_concat[0]
else:
outputs = torch.cat(out_concat, 1)
# if nput is also connected to output than apply output vertex operation
# and then do sum with concatenated tensor
if self.spec.matrix[0, self.num_vertices-1]:
outputs += self.input_op[self.num_vertices-1](tensors[0])
#if self.spec.matrix[0, self.num_vertices-1]:
# out_concat.append(self.input_op[self.num_vertices-1](tensors[0]))
#outputs = sum(out_concat) / len(out_concat)
return outputs
def Projection(in_channels, out_channels):
"""1x1 projection (as in ResNet) followed by batch normalization and ReLU."""
return ConvBnRelu(in_channels, out_channels, 1)
def Truncate(inputs, channels):
"""Slice the inputs to channels if necessary."""
input_channels = inputs.size()[1]
if input_channels < channels:
raise ValueError('input channel < output channels for truncate')
elif input_channels == channels:
return inputs # No truncation necessary
else:
# Truncation should only be necessary when channel division leads to
# vertices with +1 channels. The input vertex should always be projected to
# the minimum channel count.
assert input_channels - channels == 1
return inputs[:, :channels, :, :]
def ComputeVertexChannels(in_channels, out_channels, matrix):
"""Computes the number of channels at every vertex.
Given the input channels and output channels, this calculates the number of
channels at each interior vertex. Interior vertices have the same number of
channels as the max of the channels of the vertices it feeds into. The output
channels are divided amongst the vertices that are directly connected to it.
When the division is not even, some vertices may receive an extra channel to
compensate.
Returns:
list of channel counts, in order of the vertices.
"""
num_vertices = np.shape(matrix)[0]
vertex_channels = [0] * num_vertices
vertex_channels[0] = in_channels # first interior vertex only gets channel from input
vertex_channels[num_vertices - 1] = out_channels # last vertex, i.e., output should have same channels as output
if num_vertices == 2:
# Edge case where module only has input and output vertices
return vertex_channels
# Compute the in-degree ignoring input, axis 0 is the src vertex and axis 1 is
# the dst vertex. Summing over 0 gives the in-degree count of each vertex.
in_degree = np.sum(matrix[1:], axis=0)
interior_channels = out_channels // in_degree[num_vertices - 1]
correction = out_channels % in_degree[num_vertices - 1] # Remainder to add
# Set channels of vertices that flow directly to output
for v in range(1, num_vertices - 1):
if matrix[v, num_vertices - 1]:
vertex_channels[v] = interior_channels
if correction:
vertex_channels[v] += 1
correction -= 1
# Set channels for all other vertices to the max of the out edges, going
# backwards. (num_vertices - 2) index skipped because it only connects to
# output.
for v in range(num_vertices - 3, 0, -1):
if not matrix[v, num_vertices - 1]:
for dst in range(v + 1, num_vertices - 1):
if matrix[v, dst]:
vertex_channels[v] = max(vertex_channels[v], vertex_channels[dst])
assert vertex_channels[v] > 0
# Sanity check, verify that channels never increase and final channels add up.
final_fan_in = 0
for v in range(1, num_vertices - 1):
if matrix[v, num_vertices - 1]:
final_fan_in += vertex_channels[v]
for dst in range(v + 1, num_vertices - 1):
if matrix[v, dst]:
assert vertex_channels[v] >= vertex_channels[dst]
assert final_fan_in == out_channels or num_vertices == 2
# num_vertices == 2 means only input/output nodes, so 0 fan-in
return vertex_channels
|
archai/archai/supergraph/algos/nasbench101/model.py/0
|
{
"file_path": "archai/archai/supergraph/algos/nasbench101/model.py",
"repo_id": "archai",
"token_count": 4009
}
| 364 |
import os
import torch
import torch.nn as nn
__all__ = ['AlexNet','alexnet']
class AlexNet(nn.Module):
def __init__(self,num_classes=1000,init_weights='True'):
super(AlexNet,self).__init__()
self.features=nn.Sequential(
nn.Conv2d(3, 96, kernel_size=(11,11), stride=(4,4), padding=2),
nn.ReLU(),
nn.LocalResponseNorm(size=5, alpha=0.0001, beta=0.75, k=2),
nn.MaxPool2d(kernel_size=3, stride=2),
nn.Conv2d(96, 256, kernel_size=(5,5), padding=2),
nn.ReLU(),
nn.LocalResponseNorm(size=5, alpha=0.0001, beta=0.75, k=2),
nn.MaxPool2d(kernel_size=3, stride=2),
nn.Conv2d(256, 384, kernel_size=(3,3), padding=1),
nn.ReLU(),
nn.Conv2d(384, 384, kernel_size=(3,3), padding=1),
nn.ReLU(),
nn.Conv2d(384, 256, kernel_size=(3,3), padding=1),
nn.ReLU(),
nn.MaxPool2d(kernel_size=3, stride=2)
)
self.avgpool=nn.AdaptiveAvgPool2d((6, 6))
self.classifier=nn.Sequential(
nn.Dropout(0.5),
nn.Linear(256 * 6 * 6, 4096),
nn.ReLU(),
nn.Dropout(0.5),
nn.Linear(4096, 4096),
nn.ReLU(),
nn.Linear(4096, num_classes)
)
self.softmax=nn.Softmax(dim=1)
if(init_weights):
self.init_weights()
def init_weights(self):
for layer in self.features:
if isinstance(layer, nn.Conv2d):
nn.init.normal_(layer.weight, mean=0, std=0.01)
nn.init.constant_(layer.bias, 1)
for layer in self.classifier:
if isinstance(layer, nn.Linear):
nn.init.normal_(layer.weight, mean=0, std=0.01)
nn.init.constant_(layer.bias, 1)
nn.init.constant_(self.features[0].bias, 1)
nn.init.constant_(self.features[8].bias, 1)
def forward(self,x):
x=self.features(x)
x=self.avgpool(x)
x = torch.flatten(x, 1)
x=self.classifier(x)
x=self.softmax(x)
return x
def alexnet(pretrained=False, progress=True, device='cpu', **kwargs):
"""
AlexNet architecture implemented from the paper
`"ImageNet Classification with Deep Convolutional Neural Networks" <https://papers.nips.cc/paper/4824-imagenet-classification-with-deep-convolutional-neural-networks>`
Args:
pretrained (bool): If True, returns a pre-trained model. In that case, the 'init_weights' argument of 'AlexNet' class is set to False
progress (bool): If True, displays a progress bar of the download to stderr
device: default is 'cpu'
"""
if pretrained:
kwargs['init_weights'] = False
model = AlexNet(**kwargs)
if pretrained:
script_dir = os.path.dirname(__file__)
state_dict = torch.load(script_dir + '/state_dicts/alexnet.pt', map_location=device)
model.load_state_dict(state_dict)
return model
|
archai/archai/supergraph/models/alexnet.py/0
|
{
"file_path": "archai/archai/supergraph/models/alexnet.py",
"repo_id": "archai",
"token_count": 1529
}
| 365 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import copy
from typing import Dict, Optional, Tuple
from overrides import EnforceOverrides
from archai.common.config import Config
from archai.common.ordered_dict_logger import get_global_logger
from archai.supergraph.datasets import data
from archai.supergraph.nas.arch_trainer import TArchTrainer
from archai.supergraph.nas.finalizers import Finalizers
from archai.supergraph.nas.model import Model
from archai.supergraph.nas.model_desc import ModelDesc
from archai.supergraph.nas.model_desc_builder import ModelDescBuilder
from archai.supergraph.utils.metrics import Metrics
from archai.supergraph.utils.trainer import Trainer
logger = get_global_logger()
class ModelMetrics:
def __init__(self, model:Model, metrics:Metrics) -> None:
self.model = model
self.metrics = metrics
class SearchResult:
def __init__(self, model_desc:Optional[ModelDesc],
search_metrics:Optional[Metrics],
train_metrics:Optional[Metrics]) -> None:
self.model_desc = model_desc
self.search_metrics = search_metrics
self.train_metrics = train_metrics
class Searcher(EnforceOverrides):
def search(self, conf_search:Config, model_desc_builder:Optional[ModelDescBuilder],
trainer_class:TArchTrainer, finalizers:Finalizers)->SearchResult:
# region config vars
conf_model_desc = conf_search['model_desc']
conf_post_train = conf_search['post_train']
cells = conf_model_desc['n_cells']
reductions = conf_model_desc['n_reductions']
nodes = conf_model_desc['cell']['n_nodes']
# endregion
assert model_desc_builder is not None, 'Default search implementation requires model_desc_builder'
# build model description that we will search on
model_desc = self.build_model_desc(model_desc_builder, conf_model_desc,
reductions, cells, nodes)
# perform search on model description
model_desc, search_metrics = self.search_model_desc(conf_search, model_desc,
trainer_class, finalizers)
# train searched model for few epochs to get some perf metrics
model_metrics = self.train_model_desc(model_desc, conf_post_train)
search_result = SearchResult(model_desc, search_metrics,
model_metrics.metrics if model_metrics is not None else None)
self.clean_log_result(conf_search, search_result)
return search_result
def clean_log_result(self, conf_search:Config, search_result:SearchResult)->None:
final_desc_filename = conf_search['final_desc_filename']
# remove weights info deom model_desc so its more readable
search_result.model_desc.clear_trainables()
# if file name was specified then save the model desc
if final_desc_filename:
search_result.model_desc.save(final_desc_filename)
if search_result.search_metrics is not None:
logger.info({'search_top1_val':
search_result.search_metrics.best_val_top1()})
if search_result.train_metrics is not None:
logger.info({'train_top1_val':
search_result.train_metrics.best_val_top1()})
def build_model_desc(self, model_desc_builder:ModelDescBuilder,
conf_model_desc:Config,
reductions:int, cells:int, nodes:int)->ModelDesc:
# reset macro params in copy of config
conf_model_desc = copy.deepcopy(conf_model_desc)
conf_model_desc['n_reductions'] = reductions
conf_model_desc['n_cells'] = cells
# create model desc for search using model config
# we will build model without call to model_desc_builder for pre-training
model_desc = model_desc_builder.build(conf_model_desc, template=None)
return model_desc
def get_data(self, conf_loader:Config)->data.DataLoaders:
# this dict caches the dataset objects per dataset config so we don't have to reload
# the reason we do dynamic attribute is so that any dependent methods
# can do ray.remote
if not hasattr(self, '_data_cache'):
self._data_cache:Dict[int, data.DataLoaders] = {}
# first get from cache
if id(conf_loader) in self._data_cache:
data_loaders = self._data_cache[id(conf_loader)]
else:
data_loaders = data.get_data(conf_loader)
self._data_cache[id(conf_loader)] = data_loaders
return data_loaders
def finalize_model(self, model:Model, finalizers:Finalizers)->ModelDesc:
return finalizers.finalize_model(model, restore_device=False)
def search_model_desc(self, conf_search:Config, model_desc:ModelDesc,
trainer_class:TArchTrainer, finalizers:Finalizers)\
->Tuple[ModelDesc, Optional[Metrics]]:
# if trainer is not specified for algos like random search we return same desc
if trainer_class is None:
return model_desc, None
logger.pushd('arch_search')
conf_trainer = conf_search['trainer']
conf_loader = conf_search['loader']
model = Model(model_desc, droppath=False, affine=False)
# get data
data_loaders = self.get_data(conf_loader)
# search arch
arch_trainer = trainer_class(conf_trainer, model, checkpoint=None)
search_metrics = arch_trainer.fit(data_loaders)
# finalize
found_desc = self.finalize_model(model, finalizers)
logger.popd()
return found_desc, search_metrics
def train_model_desc(self, model_desc:ModelDesc, conf_train:Config)\
->Optional[ModelMetrics]:
"""Train given description"""
# region conf vars
conf_trainer = conf_train['trainer']
conf_loader = conf_train['loader']
trainer_title = conf_trainer['title']
epochs = conf_trainer['epochs']
drop_path_prob = conf_trainer['drop_path_prob']
# endregion
# if epochs ==0 then nothing to train, so save time
if epochs <= 0:
return None
logger.pushd(trainer_title)
model = Model(model_desc, droppath=drop_path_prob>0.0, affine=True)
# get data
data_loaders= self.get_data(conf_loader)
trainer = Trainer(conf_trainer, model, checkpoint=None)
train_metrics = trainer.fit(data_loaders)
logger.popd()
return ModelMetrics(model, train_metrics)
|
archai/archai/supergraph/nas/searcher.py/0
|
{
"file_path": "archai/archai/supergraph/nas/searcher.py",
"repo_id": "archai",
"token_count": 2713
}
| 366 |
# Copyright (c) 2020 abhuse.
# Licensed under the MIT license.
# https://github.com/ildoonet/pytorch-gradual-warmup-lr
from typing import Any, Dict, List, Optional
from torch.optim.lr_scheduler import ReduceLROnPlateau, _LRScheduler
from torch.optim.optimizer import Optimizer
class GradualWarmupScheduler(_LRScheduler):
"""Gradually warm-up (increasing) learning rate in optimizer.
It has been proposed in `Accurate, Large Minibatch SGD: Training ImageNet in 1 Hour`.
"""
def __init__(
self, optimizer: Optimizer, multiplier: float, total_epoch: int, after_scheduler: Optional[_LRScheduler] = None
) -> None:
"""Initialize the scheduler.
Args:
optimizer: Wrapped optimizer.
multiplier: Target learning rate = base lr * multiplier if multiplier > 1.0.
If multiplier = 1.0, lr starts from 0 and ends up with the base_lr.
total_epoch: Target learning rate is reached gradually at total_epoch.
after_scheduler: After target_epoch, use this scheduler.
"""
self.multiplier = multiplier
if self.multiplier < 1.0:
raise ValueError("Multiplier should be >= 1.")
self.total_epoch = total_epoch
self.after_scheduler = after_scheduler
self.finished = False
super(GradualWarmupScheduler, self).__init__(optimizer)
def get_lr(self) -> List[float]:
if self.last_epoch > self.total_epoch:
if self.after_scheduler:
if not self.finished:
self.after_scheduler.base_lrs = [base_lr * self.multiplier for base_lr in self.base_lrs]
self.finished = True
return self.after_scheduler.get_lr()
return [base_lr * self.multiplier for base_lr in self.base_lrs]
if self.multiplier == 1.0:
return [base_lr * (float(self.last_epoch) / self.total_epoch) for base_lr in self.base_lrs]
else:
return [
base_lr * ((self.multiplier - 1.0) * self.last_epoch / self.total_epoch + 1.0)
for base_lr in self.base_lrs
]
def _step_reduce_lr(self, epoch: int, metrics: Dict[str, Any]) -> None:
if epoch is None:
epoch = self.last_epoch + 1
self.last_epoch = epoch if epoch != 0 else 1
if self.last_epoch <= self.total_epoch:
warmup_lr = [
base_lr * ((self.multiplier - 1.0) * self.last_epoch / self.total_epoch + 1.0)
for base_lr in self.base_lrs
]
for param_group, lr in zip(self.optimizer.param_groups, warmup_lr):
param_group["lr"] = lr
else:
if epoch is None:
self.after_scheduler.step(None, metrics)
else:
self.after_scheduler.step(epoch - self.total_epoch, metrics)
def step(self, epoch: Optional[int] = None, metrics: Optional[Dict[str, Any]] = None) -> None:
if type(self.after_scheduler) != ReduceLROnPlateau:
if self.finished and self.after_scheduler:
if epoch is None:
self.after_scheduler.step(None)
else:
self.after_scheduler.step(epoch - self.total_epoch)
self._last_lr = self.after_scheduler.get_last_lr()
else:
return super(GradualWarmupScheduler, self).step(epoch)
else:
self._step_reduce_lr(epoch, metrics)
|
archai/archai/trainers/gradual_warmup_scheduler.py/0
|
{
"file_path": "archai/archai/trainers/gradual_warmup_scheduler.py",
"repo_id": "archai",
"token_count": 1645
}
| 367 |
# in toy mode, load the confif for algo and then override with common settings for toy mode
# any additional algo specific toy mode settings will go in this file
__include__: ['divnas.yaml', 'toy_common.yaml']
# disable seed train and post train by setting the epochs to 0
nas:
search:
seed_train:
trainer:
epochs: 0 # number of epochs model will be trained before search
post_train:
trainer:
epochs: 0 # number of epochs model will be trained after search
|
archai/confs/algos/divnas_toy.yaml/0
|
{
"file_path": "archai/confs/algos/divnas_toy.yaml",
"repo_id": "archai",
"token_count": 159
}
| 368 |
common:
checkpoint:
freq: 20
dataset:
max_batches: -1
autoaug:
loader:
epochs: 200
batch: 512
lr_schedule:
type: 'cosine'
warmup:
multiplier: 4
epochs: 5
optimizer:
lr: 0.1
type: 'sgd'
nesterov: True
decay: 0.0002
model:
type: 'resnet50'
|
archai/confs/aug/aug_cifar_sgd_resnet50.yaml/0
|
{
"file_path": "archai/confs/aug/aug_cifar_sgd_resnet50.yaml",
"repo_id": "archai",
"token_count": 221
}
| 369 |
__include__: './size_224x224_base.yaml' # inherit settings for 224x224 dataset
dataset_eval: # search dataset default is cifar10, we override eval dataset to sport8
name: 'sport8'
n_classes: 8
channels: 3 # number of channels in image
max_batches: -1 # if >= 0 then only these many batches are generated (useful for debugging)
storage_name: 'sport8' # name of folder or tar file to copy from cloud storage
|
archai/confs/datasets/sport8.yaml/0
|
{
"file_path": "archai/confs/datasets/sport8.yaml",
"repo_id": "archai",
"token_count": 126
}
| 370 |
import argparse
import uuid
import json
import os
from archai.common.store import ArchaiStore
from commands import make_train_model_command
from azure.ai.ml import Input, MLClient
from azure.ai.ml.identity import AzureMLOnBehalfOfCredential
from azure.identity import DefaultAzureCredential
from archai.discrete_search.search_spaces.config import ArchConfig
from azure.ai.ml import dsl
from utils import copy_code_folder
def start_training_pipeline(description, ml_client, store, model_architectures,
compute_cluster_name, datastore_uri, results_uri, output_folder,
experiment_name, environment_name, training_epochs, save_models):
""" Creates a new Azure ML Pipeline for training a set of models, updating the status of
these jobs in a given Azure Storage Table. This command does not wait for those jobs to
finish. For that use the monitor.py script which monitors the same Azure Storage Table
to find out when the jobs have all finished. The train.py script will update the table
when each training job completes. """
print(f"Training models: {model_architectures}")
print(f"Cluster: {compute_cluster_name}")
print(f"Dataset: {datastore_uri}")
print(f"Output: {results_uri}")
print(f"Env: {environment_name}")
print(f"Epochs: {training_epochs}")
code_dir = copy_code_folder()
model_names = []
for archid in model_architectures:
model_id = 'id_' + str(uuid.uuid4()).replace('-', '_')
model_names += [model_id]
root_uri = results_uri
i = root_uri.rfind('/')
if i > 0:
root_uri = root_uri[:i]
# create new status rows and models.json for these new jobs.
models = []
for i, archid in enumerate(model_architectures):
model_id = model_names[i]
print(f'Launching training job for model {model_id}')
e = store.get_status(model_id)
nb_layers, kernel_size, hidden_dim = eval(archid)
e["nb_layers"] = nb_layers
e["kernel_size"] = kernel_size
e["hidden_dim"] = hidden_dim
e['experiment'] = experiment_name
e['status'] = 'preparing'
e['epochs'] = training_epochs
store.update_status_entity(e)
models += [{
'id': model_id,
'status': 'training',
'nb_layers': nb_layers,
'kernel_size': kernel_size,
'hidden_dim': hidden_dim,
'epochs': training_epochs,
'val_acc': e['val_acc'] if 'val_acc' in e else 0.0
}]
results = {
'models': models
}
@dsl.pipeline(
compute=compute_cluster_name,
description=description,
)
def parallel_training_pipeline(
data_input
):
outputs = {}
for i, archid in enumerate(model_architectures):
model_id = model_names[i]
output_path = f'{root_uri}/{model_id}'
train_job = make_train_model_command(
output_path, code_dir, environment_name, model_id,
store.storage_account_name, store.storage_account_key,
archid, training_epochs, save_models)(
data=data_input
)
outputs[model_id] = train_job.outputs.results
return outputs
training_pipeline = parallel_training_pipeline(
data_input=Input(type="uri_folder", path=datastore_uri)
)
# submit the pipeline job
pipeline_job = ml_client.jobs.create_or_update(
training_pipeline,
experiment_name=experiment_name,
)
# Write the new list of pending models so that the make_monitor_command
# knows what to wait for.
print("Writing pending.json: ")
print(json.dumps(results, indent=2))
results_path = f'{output_folder}/pending.json'
with open(results_path, 'w') as f:
f.write(json.dumps(results, indent=2))
return (pipeline_job, model_names)
def main():
# input and output arguments
parser = argparse.ArgumentParser()
parser.add_argument("--config", type=str, help="optional bin hex encoded config.json file ")
parser.add_argument("--description", type=str, help="the pipeline description")
parser.add_argument("--models_path", help="Location of our pareto.json file.")
parser.add_argument("--compute_cluster_name", help="name of compute cluster to use")
parser.add_argument("--datastore_uri", help="location of dataset datastore")
parser.add_argument("--results_uri", help="location to store the trained models")
parser.add_argument("--output_path", help="location to store the list of pending models (pending.json)")
parser.add_argument("--experiment_name", help="name of AML experiment")
parser.add_argument("--environment_name", help="AML conda environment to use")
parser.add_argument('--epochs', type=float, help='number of epochs to train', default=0.001)
parser.add_argument("--save_models", help="AML conda environment to use", action="store_true")
args = parser.parse_args()
path = args.models_path
print(f"Reading pareto.json from {path}")
pareto_file = os.path.join(path, 'pareto.json')
with open(pareto_file) as f:
pareto_models = json.load(f)
model_architectures = []
for a in pareto_models:
if type(a) is dict and 'nb_layers' in a:
config = ArchConfig(a)
nb_layers = config.pick("nb_layers")
kernel_size = config.pick("kernel_size")
hidden_dim = config.pick("hidden_dim")
archid = f'({nb_layers}, {kernel_size}, {hidden_dim})'
model_architectures += [archid]
identity = AzureMLOnBehalfOfCredential()
if args.config:
print("Using AzureMLOnBehalfOfCredential...")
workspace_config = str(bytes.fromhex(args.config), encoding='utf-8')
print(f"Config: {workspace_config}")
config = json.loads(workspace_config)
else:
print("Using DefaultAzureCredential...")
config_file = "../.azureml/config.json"
print(f"Config: {config_file}")
config = json.load(open(config_file, 'r'))
identity = DefaultAzureCredential()
subscription = config['subscription_id']
resource_group = config['resource_group']
workspace_name = config['workspace_name']
storage_account_key = config['storage_account_key']
storage_account_name = config['storage_account_name']
ml_client = MLClient(
identity,
subscription,
resource_group,
workspace_name
)
store = ArchaiStore(storage_account_name, storage_account_key)
start_training_pipeline(args.description, ml_client, store, model_architectures,
args.compute_cluster_name, args.datastore_uri, args.results_uri, args.output_path,
args.experiment_name, args.environment_name, args.epochs, args.save_models)
if __name__ == "__main__":
main()
|
archai/docs/advanced_guide/cloud/azure/notebooks/multi_node_search/scripts/training_pipeline.py/0
|
{
"file_path": "archai/docs/advanced_guide/cloud/azure/notebooks/multi_node_search/scripts/training_pipeline.py",
"repo_id": "archai",
"token_count": 2850
}
| 371 |
<jupyter_start><jupyter_text>Implementing a Custom TrainerAbstract base classes (ABCs) define a blueprint for a class, specifying its methods and attributes, but not its implementation. They are important in implementing a consistent interface, as they enforce a set of requirements on implementing classes and make it easier to write code that can work with multiple implementations.First, we define a boilerplate for the `TrainerBase` class, which is the same implemented in `archai.api.trainer_base` module.<jupyter_code>from abc import abstractmethod
from overrides import EnforceOverrides
class TrainerBase(EnforceOverrides):
def __init__(self) -> None:
pass
@abstractmethod
def train(self) -> None:
pass
@abstractmethod
def evaluate(self) -> None:
pass
@abstractmethod
def predict(self) -> None:
pass<jupyter_output><empty_output><jupyter_text>PyTorch-based TrainerIn the context of a custom trainer, using ABCs can help ensure that the provider implements the required methods and provides a consistent interface for training, evaluating and predicting. In this example, we will implement a PyTorch-based trainer, as follows:<jupyter_code>from typing import Optional
import torch
from overrides import overrides
from torch.utils.data import Dataset
class PyTorchTrainer(TrainerBase):
def __init__(
self,
model: torch.nn.Module,
train_dataset: Optional[Dataset] = None,
eval_dataset: Optional[Dataset] = None,
) -> None:
super().__init__()
self.model = model
self.train_dataset = train_dataset
self.eval_dataset = eval_dataset
# Setup the trainer
self._setup()
def _setup(self) -> None:
self.loss_fn = torch.nn.CrossEntropyLoss()
self.optimizer = torch.optim.Adam(self.model.parameters(), lr=1e-3)
def _train_step(self, inputs: torch.Tensor, labels: torch.Tensor) -> None:
self.optimizer.zero_grad()
outputs = self.model(inputs)
loss = self.loss_fn(outputs, labels)
loss.backward()
self.optimizer.step()
return loss.item()
@overrides
def train(self) -> None:
total_loss = 0.0
train_loader = torch.utils.data.DataLoader(self.train_dataset, batch_size=64, shuffle=True)
self.model.train()
for idx, (inputs, labels) in enumerate(train_loader):
inputs = inputs.view(inputs.size(0), -1)
total_loss += self._train_step(inputs, labels)
if idx % 10 == 0:
print(f"Batch {idx} loss: {total_loss / (idx + 1)}")
def _eval_step(self, inputs: torch.Tensor, labels: torch.Tensor) -> None:
with torch.no_grad():
outputs = self.model(inputs)
loss = self.loss_fn(outputs, labels)
return loss.item()
@overrides
def evaluate(self, eval_dataset: Optional[Dataset] = None) -> None:
eval_dataset = eval_dataset if eval_dataset else self.eval_dataset
assert eval_dataset is not None, "`eval_dataset` has not been provided."
eval_loader = torch.utils.data.DataLoader(eval_dataset, batch_size=64, shuffle=False)
eval_loss = 0.0
self.model.eval()
for idx, (inputs, labels) in enumerate(eval_loader):
inputs = inputs.view(inputs.size(0), -1)
loss = self._eval_step(inputs, labels)
eval_loss += loss
self.model.train()
eval_loss /= idx
return eval_loss
@overrides
def predict(self, inputs: torch.Tensor) -> None:
self.model.eval()
preds = self.model(inputs)
self.model.train()
return preds<jupyter_output><empty_output><jupyter_text>Defining the ModelOnce the data is loaded, we can define any CV-based model. In this example, we will create a simple linear model using PyTorch:<jupyter_code>from torch import nn
class Model(nn.Module):
def __init__(self) -> None:
super().__init__()
self.linear = nn.Linear(28 * 28, 10)
def forward(self, x: torch.Tensor) -> torch.Tensor:
return self.linear(x)
model = Model()<jupyter_output><empty_output><jupyter_text>Creating and Training with the TrainerAfter loading the data and creating the data, we need to plug these instances into the `PyTorchTrainer` and start the training, as follows:<jupyter_code>from archai.datasets.cv.mnist_dataset_provider import MnistDatasetProvider
dataset_provider = MnistDatasetProvider()
train_dataset = dataset_provider.get_train_dataset()
trainer = PyTorchTrainer(model, train_dataset=train_dataset)
trainer.train()<jupyter_output>Batch 0 loss: 2.3435773849487305
Batch 10 loss: 2.164100560274991
Batch 20 loss: 2.0146874416442144
Batch 30 loss: 1.875573092891324
Batch 40 loss: 1.755056075933503
Batch 50 loss: 1.65761978486005
Batch 60 loss: 1.5680492149024714
Batch 70 loss: 1.482287242378987
Batch 80 loss: 1.4176807028275948
Batch 90 loss: 1.3575652700204115
Batch 100 loss: 1.3116845883945427
Batch 110 loss: 1.264954976133398
Batch 120 loss: 1.2235281644773877
Batch 130 loss: 1.1893346013913628
Batch 140 loss: 1.1595103922465169
Batch 150 loss: 1.1271054373671676
Batch 160 loss: 1.098986664173766
Batch 170 loss: 1.0724144109159883
Batch 180 loss: 1.0449848247496463
Batch 190 loss: 1.0206239084610764
Batch 200 loss: 1.0005531422237852
Batch 210 loss: 0.9785312015863391
Batch 220 loss: 0.9595723239814534
Batch 230 loss: 0.9406399880394791
Batch 240 loss: 0.9242911396926864
Batch 250 loss: 0.9074264486947382
Batch 260 loss: 0.8933870223746903
Batch 270 loss: 0.8793117023482094
Batch 280 loss: 0.8656814331685945
Batc[...]<jupyter_text>Evaluating and Predicting with the TrainerFinally, we evaluate our pre-trained model with the validation set and create a set of random-based inputs to calculate the model's predictions:<jupyter_code>val_dataset = dataset_provider.get_val_dataset()
eval_loss = trainer.evaluate(eval_dataset=val_dataset)
print(f"Eval loss: {eval_loss}")
inputs = torch.zeros(1, 28 * 28)
preds = trainer.predict(inputs)
print(f"Predictions: {preds}")<jupyter_output>Eval loss: 0.3360353711610421
Predictions: tensor([[-0.1244, 0.2467, -0.0254, -0.0535, 0.0533, 0.1786, -0.0015, 0.1122,
-0.2270, -0.0415]], grad_fn=<AddmmBackward0>)
|
archai/docs/getting_started/notebooks/api/trainer_base.ipynb/0
|
{
"file_path": "archai/docs/getting_started/notebooks/api/trainer_base.ipynb",
"repo_id": "archai",
"token_count": 2531
}
| 372 |
<jupyter_start><jupyter_text>Creating Memory Mapped NLP-based DataIn this notebook, we will use a fast dataset provider-based abstraction that interfaces with Hugging Face's `datasets` (and has been created by HazyResearch). The key advantage of this approach is the use of either shared memory or memory maps in Python to accelerate the caching process. Furthermore, the dataset is cached as a contiguous numpy array, enabling manipulation of data with any sequence length. This feature eliminates the need for re-encoding data for multiple lengths, streamlining the data processing pipeline. Instantiating the ProviderThe first step is to instantiate the `FastHfDatasetProvider.from_hub()`, which loads and encodes the dataset. A set of arguments can be passed to its class method according to the user's needs:* `dataset_name`: Name of the dataset.* `dataset_config_name`: Name of the dataset configuration.* `data_dir`: Path to the data directory.* `tokenizer`: Instance of tokenizer to use.* `tokenizer_name`: Name of the tokenizer, if `tokenizer` has not been passed.* `mapping_column_name`: The columns in `dataset` that should be tokenized.* `validation_split`: Fraction of the dataset to use for validation.* `seed`: Random seed.* `num_workers`: Number of workers to use for encoding.* `use_eos_token`: Whether to use EOS token to separate sequences.* `use_shared_memory`: Whether to use shared memory for caching.* `cache_dir`: Path to the cache directory.<jupyter_code>from archai.datasets.nlp.fast_hf_dataset_provider import FastHfDatasetProvider
# The provider will automatically download the dataset and tokenizer, encode
# the dataset and cache it for future use
dataset_provider = FastHfDatasetProvider.from_hub(
"glue",
dataset_config_name="sst2",
tokenizer_name="gpt2",
mapping_column_name=["sentence"],
use_shared_memory=False,
cache_dir="cache/glue-sst2-gpt2"
)
# (inputs, labels) can be retrieved with any sequence length
train_dataset = dataset_provider.get_train_dataset(seq_len=512)
val_dataset = dataset_provider.get_val_dataset(seq_len=512)
print(train_dataset[0], val_dataset[0])<jupyter_output>2023-03-21 15:07:57,990 - archai.datasets.nlp.fast_hf_dataset_provider β WARNING β Shared memory is not available in Python < 3.8.
2023-03-21 15:08:00,865 - archai.datasets.nlp.fast_hf_dataset_provider β INFO β Downloading dataset ...<jupyter_text>Loading from CacheAfter loading and encoding the dataset for the first time, a cache will be created with a unique fingerprint (identifier) based on its configuration. The cached is composed by the following files:* `config.json`: Dataset provider configuration (used to re-create the object when loaded from cache).* `tokenizer.pkl`: Tokenizer used to encode the data (also re-created when loaded from cache).* `train.npy`: Training tokens (inputs and labels).* `validation.npy`: Validation tokens (inputs and labels).* `test.npy`: Testing tokens (inputs and labels).The `FastHfDatasetProvider` class provides a `from_cache` method which can be used to re-instantiate the cached dataset provider, in case the user wants to re-use in different places.<jupyter_code># The caching mechanism automatically saves `config.json` and `tokenizer.pkl`,
# which are used to recreate the provider when calling `from_cache` method
dataset_provider = FastHfDatasetProvider.from_cache("cache/glue-sst2-gpt2")
train_dataset = dataset_provider.get_train_dataset(seq_len=512)
val_dataset = dataset_provider.get_val_dataset(seq_len=512)
print(train_dataset[0], val_dataset[0])<jupyter_output>2023-03-21 15:08:07,007 - archai.datasets.nlp.fast_hf_dataset_provider β INFO β Loading dataset from: cache/glue-sst2-gpt2
(tensor([24717, 649, 3200, 507, 422, 262, 21694, 4991, 220, 50256,
3642, 1299, 645, 20868, 837, 691, 2248, 1850, 308, 3775,
220, 50256, 5562, 10408, 663, 3435, 290, 48556, 1223, 2138,
4950, 546, 1692, 3450, 220, 50256, 2787, 1299, 15950, 11378,
284, 3520, 262, 976, 3690, 220, 50256, 261, 262, 5290,
15827, 12, 1659, 12, 1169, 12, 1008, 9310, 35478, 20954,
262, 28303, 714, 47478, 469, 510, 220, 50256, 5562, 705,
82, 1290, 1165, 15444, 284, 17004, 884, 31194, 3513, 220,
50256, 26567, 2536, 689, 326, 262, 3437, 286, 884, 289,
31777, 2512, 30181, 355, 29408, 1830, 460, 991, 1210, 503,
257, 1402, 837, 2614, 2646, 351, 281, 7016, 3355, 404,
764, [...]
|
archai/docs/getting_started/notebooks/nlp/fast_hf_dataset_provider.ipynb/0
|
{
"file_path": "archai/docs/getting_started/notebooks/nlp/fast_hf_dataset_provider.ipynb",
"repo_id": "archai",
"token_count": 1667
}
| 373 |
Computer Vision
===============
.. toctree::
:maxdepth: 2
archai.datasets.cv.transforms
FGVC Aircraft Dataset Provider
------------------------------
.. automodule:: archai.datasets.cv.aircraft_dataset_provider
:members:
:undoc-members:
:show-inheritance:
Caltech-Based Dataset Provider
------------------------------
.. automodule:: archai.datasets.cv.caltech_dataset_provider
:members:
:undoc-members:
:show-inheritance:
CIFAR-Based Dataset Provider
----------------------------
.. automodule:: archai.datasets.cv.cifar_dataset_provider
:members:
:undoc-members:
:show-inheritance:
Cityscapes Dataset Provider
---------------------------
.. automodule:: archai.datasets.cv.cityscapes_dataset_provider
:members:
:undoc-members:
:show-inheritance:
COCO-Based Dataset Provider
---------------------------
.. automodule:: archai.datasets.cv.coco_dataset_provider
:members:
:undoc-members:
:show-inheritance:
Oxford 102 Flower Dataset Provider
----------------------------------
.. automodule:: archai.datasets.cv.flowers102_dataset_provider
:members:
:undoc-members:
:show-inheritance:
Food-101 Dataset Provider
-------------------------
.. automodule:: archai.datasets.cv.food101_dataset_provider
:members:
:undoc-members:
:show-inheritance:
Image Folder Dataset Provider
-----------------------------
.. automodule:: archai.datasets.cv.image_folder_dataset_provider
:members:
:undoc-members:
:show-inheritance:
ImageNet Dataset Provider
-------------------------
.. automodule:: archai.datasets.cv.imagenet_dataset_provider
:members:
:undoc-members:
:show-inheritance:
MNIST-Based Dataset Provider
----------------------------
.. automodule:: archai.datasets.cv.mnist_dataset_provider
:members:
:undoc-members:
:show-inheritance:
Stanford Cars Dataset Provider
------------------------------
.. automodule:: archai.datasets.cv.stanford_cars_dataset_provider
:members:
:undoc-members:
:show-inheritance:
SVHN Dataset Provider
---------------------
.. automodule:: archai.datasets.cv.svhn_dataset_provider
:members:
:undoc-members:
:show-inheritance:
TensorPack LMDB Dataset Provider
--------------------------------
.. automodule:: archai.datasets.cv.tensorpack_lmdb_dataset_provider
:members:
:undoc-members:
:show-inheritance:
TensorPack LMDB Dataset Provider (Utilities)
--------------------------------------------
.. automodule:: archai.datasets.cv.tensorpack_lmdb_dataset_provider_utils
:members:
:undoc-members:
:show-inheritance:
USPS Dataset Provider
---------------------
.. automodule:: archai.datasets.cv.usps_dataset_provider
:members:
:undoc-members:
:show-inheritance:
|
archai/docs/reference/api/archai.datasets.cv.rst/0
|
{
"file_path": "archai/docs/reference/api/archai.datasets.cv.rst",
"repo_id": "archai",
"token_count": 996
}
| 374 |
Segmentation DAG
================
Model
-----
.. automodule:: archai.discrete_search.search_spaces.cv.segmentation_dag.model
:members:
:undoc-members:
Operators
---------
.. automodule:: archai.discrete_search.search_spaces.cv.segmentation_dag.ops
:members:
:undoc-members:
Search Space
------------
.. automodule:: archai.discrete_search.search_spaces.cv.segmentation_dag.search_space
:members:
:undoc-members:
|
archai/docs/reference/api/archai.discrete_search.search_spaces.cv.segmentation_dag.rst/0
|
{
"file_path": "archai/docs/reference/api/archai.discrete_search.search_spaces.cv.segmentation_dag.rst",
"repo_id": "archai",
"token_count": 161
}
| 375 |
NasBench-101
============
Base Operators
--------------
.. automodule:: archai.supergraph.algos.nasbench101.base_ops
:members:
:undoc-members:
Operators
---------
.. automodule:: archai.supergraph.algos.nasbench101.nasbench101_op
:members:
:undoc-members:
Configuration
-------------
.. automodule:: archai.supergraph.algos.nasbench101.config
:members:
:undoc-members:
Graph (Utilities)
-----------------
.. automodule:: archai.supergraph.algos.nasbench101.graph_util
:members:
:undoc-members:
Model Builder
-------------
.. automodule:: archai.supergraph.algos.nasbench101.model_builder
:members:
:undoc-members:
Model Description Builder
-------------------------
.. automodule:: archai.supergraph.algos.nasbench101.nasbench101_model_desc_builder
:members:
:undoc-members:
Model Matrix
------------
.. automodule:: archai.supergraph.algos.nasbench101.model_matrix
:members:
:undoc-members:
Model Metrics (PB-2)
--------------------
.. automodule:: archai.supergraph.algos.nasbench101.model_metrics_pb2
:members:
:undoc-members:
Model Specification
-------------------
.. automodule:: archai.supergraph.algos.nasbench101.model_spec
:members:
:undoc-members:
Model
-----
.. automodule:: archai.supergraph.algos.nasbench101.model
:members:
:undoc-members:
Experiment Runner
-----------------
.. automodule:: archai.supergraph.algos.nasbench101.nasbench101_exp_runner
:members:
:undoc-members:
|
archai/docs/reference/api/archai.supergraph.algos.nasbench101.rst/0
|
{
"file_path": "archai/docs/reference/api/archai.supergraph.algos.nasbench101.rst",
"repo_id": "archai",
"token_count": 520
}
| 376 |
Roadmap
=======
This section of the documentation is designed to give users a sense of what to expect from Archai in the coming months and years, and to provide insight into the direction and focus of future work.
The roadmap is organized into broad categories or themes, with each category representing a key area. Within each category, the roadmap lists specific goals or objectives that the Archai team is working towards, along with an estimated timeline for their completion.
|
archai/docs/reference/roadmap.rst/0
|
{
"file_path": "archai/docs/reference/roadmap.rst",
"repo_id": "archai",
"token_count": 95
}
| 377 |
# Copyright (c) EleutherAI.
# Licensed under the MIT license.
# https://github.com/EleutherAI/lm-evaluation-harness/blob/master/main.py
import fnmatch
from typing import List
def pattern_match(patterns: List[str], source_list: List[str]) -> List[str]:
task_names = set()
for pattern in patterns:
for matching in fnmatch.filter(source_list, pattern):
task_names.add(matching)
return list(task_names)
class MultiChoice:
def __init__(self, choices: List[str]):
self.choices = choices
def __contains__(self, values: List[str]) -> bool:
for value in values.split(","):
if len(fnmatch.filter(self.choices, value)) == 0:
return False
return True
def __iter__(self) -> str:
for choice in self.choices:
yield choice
|
archai/research/lm_eval_harness/lm_eval_harness/utils/regex.py/0
|
{
"file_path": "archai/research/lm_eval_harness/lm_eval_harness/utils/regex.py",
"repo_id": "archai",
"token_count": 335
}
| 378 |
#!/bin/bash
#fail if any errors
set -e
nvidia-smi --list-gpus
gpu_count=$(nvidia-smi --query-gpu=name --format=csv,noheader | wc -l)
echo "*****************************************"
echo "Using $gpu_count GPUS"
echo "*****************************************"
set -e -o xtrace
python -m torch.distributed.launch --nproc_per_node=$gpu_count scripts/main.py $*
|
archai/scripts/supergraph/dist_main.sh/0
|
{
"file_path": "archai/scripts/supergraph/dist_main.sh",
"repo_id": "archai",
"token_count": 123
}
| 379 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
# Copyright 2019 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Runnable example, as shown in the README.md."""
from __future__ import absolute_import, division, print_function
from absl import app
from nasbench import api
# Replace this string with the path to the downloaded nasbench.tfrecord before
# executing.
NASBENCH_TFRECORD = r"D:\datasets\nasbench_ds\nasbench_full.tfrecord"
INPUT = "input"
OUTPUT = "output"
CONV1X1 = "conv1x1-bn-relu"
CONV3X3 = "conv3x3-bn-relu"
MAXPOOL3X3 = "maxpool3x3"
def main(argv):
del argv # Unused
# Load the data from file (this will take some time)
nasbench = api.NASBench(NASBENCH_TFRECORD)
# Create an Inception-like module (5x5 convolution replaced with two 3x3
# convolutions).
model_spec = api.ModelSpec(
# Adjacency matrix of the module
matrix=[
[0, 1, 1, 1, 0, 1, 0], # input layer
[0, 0, 0, 0, 0, 0, 1], # 1x1 conv
[0, 0, 0, 0, 0, 0, 1], # 3x3 conv
[0, 0, 0, 0, 1, 0, 0], # 5x5 conv (replaced by two 3x3's)
[0, 0, 0, 0, 0, 0, 1], # 5x5 conv (replaced by two 3x3's)
[0, 0, 0, 0, 0, 0, 1], # 3x3 max-pool
[0, 0, 0, 0, 0, 0, 0],
], # output layer
# Operations at the vertices of the module, matches order of matrix
ops=[INPUT, CONV1X1, CONV3X3, CONV3X3, CONV3X3, MAXPOOL3X3, OUTPUT],
)
# Query this model from dataset, returns a dictionary containing the metrics
# associated with this model.
print("Querying an Inception-like model.")
data = nasbench.query(model_spec)
print(data)
print(nasbench.get_budget_counters()) # prints (total time, total epochs)
# Get all metrics (all epoch lengths, all repeats) associated with this
# model_spec. This should be used for dataset analysis and NOT for
# benchmarking algorithms (does not increment budget counters).
print("\nGetting all metrics for the same Inception-like model.")
fixed_metrics, computed_metrics = nasbench.get_metrics_from_spec(model_spec)
print(fixed_metrics)
for epochs in nasbench.valid_epochs:
for repeat_index in range(len(computed_metrics[epochs])):
data_point = computed_metrics[epochs][repeat_index]
print("Epochs trained %d, repeat number: %d" % (epochs, repeat_index + 1))
print(data_point)
# Iterate through unique models in the dataset. Models are unqiuely identified
# by a hash.
print("\nIterating over unique models in the dataset.")
for unique_hash in nasbench.hash_iterator():
fixed_metrics, computed_metrics = nasbench.get_metrics_from_hash(unique_hash)
print(fixed_metrics)
# For demo purposes, break here instead of iterating through whole set.
break
# If you are passing command line flags to modify the default config values, you
# must use app.run(main)
if __name__ == "__main__":
app.run(main)
|
archai/scripts/supergraph/nasbench101/archai_train.py/0
|
{
"file_path": "archai/scripts/supergraph/nasbench101/archai_train.py",
"repo_id": "archai",
"token_count": 1327
}
| 380 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import argparse
import os
import pathlib
import re
from collections import OrderedDict
from inspect import getsourcefile
import subprocess
import sys
from typing import Dict, Iterator, List, Tuple
import matplotlib
import yaml
try:
from runstats import Statistics
except:
subprocess.check_call([sys.executable, '-m', 'pip', 'install', 'runstats'])
from runstats import Statistics
matplotlib.use("Agg")
import matplotlib.pyplot as plt
import numpy as np
from archai.common import utils
def main():
parser = argparse.ArgumentParser(description="Report creator")
parser.add_argument(
"--results-dir",
"-d",
type=str,
default=r"~/logdir/proxynas_test_0001", # r'~/logdir/report_test'
help="folder with experiment results from pt",
)
parser.add_argument("--out-dir", "-o", type=str, default=r"~/logdir/reports", help="folder to output reports")
args, extra_args = parser.parse_known_args()
# root dir where all results are stored
results_dir = pathlib.Path(utils.full_path(args.results_dir))
print(f"results_dir: {results_dir}")
# extract experiment name which is top level directory
exp_name = results_dir.parts[-1]
# create results dir for experiment
out_dir = utils.full_path(os.path.join(args.out_dir, exp_name))
print(f"out_dir: {out_dir}")
os.makedirs(out_dir, exist_ok=True)
# get list of all structured logs for each job
logs = {}
job_count = 0
for job_dir in results_dir.iterdir():
job_count += 1
for subdir in job_dir.iterdir():
if not subdir.is_dir():
continue
# currently we expect that each job was ExperimentRunner job which should have
# _search or _eval folders
if subdir.stem.endswith("_search"):
sub_job = "search"
elif subdir.stem.endswith("_eval"):
sub_job = "eval"
else:
raise RuntimeError(
f'Sub directory "{subdir}" in job "{job_dir}" must '
"end with either _search or _eval which "
"should be the case if ExperimentRunner was used."
)
logs_filepath = os.path.join(str(subdir), "log.yaml")
if os.path.isfile(logs_filepath):
fix_yaml(logs_filepath)
with open(logs_filepath, "r") as f:
key = job_dir.name + ":" + sub_job
logs[key] = yaml.load(f, Loader=yaml.Loader)
# create list of epoch nodes having same path in the logs
grouped_logs = group_multi_runs(logs)
collated_grouped_logs = collect_epoch_nodes(grouped_logs)
summary_text, details_text = "", ""
for log_key, grouped_logs in collated_grouped_logs.items():
# for each path for epochs nodes, compute stats
for node_path, logs_epochs_nodes in grouped_logs.items():
collated_epoch_stats = get_epoch_stats(node_path, logs_epochs_nodes)
summary_text += get_summary_text(log_key, out_dir, node_path, collated_epoch_stats, len(logs_epochs_nodes))
details_text += get_details_text(log_key, out_dir, node_path, collated_epoch_stats, len(logs_epochs_nodes))
write_report("summary.md", **vars())
write_report("details.md", **vars())
def epoch_nodes(node: OrderedDict, path=[]) -> Iterator[Tuple[List[str], OrderedDict]]:
"""Search nodes recursively for nodes named 'epochs' and return them along with their paths"""
for k, v in node.items():
if k == "epochs" and isinstance(v, OrderedDict) and len(v) and "0" in v:
yield path, v
elif isinstance(v, OrderedDict): # make recursive call
for p, en in epoch_nodes(v, path=path + [k]):
yield p, en
def fix_yaml(filepath: str):
# fix yaml construction recursion error because of bad lines
yaml = pathlib.Path(filepath).read_text()
bad_lines = [r"get: !!python/object/apply:builtins.getattr", r"- *id001", r" - get"]
# form pattern by joining str literals after escape by whitespace /s
# Note: don't use re.escape as it cannot be used in re.sub
pattern = r"\s+".join([re.escape(l) for l in bad_lines])
fixed_yaml = re.sub(pattern, "", yaml)
if yaml != fixed_yaml:
backup = pathlib.Path(filepath + ".original.yaml")
assert not backup.exists(), f"Backup file {backup} should not exist"
backup.write_text(yaml)
pathlib.Path(filepath).write_text(fixed_yaml)
print(f"Yaml at {filepath} was fixed")
def remove_seed_part(log_key: str) -> str:
# regex identifies seed123, seed123.4, seed_123, seed_123.4
# pattern is 'seed' followed by optional '_' followed by int or float number
pat = r"seed\_?([0-9]*[.])?[0-9]+"
return re.sub(pat, "", log_key)
def group_multi_runs(logs: Dict[str, OrderedDict]) -> Dict[str, List[OrderedDict]]:
result: Dict[str, List[OrderedDict]] = {}
for log_key, log in logs.items():
seed_less_key = remove_seed_part(log_key)
if seed_less_key in result:
result[seed_less_key].append(log)
else:
result[seed_less_key] = [log]
return result
def collect_epoch_nodes(grouped_logs: Dict[str, List[OrderedDict]]) -> Dict[str, Dict[str, List[OrderedDict]]]:
"""Make list of epoch nodes in same path in each of the logs if collate=True else
its just list of epoch nodes with jobdir and path as the key."""
collated: Dict[str, Dict[str, List[OrderedDict]]] = {}
for log_key, logs in grouped_logs.items():
collated_logs: Dict[str, List[OrderedDict]] = {}
for log in logs:
for path, epoch_node in epoch_nodes(log):
# for each path get the list where we can put epoch node
path_key = "/".join(path)
if path_key not in collated_logs:
collated_logs[path_key] = []
v = collated_logs[path_key]
v.append(epoch_node)
collated[log_key] = collated_logs
return collated
class EpochStats:
def __init__(self) -> None:
self.start_lr = Statistics()
self.end_lr = Statistics()
self.train_fold = FoldStats()
self.val_fold = FoldStats()
def update(self, epoch_node: OrderedDict) -> None:
self.start_lr.push(epoch_node["start_lr"])
if "train" in epoch_node:
self.end_lr.push(epoch_node["train"]["end_lr"])
self.train_fold.update(epoch_node["train"])
if "val" in epoch_node:
self.val_fold.update(epoch_node["val"])
class FoldStats:
def __init__(self) -> None:
self.top1 = Statistics()
self.top5 = Statistics()
self.duration = Statistics()
self.step_time = Statistics()
def update(self, fold_node: OrderedDict) -> None:
self.top1.push(fold_node["top1"])
self.top5.push(fold_node["top5"])
if "duration" in fold_node:
self.duration.push(fold_node["duration"])
if "step_time" in fold_node:
self.step_time.push(fold_node["step_time"])
def stat2str(stat: Statistics) -> str:
if len(stat) == 0:
return "-"
s = f"{stat.mean():.4f}"
if len(stat) > 1:
s += f"<sup> ± {stat.stddev():.4f}</sup>"
return s
def get_epoch_stats(node_path: str, logs_epochs_nodes: List[OrderedDict]) -> List[EpochStats]:
epoch_stats = []
for epochs_node in logs_epochs_nodes:
for epoch_num, epoch_node in epochs_node.items():
if not str.isnumeric(epoch_num): # each epoch key must be numeric
continue
epoch_num = int(epoch_num)
if epoch_num >= len(epoch_stats):
epoch_stats.append(EpochStats())
epoch_stat = epoch_stats[epoch_num]
epoch_stat.update(epoch_node)
return epoch_stats
def get_valid_filename(s):
s = str(s).strip().replace(" ", "-")
return re.sub(r"(?u)[^-\w.]", "-", s)
def get_summary_text(log_key: str, out_dir: str, node_path: str, epoch_stats: List[EpochStats], seed_runs: int) -> str:
lines = ["", ""]
lines.append(f"## Run: {log_key}\n")
lines.append(f"### Metric Type: {node_path}\n")
lines.append(f"Number of epochs: {len(epoch_stats)}\n")
lines.append(f"Number of seeds: {seed_runs}\n")
lines.append("\n")
plot_filename = get_valid_filename(log_key + ":" + node_path) + ".png"
plot_filepath = os.path.join(out_dir, plot_filename)
plot_epochs(epoch_stats, plot_filepath)
lines.append("")
train_duration = Statistics()
for epoch_stat in epoch_stats:
train_duration += epoch_stat.train_fold.duration
lines.append(f"")
lines.append(f"Train epoch time: {stat2str(train_duration)}")
lines.append("")
milestones = [0, 5, 30, 100, 200, 600, 1500]
for milestone in milestones:
if len(epoch_stats) >= milestone and len(epoch_stats[milestone - 1].val_fold.top1) > 0:
lines.append(f"{stat2str(epoch_stats[milestone-1].val_fold.top1)} val top1 @ {milestone} epochs\n")
# last epoch
if not len(epoch_stats) in milestones:
# find last epoch with valid stats
last_epoch = len(epoch_stats) - 1
while last_epoch >= 0 and len(epoch_stats[last_epoch].val_fold.top1) == 0:
last_epoch -= 1
if last_epoch >= 0:
lines.append(
f"{stat2str(epoch_stats[last_epoch].val_fold.top1)} val top1 @ {len(epoch_stats)} epochs [Last]\n"
)
else:
lines.append("[Last] No epoch with valid val stats found!")
return "\n".join(lines)
def get_details_text(log_key: str, out_dir: str, node_path: str, epoch_stats: List[EpochStats], seed_runs: int) -> str:
lines = ["", ""]
lines.append(f"## Run: {log_key}\n")
lines.append(f"### Metric Type: {node_path}\n")
lines.append(f"Number of seeds: {seed_runs}\n")
lines.append(
"|Epoch |Val Top1 |Val Top5 |Train Top1 |Train Top5 |Train Duration |Val Duration |Train Step Time |Val Step Time |StartLR |EndLR |"
)
lines.append("|---|---|---|---|---|---|---|---|---|---|---|")
for i, epoch_stat in enumerate(epoch_stats):
line = "|"
line += str(i) + "|"
line += stat2str(epoch_stat.val_fold.top1) + "|"
line += stat2str(epoch_stat.val_fold.top5) + "|"
line += stat2str(epoch_stat.train_fold.top1) + "|"
line += stat2str(epoch_stat.train_fold.top5) + "|"
line += stat2str(epoch_stat.train_fold.duration) + "|"
line += stat2str(epoch_stat.val_fold.duration) + "|"
line += stat2str(epoch_stat.train_fold.step_time) + "|"
line += stat2str(epoch_stat.val_fold.step_time) + "|"
line += stat2str(epoch_stat.start_lr) + "|"
line += stat2str(epoch_stat.end_lr) + "|"
lines.append(line)
return "\n".join(lines)
def plot_epochs(epoch_stats: List[EpochStats], filepath: str):
plt.ioff()
plt.clf()
fig, ax = plt.subplots()
clrs = plt.colors.get_palette("husl", 5)
with plt.style.use("seaborn-darkgrid"):
metrics = []
val_top1_means = [es.val_fold.top1.mean() if len(es.val_fold.top1) > 0 else np.nan for es in epoch_stats]
val_top1_std = [es.val_fold.top1.stddev() if len(es.val_fold.top1) > 1 else np.nan for es in epoch_stats]
val_top1_min = [es.val_fold.top1.minimum() if len(es.val_fold.top1) > 0 else np.nan for es in epoch_stats]
val_top1_max = [es.val_fold.top1.maximum() if len(es.val_fold.top1) > 0 else np.nan for es in epoch_stats]
metrics.append((val_top1_means, val_top1_std, "val_top1", val_top1_min, val_top1_max))
val_top5_means = [es.val_fold.top5.mean() if len(es.val_fold.top5) > 0 else np.nan for es in epoch_stats]
val_top5_std = [es.val_fold.top5.stddev() if len(es.val_fold.top5) > 1 else np.nan for es in epoch_stats]
val_top5_min = [es.val_fold.top5.minimum() if len(es.val_fold.top5) > 0 else np.nan for es in epoch_stats]
val_top5_max = [es.val_fold.top5.maximum() if len(es.val_fold.top5) > 0 else np.nan for es in epoch_stats]
metrics.append((val_top5_means, val_top5_std, "val_top5", val_top5_min, val_top5_max))
train_top1_means = [es.train_fold.top1.mean() if len(es.train_fold.top1) > 0 else np.nan for es in epoch_stats]
train_top1_std = [es.train_fold.top1.stddev() if len(es.train_fold.top1) > 1 else np.nan for es in epoch_stats]
train_top1_min = [es.train_fold.top1.minimum() if len(es.train_fold.top1) > 0 else np.nan for es in epoch_stats]
train_top1_max = [es.train_fold.top1.maximum() if len(es.train_fold.top1) > 0 else np.nan for es in epoch_stats]
metrics.append((train_top1_means, train_top1_std, "train_top1", train_top1_min, train_top1_max))
train_top5_means = [es.train_fold.top5.mean() if len(es.train_fold.top5) > 0 else np.nan for es in epoch_stats]
train_top5_std = [es.train_fold.top5.stddev() if len(es.train_fold.top5) > 1 else np.nan for es in epoch_stats]
train_top5_min = [es.train_fold.top1.minimum() if len(es.train_fold.top5) > 0 else np.nan for es in epoch_stats]
train_top5_max = [es.train_fold.top1.maximum() if len(es.train_fold.top5) > 0 else np.nan for es in epoch_stats]
metrics.append((train_top5_means, train_top5_std, "train_top5", train_top5_min, train_top5_max))
for i, metric in enumerate(metrics):
ax.plot(range(len(metric[0])), metric[0], label=metric[2], c=clrs[i])
ax.fill_between(
range(len(metric[0])),
np.subtract(metric[0], metric[1]),
np.add(metric[0], metric[1]),
alpha=0.5,
facecolor=clrs[i],
)
ax.fill_between(range(len(metric[0])), metric[3], metric[4], alpha=0.1, facecolor=clrs[i])
ax.set_xlabel("Epoch")
ax.set_ylabel("Accuracy")
ax.set_title("Accuracy Metrics")
ax.legend()
ax.grid("on")
# add more ticks
# ax.set_xticks(np.arange(max([len(m) for m in metrics])))
# remove tick marks
# ax.xaxis.set_tick_params(size=0)
# ax.yaxis.set_tick_params(size=0)
# change the color of the top and right spines to opaque gray
# ax.spines['right'].set_color((.8,.8,.8))
# ax.spines['top'].set_color((.8,.8,.8))
# tweak the axis labels
xlab = ax.xaxis.get_label()
ylab = ax.yaxis.get_label()
xlab.set_style("italic")
xlab.set_size(10)
ylab.set_style("italic")
ylab.set_size(10)
# tweak the title
ttl = ax.title
ttl.set_weight("bold")
plt.savefig(filepath)
plt.close()
def write_report(template_filename: str, **kwargs) -> None:
source_file = getsourcefile(lambda: 0)
script_dir = os.path.dirname(os.path.abspath(source_file))
template = pathlib.Path(os.path.join(script_dir, template_filename)).read_text()
report = template.format(**kwargs)
outfilepath = os.path.join(kwargs["out_dir"], template_filename)
with open(outfilepath, "w", encoding="utf-8") as f:
f.write(report)
print(f"report written to: {outfilepath}")
if __name__ == "__main__":
main()
|
archai/scripts/supergraph/reports/exprep.py/0
|
{
"file_path": "archai/scripts/supergraph/reports/exprep.py",
"repo_id": "archai",
"token_count": 6983
}
| 381 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from argparse import ArgumentParser
from pathlib import Path
import os
import sys
import yaml
from typing import Optional, Dict
from azure.identity import DefaultAzureCredential
from azure.ai.ml.entities import UserIdentityConfiguration
from azure.ai.ml import MLClient
from azure.ai.ml import command, Input, Output, dsl
from archai.common.config import Config
import archai.common.azureml_helper as aml_helper
from archai.common.store import ArchaiStore
from archai.common.file_utils import TemporaryFiles
from shutil import copyfile, rmtree
from aml.util.setup import register_datastore, configure_store, create_cluster, copy_code_folder
confs_path = Path(__file__).absolute().parent / 'confs'
def data_prep_component(environment_name, datastore_path):
return command(
name="data_prep",
display_name="Data preparation for training",
description="Downloads the remote dataset to our blob store.",
inputs={
"name": Input(type='string')
},
outputs={
"data": Output(type="uri_folder", path=datastore_path, mode="rw_mount")
},
# The source folder of the component
code='data_prep',
command="""python3 prep_data_store.py \
--path ${{outputs.data}} \
""",
environment=environment_name,
)
def search_component(config, environment_name, seed, modelstore_path, output_path: Path):
# we need a folder containing all the specific code we need here, which is not everything in this repo.
scripts_path = output_path / 'scripts'
os.makedirs(str(scripts_path), exist_ok=True)
config_dir = scripts_path / 'confs'
os.makedirs(str(config_dir), exist_ok=True)
copyfile('search.py', str(scripts_path / 'search.py'))
copyfile('train.py', str(scripts_path / 'train.py'))
copy_code_folder('search_space', str(scripts_path / 'search_space'))
copy_code_folder('training', str(scripts_path / 'training'))
copy_code_folder(os.path.join('aml', 'training'), str(scripts_path / 'aml' / 'training'))
copy_code_folder(os.path.join('aml', 'util'), str(scripts_path / 'aml' / 'util'))
config.save(str(config_dir / 'aml_search.yaml'))
aml_config = config['aml']
timeout = int(aml_config.get('timeout', 3600))
con_str = aml_config['connection_str']
fixed_args = f'--seed {seed} --timeout {timeout} --search_config confs/aml_search.yaml'
return command(
name="search",
display_name="Archai search job",
description="Searches for the best face segmentation model.",
is_deterministic=False,
inputs={
"data": Input(type="uri_folder")
},
outputs={
"results": Output(type="uri_folder", path=modelstore_path, mode="rw_mount")
},
identity=UserIdentityConfiguration(),
# The source folder of the component
code=str(scripts_path),
environment_variables={'MODEL_STORAGE_CONNECTION_STRING': con_str},
command="""python3 search.py \
--dataset_dir ${{inputs.data}} \
--output_dir ${{outputs.results}} \
""" + fixed_args,
environment=environment_name,
)
def main(output_dir: Path, experiment_name: str, seed: int, data_prep_only: bool):
if output_dir.exists():
rmtree(str(output_dir))
output_dir.mkdir(parents=True)
# Filters extra args that have the prefix `search_space`
config_file = str(confs_path / 'aml_search.yaml')
config = Config(config_file, resolve_env_vars=True)
aml_config = config['aml']
con_str = aml_config.get('connection_str', '$')
if '$' in con_str:
print("Please set environment variable MODEL_STORAGE_CONNECTION_STRING containing the Azure" +
"storage account connection string for the Azure storage account you want to use to " +
"control this experiment.")
return 1
workspace_name = aml_config['workspace_name']
subscription_id = aml_config['subscription_id']
resource_group_name = aml_config['resource_group']
# extract conda.yaml.
with open('conda.yaml', 'w') as f:
yaml.dump(aml_config['environment'].to_dict(), f)
storage_account_name, storage_account_key = ArchaiStore.parse_connection_string(con_str)
print(f'Using storage account: {storage_account_name}')
ml_client = MLClient(
credential=DefaultAzureCredential(),
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name
)
print(f'Using workspace "{ml_client.workspace_name}" in resource group "{ml_client.resource_group_name}"')
# Create aml computer clusters
cpu_compute_name = create_cluster(ml_client, aml_config, 'search_cluster')
create_cluster(ml_client, aml_config, 'training_cluster')
archai_job_env = aml_helper.create_environment_from_file(
ml_client,
image="mcr.microsoft.com/azureml/openmpi3.1.2-ubuntu18.04:latest",
conda_file="conda.yaml",
version='1.0.26')
environment_name = f"{archai_job_env.name}:{archai_job_env.version}"
# Register the datastore with AML
data_store_name = 'datasets'
data_container_name = 'datasets'
model_store_name = 'models'
model_container_name = aml_config.get('blob_container_name', 'models')
# register our azure datastores
results_path = register_datastore(ml_client, model_store_name, model_container_name, storage_account_name, storage_account_key, experiment_name)
datastore_path = register_datastore(ml_client, data_store_name, data_container_name, storage_account_name, storage_account_key, experiment_name)
# save this in the output folder so it can be found by pipeline components.
aml_config['experiment_name'] = experiment_name
aml_config['environment_name'] = environment_name
aml_config['datastore_path'] = datastore_path
aml_config['results_path'] = results_path
# make sure the datasets container exists
store = configure_store(aml_config, data_container_name)
# make sure the models container exists
store = configure_store(aml_config, model_container_name)
with TemporaryFiles() as tmp_files:
filename = tmp_files.get_temp_file()
aml_config['connection_str'] = "${MODEL_STORAGE_CONNECTION_STRING}"
config.save(filename)
aml_config['connection_str'] = con_str
store.upload_blob(f"{experiment_name}/config", filename, 'aml_search.yaml')
@dsl.pipeline(
compute=cpu_compute_name,
description="FaceSynthetics Archai search pipeline",
)
def archai_search_pipeline():
data_prep_job = data_prep_component(environment_name, datastore_path)(
name=experiment_name
)
if data_prep_only:
return {
"results": data_prep_job.outputs.data
}
else:
search_job = search_component(config, environment_name, seed, results_path, output_dir)(
data=data_prep_job.outputs.data
)
return {
"results": search_job.outputs.results
}
pipeline_job = ml_client.jobs.create_or_update(
archai_search_pipeline(),
# Project's name
experiment_name=experiment_name,
)
import webbrowser
webbrowser.open(pipeline_job.services["Studio"].endpoint)
job_name = pipeline_job.name
print(f'Started pipeline: {job_name}')
return 0
if __name__ == '__main__':
parser = ArgumentParser("""This script runs the search in an Azure ML workspace.""")
parser.add_argument('--output_dir', type=Path, help='Output directory for downloading results.', default='output')
parser.add_argument('--experiment_name', default='facesynthetics')
parser.add_argument('--seed', type=int, help='Random seed', default=42)
parser.add_argument('--test', help='Run only the data_prep step to test environment is working', action="store_true")
args = parser.parse_args()
rc = main(args.output_dir, args.experiment_name, args.seed, args.test)
sys.exit(rc)
|
archai/tasks/face_segmentation/aml.py/0
|
{
"file_path": "archai/tasks/face_segmentation/aml.py",
"repo_id": "archai",
"token_count": 3186
}
| 382 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import argparse
import os
import sys
from archai.common.store import ArchaiStore
CONNECTION_NAME = 'MODEL_STORAGE_CONNECTION_STRING'
def unlock(con_str, experiment_name):
parser = argparse.ArgumentParser(
description='Unlock all jobs for given node or unlock all jobs.')
parser.add_argument('--node', help='Optional node name (default None).')
args = parser.parse_args()
storage_account_name, storage_account_key = ArchaiStore.parse_connection_string(con_str)
store = ArchaiStore(storage_account_name, storage_account_key, table_name=experiment_name)
store.unlock_all(args.node)
if __name__ == '__main__':
experiment_name = os.getenv("EXPERIMENT_NAME", "facesynthetics")
con_str = os.getenv(CONNECTION_NAME)
if not con_str:
print(f"Please specify your {CONNECTION_NAME} environment variable.")
sys.exit(1)
unlock(con_str, experiment_name)
|
archai/tasks/face_segmentation/aml/azure/unlock.py/0
|
{
"file_path": "archai/tasks/face_segmentation/aml/azure/unlock.py",
"repo_id": "archai",
"token_count": 341
}
| 383 |
ο»Ώ<?xml version="1.0" encoding="utf-8"?>
<DirectedGraph GraphDirection="TopToBottom" Layout="Sugiyama" Offset="-1829.8148940022802,-883.0494917160034" ZoomLevel="1" xmlns="http://schemas.microsoft.com/vs/2009/dgml">
<Nodes>
<Node Id="..." Bounds="-891.259155273438,-398.550804903068,50,25.96" UseManualLocation="True" />
<Node Id="...1" Bounds="-996.17041015625,-122.697003871574,50,25.96" Label="..." UseManualLocation="True" />
<Node Id="AzureBlobStore" Category="storage" Bounds="-1193.71833333333,-533.830712207031,106.03,73.6213311767578" Label="azure blob store" UseManualLocation="True" />
<Node Id="AzureStatusTable" Category="storage" Bounds="-1192.64078544617,-430.209281030274,112.973333333333,73.6213311767578" Label="azure status table" UseManualLocation="True" />
<Node Id="JupyterNotebook" Category="script" Bounds="-1374.80858579,-377.363886517334,115.163333333333,62.964467010498" Label="Jupyter Notebook" UseManualLocation="True" />
<Node Id="KubernetesCluster" Category="cluster" Bounds="-1195.17041666667,-264.359319506836,119.78,62.9644670104981" Label="Kubernetes Cluster" UseManualLocation="True" />
<Node Id="QualcommDevice" Category="device" Bounds="-767.554234593709,-540.257548474121,115.2,62.96" Label="Qualcomm device" UseManualLocation="True" />
<Node Id="QualcommDevice1" Category="device" Bounds="-767.259849828084,-447.297448474121,115.2,62.96" Label="Qualcomm device" UseManualLocation="True" />
<Node Id="Runner.py" Category="script" Bounds="-905.306400171916,-584.479938924064,70.4533333333334,62.9644670104981" Label="runner.py" UseManualLocation="True" />
<Node Id="Runner.py1" Category="script" Bounds="-905.012036539714,-491.515371913566,70.4533333333334,62.9644670104981" Label="runner.py" UseManualLocation="True" />
<Node Id="Runner.py2" Category="script" Bounds="-901.180981852214,-342.590704903068,70.4533333333334,62.9644670104981" Label="runner.py" UseManualLocation="True" />
<Node Id="Runner.py3" Category="script" Bounds="-903.170422770182,-249.62613789257,70.4533333333334,62.9644670104981" Label="runner.py" UseManualLocation="True" />
<Node Id="Runner.py4" Category="script" Bounds="-901.449353434245,-156.661570882072,70.4533333333334,62.9644670104981" Label="runner.py" UseManualLocation="True" />
<Node Id="SnpeQuantiation" Category="quantize" Bounds="-760.452083333333,-255.710307686227,109.126666666667,62.964467010498" Label="snpe quantiation" UseManualLocation="True" />
<Node Id="SnpeQuantization" Category="quantize" Bounds="-763.723191502889,-348.674874696725,114.553333333333,62.9644670104981" Label="snpe quantization" UseManualLocation="True" />
<Node Id="Test_onnx" Category="test" Bounds="-761.731038004557,-162.745740675729,70.5999999999999,47.96" Label="test_onnx" UseManualLocation="True" />
<Node Id="Upload" Category="script" Bounds="-1358,-486,56.9533333333333,62.964467010498" Label="upload" UseManualLocation="True" />
</Nodes>
<Links>
<Link Source="AzureBlobStore" Target="Runner.py" Bounds="-1087.68836914063,-543.888146126435,173.568538449327,35.9021208699053" />
<Link Source="AzureBlobStore" Target="Runner.py1" Bounds="-1087.68836914063,-489.782197817309,173.759053025964,23.7223370430618" />
<Link Source="AzureBlobStore" Target="Runner.py2" Bounds="-1088.21890281343,-461.505920916907,179.584023885674,121.517239678322" />
<Link Source="AzureStatusTable" Target="JupyterNotebook" Bounds="-1250.94000054214,-378.575417826448,58.2992201059451,15.298820648026" />
<Link Source="AzureStatusTable" Target="Runner.py" Bounds="-1079.66748046875,-523.59619140625,166.653381347656,105.640502929688" />
<Link Source="AzureStatusTable" Target="Runner.py1" Bounds="-1079.66748046875,-451.611999511719,165.948486328125,41.5036010742188" />
<Link Source="AzureStatusTable" Target="Runner.py2" Bounds="-1079.66748046875,-379.401794433594,169.876037597656,51.8286743164063" />
<Link Source="KubernetesCluster" Target="...1" Bounds="-1093.33052927498,-201.394849319458,97.6659896150068,73.2956685371913" />
<Link Source="KubernetesCluster" Target="Runner.py2" Bounds="-1075.39039550781,-298.365678712837,165.566640952845,48.0922930284574" />
<Link Source="KubernetesCluster" Target="Runner.py3" Bounds="-1075.39039550781,-229.576487801222,163.233609160429,8.99595989414743" />
<Link Source="KubernetesCluster" Target="Runner.py4" Bounds="-1075.39039550781,-208.904464362079,165.585551579934,66.2801678235037" />
<Link Source="Runner.py" Target="AzureStatusTable" Bounds="-1072.13012695313,-535.250061035156,166.82373046875,105.237548828125" />
<Link Source="Runner.py" Target="QualcommDevice" Bounds="-834.853066838583,-543.269532104768,58.6235405861986,16.1894387709768" />
<Link Source="Runner.py1" Target="AzureStatusTable" Bounds="-1070.91955566406,-447.686828613281,165.907531738281,41.6351928710938" />
<Link Source="Runner.py1" Target="QualcommDevice1" Bounds="-834.55870320638,-450.305950587842,58.6235177587191,16.1877924281616" />
<Link Source="Runner.py2" Target="AzureStatusTable" Bounds="-1071.05505371094,-370.519165039063,169.874084472656,51.8364562988281" />
<Link Source="Runner.py2" Target="SnpeQuantization" Bounds="-830.72764851888,-314.664864642865,58.0109886197575,2.21273140591086" />
<Link Source="Runner.py3" Target="SnpeQuantiation" Bounds="-832.717089436849,-221.841899441892,63.2713423409264,2.37545015813271" />
<Link Source="Runner.py4" Target="Test_onnx" Bounds="-830.996020100911,-134.464308505795,60.3071978911451,5.86127855164511" />
<Link Source="Upload" Target="AzureBlobStore" Bounds="-1301.04666666667,-483.110272187314,98.5479867833703,22.1825747724036" />
<Link Source="Upload" Target="AzureStatusTable" Bounds="-1301.04666666667,-445.517004029011,99.824346137553,31.5519803774536" />
</Links>
<Categories>
<Category Id="cluster" />
<Category Id="device" />
<Category Id="quantize" />
<Category Id="script" />
<Category Id="storage" />
<Category Id="test" />
</Categories>
<Properties>
<Property Id="Bounds" DataType="System.Windows.Rect" />
<Property Id="Expression" DataType="System.String" />
<Property Id="GraphDirection" DataType="Microsoft.VisualStudio.Diagrams.Layout.LayoutOrientation" />
<Property Id="GroupLabel" DataType="System.String" />
<Property Id="IsEnabled" DataType="System.Boolean" />
<Property Id="Label" Label="Label" Description="Displayable label of an Annotatable object" DataType="System.String" />
<Property Id="Layout" DataType="System.String" />
<Property Id="Offset" DataType="System.String" />
<Property Id="TargetType" DataType="System.Type" />
<Property Id="UseManualLocation" DataType="System.Boolean" />
<Property Id="Value" DataType="System.String" />
<Property Id="ValueLabel" DataType="System.String" />
<Property Id="ZoomLevel" DataType="System.String" />
</Properties>
<Styles>
<Style TargetType="Node" GroupLabel="test" ValueLabel="True">
<Condition Expression="HasCategory('test')" />
<Setter Property="Icon" Value="CodeMap_TestProject" />
</Style>
<Style TargetType="Node" GroupLabel="storage" ValueLabel="True">
<Condition Expression="HasCategory('storage')" />
<Setter Property="Icon" Value="pack://application:,,,/Microsoft.VisualStudio.Progression.GraphControl;component/Icons/Table.png" />
</Style>
<Style TargetType="Node" GroupLabel="script" ValueLabel="True">
<Condition Expression="HasCategory('script')" />
<Setter Property="Icon" Value="pack://application:,,,/Microsoft.VisualStudio.Progression.GraphControl;component/Icons/Script.png" />
</Style>
<Style TargetType="Node" GroupLabel="quantize" ValueLabel="True">
<Condition Expression="HasCategory('quantize')" />
<Setter Property="Icon" Value="pack://application:,,,/Microsoft.VisualStudio.Progression.GraphControl;component/Icons/Gears.png" />
</Style>
<Style TargetType="Node" GroupLabel="device" ValueLabel="True">
<Condition Expression="HasCategory('device')" />
<Setter Property="Icon" Value="pack://application:,,,/Microsoft.VisualStudio.Progression.GraphControl;component/Icons/Device.png" />
</Style>
<Style TargetType="Node" GroupLabel="cluster" ValueLabel="True">
<Condition Expression="HasCategory('cluster')" />
<Setter Property="Icon" Value="pack://application:,,,/Microsoft.VisualStudio.Progression.GraphControl;component/Icons/Network.png" />
</Style>
<Style TargetType="Node">
<Setter Property="HorizontalAlignment" Value="Center" />
<Setter Property="IconPlacement" Value="Top" />
</Style>
</Styles>
</DirectedGraph>
|
archai/tasks/face_segmentation/aml/images/snpe.dgml/0
|
{
"file_path": "archai/tasks/face_segmentation/aml/images/snpe.dgml",
"repo_id": "archai",
"token_count": 3387
}
| 384 |
## Readme
This folder contains code for running models using the Qualcomm SNPE Neural Processing SDK,
including quantizing those models and running them on the Qualcomm DSP.
This folder uses http://github.com/microsoft/olive to do the actual SNPE work.
1. **Snapdragon 888 Dev Kit** - get one of these [Snapdragon 888 HDK](https://developer.qualcomm.com/hardware/snapdragon-888-hdk) boards.
1. **Download dataset**. Get the dataset from https://github.com/microsoft/FaceSynthetics.
The best way is using `azcopy`. You could put them in a datasets folder,
for example: `d:\datasets\FaceSynthetics`. Then set your `INPUT_DATASET` environment
variable pointing to this folder.
1. **Install Android NDK**. You need a working version of `adb` in your PATH.
Just download the zip file from [https://developer.android.com/ndk/downloads/](https://developer.android.com/ndk/downloads/)
and unzip it then you can set your `ANDROID_NDK_ROOT` environment variable pointing to the folder containing the
unzipped bits.
1. **Check Device USB**. Check you can run `adb shell` to connect to your Snapdragon over USB.
You may need to run `sudo usermod -aG plugdev $LOGNAME`.
1. **Install SNPE SDK on Ubuntu 18.04**.
See [SNPE Setup](https://developer.qualcomm.com/sites/default/files/docs/snpe/setup.html).
See [Neural Processing SDK Download](https://developer.qualcomm.com/downloads/qualcomm-neural-processing-sdk-ai-v1600?referrer=node/34505).
You can skip the Caffe setup, but use the `requirements.txt` pip install list, the one posted in the Qualcomm setup page
has conflicting versions. Then set your `SNPE_ROOT` environment variable pointing to the folder containing the unzipped
bits. If you plan to use Qualcomm hardware devices then set the `SNPE_ANDROID_ROOT` to the same place as `SNPE_ROOT`.
1. **Install Archai**. In your Python 3.8 Conda environment run:
```
git clone https://github.com/microsoft/archai.git
cd archai
pip install -e .[dev]
```
1. **Install required packages including Olive **
```
pushd tasks/face_segmentation/aml
pip install -r requirements.txt
```
1. Let Olive configure SNPE
```
python -m olive.snpe.configure
```
**If you run into a protobuf inconsistency with Python 3.8 you can workaround
it by setting the folloiwng env. variable:**
```
export PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=python
```
1. **Create experiment folder**. The subsequent scripts all assume you are in a folder for running your experiment.
```
mkdir ~/experiment1
cd ~/experiment1
```
In this folder we will build the following files:
- data/test - the test image set for the device
- data/quant - the image dataset for quantizing the model
- snpe_models/model.quant.dlc - the quantized model
1. **Prepare data**. Run `python create_data.py --help`, this scripts creates data for both
quantization and test and puts it in your local experiment folder under `data/test` and
`data/quant`. For example:
```
python create_data.py --input ~/datasets/FaceSynthetics --count 1000 --dim 256
```
1. **Convert and quantize model**. You can use `test_snpe.py` to convert a .onnx model to .dlc and
quantize it. For example:
```
python test_snpe.py --quantize --model model.onnx
```
This can take about 10 minutes depending on the size of your quantization data set and the size of
your model.
1. **Run test images on device**. You can use `test_snpe.py` to test your quantized model on a
Qualcomm 888 dev board. You can find the device id using `adb devices`:
```
python test_snpe.py --device e6dc0375 --images ./data/test --model model.onnx --dlc ./snpe_models/model.quant.dlc
```
6. **Performance benchmark SNPE model**.
```
python test_snpe.py --device e6dc0375 --benchmark --images ./data/test --model model.onnx --dlc ./snpe_models/model.quant.dlc
```
|
archai/tasks/face_segmentation/aml/snpe/readme.md/0
|
{
"file_path": "archai/tasks/face_segmentation/aml/snpe/readme.md",
"repo_id": "archai",
"token_count": 1270
}
| 385 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
"""Adapted from torchvision https://github.com/pytorch/vision/blob/main/references/classification/train.py"""
import copy
import datetime
import os
import time
import warnings
import torch
import torch.utils.data
import torchvision
import transforms
import utils
from torch import nn
from torch.utils.data.dataloader import default_collate
from torchvision.transforms.functional import InterpolationMode
from torchvision.models.quantization.mobilenetv2 import _replace_relu
from torchinfo import summary
from dataset import FaceLandmarkDataset
from search_space import create_model_from_search_results
def average_error(target, output):
errors = target - output # shape (B, K, 2)
norm = torch.norm(errors, dim=-1) # shape (B, K)
return norm.mean()
def train_one_epoch(model, criterion, optimizer, data_loader, device, epoch, args, model_ema=None, scaler=None):
model.train()
metric_logger = utils.MetricLogger(delimiter=" ")
metric_logger.add_meter("lr", utils.SmoothedValue(window_size=1, fmt="{value}"))
metric_logger.add_meter("img/s", utils.SmoothedValue(window_size=10, fmt="{value}"))
header = f"Epoch: [{epoch}]"
for i, (image, target) in enumerate(metric_logger.log_every(data_loader, args.print_freq, header)):
start_time = time.time()
image, target = image.to(device), target.to(device)
target = torch.squeeze(target)
with torch.cuda.amp.autocast(enabled=scaler is not None):
output = model(image)
output = torch.reshape(output, target.shape)
loss = criterion(output, target)
optimizer.zero_grad()
if scaler is not None:
scaler.scale(loss).backward()
if args.clip_grad_norm is not None:
# we should unscale the gradients of optimizer's assigned params if do gradient clipping
scaler.unscale_(optimizer)
nn.utils.clip_grad_norm_(model.parameters(), args.clip_grad_norm)
scaler.step(optimizer)
scaler.update()
else:
loss.backward()
if args.clip_grad_norm is not None:
nn.utils.clip_grad_norm_(model.parameters(), args.clip_grad_norm)
optimizer.step()
if model_ema and i % args.model_ema_steps == 0:
model_ema.update_parameters(model)
if epoch < args.lr_warmup_epochs:
# Reset ema buffer to keep copying weights during warmup period
model_ema.n_averaged.fill_(0)
error = average_error(target, output)
batch_size = image.shape[0]
metric_logger.update(loss=loss.item(), lr=optimizer.param_groups[0]["lr"])
metric_logger.meters["error"].update(error, n=batch_size)
metric_logger.meters["img/s"].update(batch_size / (time.time() - start_time))
def evaluate(model, criterion, data_loader, epoch, device, print_freq=100, log_suffix=""):
model.eval()
metric_logger = utils.MetricLogger(delimiter=" ")
header = f"Test: {log_suffix}"
num_processed_samples = 0
with torch.inference_mode():
for image, target in metric_logger.log_every(data_loader, print_freq, header):
image = image.to(device, non_blocking=True)
target = target.to(device, non_blocking=True)
output = model(image)
output = torch.reshape(output, target.shape)
loss = criterion(output, target)
error = average_error(target, output)
# FIXME need to take into account that the datasets
# could have been padded in distributed setup
batch_size = image.shape[0]
metric_logger.update(loss=loss.item())
metric_logger.meters["error"].update(error, n=batch_size)
num_processed_samples += batch_size
# gather the stats from all processes
num_processed_samples = utils.reduce_across_processes(num_processed_samples)
if (
hasattr(data_loader.dataset, "__len__")
and len(data_loader.dataset) != num_processed_samples
and torch.distributed.get_rank() == 0
):
# See FIXME above
warnings.warn(
f"It looks like the dataset has {len(data_loader.dataset)} samples, but {num_processed_samples} "
"samples were used for the validation, which might bias the results. "
"Try adjusting the batch size and / or the world size. "
"Setting the world size to 1 is always a safe bet."
)
metric_logger.synchronize_between_processes()
# print(f"{header} Acc@1 {metric_logger.acc1.global_avg:.3f} Acc@5 {metric_logger.acc5.global_avg:.3f}")
print(f"{header} Error {metric_logger.error.global_avg:.4f}")
return float(metric_logger.error.global_avg)
def load_data(traindir, args):
# Data loading code
print("Loading data")
_, val_crop_size, train_crop_size = args.val_resize_size, args.val_crop_size, args.train_crop_size
print("Loading training data")
st = time.time()
assert val_crop_size == train_crop_size
dataset = FaceLandmarkDataset(traindir, limit=args.max_num_images, crop_size=train_crop_size)
print("Took", time.time() - st)
validation_dataset_size = int(len(dataset) * 0.1)
dataset_train, dataset_test = torch.utils.data.random_split(
dataset, [len(dataset) - validation_dataset_size, validation_dataset_size]
)
dataset = dataset_train
print("Creating data loaders")
if args.distributed:
train_sampler = torch.utils.data.distributed.DistributedSampler(dataset)
test_sampler = torch.utils.data.distributed.DistributedSampler(dataset_test, shuffle=False)
else:
train_sampler = torch.utils.data.RandomSampler(dataset)
test_sampler = torch.utils.data.SequentialSampler(dataset_test)
return dataset, dataset_test, train_sampler, test_sampler
def setup_qat(model: nn.Module) -> None:
assert model is not None
_replace_relu(model)
model.eval()
# Modify quantization engine as appropriate for the target platform
model.setup_qconfig('fbgemm')
model.fuse_model()
torch.ao.quantization.prepare_qat(model.train(), inplace=True)
def train(args, model: nn.Module = None):
if args.output_dir:
utils.mkdir(args.output_dir)
utils.init_distributed_mode(args)
print(args)
device = torch.device(args.device)
if args.use_deterministic_algorithms:
torch.backends.cudnn.benchmark = False
torch.use_deterministic_algorithms(True)
else:
torch.backends.cudnn.benchmark = True
dataset, dataset_test, train_sampler, test_sampler = load_data(args.data_path, args)
num_classes = dataset.dataset.num_landmarks
data_loader = torch.utils.data.DataLoader(
dataset,
batch_size=args.batch_size,
sampler=train_sampler,
num_workers=args.workers,
pin_memory=True,
)
data_loader_test = torch.utils.data.DataLoader(
dataset_test, batch_size=args.batch_size, sampler=test_sampler, num_workers=args.workers, pin_memory=True
)
print("Creating model")
if model is None:
if args.search_result_archid:
model = create_model_from_search_results(
args.search_result_archid,
args.search_result_csv,
num_classes=num_classes,
qat=args.qat,
qat_skip_layers=args.qat_skip_layers)
if (args.qat):
print('Preparing for QAT')
setup_qat(model)
else:
model = torchvision.models.__dict__[args.model](weights=args.weights, num_classes=num_classes)
model.to(device)
if args.distributed and args.sync_bn:
model = torch.nn.SyncBatchNorm.convert_sync_batchnorm(model)
print(summary(model, input_size=(1, 3, 192, 192)))
criterion = nn.MSELoss()
if args.norm_weight_decay is None:
parameters = model.parameters()
else:
param_groups = torchvision.ops._utils.split_normalization_params(model)
wd_groups = [args.norm_weight_decay, args.weight_decay]
parameters = [{"params": p, "weight_decay": w} for p, w in zip(param_groups, wd_groups) if p]
opt_name = args.opt.lower()
if opt_name.startswith("sgd"):
optimizer = torch.optim.SGD(
parameters,
lr=args.lr,
momentum=args.momentum,
weight_decay=args.weight_decay,
nesterov="nesterov" in opt_name,
)
elif opt_name == "rmsprop":
optimizer = torch.optim.RMSprop(
parameters, lr=args.lr, momentum=args.momentum, weight_decay=args.weight_decay, eps=0.0316, alpha=0.9
)
elif opt_name == "adamw":
optimizer = torch.optim.AdamW(parameters, lr=args.lr, weight_decay=args.weight_decay)
else:
raise RuntimeError(f"Invalid optimizer {args.opt}. Only SGD, RMSprop and AdamW are supported.")
scaler = torch.cuda.amp.GradScaler() if args.amp else None
args.lr_scheduler = args.lr_scheduler.lower()
if args.lr_scheduler == "steplr":
main_lr_scheduler = torch.optim.lr_scheduler.StepLR(optimizer, step_size=args.lr_step_size, gamma=args.lr_gamma)
elif args.lr_scheduler == "cosineannealinglr":
main_lr_scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(
optimizer, T_max=args.epochs - args.lr_warmup_epochs
)
elif args.lr_scheduler == "exponentiallr":
main_lr_scheduler = torch.optim.lr_scheduler.ExponentialLR(optimizer, gamma=args.lr_gamma)
else:
raise RuntimeError(
f"Invalid lr scheduler '{args.lr_scheduler}'. Only StepLR, CosineAnnealingLR and ExponentialLR "
"are supported."
)
if args.lr_warmup_epochs > 0:
if args.lr_warmup_method == "linear":
warmup_lr_scheduler = torch.optim.lr_scheduler.LinearLR(
optimizer, start_factor=args.lr_warmup_decay, total_iters=args.lr_warmup_epochs
)
elif args.lr_warmup_method == "constant":
warmup_lr_scheduler = torch.optim.lr_scheduler.ConstantLR(
optimizer, factor=args.lr_warmup_decay, total_iters=args.lr_warmup_epochs
)
else:
raise RuntimeError(
f"Invalid warmup lr method '{args.lr_warmup_method}'. Only linear and constant are supported."
)
lr_scheduler = torch.optim.lr_scheduler.SequentialLR(
optimizer, schedulers=[warmup_lr_scheduler, main_lr_scheduler], milestones=[args.lr_warmup_epochs]
)
else:
lr_scheduler = main_lr_scheduler
model_without_ddp = model
if args.distributed:
model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[args.gpu])
model_without_ddp = model.module
model_ema = None
if args.model_ema:
# Decay adjustment that aims to keep the decay independent from other hyper-parameters originally proposed at:
# https://github.com/facebookresearch/pycls/blob/f8cd9627/pycls/core/net.py#L123
#
# total_ema_updates = (Dataset_size / n_GPUs) * epochs / (batch_size_per_gpu * EMA_steps)
# We consider constant = Dataset_size for a given dataset/setup and ommit it. Thus:
# adjust = 1 / total_ema_updates ~= n_GPUs * batch_size_per_gpu * EMA_steps / epochs
adjust = args.world_size * args.batch_size * args.model_ema_steps / args.epochs
alpha = 1.0 - args.model_ema_decay
alpha = min(1.0, alpha * adjust)
model_ema = utils.ExponentialMovingAverage(model_without_ddp, device=device, decay=1.0 - alpha)
if args.resume:
checkpoint = torch.load(args.resume, map_location="cpu")
model_without_ddp.load_state_dict(checkpoint["model"])
if not args.test_only:
optimizer.load_state_dict(checkpoint["optimizer"])
lr_scheduler.load_state_dict(checkpoint["lr_scheduler"])
args.start_epoch = checkpoint["epoch"] + 1
if model_ema:
model_ema.load_state_dict(checkpoint["model_ema"])
if scaler:
scaler.load_state_dict(checkpoint["scaler"])
if args.test_only:
# We disable the cudnn benchmarking because it can noticeably affect the accuracy
torch.backends.cudnn.benchmark = False
torch.backends.cudnn.deterministic = True
if model_ema:
evaluate(model_ema, criterion, data_loader_test, device=device, log_suffix="EMA")
else:
evaluate(model, criterion, data_loader_test, device=device)
return
print("Start training")
start_time = time.time()
for epoch in range(args.start_epoch, args.epochs):
if args.distributed:
train_sampler.set_epoch(epoch)
train_one_epoch(model, criterion, optimizer, data_loader, device, epoch, args, model_ema, scaler)
lr_scheduler.step()
val_error = evaluate(model, criterion, data_loader_test, epoch, device=device)
if model_ema:
val_error = evaluate(model_ema, criterion, data_loader_test, device=device, log_suffix="EMA")
if args.output_dir:
model_to_save = model_without_ddp
if (args.qat):
model_to_save = copy.deepcopy(model_without_ddp)
model_to_save.eval()
model_to_save.to(torch.device("cpu"))
torch.ao.quantization.convert(model_to_save, inplace=True)
checkpoint = {
"model": model_to_save.state_dict(),
"optimizer": optimizer.state_dict(),
"lr_scheduler": lr_scheduler.state_dict(),
"epoch": epoch,
"args": args,
}
if model_ema:
checkpoint["model_ema"] = model_ema.state_dict()
if scaler:
checkpoint["scaler"] = scaler.state_dict()
utils.save_on_master(checkpoint, os.path.join(args.output_dir, f"model_{epoch}.pth"))
utils.save_on_master(checkpoint, os.path.join(args.output_dir, "checkpoint.pth"))
total_time = time.time() - start_time
total_time_str = str(datetime.timedelta(seconds=int(total_time)))
print(f"Training time {total_time_str}")
return val_error
def get_args_parser(add_help=True):
import argparse
parser = argparse.ArgumentParser(description="PyTorch Classification Training", add_help=add_help)
parser.add_argument("--data_path", "--data-path", default=None, type=str, help="dataset path")
parser.add_argument(
"--max_num_images",
"--max-num-images",
default=None,
type=int,
help="limit to number of images to use in dataset",
)
parser.add_argument(
"--search_result_archid", "--search-result-archid", default=None, type=str, help="nas search arch id to use"
)
parser.add_argument(
"--search_result_csv", "--search_result-csv", default=None, type=str, help="nas search result csv to use"
)
parser.add_argument("--model", default="resnet18", type=str, help="model name")
parser.add_argument("--device", default="cuda", type=str, help="device (Use cuda or cpu Default: cuda)")
parser.add_argument(
"-b",
"--batch_size",
"--batch-size",
default=32,
type=int,
help="images per gpu, the total batch size is $NGPU x batch_size",
)
parser.add_argument("--epochs", default=90, type=int, metavar="N", help="number of total epochs to run")
parser.add_argument(
"-j", "--workers", default=16, type=int, metavar="N", help="number of data loading workers (default: 16)"
)
parser.add_argument("--opt", default="sgd", type=str, help="optimizer")
parser.add_argument("--lr", default=0.1, type=float, help="initial learning rate")
parser.add_argument("--momentum", default=0.9, type=float, metavar="M", help="momentum")
parser.add_argument(
"--wd",
"--weight-decay",
default=1e-4,
type=float,
metavar="W",
help="weight decay (default: 1e-4)",
dest="weight_decay",
)
parser.add_argument(
"--norm-weight-decay",
default=None,
type=float,
help="weight decay for Normalization layers (default: None, same value as --wd)",
)
parser.add_argument("--mixup-alpha", default=0.0, type=float, help="mixup alpha (default: 0.0)")
parser.add_argument("--cutmix-alpha", default=0.0, type=float, help="cutmix alpha (default: 0.0)")
parser.add_argument(
"--lr_scheduler", "--lr-scheduler", default="steplr", type=str, help="the lr scheduler (default: steplr)"
)
parser.add_argument("--lr-warmup-epochs", default=0, type=int, help="the number of epochs to warmup (default: 0)")
parser.add_argument(
"--lr-warmup-method", default="constant", type=str, help="the warmup method (default: constant)"
)
parser.add_argument("--lr-warmup-decay", default=0.01, type=float, help="the decay for lr")
parser.add_argument(
"--lr_step_size", "--lr-step-size", default=30, type=int, help="decrease lr every step-size epochs"
)
parser.add_argument("--lr_gamma", "--lr-gamma", default=0.1, type=float, help="decrease lr by a factor of lr-gamma")
parser.add_argument("--print-freq", default=10, type=int, help="print frequency")
parser.add_argument("--output_dir", "--output-dir", default=".", type=str, help="path to save outputs")
parser.add_argument("--resume", default="", type=str, help="path of checkpoint")
parser.add_argument("--start-epoch", default=0, type=int, metavar="N", help="start epoch")
parser.add_argument(
"--sync-bn",
dest="sync_bn",
help="Use sync batch norm",
action="store_true",
)
parser.add_argument(
"--test-only",
dest="test_only",
help="Only test the model",
action="store_true",
)
# Mixed precision training parameters
parser.add_argument("--amp", action="store_true", help="Use torch.cuda.amp for mixed precision training")
# distributed training parameters
parser.add_argument("--world-size", default=1, type=int, help="number of distributed processes")
parser.add_argument("--dist-url", default="env://", type=str, help="url used to set up distributed training")
parser.add_argument(
"--model-ema", action="store_true", help="enable tracking Exponential Moving Average of model parameters"
)
parser.add_argument(
"--model-ema-steps",
type=int,
default=32,
help="the number of iterations that controls how often to update the EMA model (default: 32)",
)
parser.add_argument(
"--model-ema-decay",
type=float,
default=0.99998,
help="decay factor for Exponential Moving Average of model parameters (default: 0.99998)",
)
parser.add_argument(
"--use-deterministic-algorithms", action="store_true", help="Forces the use of deterministic algorithms only."
)
parser.add_argument(
"--val-resize-size", default=128, type=int, help="the resize size used for validation (default: 128)"
)
parser.add_argument(
"--val-crop-size", default=128, type=int, help="the central crop size used for validation (default: 128)"
)
parser.add_argument(
"--train_crop_size",
"--train-crop-size",
default=128,
type=int,
help="the random crop size used for training (default: 128)",
)
parser.add_argument("--clip-grad-norm", default=None, type=float, help="the maximum gradient norm (default None)")
parser.add_argument("--qat", help="Performs quantization aware training", action="store_true")
parser.add_argument(
"--qat_skip_layers", default=0, type=int, help="Number of layers to be skipped from quantization when performing QAT"
)
parser.add_argument("--weights", default=None, type=str, help="the weights enum name to load")
return parser
if __name__ == "__main__":
args, _ = get_args_parser().parse_known_args()
train(args)
|
archai/tasks/facial_landmark_detection/train.py/0
|
{
"file_path": "archai/tasks/facial_landmark_detection/train.py",
"repo_id": "archai",
"token_count": 8576
}
| 386 |
{
"arch_type": "gpt2",
"d_inner": 960,
"d_model": 704,
"dropatt": 0.0,
"max_sequence_length": 1024,
"n_head": 2,
"n_layer": 9,
"vocab_size": 50257
}
|
archai/tasks/text_generation/models/gpt2_46e7c68a025417e20a7e13bd4c1ee71438d28069/0
|
{
"file_path": "archai/tasks/text_generation/models/gpt2_46e7c68a025417e20a7e13bd4c1ee71438d28069",
"repo_id": "archai",
"token_count": 83
}
| 387 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from unittest.mock import MagicMock
from overrides import overrides
from archai.api.trainer_base import TrainerBase
class MyTrainer(TrainerBase):
def __init__(self) -> None:
super().__init__()
@overrides
def train(self) -> None:
return MagicMock()
@overrides
def evaluate(self) -> None:
return MagicMock()
@overrides
def predict(self) -> None:
return MagicMock()
def test_trainer():
trainer = MyTrainer()
# Assert that mocked methods run
assert trainer.train()
assert trainer.evaluate()
assert trainer.predict()
|
archai/tests/api/test_trainer_base.py/0
|
{
"file_path": "archai/tests/api/test_trainer_base.py",
"repo_id": "archai",
"token_count": 248
}
| 388 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import pytest
from archai.datasets.nlp.tokenizer_utils.token_config import (
SpecialTokenEnum,
TokenConfig,
)
@pytest.fixture
def token_config():
return TokenConfig(
bos_token="<bos>",
eos_token="<eos>",
unk_token="<unk>",
pad_token="<pad>",
add_prefix_space=False,
add_prefix_new_line=True,
lower_case=True,
)
def test_special_token_enum():
# Assert that the correct values are assigned to the special tokens enumerator
assert SpecialTokenEnum.UNK.value == 0
assert SpecialTokenEnum.BOS.value == 1
assert SpecialTokenEnum.EOS.value == 2
assert SpecialTokenEnum.PAD.value == 3
assert SpecialTokenEnum.MASK.value == 4
def test_token_config(token_config):
# Assert that the correct values are assigned to the special tokens
assert token_config.bos_token == "<bos>"
assert token_config.eos_token == "<eos>"
assert token_config.unk_token == "<unk>"
assert token_config.pad_token == "<pad>"
assert token_config.add_prefix_space is False
assert token_config.add_prefix_new_line is True
assert token_config.lower_case is True
# Assert that the special tokens are added to the special token list
special_tokens = token_config.get_special_tokens()
assert special_tokens == ["<unk>", "<bos>", "<eos>", "<pad>"]
# Assert that the special tokens names are returned correctly
assert token_config.special_token_name(SpecialTokenEnum.BOS) == "<bos>"
assert token_config.special_token_name(SpecialTokenEnum.EOS) == "<eos>"
assert token_config.special_token_name(SpecialTokenEnum.UNK) == "<unk>"
assert token_config.special_token_name(SpecialTokenEnum.PAD) == "<pad>"
assert token_config.special_token_name("invalid") is None
|
archai/tests/datasets/nlp/tokenizer_utils/test_token_config.py/0
|
{
"file_path": "archai/tests/datasets/nlp/tokenizer_utils/test_token_config.py",
"repo_id": "archai",
"token_count": 672
}
| 389 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from typing import List
from unittest.mock import MagicMock
import numpy as np
from overrides import overrides
from archai.discrete_search.api.archai_model import ArchaiModel
from archai.discrete_search.api.search_space import (
BayesOptSearchSpace,
DiscreteSearchSpace,
EvolutionarySearchSpace,
)
class MyDiscreteSearchSpace(DiscreteSearchSpace):
def __init__(self) -> None:
super().__init__()
@overrides
def save_arch(self, arch: ArchaiModel, file_path: str) -> None:
return MagicMock()
@overrides
def load_arch(self, file_path: str) -> ArchaiModel:
return MagicMock()
@overrides
def save_model_weights(self, arch: ArchaiModel, file_path: str) -> None:
return MagicMock()
@overrides
def load_model_weights(self, arch: ArchaiModel, file_path: str) -> None:
return MagicMock()
@overrides
def random_sample(self) -> ArchaiModel:
return MagicMock()
class MyEvolutionarySearchSpace(MyDiscreteSearchSpace, EvolutionarySearchSpace):
def __init__(self) -> None:
super().__init__()
@overrides
def mutate(self, arch: ArchaiModel) -> ArchaiModel:
return MagicMock()
@overrides
def crossover(self, arch_list: List[ArchaiModel]) -> ArchaiModel:
return MagicMock()
class MyBayesOptSearchSpace(MyDiscreteSearchSpace, BayesOptSearchSpace):
def __init__(self) -> None:
super().__init__()
@overrides
def encode(self, arch: ArchaiModel) -> np.ndarray:
return MagicMock()
def test_discrete_search_space():
search_space = MyDiscreteSearchSpace()
# Assert that overridden methods run
assert search_space.save_arch(MagicMock(), "test")
assert search_space.load_arch("test")
assert search_space.save_model_weights(MagicMock(), "test")
assert search_space.load_model_weights(MagicMock(), "test")
assert search_space.random_sample()
def test_evolutionary_search_space():
search_space = MyEvolutionarySearchSpace()
# Assert that overridden methods run
assert search_space.mutate(MagicMock())
assert search_space.crossover([MagicMock(), MagicMock()])
def test_bayes_opt_search_space():
search_space = MyBayesOptSearchSpace()
# Assert that overridden methods run
assert search_space.encode(MagicMock())
|
archai/tests/discrete_search/api/test_search_space.py/0
|
{
"file_path": "archai/tests/discrete_search/api/test_search_space.py",
"repo_id": "archai",
"token_count": 876
}
| 390 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import pytest
from transformers import PretrainedConfig
from archai.onnx.config_utils.codegen_onnx_config import CodeGenOnnxConfig
@pytest.fixture
def dummy_config_codegen():
class DummyConfig(PretrainedConfig):
max_position_embeddings = 16
hidden_size = 32
n_layer = 3
num_attention_heads = 4
return DummyConfig()
def test_codegen_onnx_config(dummy_config_codegen):
# Assert that default values are set correctly
codegen_onnx_config = CodeGenOnnxConfig(dummy_config_codegen)
assert codegen_onnx_config.num_layers == 3
assert codegen_onnx_config.is_ort_graph_optimizable is False
assert codegen_onnx_config.ort_graph_optimizer_args == (4, 32)
|
archai/tests/onnx/config_utils/test_codegen_onnx_config.py/0
|
{
"file_path": "archai/tests/onnx/config_utils/test_codegen_onnx_config.py",
"repo_id": "archai",
"token_count": 293
}
| 391 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import pytest
from archai.quantization.quantization_utils import rgetattr, rsetattr
@pytest.fixture
def obj():
class DummyInnerObject:
def __init__(self):
self.attr = "some inner value"
class DummyObject:
def __init__(self):
self.attr1 = DummyInnerObject()
self.attr1.attr2 = DummyInnerObject()
self.attr3 = "some value"
return DummyObject()
def test_rgetattr(obj):
# Assert normal attribute retrieval
assert rgetattr(obj, "attr3") == "some value"
# Assert recursive attribute retrieval
assert rgetattr(obj, "attr1.attr") == "some inner value"
assert rgetattr(obj, "attr1.attr2.attr") == "some inner value"
def test_rsetattr(obj):
# Assert normal attribute setting
rsetattr(obj, "attr3", "new value")
assert obj.attr3 == "new value"
# Assert recursive attribute setting
rsetattr(obj, "attr1.attr", "some value")
assert obj.attr1.attr == "some value"
rsetattr(obj, "attr1.attr2.attr", "some value")
assert obj.attr1.attr2.attr == "some value"
|
archai/tests/quantization/test_quantization_utils.py/0
|
{
"file_path": "archai/tests/quantization/test_quantization_utils.py",
"repo_id": "archai",
"token_count": 446
}
| 392 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import numpy as np
import pytest
import torch
from torch.optim import SGD
from archai.trainers.gradual_warmup_scheduler import GradualWarmupScheduler
@pytest.fixture
def optimizer():
return SGD([torch.randn(2, 2, requires_grad=True)], 0.1)
@pytest.fixture
def scheduler(optimizer):
return GradualWarmupScheduler(optimizer, 2.0, 5)
def test_gradual_warmup_scheduler(scheduler):
# Assert that attributes have been defined correctly
assert scheduler.multiplier == 2.0
assert scheduler.total_epoch == 5
assert scheduler.after_scheduler is None
assert scheduler.finished is False
# Assert that it produces corret values for last_epoch <= total_epoch
scheduler.last_epoch = 3
scheduler.base_lrs = [0.1, 0.2, 0.3]
result = scheduler.get_lr()
result = [np.round(lr, 2) for lr in result]
assert result == [0.16, 0.32, 0.48]
# Assert that it produces corret values for last_epoch > total_epoch
scheduler.last_epoch = 7
result = scheduler.get_lr()
assert result == [0.2, 0.4, 0.6]
# Assert that it produces corret values for last_epoch <= total_epoch
scheduler.last_epoch = 3
scheduler.base_lrs = [0.1, 0.2, 0.3]
scheduler.step()
result = scheduler.optimizer.param_groups[0]["lr"]
assert np.round(result, 2) == 0.18
# Assert that it produces corret values for last_epoch > total_epoch
scheduler.last_epoch = 7
scheduler.step()
result = scheduler.optimizer.param_groups[0]["lr"]
assert result == 0.2
|
archai/tests/trainers/test_gradual_warmup_scheduler.py/0
|
{
"file_path": "archai/tests/trainers/test_gradual_warmup_scheduler.py",
"repo_id": "archai",
"token_count": 601
}
| 393 |
ο»Ώ# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from ...v7_0.graph.models import *
from .graph_client import GraphClient
__all__ = [
'Avatar',
'GraphCachePolicies',
'GraphDescriptorResult',
'GraphFederatedProviderData',
'GraphGroup',
'GraphGroupCreationContext',
'GraphMember',
'GraphMembership',
'GraphMembershipState',
'GraphMembershipTraversal',
'GraphProviderInfo',
'GraphScope',
'GraphScopeCreationContext',
'GraphStorageKeyResult',
'GraphSubject',
'GraphSubjectBase',
'GraphSubjectLookup',
'GraphSubjectLookupKey',
'GraphSubjectQuery',
'GraphUser',
'GraphUserCreationContext',
'GraphUserUpdateContext',
'JsonPatchOperation',
'PagedGraphGroups',
'PagedGraphUsers',
'ReferenceLinks',
'GraphClient'
]
|
azure-devops-python-api/azure-devops/azure/devops/released/graph/__init__.py/0
|
{
"file_path": "azure-devops-python-api/azure-devops/azure/devops/released/graph/__init__.py",
"repo_id": "azure-devops-python-api",
"token_count": 371
}
| 394 |
ο»Ώ# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from ...v7_0.policy.models import *
from .policy_client import PolicyClient
__all__ = [
'GraphSubjectBase',
'IdentityRef',
'PolicyConfiguration',
'PolicyConfigurationRef',
'PolicyEvaluationRecord',
'PolicyType',
'PolicyTypeRef',
'ReferenceLinks',
'VersionedPolicyConfigurationRef',
'PolicyClient'
]
|
azure-devops-python-api/azure-devops/azure/devops/released/policy/__init__.py/0
|
{
"file_path": "azure-devops-python-api/azure-devops/azure/devops/released/policy/__init__.py",
"repo_id": "azure-devops-python-api",
"token_count": 200
}
| 395 |
ο»Ώ# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from ...v7_0.symbol.models import *
from .symbol_client import SymbolClient
__all__ = [
'DebugEntry',
'DebugEntryCreateBatch',
'IDomainId',
'JsonBlobBlockHash',
'JsonBlobIdentifier',
'JsonBlobIdentifierWithBlocks',
'Request',
'ResourceBase',
'SymbolClient'
]
|
azure-devops-python-api/azure-devops/azure/devops/released/symbol/__init__.py/0
|
{
"file_path": "azure-devops-python-api/azure-devops/azure/devops/released/symbol/__init__.py",
"repo_id": "azure-devops-python-api",
"token_count": 203
}
| 396 |
ο»Ώ# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from ...v7_0.wiki.models import *
from .wiki_client import WikiClient
__all__ = [
'Comment',
'CommentAttachment',
'CommentCreateParameters',
'CommentList',
'CommentMention',
'CommentReaction',
'CommentResourceReference',
'CommentUpdateParameters',
'GitRepository',
'GitRepositoryRef',
'GitVersionDescriptor',
'GraphSubjectBase',
'IdentityRef',
'ReferenceLinks',
'TeamProjectCollectionReference',
'TeamProjectReference',
'WikiAttachment',
'WikiAttachmentResponse',
'WikiCreateBaseParameters',
'WikiCreateParametersV2',
'WikiPage',
'WikiPageCreateOrUpdateParameters',
'WikiPageDetail',
'WikiPageMove',
'WikiPageMoveParameters',
'WikiPageMoveResponse',
'WikiPageResponse',
'WikiPagesBatchRequest',
'WikiPageStat',
'WikiPageViewStats',
'WikiUpdateParameters',
'WikiV2',
'WikiClient'
]
|
azure-devops-python-api/azure-devops/azure/devops/released/wiki/__init__.py/0
|
{
"file_path": "azure-devops-python-api/azure-devops/azure/devops/released/wiki/__init__.py",
"repo_id": "azure-devops-python-api",
"token_count": 421
}
| 397 |
ο»Ώ# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest.serialization import Model
class AuditActionInfo(Model):
"""
:param action_id: The action id for the event, i.e Git.CreateRepo, Project.RenameProject
:type action_id: str
:param area: Area of Azure DevOps the action occurred
:type area: str
:param category: Type of action executed
:type category: object
"""
_attribute_map = {
'action_id': {'key': 'actionId', 'type': 'str'},
'area': {'key': 'area', 'type': 'str'},
'category': {'key': 'category', 'type': 'object'}
}
def __init__(self, action_id=None, area=None, category=None):
super(AuditActionInfo, self).__init__()
self.action_id = action_id
self.area = area
self.category = category
class AuditLogEntry(Model):
"""
:param action_id: The action if for the event, i.e Git.CreateRepo, Project.RenameProject
:type action_id: str
:param activity_id: ActivityId
:type activity_id: str
:param actor_cUID: The Actor's CUID
:type actor_cUID: str
:param actor_uPN: The Actor's UPN
:type actor_uPN: str
:param actor_user_id: The Actor's User Id
:type actor_user_id: str
:param authentication_mechanism: Type of authentication used by the author
:type authentication_mechanism: str
:param correlation_id: This allows us to group things together, like one user action that caused a cascade of event entries (project creation).
:type correlation_id: str
:param data: External data such as CUIDs, item names, etc.
:type data: dict
:param id: EventId, should be unique
:type id: str
:param ip_address: IP Address where the event was originated
:type ip_address: str
:param project_id: When specified, the id of the project this event is associated to
:type project_id: str
:param scope_id: The organization Id (Organization is the only scope currently supported)
:type scope_id: str
:param scope_type: The type of the scope (Organization is only scope currently supported)
:type scope_type: object
:param timestamp: The time when the event occurred in UTC
:type timestamp: datetime
:param user_agent: The user agent from the request
:type user_agent: str
"""
_attribute_map = {
'action_id': {'key': 'actionId', 'type': 'str'},
'activity_id': {'key': 'activityId', 'type': 'str'},
'actor_cUID': {'key': 'actorCUID', 'type': 'str'},
'actor_uPN': {'key': 'actorUPN', 'type': 'str'},
'actor_user_id': {'key': 'actorUserId', 'type': 'str'},
'authentication_mechanism': {'key': 'authenticationMechanism', 'type': 'str'},
'correlation_id': {'key': 'correlationId', 'type': 'str'},
'data': {'key': 'data', 'type': '{object}'},
'id': {'key': 'id', 'type': 'str'},
'ip_address': {'key': 'ipAddress', 'type': 'str'},
'project_id': {'key': 'projectId', 'type': 'str'},
'scope_id': {'key': 'scopeId', 'type': 'str'},
'scope_type': {'key': 'scopeType', 'type': 'object'},
'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
'user_agent': {'key': 'userAgent', 'type': 'str'}
}
def __init__(self, action_id=None, activity_id=None, actor_cUID=None, actor_uPN=None, actor_user_id=None, authentication_mechanism=None, correlation_id=None, data=None, id=None, ip_address=None, project_id=None, scope_id=None, scope_type=None, timestamp=None, user_agent=None):
super(AuditLogEntry, self).__init__()
self.action_id = action_id
self.activity_id = activity_id
self.actor_cUID = actor_cUID
self.actor_uPN = actor_uPN
self.actor_user_id = actor_user_id
self.authentication_mechanism = authentication_mechanism
self.correlation_id = correlation_id
self.data = data
self.id = id
self.ip_address = ip_address
self.project_id = project_id
self.scope_id = scope_id
self.scope_type = scope_type
self.timestamp = timestamp
self.user_agent = user_agent
class AuditLogQueryResult(Model):
"""
The object returned when the audit log is queried. It contains the log and the information needed to query more audit entries.
:param continuation_token: The continuation token to pass to get the next set of results
:type continuation_token: str
:param decorated_audit_log_entries: The list of audit log entries
:type decorated_audit_log_entries: list of :class:`DecoratedAuditLogEntry <azure.devops.v7_0.audit.models.DecoratedAuditLogEntry>`
:param has_more: True when there are more matching results to be fetched, false otherwise.
:type has_more: bool
"""
_attribute_map = {
'continuation_token': {'key': 'continuationToken', 'type': 'str'},
'decorated_audit_log_entries': {'key': 'decoratedAuditLogEntries', 'type': '[DecoratedAuditLogEntry]'},
'has_more': {'key': 'hasMore', 'type': 'bool'}
}
def __init__(self, continuation_token=None, decorated_audit_log_entries=None, has_more=None):
super(AuditLogQueryResult, self).__init__()
self.continuation_token = continuation_token
self.decorated_audit_log_entries = decorated_audit_log_entries
self.has_more = has_more
class AuditStream(Model):
"""
This class represents an audit stream
:param consumer_inputs: Inputs used to communicate with external service. Inputs could be url, a connection string, a token, etc.
:type consumer_inputs: dict
:param consumer_type: Type of the consumer, i.e. splunk, azureEventHub, etc.
:type consumer_type: str
:param created_time: The time when the stream was created
:type created_time: datetime
:param display_name: Used to identify individual streams
:type display_name: str
:param id: Unique stream identifier
:type id: int
:param status: Status of the stream, Enabled, Disabled
:type status: object
:param status_reason: Reason for the current stream status, i.e. Disabled by the system, Invalid credentials, etc.
:type status_reason: str
:param updated_time: The time when the stream was last updated
:type updated_time: datetime
"""
_attribute_map = {
'consumer_inputs': {'key': 'consumerInputs', 'type': '{str}'},
'consumer_type': {'key': 'consumerType', 'type': 'str'},
'created_time': {'key': 'createdTime', 'type': 'iso-8601'},
'display_name': {'key': 'displayName', 'type': 'str'},
'id': {'key': 'id', 'type': 'int'},
'status': {'key': 'status', 'type': 'object'},
'status_reason': {'key': 'statusReason', 'type': 'str'},
'updated_time': {'key': 'updatedTime', 'type': 'iso-8601'}
}
def __init__(self, consumer_inputs=None, consumer_type=None, created_time=None, display_name=None, id=None, status=None, status_reason=None, updated_time=None):
super(AuditStream, self).__init__()
self.consumer_inputs = consumer_inputs
self.consumer_type = consumer_type
self.created_time = created_time
self.display_name = display_name
self.id = id
self.status = status
self.status_reason = status_reason
self.updated_time = updated_time
class DecoratedAuditLogEntry(Model):
"""
:param action_id: The action id for the event, i.e Git.CreateRepo, Project.RenameProject
:type action_id: str
:param activity_id: ActivityId
:type activity_id: str
:param actor_cUID: The Actor's CUID
:type actor_cUID: str
:param actor_display_name: DisplayName of the user who initiated the action
:type actor_display_name: str
:param actor_image_url: URL of Actor's Profile image
:type actor_image_url: str
:param actor_uPN: The Actor's UPN
:type actor_uPN: str
:param actor_user_id: The Actor's User Id
:type actor_user_id: str
:param area: Area of Azure DevOps the action occurred
:type area: str
:param authentication_mechanism: Type of authentication used by the actor
:type authentication_mechanism: str
:param category: Type of action executed
:type category: object
:param category_display_name: DisplayName of the category
:type category_display_name: str
:param correlation_id: This allows related audit entries to be grouped together. Generally this occurs when a single action causes a cascade of audit entries. For example, project creation.
:type correlation_id: str
:param data: External data such as CUIDs, item names, etc.
:type data: dict
:param details: Decorated details
:type details: str
:param id: EventId - Needs to be unique per service
:type id: str
:param ip_address: IP Address where the event was originated
:type ip_address: str
:param project_id: When specified, the id of the project this event is associated to
:type project_id: str
:param project_name: When specified, the name of the project this event is associated to
:type project_name: str
:param scope_display_name: DisplayName of the scope
:type scope_display_name: str
:param scope_id: The organization Id (Organization is the only scope currently supported)
:type scope_id: str
:param scope_type: The type of the scope (Organization is only scope currently supported)
:type scope_type: object
:param timestamp: The time when the event occurred in UTC
:type timestamp: datetime
:param user_agent: The user agent from the request
:type user_agent: str
"""
_attribute_map = {
'action_id': {'key': 'actionId', 'type': 'str'},
'activity_id': {'key': 'activityId', 'type': 'str'},
'actor_cUID': {'key': 'actorCUID', 'type': 'str'},
'actor_display_name': {'key': 'actorDisplayName', 'type': 'str'},
'actor_image_url': {'key': 'actorImageUrl', 'type': 'str'},
'actor_uPN': {'key': 'actorUPN', 'type': 'str'},
'actor_user_id': {'key': 'actorUserId', 'type': 'str'},
'area': {'key': 'area', 'type': 'str'},
'authentication_mechanism': {'key': 'authenticationMechanism', 'type': 'str'},
'category': {'key': 'category', 'type': 'object'},
'category_display_name': {'key': 'categoryDisplayName', 'type': 'str'},
'correlation_id': {'key': 'correlationId', 'type': 'str'},
'data': {'key': 'data', 'type': '{object}'},
'details': {'key': 'details', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'ip_address': {'key': 'ipAddress', 'type': 'str'},
'project_id': {'key': 'projectId', 'type': 'str'},
'project_name': {'key': 'projectName', 'type': 'str'},
'scope_display_name': {'key': 'scopeDisplayName', 'type': 'str'},
'scope_id': {'key': 'scopeId', 'type': 'str'},
'scope_type': {'key': 'scopeType', 'type': 'object'},
'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
'user_agent': {'key': 'userAgent', 'type': 'str'}
}
def __init__(self, action_id=None, activity_id=None, actor_cUID=None, actor_display_name=None, actor_image_url=None, actor_uPN=None, actor_user_id=None, area=None, authentication_mechanism=None, category=None, category_display_name=None, correlation_id=None, data=None, details=None, id=None, ip_address=None, project_id=None, project_name=None, scope_display_name=None, scope_id=None, scope_type=None, timestamp=None, user_agent=None):
super(DecoratedAuditLogEntry, self).__init__()
self.action_id = action_id
self.activity_id = activity_id
self.actor_cUID = actor_cUID
self.actor_display_name = actor_display_name
self.actor_image_url = actor_image_url
self.actor_uPN = actor_uPN
self.actor_user_id = actor_user_id
self.area = area
self.authentication_mechanism = authentication_mechanism
self.category = category
self.category_display_name = category_display_name
self.correlation_id = correlation_id
self.data = data
self.details = details
self.id = id
self.ip_address = ip_address
self.project_id = project_id
self.project_name = project_name
self.scope_display_name = scope_display_name
self.scope_id = scope_id
self.scope_type = scope_type
self.timestamp = timestamp
self.user_agent = user_agent
__all__ = [
'AuditActionInfo',
'AuditLogEntry',
'AuditLogQueryResult',
'AuditStream',
'DecoratedAuditLogEntry',
]
|
azure-devops-python-api/azure-devops/azure/devops/v7_0/audit/models.py/0
|
{
"file_path": "azure-devops-python-api/azure-devops/azure/devops/v7_0/audit/models.py",
"repo_id": "azure-devops-python-api",
"token_count": 4894
}
| 398 |
ο»Ώ# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest.serialization import Model
class GraphSubjectBase(Model):
"""
:param _links: This field contains zero or more interesting links about the graph subject. These links may be invoked to obtain additional relationships or more detailed information about this graph subject.
:type _links: :class:`ReferenceLinks <azure.devops.v7_0.microsoft._visual_studio._services._web_api.models.ReferenceLinks>`
:param descriptor: The descriptor is the primary way to reference the graph subject while the system is running. This field will uniquely identify the same graph subject across both Accounts and Organizations.
:type descriptor: str
:param display_name: This is the non-unique display name of the graph subject. To change this field, you must alter its value in the source provider.
:type display_name: str
:param url: This url is the full route to the source resource of this graph subject.
:type url: str
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'descriptor': {'key': 'descriptor', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, _links=None, descriptor=None, display_name=None, url=None):
super(GraphSubjectBase, self).__init__()
self._links = _links
self.descriptor = descriptor
self.display_name = display_name
self.url = url
class IdentityBase(Model):
"""
Base Identity class to allow "trimmed" identity class in the GetConnectionData API Makes sure that on-the-wire representations of the derived classes are compatible with each other (e.g. Server responds with PublicIdentity object while client deserializes it as Identity object) Derived classes should not have additional [DataMember] properties
:param custom_display_name: The custom display name for the identity (if any). Setting this property to an empty string will clear the existing custom display name. Setting this property to null will not affect the existing persisted value (since null values do not get sent over the wire or to the database)
:type custom_display_name: str
:param descriptor:
:type descriptor: :class:`str <azure.devops.v7_0.microsoft._visual_studio._services._web_api.models.str>`
:param id: Identity Identifier. Also called Storage Key, or VSID
:type id: str
:param is_active: True if the identity has a membership in any Azure Devops group in the organization.
:type is_active: bool
:param is_container: True if the identity is a group.
:type is_container: bool
:param master_id:
:type master_id: str
:param member_ids: Id of the members of the identity (groups only).
:type member_ids: list of str
:param member_of:
:type member_of: list of :class:`str <azure.devops.v7_0.microsoft._visual_studio._services._web_api.models.str>`
:param members:
:type members: list of :class:`str <azure.devops.v7_0.microsoft._visual_studio._services._web_api.models.str>`
:param meta_type_id:
:type meta_type_id: int
:param properties:
:type properties: :class:`object <azure.devops.v7_0.microsoft._visual_studio._services._web_api.models.object>`
:param provider_display_name: The display name for the identity as specified by the source identity provider.
:type provider_display_name: str
:param resource_version:
:type resource_version: int
:param social_descriptor:
:type social_descriptor: :class:`str <azure.devops.v7_0.microsoft._visual_studio._services._web_api.models.str>`
:param subject_descriptor: Subject descriptor of a Graph entity.
:type subject_descriptor: :class:`str <azure.devops.v7_0.microsoft._visual_studio._services._web_api.models.str>`
:param unique_user_id:
:type unique_user_id: int
"""
_attribute_map = {
'custom_display_name': {'key': 'customDisplayName', 'type': 'str'},
'descriptor': {'key': 'descriptor', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'is_active': {'key': 'isActive', 'type': 'bool'},
'is_container': {'key': 'isContainer', 'type': 'bool'},
'master_id': {'key': 'masterId', 'type': 'str'},
'member_ids': {'key': 'memberIds', 'type': '[str]'},
'member_of': {'key': 'memberOf', 'type': '[str]'},
'members': {'key': 'members', 'type': '[str]'},
'meta_type_id': {'key': 'metaTypeId', 'type': 'int'},
'properties': {'key': 'properties', 'type': 'object'},
'provider_display_name': {'key': 'providerDisplayName', 'type': 'str'},
'resource_version': {'key': 'resourceVersion', 'type': 'int'},
'social_descriptor': {'key': 'socialDescriptor', 'type': 'str'},
'subject_descriptor': {'key': 'subjectDescriptor', 'type': 'str'},
'unique_user_id': {'key': 'uniqueUserId', 'type': 'int'}
}
def __init__(self, custom_display_name=None, descriptor=None, id=None, is_active=None, is_container=None, master_id=None, member_ids=None, member_of=None, members=None, meta_type_id=None, properties=None, provider_display_name=None, resource_version=None, social_descriptor=None, subject_descriptor=None, unique_user_id=None):
super(IdentityBase, self).__init__()
self.custom_display_name = custom_display_name
self.descriptor = descriptor
self.id = id
self.is_active = is_active
self.is_container = is_container
self.master_id = master_id
self.member_ids = member_ids
self.member_of = member_of
self.members = members
self.meta_type_id = meta_type_id
self.properties = properties
self.provider_display_name = provider_display_name
self.resource_version = resource_version
self.social_descriptor = social_descriptor
self.subject_descriptor = subject_descriptor
self.unique_user_id = unique_user_id
class IdentityData(Model):
"""
:param identity_ids:
:type identity_ids: list of str
"""
_attribute_map = {
'identity_ids': {'key': 'identityIds', 'type': '[str]'}
}
def __init__(self, identity_ids=None):
super(IdentityData, self).__init__()
self.identity_ids = identity_ids
class IdentityRef(GraphSubjectBase):
"""
:param _links: This field contains zero or more interesting links about the graph subject. These links may be invoked to obtain additional relationships or more detailed information about this graph subject.
:type _links: :class:`ReferenceLinks <azure.devops.v7_0.microsoft._visual_studio._services._web_api.models.ReferenceLinks>`
:param descriptor: The descriptor is the primary way to reference the graph subject while the system is running. This field will uniquely identify the same graph subject across both Accounts and Organizations.
:type descriptor: str
:param display_name: This is the non-unique display name of the graph subject. To change this field, you must alter its value in the source provider.
:type display_name: str
:param url: This url is the full route to the source resource of this graph subject.
:type url: str
:param directory_alias: Deprecated - Can be retrieved by querying the Graph user referenced in the "self" entry of the IdentityRef "_links" dictionary
:type directory_alias: str
:param id:
:type id: str
:param image_url: Deprecated - Available in the "avatar" entry of the IdentityRef "_links" dictionary
:type image_url: str
:param inactive: Deprecated - Can be retrieved by querying the Graph membership state referenced in the "membershipState" entry of the GraphUser "_links" dictionary
:type inactive: bool
:param is_aad_identity: Deprecated - Can be inferred from the subject type of the descriptor (Descriptor.IsAadUserType/Descriptor.IsAadGroupType)
:type is_aad_identity: bool
:param is_container: Deprecated - Can be inferred from the subject type of the descriptor (Descriptor.IsGroupType)
:type is_container: bool
:param is_deleted_in_origin:
:type is_deleted_in_origin: bool
:param profile_url: Deprecated - not in use in most preexisting implementations of ToIdentityRef
:type profile_url: str
:param unique_name: Deprecated - use Domain+PrincipalName instead
:type unique_name: str
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'descriptor': {'key': 'descriptor', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'directory_alias': {'key': 'directoryAlias', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'image_url': {'key': 'imageUrl', 'type': 'str'},
'inactive': {'key': 'inactive', 'type': 'bool'},
'is_aad_identity': {'key': 'isAadIdentity', 'type': 'bool'},
'is_container': {'key': 'isContainer', 'type': 'bool'},
'is_deleted_in_origin': {'key': 'isDeletedInOrigin', 'type': 'bool'},
'profile_url': {'key': 'profileUrl', 'type': 'str'},
'unique_name': {'key': 'uniqueName', 'type': 'str'}
}
def __init__(self, _links=None, descriptor=None, display_name=None, url=None, directory_alias=None, id=None, image_url=None, inactive=None, is_aad_identity=None, is_container=None, is_deleted_in_origin=None, profile_url=None, unique_name=None):
super(IdentityRef, self).__init__(_links=_links, descriptor=descriptor, display_name=display_name, url=url)
self.directory_alias = directory_alias
self.id = id
self.image_url = image_url
self.inactive = inactive
self.is_aad_identity = is_aad_identity
self.is_container = is_container
self.is_deleted_in_origin = is_deleted_in_origin
self.profile_url = profile_url
self.unique_name = unique_name
class JsonPatchOperation(Model):
"""
The JSON model for a JSON Patch operation
:param from_: The path to copy from for the Move/Copy operation.
:type from_: str
:param op: The patch operation
:type op: object
:param path: The path for the operation. In the case of an array, a zero based index can be used to specify the position in the array (e.g. /biscuits/0/name). The "-" character can be used instead of an index to insert at the end of the array (e.g. /biscuits/-).
:type path: str
:param value: The value for the operation. This is either a primitive or a JToken.
:type value: object
"""
_attribute_map = {
'from_': {'key': 'from', 'type': 'str'},
'op': {'key': 'op', 'type': 'object'},
'path': {'key': 'path', 'type': 'str'},
'value': {'key': 'value', 'type': 'object'}
}
def __init__(self, from_=None, op=None, path=None, value=None):
super(JsonPatchOperation, self).__init__()
self.from_ = from_
self.op = op
self.path = path
self.value = value
class OperationReference(Model):
"""
Reference for an async operation.
:param id: Unique identifier for the operation.
:type id: str
:param plugin_id: Unique identifier for the plugin.
:type plugin_id: str
:param status: The current status of the operation.
:type status: object
:param url: URL to get the full operation object.
:type url: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'plugin_id': {'key': 'pluginId', 'type': 'str'},
'status': {'key': 'status', 'type': 'object'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, id=None, plugin_id=None, status=None, url=None):
super(OperationReference, self).__init__()
self.id = id
self.plugin_id = plugin_id
self.status = status
self.url = url
class ProcessReference(Model):
"""
:param name:
:type name: str
:param url:
:type url: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, name=None, url=None):
super(ProcessReference, self).__init__()
self.name = name
self.url = url
class ProjectAvatar(Model):
"""
Contains the image data for project avatar.
:param image: The avatar image represented as a byte array.
:type image: str
"""
_attribute_map = {
'image': {'key': 'image', 'type': 'str'}
}
def __init__(self, image=None):
super(ProjectAvatar, self).__init__()
self.image = image
class ProjectInfo(Model):
"""
Contains information describing a project.
:param abbreviation: The abbreviated name of the project.
:type abbreviation: str
:param description: The description of the project.
:type description: str
:param id: The id of the project.
:type id: str
:param last_update_time: The time that this project was last updated.
:type last_update_time: datetime
:param name: The name of the project.
:type name: str
:param properties: A set of name-value pairs storing additional property data related to the project.
:type properties: list of :class:`ProjectProperty <azure.devops.v7_0.core.models.ProjectProperty>`
:param revision: The current revision of the project.
:type revision: long
:param state: The current state of the project.
:type state: object
:param uri: A Uri that can be used to refer to this project.
:type uri: str
:param version: The version number of the project.
:type version: long
:param visibility: Indicates whom the project is visible to.
:type visibility: object
"""
_attribute_map = {
'abbreviation': {'key': 'abbreviation', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'last_update_time': {'key': 'lastUpdateTime', 'type': 'iso-8601'},
'name': {'key': 'name', 'type': 'str'},
'properties': {'key': 'properties', 'type': '[ProjectProperty]'},
'revision': {'key': 'revision', 'type': 'long'},
'state': {'key': 'state', 'type': 'object'},
'uri': {'key': 'uri', 'type': 'str'},
'version': {'key': 'version', 'type': 'long'},
'visibility': {'key': 'visibility', 'type': 'object'}
}
def __init__(self, abbreviation=None, description=None, id=None, last_update_time=None, name=None, properties=None, revision=None, state=None, uri=None, version=None, visibility=None):
super(ProjectInfo, self).__init__()
self.abbreviation = abbreviation
self.description = description
self.id = id
self.last_update_time = last_update_time
self.name = name
self.properties = properties
self.revision = revision
self.state = state
self.uri = uri
self.version = version
self.visibility = visibility
class ProjectProperties(Model):
"""
:param project_id: The team project Id
:type project_id: str
:param properties: The collection of team project properties
:type properties: list of :class:`ProjectProperty <azure.devops.v7_0.core.models.ProjectProperty>`
"""
_attribute_map = {
'project_id': {'key': 'projectId', 'type': 'str'},
'properties': {'key': 'properties', 'type': '[ProjectProperty]'}
}
def __init__(self, project_id=None, properties=None):
super(ProjectProperties, self).__init__()
self.project_id = project_id
self.properties = properties
class ProjectProperty(Model):
"""
A named value associated with a project.
:param name: The name of the property.
:type name: str
:param value: The value of the property.
:type value: object
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'value': {'key': 'value', 'type': 'object'}
}
def __init__(self, name=None, value=None):
super(ProjectProperty, self).__init__()
self.name = name
self.value = value
class Proxy(Model):
"""
:param authorization:
:type authorization: :class:`ProxyAuthorization <azure.devops.v7_0.core.models.ProxyAuthorization>`
:param description: This is a description string
:type description: str
:param friendly_name: The friendly name of the server
:type friendly_name: str
:param global_default:
:type global_default: bool
:param site: This is a string representation of the site that the proxy server is located in (e.g. "NA-WA-RED")
:type site: str
:param site_default:
:type site_default: bool
:param url: The URL of the proxy server
:type url: str
"""
_attribute_map = {
'authorization': {'key': 'authorization', 'type': 'ProxyAuthorization'},
'description': {'key': 'description', 'type': 'str'},
'friendly_name': {'key': 'friendlyName', 'type': 'str'},
'global_default': {'key': 'globalDefault', 'type': 'bool'},
'site': {'key': 'site', 'type': 'str'},
'site_default': {'key': 'siteDefault', 'type': 'bool'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, authorization=None, description=None, friendly_name=None, global_default=None, site=None, site_default=None, url=None):
super(Proxy, self).__init__()
self.authorization = authorization
self.description = description
self.friendly_name = friendly_name
self.global_default = global_default
self.site = site
self.site_default = site_default
self.url = url
class ProxyAuthorization(Model):
"""
:param authorization_url: Gets or sets the endpoint used to obtain access tokens from the configured token service.
:type authorization_url: str
:param client_id: Gets or sets the client identifier for this proxy.
:type client_id: str
:param identity: Gets or sets the user identity to authorize for on-prem.
:type identity: :class:`str <azure.devops.v7_0.core.models.str>`
:param public_key: Gets or sets the public key used to verify the identity of this proxy. Only specify on hosted.
:type public_key: :class:`PublicKey <azure.devops.v7_0.core.models.PublicKey>`
"""
_attribute_map = {
'authorization_url': {'key': 'authorizationUrl', 'type': 'str'},
'client_id': {'key': 'clientId', 'type': 'str'},
'identity': {'key': 'identity', 'type': 'str'},
'public_key': {'key': 'publicKey', 'type': 'PublicKey'}
}
def __init__(self, authorization_url=None, client_id=None, identity=None, public_key=None):
super(ProxyAuthorization, self).__init__()
self.authorization_url = authorization_url
self.client_id = client_id
self.identity = identity
self.public_key = public_key
class PublicKey(Model):
"""
Represents the public key portion of an RSA asymmetric key.
:param exponent: Gets or sets the exponent for the public key.
:type exponent: str
:param modulus: Gets or sets the modulus for the public key.
:type modulus: str
"""
_attribute_map = {
'exponent': {'key': 'exponent', 'type': 'str'},
'modulus': {'key': 'modulus', 'type': 'str'}
}
def __init__(self, exponent=None, modulus=None):
super(PublicKey, self).__init__()
self.exponent = exponent
self.modulus = modulus
class ReferenceLinks(Model):
"""
The class to represent a collection of REST reference links.
:param links: The readonly view of the links. Because Reference links are readonly, we only want to expose them as read only.
:type links: dict
"""
_attribute_map = {
'links': {'key': 'links', 'type': '{object}'}
}
def __init__(self, links=None):
super(ReferenceLinks, self).__init__()
self.links = links
class TeamMember(Model):
"""
:param identity:
:type identity: :class:`IdentityRef <azure.devops.v7_0.microsoft._visual_studio._services._web_api.models.IdentityRef>`
:param is_team_admin:
:type is_team_admin: bool
"""
_attribute_map = {
'identity': {'key': 'identity', 'type': 'IdentityRef'},
'is_team_admin': {'key': 'isTeamAdmin', 'type': 'bool'}
}
def __init__(self, identity=None, is_team_admin=None):
super(TeamMember, self).__init__()
self.identity = identity
self.is_team_admin = is_team_admin
class TeamProjectCollectionReference(Model):
"""
Reference object for a TeamProjectCollection.
:param id: Collection Id.
:type id: str
:param name: Collection Name.
:type name: str
:param url: Collection REST Url.
:type url: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, id=None, name=None, url=None):
super(TeamProjectCollectionReference, self).__init__()
self.id = id
self.name = name
self.url = url
class TeamProjectReference(Model):
"""
Represents a shallow reference to a TeamProject.
:param abbreviation: Project abbreviation.
:type abbreviation: str
:param default_team_image_url: Url to default team identity image.
:type default_team_image_url: str
:param description: The project's description (if any).
:type description: str
:param id: Project identifier.
:type id: str
:param last_update_time: Project last update time.
:type last_update_time: datetime
:param name: Project name.
:type name: str
:param revision: Project revision.
:type revision: long
:param state: Project state.
:type state: object
:param url: Url to the full version of the object.
:type url: str
:param visibility: Project visibility.
:type visibility: object
"""
_attribute_map = {
'abbreviation': {'key': 'abbreviation', 'type': 'str'},
'default_team_image_url': {'key': 'defaultTeamImageUrl', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'last_update_time': {'key': 'lastUpdateTime', 'type': 'iso-8601'},
'name': {'key': 'name', 'type': 'str'},
'revision': {'key': 'revision', 'type': 'long'},
'state': {'key': 'state', 'type': 'object'},
'url': {'key': 'url', 'type': 'str'},
'visibility': {'key': 'visibility', 'type': 'object'}
}
def __init__(self, abbreviation=None, default_team_image_url=None, description=None, id=None, last_update_time=None, name=None, revision=None, state=None, url=None, visibility=None):
super(TeamProjectReference, self).__init__()
self.abbreviation = abbreviation
self.default_team_image_url = default_team_image_url
self.description = description
self.id = id
self.last_update_time = last_update_time
self.name = name
self.revision = revision
self.state = state
self.url = url
self.visibility = visibility
class WebApiConnectedServiceRef(Model):
"""
:param id:
:type id: str
:param url:
:type url: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, id=None, url=None):
super(WebApiConnectedServiceRef, self).__init__()
self.id = id
self.url = url
class WebApiTeamRef(Model):
"""
:param id: Team (Identity) Guid. A Team Foundation ID.
:type id: str
:param name: Team name
:type name: str
:param url: Team REST API Url
:type url: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, id=None, name=None, url=None):
super(WebApiTeamRef, self).__init__()
self.id = id
self.name = name
self.url = url
class Identity(IdentityBase):
"""
:param custom_display_name: The custom display name for the identity (if any). Setting this property to an empty string will clear the existing custom display name. Setting this property to null will not affect the existing persisted value (since null values do not get sent over the wire or to the database)
:type custom_display_name: str
:param descriptor:
:type descriptor: :class:`str <azure.devops.v7_0.microsoft._visual_studio._services._web_api.models.str>`
:param id: Identity Identifier. Also called Storage Key, or VSID
:type id: str
:param is_active: True if the identity has a membership in any Azure Devops group in the organization.
:type is_active: bool
:param is_container: True if the identity is a group.
:type is_container: bool
:param master_id:
:type master_id: str
:param member_ids: Id of the members of the identity (groups only).
:type member_ids: list of str
:param member_of:
:type member_of: list of :class:`str <azure.devops.v7_0.microsoft._visual_studio._services._web_api.models.str>`
:param members:
:type members: list of :class:`str <azure.devops.v7_0.microsoft._visual_studio._services._web_api.models.str>`
:param meta_type_id:
:type meta_type_id: int
:param properties:
:type properties: :class:`object <azure.devops.v7_0.microsoft._visual_studio._services._web_api.models.object>`
:param provider_display_name: The display name for the identity as specified by the source identity provider.
:type provider_display_name: str
:param resource_version:
:type resource_version: int
:param social_descriptor:
:type social_descriptor: :class:`str <azure.devops.v7_0.microsoft._visual_studio._services._web_api.models.str>`
:param subject_descriptor: Subject descriptor of a Graph entity.
:type subject_descriptor: :class:`str <azure.devops.v7_0.microsoft._visual_studio._services._web_api.models.str>`
:param unique_user_id:
:type unique_user_id: int
"""
_attribute_map = {
'custom_display_name': {'key': 'customDisplayName', 'type': 'str'},
'descriptor': {'key': 'descriptor', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'is_active': {'key': 'isActive', 'type': 'bool'},
'is_container': {'key': 'isContainer', 'type': 'bool'},
'master_id': {'key': 'masterId', 'type': 'str'},
'member_ids': {'key': 'memberIds', 'type': '[str]'},
'member_of': {'key': 'memberOf', 'type': '[str]'},
'members': {'key': 'members', 'type': '[str]'},
'meta_type_id': {'key': 'metaTypeId', 'type': 'int'},
'properties': {'key': 'properties', 'type': 'object'},
'provider_display_name': {'key': 'providerDisplayName', 'type': 'str'},
'resource_version': {'key': 'resourceVersion', 'type': 'int'},
'social_descriptor': {'key': 'socialDescriptor', 'type': 'str'},
'subject_descriptor': {'key': 'subjectDescriptor', 'type': 'str'},
'unique_user_id': {'key': 'uniqueUserId', 'type': 'int'},
}
def __init__(self, custom_display_name=None, descriptor=None, id=None, is_active=None, is_container=None, master_id=None, member_ids=None, member_of=None, members=None, meta_type_id=None, properties=None, provider_display_name=None, resource_version=None, social_descriptor=None, subject_descriptor=None, unique_user_id=None):
super(Identity, self).__init__(custom_display_name=custom_display_name, descriptor=descriptor, id=id, is_active=is_active, is_container=is_container, master_id=master_id, member_ids=member_ids, member_of=member_of, members=members, meta_type_id=meta_type_id, properties=properties, provider_display_name=provider_display_name, resource_version=resource_version, social_descriptor=social_descriptor, subject_descriptor=subject_descriptor, unique_user_id=unique_user_id)
class Process(ProcessReference):
"""
:param name:
:type name: str
:param url:
:type url: str
:param _links:
:type _links: :class:`ReferenceLinks <azure.devops.v7_0.core.models.ReferenceLinks>`
:param description:
:type description: str
:param id:
:type id: str
:param is_default:
:type is_default: bool
:param type:
:type type: object
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'description': {'key': 'description', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'is_default': {'key': 'isDefault', 'type': 'bool'},
'type': {'key': 'type', 'type': 'object'}
}
def __init__(self, name=None, url=None, _links=None, description=None, id=None, is_default=None, type=None):
super(Process, self).__init__(name=name, url=url)
self._links = _links
self.description = description
self.id = id
self.is_default = is_default
self.type = type
class TeamProject(TeamProjectReference):
"""
Represents a Team Project object.
:param abbreviation: Project abbreviation.
:type abbreviation: str
:param default_team_image_url: Url to default team identity image.
:type default_team_image_url: str
:param description: The project's description (if any).
:type description: str
:param id: Project identifier.
:type id: str
:param last_update_time: Project last update time.
:type last_update_time: datetime
:param name: Project name.
:type name: str
:param revision: Project revision.
:type revision: long
:param state: Project state.
:type state: object
:param url: Url to the full version of the object.
:type url: str
:param visibility: Project visibility.
:type visibility: object
:param _links: The links to other objects related to this object.
:type _links: :class:`ReferenceLinks <azure.devops.v7_0.core.models.ReferenceLinks>`
:param capabilities: Set of capabilities this project has (such as process template & version control).
:type capabilities: dict
:param default_team: The shallow ref to the default team.
:type default_team: :class:`WebApiTeamRef <azure.devops.v7_0.core.models.WebApiTeamRef>`
"""
_attribute_map = {
'abbreviation': {'key': 'abbreviation', 'type': 'str'},
'default_team_image_url': {'key': 'defaultTeamImageUrl', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'last_update_time': {'key': 'lastUpdateTime', 'type': 'iso-8601'},
'name': {'key': 'name', 'type': 'str'},
'revision': {'key': 'revision', 'type': 'long'},
'state': {'key': 'state', 'type': 'object'},
'url': {'key': 'url', 'type': 'str'},
'visibility': {'key': 'visibility', 'type': 'object'},
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'capabilities': {'key': 'capabilities', 'type': '{{str}}'},
'default_team': {'key': 'defaultTeam', 'type': 'WebApiTeamRef'}
}
def __init__(self, abbreviation=None, default_team_image_url=None, description=None, id=None, last_update_time=None, name=None, revision=None, state=None, url=None, visibility=None, _links=None, capabilities=None, default_team=None):
super(TeamProject, self).__init__(abbreviation=abbreviation, default_team_image_url=default_team_image_url, description=description, id=id, last_update_time=last_update_time, name=name, revision=revision, state=state, url=url, visibility=visibility)
self._links = _links
self.capabilities = capabilities
self.default_team = default_team
class TeamProjectCollection(TeamProjectCollectionReference):
"""
Data contract for a TeamProjectCollection.
:param id: Collection Id.
:type id: str
:param name: Collection Name.
:type name: str
:param url: Collection REST Url.
:type url: str
:param _links: The links to other objects related to this object.
:type _links: :class:`ReferenceLinks <azure.devops.v7_0.core.models.ReferenceLinks>`
:param description: Project collection description.
:type description: str
:param process_customization_type: Process customization type on this collection. It can be Xml or Inherited.
:type process_customization_type: object
:param state: Project collection state.
:type state: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'description': {'key': 'description', 'type': 'str'},
'process_customization_type': {'key': 'processCustomizationType', 'type': 'object'},
'state': {'key': 'state', 'type': 'str'}
}
def __init__(self, id=None, name=None, url=None, _links=None, description=None, process_customization_type=None, state=None):
super(TeamProjectCollection, self).__init__(id=id, name=name, url=url)
self._links = _links
self.description = description
self.process_customization_type = process_customization_type
self.state = state
class WebApiConnectedService(WebApiConnectedServiceRef):
"""
:param url:
:type url: str
:param authenticated_by: The user who did the OAuth authentication to created this service
:type authenticated_by: :class:`IdentityRef <azure.devops.v7_0.core.models.IdentityRef>`
:param description: Extra description on the service.
:type description: str
:param friendly_name: Friendly Name of service connection
:type friendly_name: str
:param id: Id/Name of the connection service. For Ex: Subscription Id for Azure Connection
:type id: str
:param kind: The kind of service.
:type kind: str
:param project: The project associated with this service
:type project: :class:`TeamProjectReference <azure.devops.v7_0.core.models.TeamProjectReference>`
:param service_uri: Optional uri to connect directly to the service such as https://windows.azure.com
:type service_uri: str
"""
_attribute_map = {
'url': {'key': 'url', 'type': 'str'},
'authenticated_by': {'key': 'authenticatedBy', 'type': 'IdentityRef'},
'description': {'key': 'description', 'type': 'str'},
'friendly_name': {'key': 'friendlyName', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'project': {'key': 'project', 'type': 'TeamProjectReference'},
'service_uri': {'key': 'serviceUri', 'type': 'str'}
}
def __init__(self, url=None, authenticated_by=None, description=None, friendly_name=None, id=None, kind=None, project=None, service_uri=None):
super(WebApiConnectedService, self).__init__(url=url)
self.authenticated_by = authenticated_by
self.description = description
self.friendly_name = friendly_name
self.id = id
self.kind = kind
self.project = project
self.service_uri = service_uri
class WebApiConnectedServiceDetails(WebApiConnectedServiceRef):
"""
:param id:
:type id: str
:param url:
:type url: str
:param connected_service_meta_data: Meta data for service connection
:type connected_service_meta_data: :class:`WebApiConnectedService <azure.devops.v7_0.core.models.WebApiConnectedService>`
:param credentials_xml: Credential info
:type credentials_xml: str
:param end_point: Optional uri to connect directly to the service such as https://windows.azure.com
:type end_point: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'connected_service_meta_data': {'key': 'connectedServiceMetaData', 'type': 'WebApiConnectedService'},
'credentials_xml': {'key': 'credentialsXml', 'type': 'str'},
'end_point': {'key': 'endPoint', 'type': 'str'}
}
def __init__(self, id=None, url=None, connected_service_meta_data=None, credentials_xml=None, end_point=None):
super(WebApiConnectedServiceDetails, self).__init__(id=id, url=url)
self.connected_service_meta_data = connected_service_meta_data
self.credentials_xml = credentials_xml
self.end_point = end_point
class WebApiTeam(WebApiTeamRef):
"""
:param id: Team (Identity) Guid. A Team Foundation ID.
:type id: str
:param name: Team name
:type name: str
:param url: Team REST API Url
:type url: str
:param description: Team description
:type description: str
:param identity: Team identity.
:type identity: :class:`Identity <azure.devops.v7_0.core.models.Identity>`
:param identity_url: Identity REST API Url to this team
:type identity_url: str
:param project_id:
:type project_id: str
:param project_name:
:type project_name: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'identity': {'key': 'identity', 'type': 'Identity'},
'identity_url': {'key': 'identityUrl', 'type': 'str'},
'project_id': {'key': 'projectId', 'type': 'str'},
'project_name': {'key': 'projectName', 'type': 'str'}
}
def __init__(self, id=None, name=None, url=None, description=None, identity=None, identity_url=None, project_id=None, project_name=None):
super(WebApiTeam, self).__init__(id=id, name=name, url=url)
self.description = description
self.identity = identity
self.identity_url = identity_url
self.project_id = project_id
self.project_name = project_name
__all__ = [
'GraphSubjectBase',
'IdentityBase',
'IdentityData',
'IdentityRef',
'JsonPatchOperation',
'OperationReference',
'ProcessReference',
'ProjectAvatar',
'ProjectInfo',
'ProjectProperties',
'ProjectProperty',
'Proxy',
'ProxyAuthorization',
'PublicKey',
'ReferenceLinks',
'TeamMember',
'TeamProjectCollectionReference',
'TeamProjectReference',
'WebApiConnectedServiceRef',
'WebApiTeamRef',
'Identity',
'Process',
'TeamProject',
'TeamProjectCollection',
'WebApiConnectedService',
'WebApiConnectedServiceDetails',
'WebApiTeam',
]
|
azure-devops-python-api/azure-devops/azure/devops/v7_0/core/models.py/0
|
{
"file_path": "azure-devops-python-api/azure-devops/azure/devops/v7_0/core/models.py",
"repo_id": "azure-devops-python-api",
"token_count": 14889
}
| 399 |
ο»Ώ# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from .models import *
from .feature_management_client import FeatureManagementClient
__all__ = [
'ContributedFeature',
'ContributedFeatureHandlerSettings',
'ContributedFeatureListener',
'ContributedFeatureSettingScope',
'ContributedFeatureState',
'ContributedFeatureStateQuery',
'ContributedFeatureValueRule',
'ReferenceLinks',
'FeatureManagementClient'
]
|
azure-devops-python-api/azure-devops/azure/devops/v7_0/feature_management/__init__.py/0
|
{
"file_path": "azure-devops-python-api/azure-devops/azure/devops/v7_0/feature_management/__init__.py",
"repo_id": "azure-devops-python-api",
"token_count": 196
}
| 400 |
ο»Ώ# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest.serialization import Model
class BaseSubscriptionFilter(Model):
"""
:param event_type:
:type event_type: str
:param type:
:type type: str
"""
_attribute_map = {
'event_type': {'key': 'eventType', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'}
}
def __init__(self, event_type=None, type=None):
super(BaseSubscriptionFilter, self).__init__()
self.event_type = event_type
self.type = type
class BatchNotificationOperation(Model):
"""
:param notification_operation:
:type notification_operation: object
:param notification_query_conditions:
:type notification_query_conditions: list of :class:`NotificationQueryCondition <azure.devops.v7_0.notification.models.NotificationQueryCondition>`
"""
_attribute_map = {
'notification_operation': {'key': 'notificationOperation', 'type': 'object'},
'notification_query_conditions': {'key': 'notificationQueryConditions', 'type': '[NotificationQueryCondition]'}
}
def __init__(self, notification_operation=None, notification_query_conditions=None):
super(BatchNotificationOperation, self).__init__()
self.notification_operation = notification_operation
self.notification_query_conditions = notification_query_conditions
class EventActor(Model):
"""
Defines an "actor" for an event.
:param id: Required: This is the identity of the user for the specified role.
:type id: str
:param role: Required: The event specific name of a role.
:type role: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'role': {'key': 'role', 'type': 'str'}
}
def __init__(self, id=None, role=None):
super(EventActor, self).__init__()
self.id = id
self.role = role
class EventScope(Model):
"""
Defines a scope for an event.
:param id: Required: This is the identity of the scope for the type.
:type id: str
:param name: Optional: The display name of the scope
:type name: str
:param type: Required: The event specific type of a scope.
:type type: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'}
}
def __init__(self, id=None, name=None, type=None):
super(EventScope, self).__init__()
self.id = id
self.name = name
self.type = type
class EventsEvaluationResult(Model):
"""
Encapsulates events result properties. It defines the total number of events used and the number of matched events.
:param count: Count of events evaluated.
:type count: int
:param matched_count: Count of matched events.
:type matched_count: int
"""
_attribute_map = {
'count': {'key': 'count', 'type': 'int'},
'matched_count': {'key': 'matchedCount', 'type': 'int'}
}
def __init__(self, count=None, matched_count=None):
super(EventsEvaluationResult, self).__init__()
self.count = count
self.matched_count = matched_count
class EventTransformRequest(Model):
"""
A transform request specify the properties of a notification event to be transformed.
:param event_payload: Event payload.
:type event_payload: str
:param event_type: Event type.
:type event_type: str
:param system_inputs: System inputs.
:type system_inputs: dict
"""
_attribute_map = {
'event_payload': {'key': 'eventPayload', 'type': 'str'},
'event_type': {'key': 'eventType', 'type': 'str'},
'system_inputs': {'key': 'systemInputs', 'type': '{str}'}
}
def __init__(self, event_payload=None, event_type=None, system_inputs=None):
super(EventTransformRequest, self).__init__()
self.event_payload = event_payload
self.event_type = event_type
self.system_inputs = system_inputs
class EventTransformResult(Model):
"""
Result of transforming a notification event.
:param content: Transformed html content.
:type content: str
:param data: Calculated data.
:type data: object
:param system_inputs: Calculated system inputs.
:type system_inputs: dict
"""
_attribute_map = {
'content': {'key': 'content', 'type': 'str'},
'data': {'key': 'data', 'type': 'object'},
'system_inputs': {'key': 'systemInputs', 'type': '{str}'}
}
def __init__(self, content=None, data=None, system_inputs=None):
super(EventTransformResult, self).__init__()
self.content = content
self.data = data
self.system_inputs = system_inputs
class ExpressionFilterClause(Model):
"""
Subscription Filter Clause represents a single clause in a subscription filter e.g. If the subscription has the following criteria "Project Name = [Current Project] AND Assigned To = [Me] it will be represented as two Filter Clauses Clause 1: Index = 1, Logical Operator: NULL , FieldName = 'Project Name', Operator = '=', Value = '[Current Project]' Clause 2: Index = 2, Logical Operator: 'AND' , FieldName = 'Assigned To' , Operator = '=', Value = '[Me]'
:param field_name:
:type field_name: str
:param index: The order in which this clause appeared in the filter query
:type index: int
:param logical_operator: Logical Operator 'AND', 'OR' or NULL (only for the first clause in the filter)
:type logical_operator: str
:param operator:
:type operator: str
:param value:
:type value: str
"""
_attribute_map = {
'field_name': {'key': 'fieldName', 'type': 'str'},
'index': {'key': 'index', 'type': 'int'},
'logical_operator': {'key': 'logicalOperator', 'type': 'str'},
'operator': {'key': 'operator', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'}
}
def __init__(self, field_name=None, index=None, logical_operator=None, operator=None, value=None):
super(ExpressionFilterClause, self).__init__()
self.field_name = field_name
self.index = index
self.logical_operator = logical_operator
self.operator = operator
self.value = value
class ExpressionFilterGroup(Model):
"""
Represents a hierarchy of SubscritionFilterClauses that have been grouped together through either adding a group in the WebUI or using parethesis in the Subscription condition string
:param end: The index of the last FilterClause in this group
:type end: int
:param level: Level of the group, since groups can be nested for each nested group the level will increase by 1
:type level: int
:param start: The index of the first FilterClause in this group
:type start: int
"""
_attribute_map = {
'end': {'key': 'end', 'type': 'int'},
'level': {'key': 'level', 'type': 'int'},
'start': {'key': 'start', 'type': 'int'}
}
def __init__(self, end=None, level=None, start=None):
super(ExpressionFilterGroup, self).__init__()
self.end = end
self.level = level
self.start = start
class ExpressionFilterModel(Model):
"""
:param clauses: Flat list of clauses in this subscription
:type clauses: list of :class:`ExpressionFilterClause <azure.devops.v7_0.notification.models.ExpressionFilterClause>`
:param groups: Grouping of clauses in the subscription
:type groups: list of :class:`ExpressionFilterGroup <azure.devops.v7_0.notification.models.ExpressionFilterGroup>`
:param max_group_level: Max depth of the Subscription tree
:type max_group_level: int
"""
_attribute_map = {
'clauses': {'key': 'clauses', 'type': '[ExpressionFilterClause]'},
'groups': {'key': 'groups', 'type': '[ExpressionFilterGroup]'},
'max_group_level': {'key': 'maxGroupLevel', 'type': 'int'}
}
def __init__(self, clauses=None, groups=None, max_group_level=None):
super(ExpressionFilterModel, self).__init__()
self.clauses = clauses
self.groups = groups
self.max_group_level = max_group_level
class GraphSubjectBase(Model):
"""
:param _links: This field contains zero or more interesting links about the graph subject. These links may be invoked to obtain additional relationships or more detailed information about this graph subject.
:type _links: :class:`ReferenceLinks <azure.devops.v7_0.microsoft._visual_studio._services._web_api.models.ReferenceLinks>`
:param descriptor: The descriptor is the primary way to reference the graph subject while the system is running. This field will uniquely identify the same graph subject across both Accounts and Organizations.
:type descriptor: str
:param display_name: This is the non-unique display name of the graph subject. To change this field, you must alter its value in the source provider.
:type display_name: str
:param url: This url is the full route to the source resource of this graph subject.
:type url: str
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'descriptor': {'key': 'descriptor', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, _links=None, descriptor=None, display_name=None, url=None):
super(GraphSubjectBase, self).__init__()
self._links = _links
self.descriptor = descriptor
self.display_name = display_name
self.url = url
class IdentityRef(GraphSubjectBase):
"""
:param _links: This field contains zero or more interesting links about the graph subject. These links may be invoked to obtain additional relationships or more detailed information about this graph subject.
:type _links: :class:`ReferenceLinks <azure.devops.v7_0.microsoft._visual_studio._services._web_api.models.ReferenceLinks>`
:param descriptor: The descriptor is the primary way to reference the graph subject while the system is running. This field will uniquely identify the same graph subject across both Accounts and Organizations.
:type descriptor: str
:param display_name: This is the non-unique display name of the graph subject. To change this field, you must alter its value in the source provider.
:type display_name: str
:param url: This url is the full route to the source resource of this graph subject.
:type url: str
:param directory_alias: Deprecated - Can be retrieved by querying the Graph user referenced in the "self" entry of the IdentityRef "_links" dictionary
:type directory_alias: str
:param id:
:type id: str
:param image_url: Deprecated - Available in the "avatar" entry of the IdentityRef "_links" dictionary
:type image_url: str
:param inactive: Deprecated - Can be retrieved by querying the Graph membership state referenced in the "membershipState" entry of the GraphUser "_links" dictionary
:type inactive: bool
:param is_aad_identity: Deprecated - Can be inferred from the subject type of the descriptor (Descriptor.IsAadUserType/Descriptor.IsAadGroupType)
:type is_aad_identity: bool
:param is_container: Deprecated - Can be inferred from the subject type of the descriptor (Descriptor.IsGroupType)
:type is_container: bool
:param is_deleted_in_origin:
:type is_deleted_in_origin: bool
:param profile_url: Deprecated - not in use in most preexisting implementations of ToIdentityRef
:type profile_url: str
:param unique_name: Deprecated - use Domain+PrincipalName instead
:type unique_name: str
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'descriptor': {'key': 'descriptor', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'directory_alias': {'key': 'directoryAlias', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'image_url': {'key': 'imageUrl', 'type': 'str'},
'inactive': {'key': 'inactive', 'type': 'bool'},
'is_aad_identity': {'key': 'isAadIdentity', 'type': 'bool'},
'is_container': {'key': 'isContainer', 'type': 'bool'},
'is_deleted_in_origin': {'key': 'isDeletedInOrigin', 'type': 'bool'},
'profile_url': {'key': 'profileUrl', 'type': 'str'},
'unique_name': {'key': 'uniqueName', 'type': 'str'}
}
def __init__(self, _links=None, descriptor=None, display_name=None, url=None, directory_alias=None, id=None, image_url=None, inactive=None, is_aad_identity=None, is_container=None, is_deleted_in_origin=None, profile_url=None, unique_name=None):
super(IdentityRef, self).__init__(_links=_links, descriptor=descriptor, display_name=display_name, url=url)
self.directory_alias = directory_alias
self.id = id
self.image_url = image_url
self.inactive = inactive
self.is_aad_identity = is_aad_identity
self.is_container = is_container
self.is_deleted_in_origin = is_deleted_in_origin
self.profile_url = profile_url
self.unique_name = unique_name
class INotificationDiagnosticLog(Model):
"""
Abstraction interface for the diagnostic log. Primarily for deserialization.
:param activity_id: Identifier used for correlating to other diagnostics that may have been recorded elsewhere.
:type activity_id: str
:param description: Description of what subscription or notification job is being logged.
:type description: str
:param end_time: Time the log ended.
:type end_time: datetime
:param id: Unique instance identifier.
:type id: str
:param log_type: Type of information being logged.
:type log_type: str
:param messages: List of log messages.
:type messages: list of :class:`NotificationDiagnosticLogMessage <azure.devops.v7_0.notification.models.NotificationDiagnosticLogMessage>`
:param properties: Dictionary of log properties and settings for the job.
:type properties: dict
:param source: This identifier depends on the logType. For notification jobs, this will be the job Id. For subscription tracing, this will be a special root Guid with the subscription Id encoded.
:type source: str
:param start_time: Time the log started.
:type start_time: datetime
"""
_attribute_map = {
'activity_id': {'key': 'activityId', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
'id': {'key': 'id', 'type': 'str'},
'log_type': {'key': 'logType', 'type': 'str'},
'messages': {'key': 'messages', 'type': '[NotificationDiagnosticLogMessage]'},
'properties': {'key': 'properties', 'type': '{str}'},
'source': {'key': 'source', 'type': 'str'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'}
}
def __init__(self, activity_id=None, description=None, end_time=None, id=None, log_type=None, messages=None, properties=None, source=None, start_time=None):
super(INotificationDiagnosticLog, self).__init__()
self.activity_id = activity_id
self.description = description
self.end_time = end_time
self.id = id
self.log_type = log_type
self.messages = messages
self.properties = properties
self.source = source
self.start_time = start_time
class InputValue(Model):
"""
Information about a single value for an input
:param data: Any other data about this input
:type data: dict
:param display_value: The text to show for the display of this value
:type display_value: str
:param value: The value to store for this input
:type value: str
"""
_attribute_map = {
'data': {'key': 'data', 'type': '{object}'},
'display_value': {'key': 'displayValue', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'}
}
def __init__(self, data=None, display_value=None, value=None):
super(InputValue, self).__init__()
self.data = data
self.display_value = display_value
self.value = value
class InputValues(Model):
"""
Information about the possible/allowed values for a given subscription input
:param default_value: The default value to use for this input
:type default_value: str
:param error: Errors encountered while computing dynamic values.
:type error: :class:`InputValuesError <azure.devops.v7_0.microsoft._visual_studio._services._web_api.models.InputValuesError>`
:param input_id: The id of the input
:type input_id: str
:param is_disabled: Should this input be disabled
:type is_disabled: bool
:param is_limited_to_possible_values: Should the value be restricted to one of the values in the PossibleValues (True) or are the values in PossibleValues just a suggestion (False)
:type is_limited_to_possible_values: bool
:param is_read_only: Should this input be made read-only
:type is_read_only: bool
:param possible_values: Possible values that this input can take
:type possible_values: list of :class:`InputValue <azure.devops.v7_0.microsoft._visual_studio._services._web_api.models.InputValue>`
"""
_attribute_map = {
'default_value': {'key': 'defaultValue', 'type': 'str'},
'error': {'key': 'error', 'type': 'InputValuesError'},
'input_id': {'key': 'inputId', 'type': 'str'},
'is_disabled': {'key': 'isDisabled', 'type': 'bool'},
'is_limited_to_possible_values': {'key': 'isLimitedToPossibleValues', 'type': 'bool'},
'is_read_only': {'key': 'isReadOnly', 'type': 'bool'},
'possible_values': {'key': 'possibleValues', 'type': '[InputValue]'}
}
def __init__(self, default_value=None, error=None, input_id=None, is_disabled=None, is_limited_to_possible_values=None, is_read_only=None, possible_values=None):
super(InputValues, self).__init__()
self.default_value = default_value
self.error = error
self.input_id = input_id
self.is_disabled = is_disabled
self.is_limited_to_possible_values = is_limited_to_possible_values
self.is_read_only = is_read_only
self.possible_values = possible_values
class InputValuesError(Model):
"""
Error information related to a subscription input value.
:param message: The error message.
:type message: str
"""
_attribute_map = {
'message': {'key': 'message', 'type': 'str'}
}
def __init__(self, message=None):
super(InputValuesError, self).__init__()
self.message = message
class InputValuesQuery(Model):
"""
:param current_values:
:type current_values: dict
:param input_values: The input values to return on input, and the result from the consumer on output.
:type input_values: list of :class:`InputValues <azure.devops.v7_0.microsoft._visual_studio._services._web_api.models.InputValues>`
:param resource: Subscription containing information about the publisher/consumer and the current input values
:type resource: object
"""
_attribute_map = {
'current_values': {'key': 'currentValues', 'type': '{str}'},
'input_values': {'key': 'inputValues', 'type': '[InputValues]'},
'resource': {'key': 'resource', 'type': 'object'}
}
def __init__(self, current_values=None, input_values=None, resource=None):
super(InputValuesQuery, self).__init__()
self.current_values = current_values
self.input_values = input_values
self.resource = resource
class ISubscriptionFilter(Model):
"""
:param event_type:
:type event_type: str
:param type:
:type type: str
"""
_attribute_map = {
'event_type': {'key': 'eventType', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'}
}
def __init__(self, event_type=None, type=None):
super(ISubscriptionFilter, self).__init__()
self.event_type = event_type
self.type = type
class ISubscriptionChannel(Model):
"""
:param type:
:type type: str
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'}
}
def __init__(self, type=None):
super(ISubscriptionChannel, self).__init__()
self.type = type
class NotificationAdminSettings(Model):
"""
:param default_group_delivery_preference: The default group delivery preference for groups in this collection
:type default_group_delivery_preference: object
"""
_attribute_map = {
'default_group_delivery_preference': {'key': 'defaultGroupDeliveryPreference', 'type': 'object'}
}
def __init__(self, default_group_delivery_preference=None):
super(NotificationAdminSettings, self).__init__()
self.default_group_delivery_preference = default_group_delivery_preference
class NotificationAdminSettingsUpdateParameters(Model):
"""
:param default_group_delivery_preference:
:type default_group_delivery_preference: object
"""
_attribute_map = {
'default_group_delivery_preference': {'key': 'defaultGroupDeliveryPreference', 'type': 'object'}
}
def __init__(self, default_group_delivery_preference=None):
super(NotificationAdminSettingsUpdateParameters, self).__init__()
self.default_group_delivery_preference = default_group_delivery_preference
class NotificationDiagnosticLogMessage(Model):
"""
:param level: Corresponds to .Net TraceLevel enumeration
:type level: int
:param message:
:type message: str
:param time:
:type time: object
"""
_attribute_map = {
'level': {'key': 'level', 'type': 'int'},
'message': {'key': 'message', 'type': 'str'},
'time': {'key': 'time', 'type': 'object'}
}
def __init__(self, level=None, message=None, time=None):
super(NotificationDiagnosticLogMessage, self).__init__()
self.level = level
self.message = message
self.time = time
class NotificationEventField(Model):
"""
Encapsulates the properties of a filterable field. A filterable field is a field in an event that can used to filter notifications for a certain event type.
:param field_type: Gets or sets the type of this field.
:type field_type: :class:`NotificationEventFieldType <azure.devops.v7_0.notification.models.NotificationEventFieldType>`
:param id: Gets or sets the unique identifier of this field.
:type id: str
:param name: Gets or sets the name of this field.
:type name: str
:param path: Gets or sets the path to the field in the event object. This path can be either Json Path or XPath, depending on if the event will be serialized into Json or XML
:type path: str
:param supported_scopes: Gets or sets the scopes that this field supports. If not specified then the event type scopes apply.
:type supported_scopes: list of str
"""
_attribute_map = {
'field_type': {'key': 'fieldType', 'type': 'NotificationEventFieldType'},
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'path': {'key': 'path', 'type': 'str'},
'supported_scopes': {'key': 'supportedScopes', 'type': '[str]'}
}
def __init__(self, field_type=None, id=None, name=None, path=None, supported_scopes=None):
super(NotificationEventField, self).__init__()
self.field_type = field_type
self.id = id
self.name = name
self.path = path
self.supported_scopes = supported_scopes
class NotificationEventFieldOperator(Model):
"""
Encapsulates the properties of a field type. It includes a unique id for the operator and a localized string for display name
:param display_name: Gets or sets the display name of an operator
:type display_name: str
:param id: Gets or sets the id of an operator
:type id: str
"""
_attribute_map = {
'display_name': {'key': 'displayName', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'}
}
def __init__(self, display_name=None, id=None):
super(NotificationEventFieldOperator, self).__init__()
self.display_name = display_name
self.id = id
class NotificationEventFieldType(Model):
"""
Encapsulates the properties of a field type. It describes the data type of a field, the operators it support and how to populate it in the UI
:param id: Gets or sets the unique identifier of this field type.
:type id: str
:param operator_constraints:
:type operator_constraints: list of :class:`OperatorConstraint <azure.devops.v7_0.notification.models.OperatorConstraint>`
:param operators: Gets or sets the list of operators that this type supports.
:type operators: list of :class:`NotificationEventFieldOperator <azure.devops.v7_0.notification.models.NotificationEventFieldOperator>`
:param subscription_field_type:
:type subscription_field_type: object
:param value: Gets or sets the value definition of this field like the getValuesMethod and template to display in the UI
:type value: :class:`ValueDefinition <azure.devops.v7_0.notification.models.ValueDefinition>`
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'operator_constraints': {'key': 'operatorConstraints', 'type': '[OperatorConstraint]'},
'operators': {'key': 'operators', 'type': '[NotificationEventFieldOperator]'},
'subscription_field_type': {'key': 'subscriptionFieldType', 'type': 'object'},
'value': {'key': 'value', 'type': 'ValueDefinition'}
}
def __init__(self, id=None, operator_constraints=None, operators=None, subscription_field_type=None, value=None):
super(NotificationEventFieldType, self).__init__()
self.id = id
self.operator_constraints = operator_constraints
self.operators = operators
self.subscription_field_type = subscription_field_type
self.value = value
class NotificationEventPublisher(Model):
"""
Encapsulates the properties of a notification event publisher.
:param id:
:type id: str
:param subscription_management_info:
:type subscription_management_info: :class:`SubscriptionManagement <azure.devops.v7_0.notification.models.SubscriptionManagement>`
:param url:
:type url: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'subscription_management_info': {'key': 'subscriptionManagementInfo', 'type': 'SubscriptionManagement'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, id=None, subscription_management_info=None, url=None):
super(NotificationEventPublisher, self).__init__()
self.id = id
self.subscription_management_info = subscription_management_info
self.url = url
class NotificationEventRole(Model):
"""
Encapsulates the properties of an event role. An event Role is used for role based subscription for example for a buildCompletedEvent, one role is request by field
:param id: Gets or sets an Id for that role, this id is used by the event.
:type id: str
:param name: Gets or sets the Name for that role, this name is used for UI display.
:type name: str
:param supports_groups: Gets or sets whether this role can be a group or just an individual user
:type supports_groups: bool
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'supports_groups': {'key': 'supportsGroups', 'type': 'bool'}
}
def __init__(self, id=None, name=None, supports_groups=None):
super(NotificationEventRole, self).__init__()
self.id = id
self.name = name
self.supports_groups = supports_groups
class NotificationEventType(Model):
"""
Encapsulates the properties of an event type. It defines the fields, that can be used for filtering, for that event type.
:param category:
:type category: :class:`NotificationEventTypeCategory <azure.devops.v7_0.notification.models.NotificationEventTypeCategory>`
:param color: Gets or sets the color representing this event type. Example: rgb(128,245,211) or #fafafa
:type color: str
:param custom_subscriptions_allowed:
:type custom_subscriptions_allowed: bool
:param event_publisher:
:type event_publisher: :class:`NotificationEventPublisher <azure.devops.v7_0.notification.models.NotificationEventPublisher>`
:param fields:
:type fields: dict
:param has_initiator:
:type has_initiator: bool
:param icon: Gets or sets the icon representing this event type. Can be a URL or a CSS class. Example: css://some-css-class
:type icon: str
:param id: Gets or sets the unique identifier of this event definition.
:type id: str
:param name: Gets or sets the name of this event definition.
:type name: str
:param roles:
:type roles: list of :class:`NotificationEventRole <azure.devops.v7_0.notification.models.NotificationEventRole>`
:param supported_scopes: Gets or sets the scopes that this event type supports
:type supported_scopes: list of str
:param url: Gets or sets the rest end point to get this event type details (fields, fields types)
:type url: str
"""
_attribute_map = {
'category': {'key': 'category', 'type': 'NotificationEventTypeCategory'},
'color': {'key': 'color', 'type': 'str'},
'custom_subscriptions_allowed': {'key': 'customSubscriptionsAllowed', 'type': 'bool'},
'event_publisher': {'key': 'eventPublisher', 'type': 'NotificationEventPublisher'},
'fields': {'key': 'fields', 'type': '{NotificationEventField}'},
'has_initiator': {'key': 'hasInitiator', 'type': 'bool'},
'icon': {'key': 'icon', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'roles': {'key': 'roles', 'type': '[NotificationEventRole]'},
'supported_scopes': {'key': 'supportedScopes', 'type': '[str]'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, category=None, color=None, custom_subscriptions_allowed=None, event_publisher=None, fields=None, has_initiator=None, icon=None, id=None, name=None, roles=None, supported_scopes=None, url=None):
super(NotificationEventType, self).__init__()
self.category = category
self.color = color
self.custom_subscriptions_allowed = custom_subscriptions_allowed
self.event_publisher = event_publisher
self.fields = fields
self.has_initiator = has_initiator
self.icon = icon
self.id = id
self.name = name
self.roles = roles
self.supported_scopes = supported_scopes
self.url = url
class NotificationEventTypeCategory(Model):
"""
Encapsulates the properties of a category. A category will be used by the UI to group event types
:param id: Gets or sets the unique identifier of this category.
:type id: str
:param name: Gets or sets the friendly name of this category.
:type name: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'}
}
def __init__(self, id=None, name=None):
super(NotificationEventTypeCategory, self).__init__()
self.id = id
self.name = name
class NotificationQueryCondition(Model):
"""
:param event_initiator:
:type event_initiator: str
:param event_type:
:type event_type: str
:param subscriber:
:type subscriber: str
:param subscription_id:
:type subscription_id: str
"""
_attribute_map = {
'event_initiator': {'key': 'eventInitiator', 'type': 'str'},
'event_type': {'key': 'eventType', 'type': 'str'},
'subscriber': {'key': 'subscriber', 'type': 'str'},
'subscription_id': {'key': 'subscriptionId', 'type': 'str'}
}
def __init__(self, event_initiator=None, event_type=None, subscriber=None, subscription_id=None):
super(NotificationQueryCondition, self).__init__()
self.event_initiator = event_initiator
self.event_type = event_type
self.subscriber = subscriber
self.subscription_id = subscription_id
class NotificationReason(Model):
"""
:param notification_reason_type:
:type notification_reason_type: object
:param target_identities:
:type target_identities: list of :class:`IdentityRef <azure.devops.v7_0.notification.models.IdentityRef>`
"""
_attribute_map = {
'notification_reason_type': {'key': 'notificationReasonType', 'type': 'object'},
'target_identities': {'key': 'targetIdentities', 'type': '[IdentityRef]'}
}
def __init__(self, notification_reason_type=None, target_identities=None):
super(NotificationReason, self).__init__()
self.notification_reason_type = notification_reason_type
self.target_identities = target_identities
class NotificationsEvaluationResult(Model):
"""
Encapsulates notifications result properties. It defines the number of notifications and the recipients of notifications.
:param count: Count of generated notifications
:type count: int
"""
_attribute_map = {
'count': {'key': 'count', 'type': 'int'}
}
def __init__(self, count=None):
super(NotificationsEvaluationResult, self).__init__()
self.count = count
class NotificationStatistic(Model):
"""
:param date:
:type date: datetime
:param hit_count:
:type hit_count: int
:param path:
:type path: str
:param type:
:type type: object
:param user:
:type user: :class:`IdentityRef <azure.devops.v7_0.notification.models.IdentityRef>`
"""
_attribute_map = {
'date': {'key': 'date', 'type': 'iso-8601'},
'hit_count': {'key': 'hitCount', 'type': 'int'},
'path': {'key': 'path', 'type': 'str'},
'type': {'key': 'type', 'type': 'object'},
'user': {'key': 'user', 'type': 'IdentityRef'}
}
def __init__(self, date=None, hit_count=None, path=None, type=None, user=None):
super(NotificationStatistic, self).__init__()
self.date = date
self.hit_count = hit_count
self.path = path
self.type = type
self.user = user
class NotificationStatisticsQuery(Model):
"""
:param conditions:
:type conditions: list of :class:`NotificationStatisticsQueryConditions <azure.devops.v7_0.notification.models.NotificationStatisticsQueryConditions>`
"""
_attribute_map = {
'conditions': {'key': 'conditions', 'type': '[NotificationStatisticsQueryConditions]'}
}
def __init__(self, conditions=None):
super(NotificationStatisticsQuery, self).__init__()
self.conditions = conditions
class NotificationStatisticsQueryConditions(Model):
"""
:param end_date:
:type end_date: datetime
:param hit_count_minimum:
:type hit_count_minimum: int
:param path:
:type path: str
:param start_date:
:type start_date: datetime
:param type:
:type type: object
:param user:
:type user: :class:`IdentityRef <azure.devops.v7_0.notification.models.IdentityRef>`
"""
_attribute_map = {
'end_date': {'key': 'endDate', 'type': 'iso-8601'},
'hit_count_minimum': {'key': 'hitCountMinimum', 'type': 'int'},
'path': {'key': 'path', 'type': 'str'},
'start_date': {'key': 'startDate', 'type': 'iso-8601'},
'type': {'key': 'type', 'type': 'object'},
'user': {'key': 'user', 'type': 'IdentityRef'}
}
def __init__(self, end_date=None, hit_count_minimum=None, path=None, start_date=None, type=None, user=None):
super(NotificationStatisticsQueryConditions, self).__init__()
self.end_date = end_date
self.hit_count_minimum = hit_count_minimum
self.path = path
self.start_date = start_date
self.type = type
self.user = user
class NotificationSubscriber(Model):
"""
A subscriber is a user or group that has the potential to receive notifications.
:param delivery_preference: Indicates how the subscriber should be notified by default.
:type delivery_preference: object
:param flags:
:type flags: object
:param id: Identifier of the subscriber.
:type id: str
:param preferred_email_address: Preferred email address of the subscriber. A null or empty value indicates no preferred email address has been set.
:type preferred_email_address: str
"""
_attribute_map = {
'delivery_preference': {'key': 'deliveryPreference', 'type': 'object'},
'flags': {'key': 'flags', 'type': 'object'},
'id': {'key': 'id', 'type': 'str'},
'preferred_email_address': {'key': 'preferredEmailAddress', 'type': 'str'}
}
def __init__(self, delivery_preference=None, flags=None, id=None, preferred_email_address=None):
super(NotificationSubscriber, self).__init__()
self.delivery_preference = delivery_preference
self.flags = flags
self.id = id
self.preferred_email_address = preferred_email_address
class NotificationSubscriberUpdateParameters(Model):
"""
Updates to a subscriber. Typically used to change (or set) a preferred email address or default delivery preference.
:param delivery_preference: New delivery preference for the subscriber (indicates how the subscriber should be notified).
:type delivery_preference: object
:param preferred_email_address: New preferred email address for the subscriber. Specify an empty string to clear the current address.
:type preferred_email_address: str
"""
_attribute_map = {
'delivery_preference': {'key': 'deliveryPreference', 'type': 'object'},
'preferred_email_address': {'key': 'preferredEmailAddress', 'type': 'str'}
}
def __init__(self, delivery_preference=None, preferred_email_address=None):
super(NotificationSubscriberUpdateParameters, self).__init__()
self.delivery_preference = delivery_preference
self.preferred_email_address = preferred_email_address
class NotificationSubscription(Model):
"""
A subscription defines criteria for matching events and how the subscription's subscriber should be notified about those events.
:param _links: Links to related resources, APIs, and views for the subscription.
:type _links: :class:`ReferenceLinks <azure.devops.v7_0.notification.models.ReferenceLinks>`
:param admin_settings: Admin-managed settings for the subscription. Only applies when the subscriber is a group.
:type admin_settings: :class:`SubscriptionAdminSettings <azure.devops.v7_0.notification.models.SubscriptionAdminSettings>`
:param description: Description of the subscription. Typically describes filter criteria which helps identity the subscription.
:type description: str
:param diagnostics: Diagnostics for this subscription.
:type diagnostics: :class:`SubscriptionDiagnostics <azure.devops.v7_0.notification.models.SubscriptionDiagnostics>`
:param extended_properties: Any extra properties like detailed description for different contexts, user/group contexts
:type extended_properties: dict
:param filter: Matching criteria for the subscription. ExpressionFilter
:type filter: :class:`ISubscriptionFilter <azure.devops.v7_0.notification.models.ISubscriptionFilter>`
:param flags: Read-only indicators that further describe the subscription.
:type flags: object
:param channel: Channel for delivering notifications triggered by the subscription.
:type channel: :class:`ISubscriptionChannel <azure.devops.v7_0.notification.models.ISubscriptionChannel>`
:param id: Subscription identifier.
:type id: str
:param last_modified_by: User that last modified (or created) the subscription.
:type last_modified_by: :class:`IdentityRef <azure.devops.v7_0.notification.models.IdentityRef>`
:param modified_date: Date when the subscription was last modified. If the subscription has not been updated since it was created, this value will indicate when the subscription was created.
:type modified_date: datetime
:param permissions: The permissions the user have for this subscriptions.
:type permissions: object
:param scope: The container in which events must be published from in order to be matched by the subscription. If empty, the scope is the current host (typically an account or project collection). For example, a subscription scoped to project A will not produce notifications for events published from project B.
:type scope: :class:`SubscriptionScope <azure.devops.v7_0.notification.models.SubscriptionScope>`
:param status: Status of the subscription. Typically indicates whether the subscription is enabled or not.
:type status: object
:param status_message: Message that provides more details about the status of the subscription.
:type status_message: str
:param subscriber: User or group that will receive notifications for events matching the subscription's filter criteria.
:type subscriber: :class:`IdentityRef <azure.devops.v7_0.notification.models.IdentityRef>`
:param url: REST API URL of the subscriotion.
:type url: str
:param user_settings: User-managed settings for the subscription. Only applies when the subscriber is a group. Typically used to indicate whether the calling user is opted in or out of a group subscription.
:type user_settings: :class:`SubscriptionUserSettings <azure.devops.v7_0.notification.models.SubscriptionUserSettings>`
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'admin_settings': {'key': 'adminSettings', 'type': 'SubscriptionAdminSettings'},
'description': {'key': 'description', 'type': 'str'},
'diagnostics': {'key': 'diagnostics', 'type': 'SubscriptionDiagnostics'},
'extended_properties': {'key': 'extendedProperties', 'type': '{str}'},
'filter': {'key': 'filter', 'type': 'ISubscriptionFilter'},
'flags': {'key': 'flags', 'type': 'object'},
'channel': {'key': 'channel', 'type': 'ISubscriptionChannel'},
'id': {'key': 'id', 'type': 'str'},
'last_modified_by': {'key': 'lastModifiedBy', 'type': 'IdentityRef'},
'modified_date': {'key': 'modifiedDate', 'type': 'iso-8601'},
'permissions': {'key': 'permissions', 'type': 'object'},
'scope': {'key': 'scope', 'type': 'SubscriptionScope'},
'status': {'key': 'status', 'type': 'object'},
'status_message': {'key': 'statusMessage', 'type': 'str'},
'subscriber': {'key': 'subscriber', 'type': 'IdentityRef'},
'url': {'key': 'url', 'type': 'str'},
'user_settings': {'key': 'userSettings', 'type': 'SubscriptionUserSettings'}
}
def __init__(self, _links=None, admin_settings=None, description=None, diagnostics=None, extended_properties=None, filter=None, flags=None, channel=None, id=None, last_modified_by=None, modified_date=None, permissions=None, scope=None, status=None, status_message=None, subscriber=None, url=None, user_settings=None):
super(NotificationSubscription, self).__init__()
self._links = _links
self.admin_settings = admin_settings
self.description = description
self.diagnostics = diagnostics
self.extended_properties = extended_properties
self.filter = filter
self.flags = flags
self.channel = channel
self.id = id
self.last_modified_by = last_modified_by
self.modified_date = modified_date
self.permissions = permissions
self.scope = scope
self.status = status
self.status_message = status_message
self.subscriber = subscriber
self.url = url
self.user_settings = user_settings
class NotificationSubscriptionCreateParameters(Model):
"""
Parameters for creating a new subscription. A subscription defines criteria for matching events and how the subscription's subscriber should be notified about those events.
:param description: Brief description for the new subscription. Typically describes filter criteria which helps identity the subscription.
:type description: str
:param filter: Matching criteria for the new subscription. ExpressionFilter
:type filter: :class:`ISubscriptionFilter <azure.devops.v7_0.notification.models.ISubscriptionFilter>`
:param channel: Channel for delivering notifications triggered by the new subscription.
:type channel: :class:`ISubscriptionChannel <azure.devops.v7_0.notification.models.ISubscriptionChannel>`
:param scope: The container in which events must be published from in order to be matched by the new subscription. If not specified, defaults to the current host (typically an account or project collection). For example, a subscription scoped to project A will not produce notifications for events published from project B.
:type scope: :class:`SubscriptionScope <azure.devops.v7_0.notification.models.SubscriptionScope>`
:param subscriber: User or group that will receive notifications for events matching the subscription's filter criteria. If not specified, defaults to the calling user.
:type subscriber: :class:`IdentityRef <azure.devops.v7_0.notification.models.IdentityRef>`
"""
_attribute_map = {
'description': {'key': 'description', 'type': 'str'},
'filter': {'key': 'filter', 'type': 'ISubscriptionFilter'},
'channel': {'key': 'channel', 'type': 'ISubscriptionChannel'},
'scope': {'key': 'scope', 'type': 'SubscriptionScope'},
'subscriber': {'key': 'subscriber', 'type': 'IdentityRef'}
}
def __init__(self, description=None, filter=None, channel=None, scope=None, subscriber=None):
super(NotificationSubscriptionCreateParameters, self).__init__()
self.description = description
self.filter = filter
self.channel = channel
self.scope = scope
self.subscriber = subscriber
class NotificationSubscriptionTemplate(Model):
"""
:param description:
:type description: str
:param filter:
:type filter: :class:`ISubscriptionFilter <azure.devops.v7_0.notification.models.ISubscriptionFilter>`
:param id:
:type id: str
:param notification_event_information:
:type notification_event_information: :class:`NotificationEventType <azure.devops.v7_0.notification.models.NotificationEventType>`
:param type:
:type type: object
"""
_attribute_map = {
'description': {'key': 'description', 'type': 'str'},
'filter': {'key': 'filter', 'type': 'ISubscriptionFilter'},
'id': {'key': 'id', 'type': 'str'},
'notification_event_information': {'key': 'notificationEventInformation', 'type': 'NotificationEventType'},
'type': {'key': 'type', 'type': 'object'}
}
def __init__(self, description=None, filter=None, id=None, notification_event_information=None, type=None):
super(NotificationSubscriptionTemplate, self).__init__()
self.description = description
self.filter = filter
self.id = id
self.notification_event_information = notification_event_information
self.type = type
class NotificationSubscriptionUpdateParameters(Model):
"""
Parameters for updating an existing subscription. A subscription defines criteria for matching events and how the subscription's subscriber should be notified about those events. Note: only the fields to be updated should be set.
:param admin_settings: Admin-managed settings for the subscription. Only applies to subscriptions where the subscriber is a group.
:type admin_settings: :class:`SubscriptionAdminSettings <azure.devops.v7_0.notification.models.SubscriptionAdminSettings>`
:param description: Updated description for the subscription. Typically describes filter criteria which helps identity the subscription.
:type description: str
:param filter: Matching criteria for the subscription. ExpressionFilter
:type filter: :class:`ISubscriptionFilter <azure.devops.v7_0.notification.models.ISubscriptionFilter>`
:param channel: Channel for delivering notifications triggered by the subscription.
:type channel: :class:`ISubscriptionChannel <azure.devops.v7_0.notification.models.ISubscriptionChannel>`
:param scope: The container in which events must be published from in order to be matched by the new subscription. If not specified, defaults to the current host (typically the current account or project collection). For example, a subscription scoped to project A will not produce notifications for events published from project B.
:type scope: :class:`SubscriptionScope <azure.devops.v7_0.notification.models.SubscriptionScope>`
:param status: Updated status for the subscription. Typically used to enable or disable a subscription.
:type status: object
:param status_message: Optional message that provides more details about the updated status.
:type status_message: str
:param user_settings: User-managed settings for the subscription. Only applies to subscriptions where the subscriber is a group. Typically used to opt-in or opt-out a user from a group subscription.
:type user_settings: :class:`SubscriptionUserSettings <azure.devops.v7_0.notification.models.SubscriptionUserSettings>`
"""
_attribute_map = {
'admin_settings': {'key': 'adminSettings', 'type': 'SubscriptionAdminSettings'},
'description': {'key': 'description', 'type': 'str'},
'filter': {'key': 'filter', 'type': 'ISubscriptionFilter'},
'channel': {'key': 'channel', 'type': 'ISubscriptionChannel'},
'scope': {'key': 'scope', 'type': 'SubscriptionScope'},
'status': {'key': 'status', 'type': 'object'},
'status_message': {'key': 'statusMessage', 'type': 'str'},
'user_settings': {'key': 'userSettings', 'type': 'SubscriptionUserSettings'}
}
def __init__(self, admin_settings=None, description=None, filter=None, channel=None, scope=None, status=None, status_message=None, user_settings=None):
super(NotificationSubscriptionUpdateParameters, self).__init__()
self.admin_settings = admin_settings
self.description = description
self.filter = filter
self.channel = channel
self.scope = scope
self.status = status
self.status_message = status_message
self.user_settings = user_settings
class OperatorConstraint(Model):
"""
Encapsulates the properties of an operator constraint. An operator constraint defines if some operator is available only for specific scope like a project scope.
:param operator:
:type operator: str
:param supported_scopes: Gets or sets the list of scopes that this type supports.
:type supported_scopes: list of str
"""
_attribute_map = {
'operator': {'key': 'operator', 'type': 'str'},
'supported_scopes': {'key': 'supportedScopes', 'type': '[str]'}
}
def __init__(self, operator=None, supported_scopes=None):
super(OperatorConstraint, self).__init__()
self.operator = operator
self.supported_scopes = supported_scopes
class ReferenceLinks(Model):
"""
The class to represent a collection of REST reference links.
:param links: The readonly view of the links. Because Reference links are readonly, we only want to expose them as read only.
:type links: dict
"""
_attribute_map = {
'links': {'key': 'links', 'type': '{object}'}
}
def __init__(self, links=None):
super(ReferenceLinks, self).__init__()
self.links = links
class SubscriptionAdminSettings(Model):
"""
Admin-managed settings for a group subscription.
:param block_user_opt_out: If true, members of the group subscribed to the associated subscription cannot opt (choose not to get notified)
:type block_user_opt_out: bool
"""
_attribute_map = {
'block_user_opt_out': {'key': 'blockUserOptOut', 'type': 'bool'}
}
def __init__(self, block_user_opt_out=None):
super(SubscriptionAdminSettings, self).__init__()
self.block_user_opt_out = block_user_opt_out
class SubscriptionDiagnostics(Model):
"""
Contains all the diagnostics settings for a subscription.
:param delivery_results: Diagnostics settings for retaining delivery results. Used for Service Hooks subscriptions.
:type delivery_results: :class:`SubscriptionTracing <azure.devops.v7_0.notification.models.SubscriptionTracing>`
:param delivery_tracing: Diagnostics settings for troubleshooting notification delivery.
:type delivery_tracing: :class:`SubscriptionTracing <azure.devops.v7_0.notification.models.SubscriptionTracing>`
:param evaluation_tracing: Diagnostics settings for troubleshooting event matching.
:type evaluation_tracing: :class:`SubscriptionTracing <azure.devops.v7_0.notification.models.SubscriptionTracing>`
"""
_attribute_map = {
'delivery_results': {'key': 'deliveryResults', 'type': 'SubscriptionTracing'},
'delivery_tracing': {'key': 'deliveryTracing', 'type': 'SubscriptionTracing'},
'evaluation_tracing': {'key': 'evaluationTracing', 'type': 'SubscriptionTracing'}
}
def __init__(self, delivery_results=None, delivery_tracing=None, evaluation_tracing=None):
super(SubscriptionDiagnostics, self).__init__()
self.delivery_results = delivery_results
self.delivery_tracing = delivery_tracing
self.evaluation_tracing = evaluation_tracing
class SubscriptionEvaluationRequest(Model):
"""
Encapsulates the properties of a SubscriptionEvaluationRequest. It defines the subscription to be evaluated and time interval for events used in evaluation.
:param min_events_created_date: The min created date for the events used for matching in UTC. Use all events created since this date
:type min_events_created_date: datetime
:param subscription_create_parameters: User or group that will receive notifications for events matching the subscription's filter criteria. If not specified, defaults to the calling user.
:type subscription_create_parameters: :class:`NotificationSubscriptionCreateParameters <azure.devops.v7_0.notification.models.NotificationSubscriptionCreateParameters>`
"""
_attribute_map = {
'min_events_created_date': {'key': 'minEventsCreatedDate', 'type': 'iso-8601'},
'subscription_create_parameters': {'key': 'subscriptionCreateParameters', 'type': 'NotificationSubscriptionCreateParameters'}
}
def __init__(self, min_events_created_date=None, subscription_create_parameters=None):
super(SubscriptionEvaluationRequest, self).__init__()
self.min_events_created_date = min_events_created_date
self.subscription_create_parameters = subscription_create_parameters
class SubscriptionEvaluationResult(Model):
"""
Encapsulates the subscription evaluation results. It defines the Date Interval that was used, number of events evaluated and events and notifications results
:param evaluation_job_status: Subscription evaluation job status
:type evaluation_job_status: object
:param events: Subscription evaluation events results.
:type events: :class:`EventsEvaluationResult <azure.devops.v7_0.notification.models.EventsEvaluationResult>`
:param id: The requestId which is the subscription evaluation jobId
:type id: str
:param notifications: Subscription evaluation notification results.
:type notifications: :class:`NotificationsEvaluationResult <azure.devops.v7_0.notification.models.NotificationsEvaluationResult>`
"""
_attribute_map = {
'evaluation_job_status': {'key': 'evaluationJobStatus', 'type': 'object'},
'events': {'key': 'events', 'type': 'EventsEvaluationResult'},
'id': {'key': 'id', 'type': 'str'},
'notifications': {'key': 'notifications', 'type': 'NotificationsEvaluationResult'}
}
def __init__(self, evaluation_job_status=None, events=None, id=None, notifications=None):
super(SubscriptionEvaluationResult, self).__init__()
self.evaluation_job_status = evaluation_job_status
self.events = events
self.id = id
self.notifications = notifications
class SubscriptionEvaluationSettings(Model):
"""
Encapsulates the subscription evaluation settings needed for the UI
:param enabled: Indicates whether subscription evaluation before saving is enabled or not
:type enabled: bool
:param interval: Time interval to check on subscription evaluation job in seconds
:type interval: int
:param threshold: Threshold on the number of notifications for considering a subscription too noisy
:type threshold: int
:param time_out: Time out for the subscription evaluation check in seconds
:type time_out: int
"""
_attribute_map = {
'enabled': {'key': 'enabled', 'type': 'bool'},
'interval': {'key': 'interval', 'type': 'int'},
'threshold': {'key': 'threshold', 'type': 'int'},
'time_out': {'key': 'timeOut', 'type': 'int'}
}
def __init__(self, enabled=None, interval=None, threshold=None, time_out=None):
super(SubscriptionEvaluationSettings, self).__init__()
self.enabled = enabled
self.interval = interval
self.threshold = threshold
self.time_out = time_out
class SubscriptionChannelWithAddress(Model):
"""
:param address:
:type address: str
:param type:
:type type: str
:param use_custom_address:
:type use_custom_address: bool
"""
_attribute_map = {
'address': {'key': 'address', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'use_custom_address': {'key': 'useCustomAddress', 'type': 'bool'}
}
def __init__(self, address=None, type=None, use_custom_address=None):
super(SubscriptionChannelWithAddress, self).__init__()
self.address = address
self.type = type
self.use_custom_address = use_custom_address
class SubscriptionManagement(Model):
"""
Encapsulates the properties needed to manage subscriptions, opt in and out of subscriptions.
:param service_instance_type:
:type service_instance_type: str
:param url:
:type url: str
"""
_attribute_map = {
'service_instance_type': {'key': 'serviceInstanceType', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, service_instance_type=None, url=None):
super(SubscriptionManagement, self).__init__()
self.service_instance_type = service_instance_type
self.url = url
class SubscriptionQuery(Model):
"""
Notification subscriptions query input.
:param conditions: One or more conditions to query on. If more than 2 conditions are specified, the combined results of each condition is returned (i.e. conditions are logically OR'ed).
:type conditions: list of :class:`SubscriptionQueryCondition <azure.devops.v7_0.notification.models.SubscriptionQueryCondition>`
:param query_flags: Flags the refine the types of subscriptions that will be returned from the query.
:type query_flags: object
"""
_attribute_map = {
'conditions': {'key': 'conditions', 'type': '[SubscriptionQueryCondition]'},
'query_flags': {'key': 'queryFlags', 'type': 'object'}
}
def __init__(self, conditions=None, query_flags=None):
super(SubscriptionQuery, self).__init__()
self.conditions = conditions
self.query_flags = query_flags
class SubscriptionQueryCondition(Model):
"""
Conditions a subscription must match to qualify for the query result set. Not all fields are required. A subscription must match all conditions specified in order to qualify for the result set.
:param filter: Filter conditions that matching subscriptions must have. Typically only the filter's type and event type are used for matching.
:type filter: :class:`ISubscriptionFilter <azure.devops.v7_0.notification.models.ISubscriptionFilter>`
:param flags: Flags to specify the the type subscriptions to query for.
:type flags: object
:param scope: Scope that matching subscriptions must have.
:type scope: str
:param subscriber_id: ID of the subscriber (user or group) that matching subscriptions must be subscribed to.
:type subscriber_id: str
:param subscription_id: ID of the subscription to query for.
:type subscription_id: str
"""
_attribute_map = {
'filter': {'key': 'filter', 'type': 'ISubscriptionFilter'},
'flags': {'key': 'flags', 'type': 'object'},
'scope': {'key': 'scope', 'type': 'str'},
'subscriber_id': {'key': 'subscriberId', 'type': 'str'},
'subscription_id': {'key': 'subscriptionId', 'type': 'str'}
}
def __init__(self, filter=None, flags=None, scope=None, subscriber_id=None, subscription_id=None):
super(SubscriptionQueryCondition, self).__init__()
self.filter = filter
self.flags = flags
self.scope = scope
self.subscriber_id = subscriber_id
self.subscription_id = subscription_id
class SubscriptionScope(EventScope):
"""
A resource, typically an account or project, in which events are published from.
:param id: Required: This is the identity of the scope for the type.
:type id: str
:param name: Optional: The display name of the scope
:type name: str
:param type: Required: The event specific type of a scope.
:type type: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(self, id=None, name=None, type=None):
super(SubscriptionScope, self).__init__(id=id, name=name, type=type)
class SubscriptionTracing(Model):
"""
Data controlling a single diagnostic setting for a subscription.
:param enabled: Indicates whether the diagnostic tracing is enabled or not.
:type enabled: bool
:param end_date: Trace until the specified end date.
:type end_date: datetime
:param max_traced_entries: The maximum number of result details to trace.
:type max_traced_entries: int
:param start_date: The date and time tracing started.
:type start_date: datetime
:param traced_entries: Trace until remaining count reaches 0.
:type traced_entries: int
"""
_attribute_map = {
'enabled': {'key': 'enabled', 'type': 'bool'},
'end_date': {'key': 'endDate', 'type': 'iso-8601'},
'max_traced_entries': {'key': 'maxTracedEntries', 'type': 'int'},
'start_date': {'key': 'startDate', 'type': 'iso-8601'},
'traced_entries': {'key': 'tracedEntries', 'type': 'int'}
}
def __init__(self, enabled=None, end_date=None, max_traced_entries=None, start_date=None, traced_entries=None):
super(SubscriptionTracing, self).__init__()
self.enabled = enabled
self.end_date = end_date
self.max_traced_entries = max_traced_entries
self.start_date = start_date
self.traced_entries = traced_entries
class SubscriptionUserSettings(Model):
"""
User-managed settings for a group subscription.
:param opted_out: Indicates whether the user will receive notifications for the associated group subscription.
:type opted_out: bool
"""
_attribute_map = {
'opted_out': {'key': 'optedOut', 'type': 'bool'}
}
def __init__(self, opted_out=None):
super(SubscriptionUserSettings, self).__init__()
self.opted_out = opted_out
class UpdateSubscripitonDiagnosticsParameters(Model):
"""
Parameters to update diagnostics settings for a subscription.
:param delivery_results: Diagnostics settings for retaining delivery results. Used for Service Hooks subscriptions.
:type delivery_results: :class:`UpdateSubscripitonTracingParameters <azure.devops.v7_0.notification.models.UpdateSubscripitonTracingParameters>`
:param delivery_tracing: Diagnostics settings for troubleshooting notification delivery.
:type delivery_tracing: :class:`UpdateSubscripitonTracingParameters <azure.devops.v7_0.notification.models.UpdateSubscripitonTracingParameters>`
:param evaluation_tracing: Diagnostics settings for troubleshooting event matching.
:type evaluation_tracing: :class:`UpdateSubscripitonTracingParameters <azure.devops.v7_0.notification.models.UpdateSubscripitonTracingParameters>`
"""
_attribute_map = {
'delivery_results': {'key': 'deliveryResults', 'type': 'UpdateSubscripitonTracingParameters'},
'delivery_tracing': {'key': 'deliveryTracing', 'type': 'UpdateSubscripitonTracingParameters'},
'evaluation_tracing': {'key': 'evaluationTracing', 'type': 'UpdateSubscripitonTracingParameters'}
}
def __init__(self, delivery_results=None, delivery_tracing=None, evaluation_tracing=None):
super(UpdateSubscripitonDiagnosticsParameters, self).__init__()
self.delivery_results = delivery_results
self.delivery_tracing = delivery_tracing
self.evaluation_tracing = evaluation_tracing
class UpdateSubscripitonTracingParameters(Model):
"""
Parameters to update a specific diagnostic setting.
:param enabled: Indicates whether to enable to disable the diagnostic tracing.
:type enabled: bool
"""
_attribute_map = {
'enabled': {'key': 'enabled', 'type': 'bool'}
}
def __init__(self, enabled=None):
super(UpdateSubscripitonTracingParameters, self).__init__()
self.enabled = enabled
class ValueDefinition(Model):
"""
Encapsulates the properties of a field value definition. It has the information needed to retrieve the list of possible values for a certain field and how to handle that field values in the UI. This information includes what type of object this value represents, which property to use for UI display and which property to use for saving the subscription
:param data_source: Gets or sets the data source.
:type data_source: list of :class:`InputValue <azure.devops.v7_0.notification.models.InputValue>`
:param end_point: Gets or sets the rest end point.
:type end_point: str
:param result_template: Gets or sets the result template.
:type result_template: str
"""
_attribute_map = {
'data_source': {'key': 'dataSource', 'type': '[InputValue]'},
'end_point': {'key': 'endPoint', 'type': 'str'},
'result_template': {'key': 'resultTemplate', 'type': 'str'}
}
def __init__(self, data_source=None, end_point=None, result_template=None):
super(ValueDefinition, self).__init__()
self.data_source = data_source
self.end_point = end_point
self.result_template = result_template
class VssNotificationEvent(Model):
"""
This is the type used for firing notifications intended for the subsystem in the Notifications SDK. For components that can't take a dependency on the Notifications SDK directly, they can use ITeamFoundationEventService.PublishNotification and the Notifications SDK ISubscriber implementation will get it.
:param actors: Optional: A list of actors which are additional identities with corresponding roles that are relevant to the event.
:type actors: list of :class:`EventActor <azure.devops.v7_0.microsoft._visual_studio._services._web_api.models.EventActor>`
:param artifact_uris: Optional: A list of artifacts referenced or impacted by this event.
:type artifact_uris: list of str
:param data: Required: The event payload. If Data is a string, it must be in Json or XML format. Otherwise it must have a serialization format attribute.
:type data: object
:param event_type: Required: The name of the event. This event must be registered in the context it is being fired.
:type event_type: str
:param expires_in: How long before the event expires and will be cleaned up. The default is to use the system default.
:type expires_in: object
:param item_id: The id of the item, artifact, extension, project, etc.
:type item_id: str
:param process_delay: How long to wait before processing this event. The default is to process immediately.
:type process_delay: object
:param scopes: Optional: A list of scopes which are are relevant to the event.
:type scopes: list of :class:`EventScope <azure.devops.v7_0.microsoft._visual_studio._services._web_api.models.EventScope>`
:param source_event_created_time: This is the time the original source event for this VssNotificationEvent was created. For example, for something like a build completion notification SourceEventCreatedTime should be the time the build finished not the time this event was raised.
:type source_event_created_time: datetime
"""
_attribute_map = {
'actors': {'key': 'actors', 'type': '[EventActor]'},
'artifact_uris': {'key': 'artifactUris', 'type': '[str]'},
'data': {'key': 'data', 'type': 'object'},
'event_type': {'key': 'eventType', 'type': 'str'},
'expires_in': {'key': 'expiresIn', 'type': 'object'},
'item_id': {'key': 'itemId', 'type': 'str'},
'process_delay': {'key': 'processDelay', 'type': 'object'},
'scopes': {'key': 'scopes', 'type': '[EventScope]'},
'source_event_created_time': {'key': 'sourceEventCreatedTime', 'type': 'iso-8601'}
}
def __init__(self, actors=None, artifact_uris=None, data=None, event_type=None, expires_in=None, item_id=None, process_delay=None, scopes=None, source_event_created_time=None):
super(VssNotificationEvent, self).__init__()
self.actors = actors
self.artifact_uris = artifact_uris
self.data = data
self.event_type = event_type
self.expires_in = expires_in
self.item_id = item_id
self.process_delay = process_delay
self.scopes = scopes
self.source_event_created_time = source_event_created_time
class ArtifactFilter(BaseSubscriptionFilter):
"""
Artifact filter options. Used in "follow" subscriptions.
:param event_type:
:type event_type: str
:param artifact_id:
:type artifact_id: str
:param artifact_type:
:type artifact_type: str
:param artifact_uri:
:type artifact_uri: str
:param type:
:type type: str
"""
_attribute_map = {
'event_type': {'key': 'eventType', 'type': 'str'},
'artifact_id': {'key': 'artifactId', 'type': 'str'},
'artifact_type': {'key': 'artifactType', 'type': 'str'},
'artifact_uri': {'key': 'artifactUri', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'}
}
def __init__(self, event_type=None, artifact_id=None, artifact_type=None, artifact_uri=None, type=None):
super(ArtifactFilter, self).__init__(event_type=event_type)
self.artifact_id = artifact_id
self.artifact_type = artifact_type
self.artifact_uri = artifact_uri
self.type = type
class FieldInputValues(InputValues):
"""
:param default_value: The default value to use for this input
:type default_value: str
:param error: Errors encountered while computing dynamic values.
:type error: :class:`InputValuesError <azure.devops.v7_0.notification.models.InputValuesError>`
:param input_id: The id of the input
:type input_id: str
:param is_disabled: Should this input be disabled
:type is_disabled: bool
:param is_limited_to_possible_values: Should the value be restricted to one of the values in the PossibleValues (True) or are the values in PossibleValues just a suggestion (False)
:type is_limited_to_possible_values: bool
:param is_read_only: Should this input be made read-only
:type is_read_only: bool
:param possible_values: Possible values that this input can take
:type possible_values: list of :class:`InputValue <azure.devops.v7_0.notification.models.InputValue>`
:param operators:
:type operators: str
"""
_attribute_map = {
'default_value': {'key': 'defaultValue', 'type': 'str'},
'error': {'key': 'error', 'type': 'InputValuesError'},
'input_id': {'key': 'inputId', 'type': 'str'},
'is_disabled': {'key': 'isDisabled', 'type': 'bool'},
'is_limited_to_possible_values': {'key': 'isLimitedToPossibleValues', 'type': 'bool'},
'is_read_only': {'key': 'isReadOnly', 'type': 'bool'},
'possible_values': {'key': 'possibleValues', 'type': '[InputValue]'},
'operators': {'key': 'operators', 'type': 'str'}
}
def __init__(self, default_value=None, error=None, input_id=None, is_disabled=None, is_limited_to_possible_values=None, is_read_only=None, possible_values=None, operators=None):
super(FieldInputValues, self).__init__(default_value=default_value, error=error, input_id=input_id, is_disabled=is_disabled, is_limited_to_possible_values=is_limited_to_possible_values, is_read_only=is_read_only, possible_values=possible_values)
self.operators = operators
class FieldValuesQuery(InputValuesQuery):
"""
:param current_values:
:type current_values: dict
:param resource: Subscription containing information about the publisher/consumer and the current input values
:type resource: object
:param input_values:
:type input_values: list of :class:`FieldInputValues <azure.devops.v7_0.notification.models.FieldInputValues>`
:param scope:
:type scope: str
"""
_attribute_map = {
'current_values': {'key': 'currentValues', 'type': '{str}'},
'resource': {'key': 'resource', 'type': 'object'},
'input_values': {'key': 'inputValues', 'type': '[FieldInputValues]'},
'scope': {'key': 'scope', 'type': 'str'}
}
def __init__(self, current_values=None, resource=None, input_values=None, scope=None):
super(FieldValuesQuery, self).__init__(current_values=current_values, resource=resource)
self.input_values = input_values
self.scope = scope
__all__ = [
'BaseSubscriptionFilter',
'BatchNotificationOperation',
'EventActor',
'EventScope',
'EventsEvaluationResult',
'EventTransformRequest',
'EventTransformResult',
'ExpressionFilterClause',
'ExpressionFilterGroup',
'ExpressionFilterModel',
'GraphSubjectBase',
'IdentityRef',
'INotificationDiagnosticLog',
'InputValue',
'InputValues',
'InputValuesError',
'InputValuesQuery',
'ISubscriptionFilter',
'ISubscriptionChannel',
'NotificationAdminSettings',
'NotificationAdminSettingsUpdateParameters',
'NotificationDiagnosticLogMessage',
'NotificationEventField',
'NotificationEventFieldOperator',
'NotificationEventFieldType',
'NotificationEventPublisher',
'NotificationEventRole',
'NotificationEventType',
'NotificationEventTypeCategory',
'NotificationQueryCondition',
'NotificationReason',
'NotificationsEvaluationResult',
'NotificationStatistic',
'NotificationStatisticsQuery',
'NotificationStatisticsQueryConditions',
'NotificationSubscriber',
'NotificationSubscriberUpdateParameters',
'NotificationSubscription',
'NotificationSubscriptionCreateParameters',
'NotificationSubscriptionTemplate',
'NotificationSubscriptionUpdateParameters',
'OperatorConstraint',
'ReferenceLinks',
'SubscriptionAdminSettings',
'SubscriptionDiagnostics',
'SubscriptionEvaluationRequest',
'SubscriptionEvaluationResult',
'SubscriptionEvaluationSettings',
'SubscriptionChannelWithAddress',
'SubscriptionManagement',
'SubscriptionQuery',
'SubscriptionQueryCondition',
'SubscriptionScope',
'SubscriptionTracing',
'SubscriptionUserSettings',
'UpdateSubscripitonDiagnosticsParameters',
'UpdateSubscripitonTracingParameters',
'ValueDefinition',
'VssNotificationEvent',
'ArtifactFilter',
'FieldInputValues',
'FieldValuesQuery',
]
|
azure-devops-python-api/azure-devops/azure/devops/v7_0/notification/models.py/0
|
{
"file_path": "azure-devops-python-api/azure-devops/azure/devops/v7_0/notification/models.py",
"repo_id": "azure-devops-python-api",
"token_count": 27095
}
| 401 |
ο»Ώ# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest import Serializer, Deserializer
from ...client import Client
from . import models
class PipelinesClient(Client):
"""Pipelines
:param str base_url: Service URL
:param Authentication creds: Authenticated credentials.
"""
def __init__(self, base_url=None, creds=None):
super(PipelinesClient, self).__init__(base_url, creds)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
resource_area_identifier = None
def get_artifact(self, project, pipeline_id, run_id, artifact_name, expand=None):
"""GetArtifact.
Get a specific artifact from a pipeline run
:param str project: Project ID or project name
:param int pipeline_id: ID of the pipeline.
:param int run_id: ID of the run of that pipeline.
:param str artifact_name: Name of the artifact.
:param str expand: Expand options. Default is None.
:rtype: :class:`<Artifact> <azure.devops.v7_0.pipelines.models.Artifact>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if pipeline_id is not None:
route_values['pipelineId'] = self._serialize.url('pipeline_id', pipeline_id, 'int')
if run_id is not None:
route_values['runId'] = self._serialize.url('run_id', run_id, 'int')
query_parameters = {}
if artifact_name is not None:
query_parameters['artifactName'] = self._serialize.query('artifact_name', artifact_name, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query('expand', expand, 'str')
response = self._send(http_method='GET',
location_id='85023071-bd5e-4438-89b0-2a5bf362a19d',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('Artifact', response)
def get_log(self, project, pipeline_id, run_id, log_id, expand=None):
"""GetLog.
Get a specific log from a pipeline run
:param str project: Project ID or project name
:param int pipeline_id: ID of the pipeline.
:param int run_id: ID of the run of that pipeline.
:param int log_id: ID of the log.
:param str expand: Expand options. Default is None.
:rtype: :class:`<Log> <azure.devops.v7_0.pipelines.models.Log>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if pipeline_id is not None:
route_values['pipelineId'] = self._serialize.url('pipeline_id', pipeline_id, 'int')
if run_id is not None:
route_values['runId'] = self._serialize.url('run_id', run_id, 'int')
if log_id is not None:
route_values['logId'] = self._serialize.url('log_id', log_id, 'int')
query_parameters = {}
if expand is not None:
query_parameters['$expand'] = self._serialize.query('expand', expand, 'str')
response = self._send(http_method='GET',
location_id='fb1b6d27-3957-43d5-a14b-a2d70403e545',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('Log', response)
def list_logs(self, project, pipeline_id, run_id, expand=None):
"""ListLogs.
Get a list of logs from a pipeline run.
:param str project: Project ID or project name
:param int pipeline_id: ID of the pipeline.
:param int run_id: ID of the run of that pipeline.
:param str expand: Expand options. Default is None.
:rtype: :class:`<LogCollection> <azure.devops.v7_0.pipelines.models.LogCollection>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if pipeline_id is not None:
route_values['pipelineId'] = self._serialize.url('pipeline_id', pipeline_id, 'int')
if run_id is not None:
route_values['runId'] = self._serialize.url('run_id', run_id, 'int')
query_parameters = {}
if expand is not None:
query_parameters['$expand'] = self._serialize.query('expand', expand, 'str')
response = self._send(http_method='GET',
location_id='fb1b6d27-3957-43d5-a14b-a2d70403e545',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('LogCollection', response)
def create_pipeline(self, input_parameters, project):
"""CreatePipeline.
Create a pipeline.
:param :class:`<CreatePipelineParameters> <azure.devops.v7_0.pipelines.models.CreatePipelineParameters>` input_parameters: Input parameters.
:param str project: Project ID or project name
:rtype: :class:`<Pipeline> <azure.devops.v7_0.pipelines.models.Pipeline>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
content = self._serialize.body(input_parameters, 'CreatePipelineParameters')
response = self._send(http_method='POST',
location_id='28e1305e-2afe-47bf-abaf-cbb0e6a91988',
version='7.0',
route_values=route_values,
content=content)
return self._deserialize('Pipeline', response)
def get_pipeline(self, project, pipeline_id, pipeline_version=None):
"""GetPipeline.
Gets a pipeline, optionally at the specified version
:param str project: Project ID or project name
:param int pipeline_id: The pipeline ID
:param int pipeline_version: The pipeline version
:rtype: :class:`<Pipeline> <azure.devops.v7_0.pipelines.models.Pipeline>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if pipeline_id is not None:
route_values['pipelineId'] = self._serialize.url('pipeline_id', pipeline_id, 'int')
query_parameters = {}
if pipeline_version is not None:
query_parameters['pipelineVersion'] = self._serialize.query('pipeline_version', pipeline_version, 'int')
response = self._send(http_method='GET',
location_id='28e1305e-2afe-47bf-abaf-cbb0e6a91988',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('Pipeline', response)
def list_pipelines(self, project, order_by=None, top=None, continuation_token=None):
"""ListPipelines.
Get a list of pipelines.
:param str project: Project ID or project name
:param str order_by: A sort expression. Defaults to "name asc"
:param int top: The maximum number of pipelines to return
:param str continuation_token: A continuation token from a previous request, to retrieve the next page of results
:rtype: [Pipeline]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if order_by is not None:
query_parameters['orderBy'] = self._serialize.query('order_by', order_by, 'str')
if top is not None:
query_parameters['$top'] = self._serialize.query('top', top, 'int')
if continuation_token is not None:
query_parameters['continuationToken'] = self._serialize.query('continuation_token', continuation_token, 'str')
response = self._send(http_method='GET',
location_id='28e1305e-2afe-47bf-abaf-cbb0e6a91988',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[Pipeline]', self._unwrap_collection(response))
def preview(self, run_parameters, project, pipeline_id, pipeline_version=None):
"""Preview.
Queues a dry run of the pipeline and returns an object containing the final yaml.
:param :class:`<RunPipelineParameters> <azure.devops.v7_0.pipelines.models.RunPipelineParameters>` run_parameters: Optional additional parameters for this run.
:param str project: Project ID or project name
:param int pipeline_id: The pipeline ID.
:param int pipeline_version: The pipeline version.
:rtype: :class:`<PreviewRun> <azure.devops.v7_0.pipelines.models.PreviewRun>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if pipeline_id is not None:
route_values['pipelineId'] = self._serialize.url('pipeline_id', pipeline_id, 'int')
query_parameters = {}
if pipeline_version is not None:
query_parameters['pipelineVersion'] = self._serialize.query('pipeline_version', pipeline_version, 'int')
content = self._serialize.body(run_parameters, 'RunPipelineParameters')
response = self._send(http_method='POST',
location_id='53df2d18-29ea-46a9-bee0-933540f80abf',
version='7.0',
route_values=route_values,
query_parameters=query_parameters,
content=content)
return self._deserialize('PreviewRun', response)
def get_run(self, project, pipeline_id, run_id):
"""GetRun.
Gets a run for a particular pipeline.
:param str project: Project ID or project name
:param int pipeline_id: The pipeline id
:param int run_id: The run id
:rtype: :class:`<Run> <azure.devops.v7_0.pipelines.models.Run>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if pipeline_id is not None:
route_values['pipelineId'] = self._serialize.url('pipeline_id', pipeline_id, 'int')
if run_id is not None:
route_values['runId'] = self._serialize.url('run_id', run_id, 'int')
response = self._send(http_method='GET',
location_id='7859261e-d2e9-4a68-b820-a5d84cc5bb3d',
version='7.0',
route_values=route_values)
return self._deserialize('Run', response)
def list_runs(self, project, pipeline_id):
"""ListRuns.
Gets top 10000 runs for a particular pipeline.
:param str project: Project ID or project name
:param int pipeline_id: The pipeline id
:rtype: [Run]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if pipeline_id is not None:
route_values['pipelineId'] = self._serialize.url('pipeline_id', pipeline_id, 'int')
response = self._send(http_method='GET',
location_id='7859261e-d2e9-4a68-b820-a5d84cc5bb3d',
version='7.0',
route_values=route_values)
return self._deserialize('[Run]', self._unwrap_collection(response))
def run_pipeline(self, run_parameters, project, pipeline_id, pipeline_version=None):
"""RunPipeline.
Runs a pipeline.
:param :class:`<RunPipelineParameters> <azure.devops.v7_0.pipelines.models.RunPipelineParameters>` run_parameters: Optional additional parameters for this run.
:param str project: Project ID or project name
:param int pipeline_id: The pipeline ID.
:param int pipeline_version: The pipeline version.
:rtype: :class:`<Run> <azure.devops.v7_0.pipelines.models.Run>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if pipeline_id is not None:
route_values['pipelineId'] = self._serialize.url('pipeline_id', pipeline_id, 'int')
query_parameters = {}
if pipeline_version is not None:
query_parameters['pipelineVersion'] = self._serialize.query('pipeline_version', pipeline_version, 'int')
content = self._serialize.body(run_parameters, 'RunPipelineParameters')
response = self._send(http_method='POST',
location_id='7859261e-d2e9-4a68-b820-a5d84cc5bb3d',
version='7.0',
route_values=route_values,
query_parameters=query_parameters,
content=content)
return self._deserialize('Run', response)
|
azure-devops-python-api/azure-devops/azure/devops/v7_0/pipelines/pipelines_client.py/0
|
{
"file_path": "azure-devops-python-api/azure-devops/azure/devops/v7_0/pipelines/pipelines_client.py",
"repo_id": "azure-devops-python-api",
"token_count": 6316
}
| 402 |
ο»Ώ# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest.serialization import Model
class AccessControlEntry(Model):
"""
Class for encapsulating the allowed and denied permissions for a given IdentityDescriptor.
:param allow: The set of permission bits that represent the actions that the associated descriptor is allowed to perform.
:type allow: int
:param deny: The set of permission bits that represent the actions that the associated descriptor is not allowed to perform.
:type deny: int
:param descriptor: The descriptor for the user this AccessControlEntry applies to.
:type descriptor: :class:`str <azure.devops.v7_0.security.models.str>`
:param extended_info: This value, when set, reports the inherited and effective information for the associated descriptor. This value is only set on AccessControlEntries returned by the QueryAccessControlList(s) call when its includeExtendedInfo parameter is set to true.
:type extended_info: :class:`AceExtendedInformation <azure.devops.v7_0.security.models.AceExtendedInformation>`
"""
_attribute_map = {
'allow': {'key': 'allow', 'type': 'int'},
'deny': {'key': 'deny', 'type': 'int'},
'descriptor': {'key': 'descriptor', 'type': 'str'},
'extended_info': {'key': 'extendedInfo', 'type': 'AceExtendedInformation'}
}
def __init__(self, allow=None, deny=None, descriptor=None, extended_info=None):
super(AccessControlEntry, self).__init__()
self.allow = allow
self.deny = deny
self.descriptor = descriptor
self.extended_info = extended_info
class AccessControlList(Model):
"""
The AccessControlList class is meant to associate a set of AccessControlEntries with a security token and its inheritance settings.
:param aces_dictionary: Storage of permissions keyed on the identity the permission is for.
:type aces_dictionary: dict
:param include_extended_info: True if this ACL holds ACEs that have extended information.
:type include_extended_info: bool
:param inherit_permissions: True if the given token inherits permissions from parents.
:type inherit_permissions: bool
:param token: The token that this AccessControlList is for.
:type token: str
"""
_attribute_map = {
'aces_dictionary': {'key': 'acesDictionary', 'type': '{AccessControlEntry}'},
'include_extended_info': {'key': 'includeExtendedInfo', 'type': 'bool'},
'inherit_permissions': {'key': 'inheritPermissions', 'type': 'bool'},
'token': {'key': 'token', 'type': 'str'}
}
def __init__(self, aces_dictionary=None, include_extended_info=None, inherit_permissions=None, token=None):
super(AccessControlList, self).__init__()
self.aces_dictionary = aces_dictionary
self.include_extended_info = include_extended_info
self.inherit_permissions = inherit_permissions
self.token = token
class AccessControlListsCollection(Model):
"""
A list of AccessControlList. An AccessControlList is meant to associate a set of AccessControlEntries with a security token and its inheritance settings.
"""
_attribute_map = {
}
def __init__(self):
super(AccessControlListsCollection, self).__init__()
class AceExtendedInformation(Model):
"""
Holds the inherited and effective permission information for a given AccessControlEntry.
:param effective_allow: This is the combination of all of the explicit and inherited permissions for this identity on this token. These are the permissions used when determining if a given user has permission to perform an action.
:type effective_allow: int
:param effective_deny: This is the combination of all of the explicit and inherited permissions for this identity on this token. These are the permissions used when determining if a given user has permission to perform an action.
:type effective_deny: int
:param inherited_allow: These are the permissions that are inherited for this identity on this token. If the token does not inherit permissions this will be 0. Note that any permissions that have been explicitly set on this token for this identity, or any groups that this identity is a part of, are not included here.
:type inherited_allow: int
:param inherited_deny: These are the permissions that are inherited for this identity on this token. If the token does not inherit permissions this will be 0. Note that any permissions that have been explicitly set on this token for this identity, or any groups that this identity is a part of, are not included here.
:type inherited_deny: int
"""
_attribute_map = {
'effective_allow': {'key': 'effectiveAllow', 'type': 'int'},
'effective_deny': {'key': 'effectiveDeny', 'type': 'int'},
'inherited_allow': {'key': 'inheritedAllow', 'type': 'int'},
'inherited_deny': {'key': 'inheritedDeny', 'type': 'int'}
}
def __init__(self, effective_allow=None, effective_deny=None, inherited_allow=None, inherited_deny=None):
super(AceExtendedInformation, self).__init__()
self.effective_allow = effective_allow
self.effective_deny = effective_deny
self.inherited_allow = inherited_allow
self.inherited_deny = inherited_deny
class ActionDefinition(Model):
"""
:param bit: The bit mask integer for this action. Must be a power of 2.
:type bit: int
:param display_name: The localized display name for this action.
:type display_name: str
:param name: The non-localized name for this action.
:type name: str
:param namespace_id: The namespace that this action belongs to. This will only be used for reading from the database.
:type namespace_id: str
"""
_attribute_map = {
'bit': {'key': 'bit', 'type': 'int'},
'display_name': {'key': 'displayName', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'namespace_id': {'key': 'namespaceId', 'type': 'str'}
}
def __init__(self, bit=None, display_name=None, name=None, namespace_id=None):
super(ActionDefinition, self).__init__()
self.bit = bit
self.display_name = display_name
self.name = name
self.namespace_id = namespace_id
class PermissionEvaluation(Model):
"""
Represents an evaluated permission.
:param permissions: Permission bit for this evaluated permission.
:type permissions: int
:param security_namespace_id: Security namespace identifier for this evaluated permission.
:type security_namespace_id: str
:param token: Security namespace-specific token for this evaluated permission.
:type token: str
:param value: Permission evaluation value.
:type value: bool
"""
_attribute_map = {
'permissions': {'key': 'permissions', 'type': 'int'},
'security_namespace_id': {'key': 'securityNamespaceId', 'type': 'str'},
'token': {'key': 'token', 'type': 'str'},
'value': {'key': 'value', 'type': 'bool'}
}
def __init__(self, permissions=None, security_namespace_id=None, token=None, value=None):
super(PermissionEvaluation, self).__init__()
self.permissions = permissions
self.security_namespace_id = security_namespace_id
self.token = token
self.value = value
class PermissionEvaluationBatch(Model):
"""
Represents a set of evaluated permissions.
:param always_allow_administrators: True if members of the Administrators group should always pass the security check.
:type always_allow_administrators: bool
:param evaluations: Array of permission evaluations to evaluate.
:type evaluations: list of :class:`PermissionEvaluation <azure.devops.v7_0.security.models.PermissionEvaluation>`
"""
_attribute_map = {
'always_allow_administrators': {'key': 'alwaysAllowAdministrators', 'type': 'bool'},
'evaluations': {'key': 'evaluations', 'type': '[PermissionEvaluation]'}
}
def __init__(self, always_allow_administrators=None, evaluations=None):
super(PermissionEvaluationBatch, self).__init__()
self.always_allow_administrators = always_allow_administrators
self.evaluations = evaluations
class SecurityNamespaceDescription(Model):
"""
Class for describing the details of a TeamFoundationSecurityNamespace.
:param actions: The list of actions that this Security Namespace is responsible for securing.
:type actions: list of :class:`ActionDefinition <azure.devops.v7_0.security.models.ActionDefinition>`
:param dataspace_category: This is the dataspace category that describes where the security information for this SecurityNamespace should be stored.
:type dataspace_category: str
:param display_name: This localized name for this namespace.
:type display_name: str
:param element_length: If the security tokens this namespace will be operating on need to be split on certain character lengths to determine its elements, that length should be specified here. If not, this value will be -1.
:type element_length: int
:param extension_type: This is the type of the extension that should be loaded from the plugins directory for extending this security namespace.
:type extension_type: str
:param is_remotable: If true, the security namespace is remotable, allowing another service to proxy the namespace.
:type is_remotable: bool
:param name: This non-localized for this namespace.
:type name: str
:param namespace_id: The unique identifier for this namespace.
:type namespace_id: str
:param read_permission: The permission bits needed by a user in order to read security data on the Security Namespace.
:type read_permission: int
:param separator_value: If the security tokens this namespace will be operating on need to be split on certain characters to determine its elements that character should be specified here. If not, this value will be the null character.
:type separator_value: str
:param structure_value: Used to send information about the structure of the security namespace over the web service.
:type structure_value: int
:param system_bit_mask: The bits reserved by system store
:type system_bit_mask: int
:param use_token_translator: If true, the security service will expect an ISecurityDataspaceTokenTranslator plugin to exist for this namespace
:type use_token_translator: bool
:param write_permission: The permission bits needed by a user in order to modify security data on the Security Namespace.
:type write_permission: int
"""
_attribute_map = {
'actions': {'key': 'actions', 'type': '[ActionDefinition]'},
'dataspace_category': {'key': 'dataspaceCategory', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'element_length': {'key': 'elementLength', 'type': 'int'},
'extension_type': {'key': 'extensionType', 'type': 'str'},
'is_remotable': {'key': 'isRemotable', 'type': 'bool'},
'name': {'key': 'name', 'type': 'str'},
'namespace_id': {'key': 'namespaceId', 'type': 'str'},
'read_permission': {'key': 'readPermission', 'type': 'int'},
'separator_value': {'key': 'separatorValue', 'type': 'str'},
'structure_value': {'key': 'structureValue', 'type': 'int'},
'system_bit_mask': {'key': 'systemBitMask', 'type': 'int'},
'use_token_translator': {'key': 'useTokenTranslator', 'type': 'bool'},
'write_permission': {'key': 'writePermission', 'type': 'int'}
}
def __init__(self, actions=None, dataspace_category=None, display_name=None, element_length=None, extension_type=None, is_remotable=None, name=None, namespace_id=None, read_permission=None, separator_value=None, structure_value=None, system_bit_mask=None, use_token_translator=None, write_permission=None):
super(SecurityNamespaceDescription, self).__init__()
self.actions = actions
self.dataspace_category = dataspace_category
self.display_name = display_name
self.element_length = element_length
self.extension_type = extension_type
self.is_remotable = is_remotable
self.name = name
self.namespace_id = namespace_id
self.read_permission = read_permission
self.separator_value = separator_value
self.structure_value = structure_value
self.system_bit_mask = system_bit_mask
self.use_token_translator = use_token_translator
self.write_permission = write_permission
__all__ = [
'AccessControlEntry',
'AccessControlList',
'AccessControlListsCollection',
'AceExtendedInformation',
'ActionDefinition',
'PermissionEvaluation',
'PermissionEvaluationBatch',
'SecurityNamespaceDescription',
]
|
azure-devops-python-api/azure-devops/azure/devops/v7_0/security/models.py/0
|
{
"file_path": "azure-devops-python-api/azure-devops/azure/devops/v7_0/security/models.py",
"repo_id": "azure-devops-python-api",
"token_count": 4356
}
| 403 |
ο»Ώ# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from .models import *
from .task_agent_client import TaskAgentClient
__all__ = [
'AadOauthTokenRequest',
'AadOauthTokenResult',
'AuthenticationSchemeReference',
'AuthorizationHeader',
'AzureManagementGroup',
'AzureManagementGroupQueryResult',
'AzureSubscription',
'AzureSubscriptionQueryResult',
'ClientCertificate',
'DataSource',
'DataSourceBinding',
'DataSourceBindingBase',
'DataSourceDetails',
'DependencyBinding',
'DependencyData',
'DependsOn',
'DeploymentGroup',
'DeploymentGroupCreateParameter',
'DeploymentGroupCreateParameterPoolProperty',
'DeploymentGroupMetrics',
'DeploymentGroupReference',
'DeploymentGroupUpdateParameter',
'DeploymentMachine',
'DeploymentMachineGroup',
'DeploymentMachineGroupReference',
'DeploymentPoolSummary',
'DeploymentTargetUpdateParameter',
'EndpointAuthorization',
'EndpointUrl',
'EnvironmentCreateParameter',
'EnvironmentDeploymentExecutionRecord',
'EnvironmentInstance',
'EnvironmentReference',
'EnvironmentResource',
'EnvironmentResourceReference',
'EnvironmentUpdateParameter',
'GraphSubjectBase',
'HelpLink',
'IdentityRef',
'InputDescriptor',
'InputValidation',
'InputValidationRequest',
'InputValue',
'InputValues',
'InputValuesError',
'KubernetesResource',
'KubernetesResourceCreateParameters',
'MetricsColumnMetaData',
'MetricsColumnsHeader',
'MetricsRow',
'PackageMetadata',
'PackageVersion',
'ProjectReference',
'PublishTaskGroupMetadata',
'ReferenceLinks',
'ResourceLimit',
'ResourceUsage',
'ResultTransformationDetails',
'SecureFile',
'ServiceEndpoint',
'ServiceEndpointAuthenticationScheme',
'ServiceEndpointDetails',
'ServiceEndpointExecutionData',
'ServiceEndpointExecutionRecord',
'ServiceEndpointExecutionRecordsInput',
'ServiceEndpointRequest',
'ServiceEndpointRequestResult',
'ServiceEndpointType',
'TaskAgent',
'TaskAgentAuthorization',
'TaskAgentCloud',
'TaskAgentCloudRequest',
'TaskAgentCloudType',
'TaskAgentJobRequest',
'TaskAgentMessage',
'TaskAgentPool',
'TaskAgentPoolMaintenanceDefinition',
'TaskAgentPoolMaintenanceJob',
'TaskAgentPoolMaintenanceJobTargetAgent',
'TaskAgentPoolMaintenanceOptions',
'TaskAgentPoolMaintenanceRetentionPolicy',
'TaskAgentPoolMaintenanceSchedule',
'TaskAgentPoolReference',
'TaskAgentPublicKey',
'TaskAgentQueue',
'TaskAgentReference',
'TaskAgentSession',
'TaskAgentSessionKey',
'TaskAgentUpdate',
'TaskAgentUpdateReason',
'TaskCommandRestrictions',
'TaskDefinition',
'TaskDefinitionEndpoint',
'TaskDefinitionReference',
'TaskExecution',
'TaskGroup',
'TaskGroupCreateParameter',
'TaskGroupDefinition',
'TaskGroupRevision',
'TaskGroupStep',
'TaskGroupUpdateParameter',
'TaskGroupUpdatePropertiesBase',
'TaskHubLicenseDetails',
'TaskInputDefinition',
'TaskInputDefinitionBase',
'TaskInputValidation',
'TaskOrchestrationOwner',
'TaskOutputVariable',
'TaskPackageMetadata',
'TaskReference',
'TaskRestrictions',
'TaskSourceDefinition',
'TaskSourceDefinitionBase',
'TaskVariableRestrictions',
'TaskVersion',
'ValidationItem',
'VariableGroup',
'VariableGroupParameters',
'VariableGroupProjectReference',
'VariableGroupProviderData',
'VariableValue',
'VirtualMachine',
'VirtualMachineGroup',
'VirtualMachineGroupCreateParameters',
'TaskAgentClient'
]
|
azure-devops-python-api/azure-devops/azure/devops/v7_0/task_agent/__init__.py/0
|
{
"file_path": "azure-devops-python-api/azure-devops/azure/devops/v7_0/task_agent/__init__.py",
"repo_id": "azure-devops-python-api",
"token_count": 1343
}
| 404 |
ο»Ώ# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest.serialization import Model
class SessionToken(Model):
"""
Represents a session token used to access Azure DevOps resources
:param access_id:
:type access_id: str
:param alternate_token: This is populated when user requests a compact token. The alternate token value is self describing token.
:type alternate_token: str
:param authorization_id:
:type authorization_id: str
:param claims:
:type claims: dict
:param client_id:
:type client_id: str
:param display_name:
:type display_name: str
:param host_authorization_id:
:type host_authorization_id: str
:param is_public:
:type is_public: bool
:param is_valid:
:type is_valid: bool
:param public_data:
:type public_data: str
:param scope:
:type scope: str
:param source:
:type source: str
:param target_accounts:
:type target_accounts: list of str
:param token: This is computed and not returned in Get queries
:type token: str
:param user_id:
:type user_id: str
:param valid_from:
:type valid_from: datetime
:param valid_to:
:type valid_to: datetime
"""
_attribute_map = {
'access_id': {'key': 'accessId', 'type': 'str'},
'alternate_token': {'key': 'alternateToken', 'type': 'str'},
'authorization_id': {'key': 'authorizationId', 'type': 'str'},
'claims': {'key': 'claims', 'type': '{str}'},
'client_id': {'key': 'clientId', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'host_authorization_id': {'key': 'hostAuthorizationId', 'type': 'str'},
'is_public': {'key': 'isPublic', 'type': 'bool'},
'is_valid': {'key': 'isValid', 'type': 'bool'},
'public_data': {'key': 'publicData', 'type': 'str'},
'scope': {'key': 'scope', 'type': 'str'},
'source': {'key': 'source', 'type': 'str'},
'target_accounts': {'key': 'targetAccounts', 'type': '[str]'},
'token': {'key': 'token', 'type': 'str'},
'user_id': {'key': 'userId', 'type': 'str'},
'valid_from': {'key': 'validFrom', 'type': 'iso-8601'},
'valid_to': {'key': 'validTo', 'type': 'iso-8601'}
}
def __init__(self, access_id=None, alternate_token=None, authorization_id=None, claims=None, client_id=None, display_name=None, host_authorization_id=None, is_public=None, is_valid=None, public_data=None, scope=None, source=None, target_accounts=None, token=None, user_id=None, valid_from=None, valid_to=None):
super(SessionToken, self).__init__()
self.access_id = access_id
self.alternate_token = alternate_token
self.authorization_id = authorization_id
self.claims = claims
self.client_id = client_id
self.display_name = display_name
self.host_authorization_id = host_authorization_id
self.is_public = is_public
self.is_valid = is_valid
self.public_data = public_data
self.scope = scope
self.source = source
self.target_accounts = target_accounts
self.token = token
self.user_id = user_id
self.valid_from = valid_from
self.valid_to = valid_to
class SessionTokenResult(Model):
"""
:param has_error:
:type has_error: bool
:param session_token:
:type session_token: :class:`SessionToken <azure.devops.v7_0.microsoft._visual_studio._services._web_api.models.SessionToken>`
:param session_token_error:
:type session_token_error: object
"""
_attribute_map = {
'has_error': {'key': 'hasError', 'type': 'bool'},
'session_token': {'key': 'sessionToken', 'type': 'SessionToken'},
'session_token_error': {'key': 'sessionTokenError', 'type': 'object'}
}
def __init__(self, has_error=None, session_token=None, session_token_error=None):
super(SessionTokenResult, self).__init__()
self.has_error = has_error
self.session_token = session_token
self.session_token_error = session_token_error
class TokenAdminPagedSessionTokens(Model):
"""
A paginated list of session tokens. Session tokens correspond to OAuth credentials such as personal access tokens (PATs) and other OAuth authorizations.
:param continuation_token: The continuation token that can be used to retrieve the next page of session tokens, or <code>null</code> if there is no next page.
:type continuation_token: str
:param value: The list of all session tokens in the current page.
:type value: list of :class:`SessionToken <azure.devops.v7_0.token_admin.models.SessionToken>`
"""
_attribute_map = {
'continuation_token': {'key': 'continuationToken', 'type': 'str'},
'value': {'key': 'value', 'type': '[SessionToken]'}
}
def __init__(self, continuation_token=None, value=None):
super(TokenAdminPagedSessionTokens, self).__init__()
self.continuation_token = continuation_token
self.value = value
class TokenAdminRevocation(Model):
"""
A request to revoke a particular delegated authorization.
:param authorization_id: The authorization ID of the OAuth authorization to revoke.
:type authorization_id: str
"""
_attribute_map = {
'authorization_id': {'key': 'authorizationId', 'type': 'str'}
}
def __init__(self, authorization_id=None):
super(TokenAdminRevocation, self).__init__()
self.authorization_id = authorization_id
class TokenAdminRevocationRule(Model):
"""
A rule which is applied to disable any incoming delegated authorization which matches the given properties.
:param created_before: A datetime cutoff. Tokens created before this time will be rejected. This is an optional parameter. If omitted, defaults to the time at which the rule was created.
:type created_before: datetime
:param scopes: A string containing a space-delimited list of OAuth scopes. A token matching any one of the scopes will be rejected. For a list of all OAuth scopes supported by Azure DevOps, see: https://docs.microsoft.com/en-us/azure/devops/integrate/get-started/authentication/oauth?view=azure-devops#scopes This is a mandatory parameter.
:type scopes: str
"""
_attribute_map = {
'created_before': {'key': 'createdBefore', 'type': 'iso-8601'},
'scopes': {'key': 'scopes', 'type': 'str'}
}
def __init__(self, created_before=None, scopes=None):
super(TokenAdminRevocationRule, self).__init__()
self.created_before = created_before
self.scopes = scopes
__all__ = [
'SessionToken',
'SessionTokenResult',
'TokenAdminPagedSessionTokens',
'TokenAdminRevocation',
'TokenAdminRevocationRule',
]
|
azure-devops-python-api/azure-devops/azure/devops/v7_0/token_admin/models.py/0
|
{
"file_path": "azure-devops-python-api/azure-devops/azure/devops/v7_0/token_admin/models.py",
"repo_id": "azure-devops-python-api",
"token_count": 2643
}
| 405 |
ο»Ώ# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest import Serializer, Deserializer
from ...client import Client
from . import models
class WorkItemTrackingClient(Client):
"""WorkItemTracking
:param str base_url: Service URL
:param Authentication creds: Authenticated credentials.
"""
def __init__(self, base_url=None, creds=None):
super(WorkItemTrackingClient, self).__init__(base_url, creds)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
resource_area_identifier = '5264459e-e5e0-4bd8-b118-0985e68a4ec5'
def get_recent_activity_data(self):
"""GetRecentActivityData.
Gets recent work item activities
:rtype: [AccountRecentActivityWorkItemModel2]
"""
response = self._send(http_method='GET',
location_id='1bc988f4-c15f-4072-ad35-497c87e3a909',
version='7.0')
return self._deserialize('[AccountRecentActivityWorkItemModel2]', self._unwrap_collection(response))
def get_work_artifact_link_types(self):
"""GetWorkArtifactLinkTypes.
Get the list of work item tracking outbound artifact link types.
:rtype: [WorkArtifactLink]
"""
response = self._send(http_method='GET',
location_id='1a31de40-e318-41cd-a6c6-881077df52e3',
version='7.0')
return self._deserialize('[WorkArtifactLink]', self._unwrap_collection(response))
def query_work_items_for_artifact_uris(self, artifact_uri_query, project=None):
"""QueryWorkItemsForArtifactUris.
Queries work items linked to a given list of artifact URI.
:param :class:`<ArtifactUriQuery> <azure.devops.v7_0.work_item_tracking.models.ArtifactUriQuery>` artifact_uri_query: Defines a list of artifact URI for querying work items.
:param str project: Project ID or project name
:rtype: :class:`<ArtifactUriQueryResult> <azure.devops.v7_0.work_item_tracking.models.ArtifactUriQueryResult>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
content = self._serialize.body(artifact_uri_query, 'ArtifactUriQuery')
response = self._send(http_method='POST',
location_id='a9a9aa7a-8c09-44d3-ad1b-46e855c1e3d3',
version='7.0',
route_values=route_values,
content=content)
return self._deserialize('ArtifactUriQueryResult', response)
def create_attachment(self, upload_stream, project=None, file_name=None, upload_type=None, area_path=None, **kwargs):
"""CreateAttachment.
Uploads an attachment.
:param object upload_stream: Stream to upload
:param str project: Project ID or project name
:param str file_name: The name of the file
:param str upload_type: Attachment upload type: Simple or Chunked
:param str area_path: Target project Area Path
:rtype: :class:`<AttachmentReference> <azure.devops.v7_0.work_item_tracking.models.AttachmentReference>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if file_name is not None:
query_parameters['fileName'] = self._serialize.query('file_name', file_name, 'str')
if upload_type is not None:
query_parameters['uploadType'] = self._serialize.query('upload_type', upload_type, 'str')
if area_path is not None:
query_parameters['areaPath'] = self._serialize.query('area_path', area_path, 'str')
if "callback" in kwargs:
callback = kwargs["callback"]
else:
callback = None
content = self._client.stream_upload(upload_stream, callback=callback)
response = self._send(http_method='POST',
location_id='e07b5fa4-1499-494d-a496-64b860fd64ff',
version='7.0',
route_values=route_values,
query_parameters=query_parameters,
content=content,
media_type='application/octet-stream')
return self._deserialize('AttachmentReference', response)
def get_attachment_content(self, id, project=None, file_name=None, download=None, **kwargs):
"""GetAttachmentContent.
Downloads an attachment.
:param str id: Attachment ID
:param str project: Project ID or project name
:param str file_name: Name of the file
:param bool download: If set to <c>true</c> always download attachment
:rtype: object
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if id is not None:
route_values['id'] = self._serialize.url('id', id, 'str')
query_parameters = {}
if file_name is not None:
query_parameters['fileName'] = self._serialize.query('file_name', file_name, 'str')
if download is not None:
query_parameters['download'] = self._serialize.query('download', download, 'bool')
response = self._send(http_method='GET',
location_id='e07b5fa4-1499-494d-a496-64b860fd64ff',
version='7.0',
route_values=route_values,
query_parameters=query_parameters,
accept_media_type='application/octet-stream')
if "callback" in kwargs:
callback = kwargs["callback"]
else:
callback = None
return self._client.stream_download(response, callback=callback)
def get_attachment_zip(self, id, project=None, file_name=None, download=None, **kwargs):
"""GetAttachmentZip.
Downloads an attachment.
:param str id: Attachment ID
:param str project: Project ID or project name
:param str file_name: Name of the file
:param bool download: If set to <c>true</c> always download attachment
:rtype: object
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if id is not None:
route_values['id'] = self._serialize.url('id', id, 'str')
query_parameters = {}
if file_name is not None:
query_parameters['fileName'] = self._serialize.query('file_name', file_name, 'str')
if download is not None:
query_parameters['download'] = self._serialize.query('download', download, 'bool')
response = self._send(http_method='GET',
location_id='e07b5fa4-1499-494d-a496-64b860fd64ff',
version='7.0',
route_values=route_values,
query_parameters=query_parameters,
accept_media_type='application/zip')
if "callback" in kwargs:
callback = kwargs["callback"]
else:
callback = None
return self._client.stream_download(response, callback=callback)
def get_classification_nodes(self, project, ids, depth=None, error_policy=None):
"""GetClassificationNodes.
Gets root classification nodes or list of classification nodes for a given list of nodes ids, for a given project. In case ids parameter is supplied you will get list of classification nodes for those ids. Otherwise you will get root classification nodes for this project.
:param str project: Project ID or project name
:param [int] ids: Comma separated integer classification nodes ids. It's not required, if you want root nodes.
:param int depth: Depth of children to fetch.
:param str error_policy: Flag to handle errors in getting some nodes. Possible options are Fail and Omit.
:rtype: [WorkItemClassificationNode]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if ids is not None:
ids = ",".join(map(str, ids))
query_parameters['ids'] = self._serialize.query('ids', ids, 'str')
if depth is not None:
query_parameters['$depth'] = self._serialize.query('depth', depth, 'int')
if error_policy is not None:
query_parameters['errorPolicy'] = self._serialize.query('error_policy', error_policy, 'str')
response = self._send(http_method='GET',
location_id='a70579d1-f53a-48ee-a5be-7be8659023b9',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[WorkItemClassificationNode]', self._unwrap_collection(response))
def get_root_nodes(self, project, depth=None):
"""GetRootNodes.
Gets root classification nodes under the project.
:param str project: Project ID or project name
:param int depth: Depth of children to fetch.
:rtype: [WorkItemClassificationNode]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if depth is not None:
query_parameters['$depth'] = self._serialize.query('depth', depth, 'int')
response = self._send(http_method='GET',
location_id='a70579d1-f53a-48ee-a5be-7be8659023b9',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[WorkItemClassificationNode]', self._unwrap_collection(response))
def create_or_update_classification_node(self, posted_node, project, structure_group, path=None):
"""CreateOrUpdateClassificationNode.
Create new or update an existing classification node.
:param :class:`<WorkItemClassificationNode> <azure.devops.v7_0.work_item_tracking.models.WorkItemClassificationNode>` posted_node: Node to create or update.
:param str project: Project ID or project name
:param TreeStructureGroup structure_group: Structure group of the classification node, area or iteration.
:param str path: Path of the classification node.
:rtype: :class:`<WorkItemClassificationNode> <azure.devops.v7_0.work_item_tracking.models.WorkItemClassificationNode>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if structure_group is not None:
route_values['structureGroup'] = self._serialize.url('structure_group', structure_group, 'TreeStructureGroup')
if path is not None:
route_values['path'] = self._serialize.url('path', path, 'str')
content = self._serialize.body(posted_node, 'WorkItemClassificationNode')
response = self._send(http_method='POST',
location_id='5a172953-1b41-49d3-840a-33f79c3ce89f',
version='7.0',
route_values=route_values,
content=content)
return self._deserialize('WorkItemClassificationNode', response)
def delete_classification_node(self, project, structure_group, path=None, reclassify_id=None):
"""DeleteClassificationNode.
Delete an existing classification node.
:param str project: Project ID or project name
:param TreeStructureGroup structure_group: Structure group of the classification node, area or iteration.
:param str path: Path of the classification node.
:param int reclassify_id: Id of the target classification node for reclassification.
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if structure_group is not None:
route_values['structureGroup'] = self._serialize.url('structure_group', structure_group, 'TreeStructureGroup')
if path is not None:
route_values['path'] = self._serialize.url('path', path, 'str')
query_parameters = {}
if reclassify_id is not None:
query_parameters['$reclassifyId'] = self._serialize.query('reclassify_id', reclassify_id, 'int')
self._send(http_method='DELETE',
location_id='5a172953-1b41-49d3-840a-33f79c3ce89f',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
def get_classification_node(self, project, structure_group, path=None, depth=None):
"""GetClassificationNode.
Gets the classification node for a given node path.
:param str project: Project ID or project name
:param TreeStructureGroup structure_group: Structure group of the classification node, area or iteration.
:param str path: Path of the classification node.
:param int depth: Depth of children to fetch.
:rtype: :class:`<WorkItemClassificationNode> <azure.devops.v7_0.work_item_tracking.models.WorkItemClassificationNode>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if structure_group is not None:
route_values['structureGroup'] = self._serialize.url('structure_group', structure_group, 'TreeStructureGroup')
if path is not None:
route_values['path'] = self._serialize.url('path', path, 'str')
query_parameters = {}
if depth is not None:
query_parameters['$depth'] = self._serialize.query('depth', depth, 'int')
response = self._send(http_method='GET',
location_id='5a172953-1b41-49d3-840a-33f79c3ce89f',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('WorkItemClassificationNode', response)
def update_classification_node(self, posted_node, project, structure_group, path=None):
"""UpdateClassificationNode.
Update an existing classification node.
:param :class:`<WorkItemClassificationNode> <azure.devops.v7_0.work_item_tracking.models.WorkItemClassificationNode>` posted_node: Node to create or update.
:param str project: Project ID or project name
:param TreeStructureGroup structure_group: Structure group of the classification node, area or iteration.
:param str path: Path of the classification node.
:rtype: :class:`<WorkItemClassificationNode> <azure.devops.v7_0.work_item_tracking.models.WorkItemClassificationNode>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if structure_group is not None:
route_values['structureGroup'] = self._serialize.url('structure_group', structure_group, 'TreeStructureGroup')
if path is not None:
route_values['path'] = self._serialize.url('path', path, 'str')
content = self._serialize.body(posted_node, 'WorkItemClassificationNode')
response = self._send(http_method='PATCH',
location_id='5a172953-1b41-49d3-840a-33f79c3ce89f',
version='7.0',
route_values=route_values,
content=content)
return self._deserialize('WorkItemClassificationNode', response)
def get_engaged_users(self, project, work_item_id, comment_id, reaction_type, top=None, skip=None):
"""GetEngagedUsers.
[Preview API] Get users who reacted on the comment.
:param str project: Project ID or project name
:param int work_item_id: WorkItem ID.
:param int comment_id: Comment ID.
:param CommentReactionType reaction_type: Type of the reaction.
:param int top:
:param int skip:
:rtype: [IdentityRef]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if work_item_id is not None:
route_values['workItemId'] = self._serialize.url('work_item_id', work_item_id, 'int')
if comment_id is not None:
route_values['commentId'] = self._serialize.url('comment_id', comment_id, 'int')
if reaction_type is not None:
route_values['reactionType'] = self._serialize.url('reaction_type', reaction_type, 'CommentReactionType')
query_parameters = {}
if top is not None:
query_parameters['$top'] = self._serialize.query('top', top, 'int')
if skip is not None:
query_parameters['$skip'] = self._serialize.query('skip', skip, 'int')
response = self._send(http_method='GET',
location_id='e33ca5e0-2349-4285-af3d-d72d86781c35',
version='7.0-preview.1',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[IdentityRef]', self._unwrap_collection(response))
def add_comment(self, request, project, work_item_id):
"""AddComment.
[Preview API] Add a comment on a work item.
:param :class:`<CommentCreate> <azure.devops.v7_0.work_item_tracking.models.CommentCreate>` request: Comment create request.
:param str project: Project ID or project name
:param int work_item_id: Id of a work item.
:rtype: :class:`<Comment> <azure.devops.v7_0.work_item_tracking.models.Comment>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if work_item_id is not None:
route_values['workItemId'] = self._serialize.url('work_item_id', work_item_id, 'int')
content = self._serialize.body(request, 'CommentCreate')
response = self._send(http_method='POST',
location_id='608aac0a-32e1-4493-a863-b9cf4566d257',
version='7.0-preview.3',
route_values=route_values,
content=content)
return self._deserialize('Comment', response)
def delete_comment(self, project, work_item_id, comment_id):
"""DeleteComment.
[Preview API] Delete a comment on a work item.
:param str project: Project ID or project name
:param int work_item_id: Id of a work item.
:param int comment_id:
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if work_item_id is not None:
route_values['workItemId'] = self._serialize.url('work_item_id', work_item_id, 'int')
if comment_id is not None:
route_values['commentId'] = self._serialize.url('comment_id', comment_id, 'int')
self._send(http_method='DELETE',
location_id='608aac0a-32e1-4493-a863-b9cf4566d257',
version='7.0-preview.3',
route_values=route_values)
def get_comment(self, project, work_item_id, comment_id, include_deleted=None, expand=None):
"""GetComment.
[Preview API] Returns a work item comment.
:param str project: Project ID or project name
:param int work_item_id: Id of a work item to get the comment.
:param int comment_id: Id of the comment to return.
:param bool include_deleted: Specify if the deleted comment should be retrieved.
:param str expand: Specifies the additional data retrieval options for work item comments.
:rtype: :class:`<Comment> <azure.devops.v7_0.work_item_tracking.models.Comment>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if work_item_id is not None:
route_values['workItemId'] = self._serialize.url('work_item_id', work_item_id, 'int')
if comment_id is not None:
route_values['commentId'] = self._serialize.url('comment_id', comment_id, 'int')
query_parameters = {}
if include_deleted is not None:
query_parameters['includeDeleted'] = self._serialize.query('include_deleted', include_deleted, 'bool')
if expand is not None:
query_parameters['$expand'] = self._serialize.query('expand', expand, 'str')
response = self._send(http_method='GET',
location_id='608aac0a-32e1-4493-a863-b9cf4566d257',
version='7.0-preview.3',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('Comment', response)
def get_comments(self, project, work_item_id, top=None, continuation_token=None, include_deleted=None, expand=None, order=None):
"""GetComments.
[Preview API] Returns a list of work item comments, pageable.
:param str project: Project ID or project name
:param int work_item_id: Id of a work item to get comments for.
:param int top: Max number of comments to return.
:param str continuation_token: Used to query for the next page of comments.
:param bool include_deleted: Specify if the deleted comments should be retrieved.
:param str expand: Specifies the additional data retrieval options for work item comments.
:param str order: Order in which the comments should be returned.
:rtype: :class:`<CommentList> <azure.devops.v7_0.work_item_tracking.models.CommentList>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if work_item_id is not None:
route_values['workItemId'] = self._serialize.url('work_item_id', work_item_id, 'int')
query_parameters = {}
if top is not None:
query_parameters['$top'] = self._serialize.query('top', top, 'int')
if continuation_token is not None:
query_parameters['continuationToken'] = self._serialize.query('continuation_token', continuation_token, 'str')
if include_deleted is not None:
query_parameters['includeDeleted'] = self._serialize.query('include_deleted', include_deleted, 'bool')
if expand is not None:
query_parameters['$expand'] = self._serialize.query('expand', expand, 'str')
if order is not None:
query_parameters['order'] = self._serialize.query('order', order, 'str')
response = self._send(http_method='GET',
location_id='608aac0a-32e1-4493-a863-b9cf4566d257',
version='7.0-preview.3',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('CommentList', response)
def get_comments_batch(self, project, work_item_id, ids, include_deleted=None, expand=None):
"""GetCommentsBatch.
[Preview API] Returns a list of work item comments by ids.
:param str project: Project ID or project name
:param int work_item_id: Id of a work item to get comments for.
:param [int] ids: Comma-separated list of comment ids to return.
:param bool include_deleted: Specify if the deleted comments should be retrieved.
:param str expand: Specifies the additional data retrieval options for work item comments.
:rtype: :class:`<CommentList> <azure.devops.v7_0.work_item_tracking.models.CommentList>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if work_item_id is not None:
route_values['workItemId'] = self._serialize.url('work_item_id', work_item_id, 'int')
query_parameters = {}
if ids is not None:
ids = ",".join(map(str, ids))
query_parameters['ids'] = self._serialize.query('ids', ids, 'str')
if include_deleted is not None:
query_parameters['includeDeleted'] = self._serialize.query('include_deleted', include_deleted, 'bool')
if expand is not None:
query_parameters['$expand'] = self._serialize.query('expand', expand, 'str')
response = self._send(http_method='GET',
location_id='608aac0a-32e1-4493-a863-b9cf4566d257',
version='7.0-preview.3',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('CommentList', response)
def update_comment(self, request, project, work_item_id, comment_id):
"""UpdateComment.
[Preview API] Update a comment on a work item.
:param :class:`<CommentUpdate> <azure.devops.v7_0.work_item_tracking.models.CommentUpdate>` request: Comment update request.
:param str project: Project ID or project name
:param int work_item_id: Id of a work item.
:param int comment_id:
:rtype: :class:`<Comment> <azure.devops.v7_0.work_item_tracking.models.Comment>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if work_item_id is not None:
route_values['workItemId'] = self._serialize.url('work_item_id', work_item_id, 'int')
if comment_id is not None:
route_values['commentId'] = self._serialize.url('comment_id', comment_id, 'int')
content = self._serialize.body(request, 'CommentUpdate')
response = self._send(http_method='PATCH',
location_id='608aac0a-32e1-4493-a863-b9cf4566d257',
version='7.0-preview.3',
route_values=route_values,
content=content)
return self._deserialize('Comment', response)
def create_comment_reaction(self, project, work_item_id, comment_id, reaction_type):
"""CreateCommentReaction.
[Preview API] Adds a new reaction to a comment.
:param str project: Project ID or project name
:param int work_item_id: WorkItem ID
:param int comment_id: Comment ID
:param CommentReactionType reaction_type: Type of the reaction
:rtype: :class:`<CommentReaction> <azure.devops.v7_0.work_item_tracking.models.CommentReaction>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if work_item_id is not None:
route_values['workItemId'] = self._serialize.url('work_item_id', work_item_id, 'int')
if comment_id is not None:
route_values['commentId'] = self._serialize.url('comment_id', comment_id, 'int')
if reaction_type is not None:
route_values['reactionType'] = self._serialize.url('reaction_type', reaction_type, 'CommentReactionType')
response = self._send(http_method='PUT',
location_id='f6cb3f27-1028-4851-af96-887e570dc21f',
version='7.0-preview.1',
route_values=route_values)
return self._deserialize('CommentReaction', response)
def delete_comment_reaction(self, project, work_item_id, comment_id, reaction_type):
"""DeleteCommentReaction.
[Preview API] Deletes an existing reaction on a comment.
:param str project: Project ID or project name
:param int work_item_id: WorkItem ID
:param int comment_id: Comment ID
:param CommentReactionType reaction_type: Type of the reaction
:rtype: :class:`<CommentReaction> <azure.devops.v7_0.work_item_tracking.models.CommentReaction>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if work_item_id is not None:
route_values['workItemId'] = self._serialize.url('work_item_id', work_item_id, 'int')
if comment_id is not None:
route_values['commentId'] = self._serialize.url('comment_id', comment_id, 'int')
if reaction_type is not None:
route_values['reactionType'] = self._serialize.url('reaction_type', reaction_type, 'CommentReactionType')
response = self._send(http_method='DELETE',
location_id='f6cb3f27-1028-4851-af96-887e570dc21f',
version='7.0-preview.1',
route_values=route_values)
return self._deserialize('CommentReaction', response)
def get_comment_reactions(self, project, work_item_id, comment_id):
"""GetCommentReactions.
[Preview API] Gets reactions of a comment.
:param str project: Project ID or project name
:param int work_item_id: WorkItem ID
:param int comment_id: Comment ID
:rtype: [CommentReaction]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if work_item_id is not None:
route_values['workItemId'] = self._serialize.url('work_item_id', work_item_id, 'int')
if comment_id is not None:
route_values['commentId'] = self._serialize.url('comment_id', comment_id, 'int')
response = self._send(http_method='GET',
location_id='f6cb3f27-1028-4851-af96-887e570dc21f',
version='7.0-preview.1',
route_values=route_values)
return self._deserialize('[CommentReaction]', self._unwrap_collection(response))
def get_comment_version(self, project, work_item_id, comment_id, version):
"""GetCommentVersion.
[Preview API]
:param str project: Project ID or project name
:param int work_item_id:
:param int comment_id:
:param int version:
:rtype: :class:`<CommentVersion> <azure.devops.v7_0.work_item_tracking.models.CommentVersion>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if work_item_id is not None:
route_values['workItemId'] = self._serialize.url('work_item_id', work_item_id, 'int')
if comment_id is not None:
route_values['commentId'] = self._serialize.url('comment_id', comment_id, 'int')
if version is not None:
route_values['version'] = self._serialize.url('version', version, 'int')
response = self._send(http_method='GET',
location_id='49e03b34-3be0-42e3-8a5d-e8dfb88ac954',
version='7.0-preview.1',
route_values=route_values)
return self._deserialize('CommentVersion', response)
def get_comment_versions(self, project, work_item_id, comment_id):
"""GetCommentVersions.
[Preview API]
:param str project: Project ID or project name
:param int work_item_id:
:param int comment_id:
:rtype: [CommentVersion]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if work_item_id is not None:
route_values['workItemId'] = self._serialize.url('work_item_id', work_item_id, 'int')
if comment_id is not None:
route_values['commentId'] = self._serialize.url('comment_id', comment_id, 'int')
response = self._send(http_method='GET',
location_id='49e03b34-3be0-42e3-8a5d-e8dfb88ac954',
version='7.0-preview.1',
route_values=route_values)
return self._deserialize('[CommentVersion]', self._unwrap_collection(response))
def create_field(self, work_item_field, project=None):
"""CreateField.
Create a new field.
:param :class:`<WorkItemField> <azure.devops.v7_0.work_item_tracking.models.WorkItemField>` work_item_field: New field definition
:param str project: Project ID or project name
:rtype: :class:`<WorkItemField> <azure.devops.v7_0.work_item_tracking.models.WorkItemField>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
content = self._serialize.body(work_item_field, 'WorkItemField')
response = self._send(http_method='POST',
location_id='b51fd764-e5c2-4b9b-aaf7-3395cf4bdd94',
version='7.0',
route_values=route_values,
content=content)
return self._deserialize('WorkItemField', response)
def delete_field(self, field_name_or_ref_name, project=None):
"""DeleteField.
Deletes the field. To undelete a filed, see "Update Field" API.
:param str field_name_or_ref_name: Field simple name or reference name
:param str project: Project ID or project name
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if field_name_or_ref_name is not None:
route_values['fieldNameOrRefName'] = self._serialize.url('field_name_or_ref_name', field_name_or_ref_name, 'str')
self._send(http_method='DELETE',
location_id='b51fd764-e5c2-4b9b-aaf7-3395cf4bdd94',
version='7.0',
route_values=route_values)
def get_field(self, field_name_or_ref_name, project=None):
"""GetField.
Gets information on a specific field.
:param str field_name_or_ref_name: Field simple name or reference name
:param str project: Project ID or project name
:rtype: :class:`<WorkItemField> <azure.devops.v7_0.work_item_tracking.models.WorkItemField>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if field_name_or_ref_name is not None:
route_values['fieldNameOrRefName'] = self._serialize.url('field_name_or_ref_name', field_name_or_ref_name, 'str')
response = self._send(http_method='GET',
location_id='b51fd764-e5c2-4b9b-aaf7-3395cf4bdd94',
version='7.0',
route_values=route_values)
return self._deserialize('WorkItemField', response)
def get_fields(self, project=None, expand=None):
"""GetFields.
Returns information for all fields. The project ID/name parameter is optional.
:param str project: Project ID or project name
:param str expand: Use ExtensionFields to include extension fields, otherwise exclude them. Unless the feature flag for this parameter is enabled, extension fields are always included.
:rtype: [WorkItemField]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if expand is not None:
query_parameters['$expand'] = self._serialize.query('expand', expand, 'str')
response = self._send(http_method='GET',
location_id='b51fd764-e5c2-4b9b-aaf7-3395cf4bdd94',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[WorkItemField]', self._unwrap_collection(response))
def update_field(self, payload, field_name_or_ref_name, project=None):
"""UpdateField.
Update a field.
:param :class:`<UpdateWorkItemField> <azure.devops.v7_0.work_item_tracking.models.UpdateWorkItemField>` payload: Payload contains desired value of the field's properties
:param str field_name_or_ref_name: Name/reference name of the field to be updated
:param str project: Project ID or project name
:rtype: :class:`<WorkItemField> <azure.devops.v7_0.work_item_tracking.models.WorkItemField>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if field_name_or_ref_name is not None:
route_values['fieldNameOrRefName'] = self._serialize.url('field_name_or_ref_name', field_name_or_ref_name, 'str')
content = self._serialize.body(payload, 'UpdateWorkItemField')
response = self._send(http_method='PATCH',
location_id='b51fd764-e5c2-4b9b-aaf7-3395cf4bdd94',
version='7.0',
route_values=route_values,
content=content)
return self._deserialize('WorkItemField', response)
def migrate_projects_process(self, new_process, project):
"""MigrateProjectsProcess.
Migrates a project to a different process within the same OOB type. For example, you can only migrate a project from agile/custom-agile to agile/custom-agile.
:param :class:`<ProcessIdModel> <azure.devops.v7_0.work_item_tracking.models.ProcessIdModel>` new_process:
:param str project: Project ID or project name
:rtype: :class:`<ProcessMigrationResultModel> <azure.devops.v7_0.work_item_tracking.models.ProcessMigrationResultModel>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
content = self._serialize.body(new_process, 'ProcessIdModel')
response = self._send(http_method='POST',
location_id='19801631-d4e5-47e9-8166-0330de0ff1e6',
version='7.0',
route_values=route_values,
content=content)
return self._deserialize('ProcessMigrationResultModel', response)
def create_query(self, posted_query, project, query, validate_wiql_only=None):
"""CreateQuery.
Creates a query, or moves a query.
:param :class:`<QueryHierarchyItem> <azure.devops.v7_0.work_item_tracking.models.QueryHierarchyItem>` posted_query: The query to create.
:param str project: Project ID or project name
:param str query: The parent id or path under which the query is to be created.
:param bool validate_wiql_only: If you only want to validate your WIQL query without actually creating one, set it to true. Default is false.
:rtype: :class:`<QueryHierarchyItem> <azure.devops.v7_0.work_item_tracking.models.QueryHierarchyItem>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if query is not None:
route_values['query'] = self._serialize.url('query', query, 'str')
query_parameters = {}
if validate_wiql_only is not None:
query_parameters['validateWiqlOnly'] = self._serialize.query('validate_wiql_only', validate_wiql_only, 'bool')
content = self._serialize.body(posted_query, 'QueryHierarchyItem')
response = self._send(http_method='POST',
location_id='a67d190c-c41f-424b-814d-0e906f659301',
version='7.0',
route_values=route_values,
query_parameters=query_parameters,
content=content)
return self._deserialize('QueryHierarchyItem', response)
def delete_query(self, project, query):
"""DeleteQuery.
Delete a query or a folder. This deletes any permission change on the deleted query or folder and any of its descendants if it is a folder. It is important to note that the deleted permission changes cannot be recovered upon undeleting the query or folder.
:param str project: Project ID or project name
:param str query: ID or path of the query or folder to delete.
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if query is not None:
route_values['query'] = self._serialize.url('query', query, 'str')
self._send(http_method='DELETE',
location_id='a67d190c-c41f-424b-814d-0e906f659301',
version='7.0',
route_values=route_values)
def get_queries(self, project, expand=None, depth=None, include_deleted=None):
"""GetQueries.
Gets the root queries and their children
:param str project: Project ID or project name
:param str expand: Include the query string (wiql), clauses, query result columns, and sort options in the results.
:param int depth: In the folder of queries, return child queries and folders to this depth.
:param bool include_deleted: Include deleted queries and folders
:rtype: [QueryHierarchyItem]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if expand is not None:
query_parameters['$expand'] = self._serialize.query('expand', expand, 'str')
if depth is not None:
query_parameters['$depth'] = self._serialize.query('depth', depth, 'int')
if include_deleted is not None:
query_parameters['$includeDeleted'] = self._serialize.query('include_deleted', include_deleted, 'bool')
response = self._send(http_method='GET',
location_id='a67d190c-c41f-424b-814d-0e906f659301',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[QueryHierarchyItem]', self._unwrap_collection(response))
def get_query(self, project, query, expand=None, depth=None, include_deleted=None, use_iso_date_format=None):
"""GetQuery.
Retrieves an individual query and its children
:param str project: Project ID or project name
:param str query: ID or path of the query.
:param str expand: Include the query string (wiql), clauses, query result columns, and sort options in the results.
:param int depth: In the folder of queries, return child queries and folders to this depth.
:param bool include_deleted: Include deleted queries and folders
:param bool use_iso_date_format: DateTime query clauses will be formatted using a ISO 8601 compliant format
:rtype: :class:`<QueryHierarchyItem> <azure.devops.v7_0.work_item_tracking.models.QueryHierarchyItem>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if query is not None:
route_values['query'] = self._serialize.url('query', query, 'str')
query_parameters = {}
if expand is not None:
query_parameters['$expand'] = self._serialize.query('expand', expand, 'str')
if depth is not None:
query_parameters['$depth'] = self._serialize.query('depth', depth, 'int')
if include_deleted is not None:
query_parameters['$includeDeleted'] = self._serialize.query('include_deleted', include_deleted, 'bool')
if use_iso_date_format is not None:
query_parameters['$useIsoDateFormat'] = self._serialize.query('use_iso_date_format', use_iso_date_format, 'bool')
response = self._send(http_method='GET',
location_id='a67d190c-c41f-424b-814d-0e906f659301',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('QueryHierarchyItem', response)
def search_queries(self, project, filter, top=None, expand=None, include_deleted=None):
"""SearchQueries.
Searches all queries the user has access to in the current project
:param str project: Project ID or project name
:param str filter: The text to filter the queries with.
:param int top: The number of queries to return (Default is 50 and maximum is 200).
:param str expand:
:param bool include_deleted: Include deleted queries and folders
:rtype: :class:`<QueryHierarchyItemsResult> <azure.devops.v7_0.work_item_tracking.models.QueryHierarchyItemsResult>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if filter is not None:
query_parameters['$filter'] = self._serialize.query('filter', filter, 'str')
if top is not None:
query_parameters['$top'] = self._serialize.query('top', top, 'int')
if expand is not None:
query_parameters['$expand'] = self._serialize.query('expand', expand, 'str')
if include_deleted is not None:
query_parameters['$includeDeleted'] = self._serialize.query('include_deleted', include_deleted, 'bool')
response = self._send(http_method='GET',
location_id='a67d190c-c41f-424b-814d-0e906f659301',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('QueryHierarchyItemsResult', response)
def update_query(self, query_update, project, query, undelete_descendants=None):
"""UpdateQuery.
Update a query or a folder. This allows you to update, rename and move queries and folders.
:param :class:`<QueryHierarchyItem> <azure.devops.v7_0.work_item_tracking.models.QueryHierarchyItem>` query_update: The query to update.
:param str project: Project ID or project name
:param str query: The ID or path for the query to update.
:param bool undelete_descendants: Undelete the children of this folder. It is important to note that this will not bring back the permission changes that were previously applied to the descendants.
:rtype: :class:`<QueryHierarchyItem> <azure.devops.v7_0.work_item_tracking.models.QueryHierarchyItem>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if query is not None:
route_values['query'] = self._serialize.url('query', query, 'str')
query_parameters = {}
if undelete_descendants is not None:
query_parameters['$undeleteDescendants'] = self._serialize.query('undelete_descendants', undelete_descendants, 'bool')
content = self._serialize.body(query_update, 'QueryHierarchyItem')
response = self._send(http_method='PATCH',
location_id='a67d190c-c41f-424b-814d-0e906f659301',
version='7.0',
route_values=route_values,
query_parameters=query_parameters,
content=content)
return self._deserialize('QueryHierarchyItem', response)
def get_queries_batch(self, query_get_request, project):
"""GetQueriesBatch.
Gets a list of queries by ids (Maximum 1000)
:param :class:`<QueryBatchGetRequest> <azure.devops.v7_0.work_item_tracking.models.QueryBatchGetRequest>` query_get_request:
:param str project: Project ID or project name
:rtype: [QueryHierarchyItem]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
content = self._serialize.body(query_get_request, 'QueryBatchGetRequest')
response = self._send(http_method='POST',
location_id='549816f9-09b0-4e75-9e81-01fbfcd07426',
version='7.0',
route_values=route_values,
content=content)
return self._deserialize('[QueryHierarchyItem]', self._unwrap_collection(response))
def destroy_work_item(self, id, project=None):
"""DestroyWorkItem.
Destroys the specified work item permanently from the Recycle Bin. This action can not be undone.
:param int id: ID of the work item to be destroyed permanently
:param str project: Project ID or project name
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if id is not None:
route_values['id'] = self._serialize.url('id', id, 'int')
self._send(http_method='DELETE',
location_id='b70d8d39-926c-465e-b927-b1bf0e5ca0e0',
version='7.0',
route_values=route_values)
def get_deleted_work_item(self, id, project=None):
"""GetDeletedWorkItem.
Gets a deleted work item from Recycle Bin.
:param int id: ID of the work item to be returned
:param str project: Project ID or project name
:rtype: :class:`<WorkItemDelete> <azure.devops.v7_0.work_item_tracking.models.WorkItemDelete>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if id is not None:
route_values['id'] = self._serialize.url('id', id, 'int')
response = self._send(http_method='GET',
location_id='b70d8d39-926c-465e-b927-b1bf0e5ca0e0',
version='7.0',
route_values=route_values)
return self._deserialize('WorkItemDelete', response)
def get_deleted_work_items(self, ids, project=None):
"""GetDeletedWorkItems.
Gets the work items from the recycle bin, whose IDs have been specified in the parameters
:param [int] ids: Comma separated list of IDs of the deleted work items to be returned
:param str project: Project ID or project name
:rtype: [WorkItemDeleteReference]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if ids is not None:
ids = ",".join(map(str, ids))
query_parameters['ids'] = self._serialize.query('ids', ids, 'str')
response = self._send(http_method='GET',
location_id='b70d8d39-926c-465e-b927-b1bf0e5ca0e0',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[WorkItemDeleteReference]', self._unwrap_collection(response))
def get_deleted_work_item_shallow_references(self, project=None):
"""GetDeletedWorkItemShallowReferences.
Gets a list of the IDs and the URLs of the deleted the work items in the Recycle Bin.
:param str project: Project ID or project name
:rtype: [WorkItemDeleteShallowReference]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
response = self._send(http_method='GET',
location_id='b70d8d39-926c-465e-b927-b1bf0e5ca0e0',
version='7.0',
route_values=route_values)
return self._deserialize('[WorkItemDeleteShallowReference]', self._unwrap_collection(response))
def restore_work_item(self, payload, id, project=None):
"""RestoreWorkItem.
Restores the deleted work item from Recycle Bin.
:param :class:`<WorkItemDeleteUpdate> <azure.devops.v7_0.work_item_tracking.models.WorkItemDeleteUpdate>` payload: Paylod with instructions to update the IsDeleted flag to false
:param int id: ID of the work item to be restored
:param str project: Project ID or project name
:rtype: :class:`<WorkItemDelete> <azure.devops.v7_0.work_item_tracking.models.WorkItemDelete>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if id is not None:
route_values['id'] = self._serialize.url('id', id, 'int')
content = self._serialize.body(payload, 'WorkItemDeleteUpdate')
response = self._send(http_method='PATCH',
location_id='b70d8d39-926c-465e-b927-b1bf0e5ca0e0',
version='7.0',
route_values=route_values,
content=content)
return self._deserialize('WorkItemDelete', response)
def get_revision(self, id, revision_number, project=None, expand=None):
"""GetRevision.
Returns a fully hydrated work item for the requested revision
:param int id:
:param int revision_number:
:param str project: Project ID or project name
:param str expand:
:rtype: :class:`<WorkItem> <azure.devops.v7_0.work_item_tracking.models.WorkItem>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if id is not None:
route_values['id'] = self._serialize.url('id', id, 'int')
if revision_number is not None:
route_values['revisionNumber'] = self._serialize.url('revision_number', revision_number, 'int')
query_parameters = {}
if expand is not None:
query_parameters['$expand'] = self._serialize.query('expand', expand, 'str')
response = self._send(http_method='GET',
location_id='a00c85a5-80fa-4565-99c3-bcd2181434bb',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('WorkItem', response)
def get_revisions(self, id, project=None, top=None, skip=None, expand=None):
"""GetRevisions.
Returns the list of fully hydrated work item revisions, paged.
:param int id:
:param str project: Project ID or project name
:param int top:
:param int skip:
:param str expand:
:rtype: [WorkItem]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if id is not None:
route_values['id'] = self._serialize.url('id', id, 'int')
query_parameters = {}
if top is not None:
query_parameters['$top'] = self._serialize.query('top', top, 'int')
if skip is not None:
query_parameters['$skip'] = self._serialize.query('skip', skip, 'int')
if expand is not None:
query_parameters['$expand'] = self._serialize.query('expand', expand, 'str')
response = self._send(http_method='GET',
location_id='a00c85a5-80fa-4565-99c3-bcd2181434bb',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[WorkItem]', self._unwrap_collection(response))
def send_mail(self, body, project=None):
"""SendMail.
RESTful method to send mail for selected/queried work items.
:param :class:`<SendMailBody> <azure.devops.v7_0.work_item_tracking.models.SendMailBody>` body:
:param str project: Project ID or project name
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
content = self._serialize.body(body, 'SendMailBody')
self._send(http_method='POST',
location_id='12438500-2f84-4fa7-9f1a-c31871b4959d',
version='7.0',
route_values=route_values,
content=content)
def delete_tag(self, project, tag_id_or_name):
"""DeleteTag.
:param str project: Project ID or project name
:param str tag_id_or_name:
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if tag_id_or_name is not None:
route_values['tagIdOrName'] = self._serialize.url('tag_id_or_name', tag_id_or_name, 'str')
self._send(http_method='DELETE',
location_id='bc15bc60-e7a8-43cb-ab01-2106be3983a1',
version='7.0',
route_values=route_values)
def get_tag(self, project, tag_id_or_name):
"""GetTag.
:param str project: Project ID or project name
:param str tag_id_or_name:
:rtype: :class:`<WorkItemTagDefinition> <azure.devops.v7_0.work_item_tracking.models.WorkItemTagDefinition>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if tag_id_or_name is not None:
route_values['tagIdOrName'] = self._serialize.url('tag_id_or_name', tag_id_or_name, 'str')
response = self._send(http_method='GET',
location_id='bc15bc60-e7a8-43cb-ab01-2106be3983a1',
version='7.0',
route_values=route_values)
return self._deserialize('WorkItemTagDefinition', response)
def get_tags(self, project):
"""GetTags.
:param str project: Project ID or project name
:rtype: [WorkItemTagDefinition]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
response = self._send(http_method='GET',
location_id='bc15bc60-e7a8-43cb-ab01-2106be3983a1',
version='7.0',
route_values=route_values)
return self._deserialize('[WorkItemTagDefinition]', self._unwrap_collection(response))
def update_tag(self, tag_data, project, tag_id_or_name):
"""UpdateTag.
:param :class:`<WorkItemTagDefinition> <azure.devops.v7_0.work_item_tracking.models.WorkItemTagDefinition>` tag_data:
:param str project: Project ID or project name
:param str tag_id_or_name:
:rtype: :class:`<WorkItemTagDefinition> <azure.devops.v7_0.work_item_tracking.models.WorkItemTagDefinition>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if tag_id_or_name is not None:
route_values['tagIdOrName'] = self._serialize.url('tag_id_or_name', tag_id_or_name, 'str')
content = self._serialize.body(tag_data, 'WorkItemTagDefinition')
response = self._send(http_method='PATCH',
location_id='bc15bc60-e7a8-43cb-ab01-2106be3983a1',
version='7.0',
route_values=route_values,
content=content)
return self._deserialize('WorkItemTagDefinition', response)
def create_template(self, template, team_context):
"""CreateTemplate.
Creates a template
:param :class:`<WorkItemTemplate> <azure.devops.v7_0.work_item_tracking.models.WorkItemTemplate>` template: Template contents
:param :class:`<TeamContext> <azure.devops.v7_0.work_item_tracking.models.TeamContext>` team_context: The team context for the operation
:rtype: :class:`<WorkItemTemplate> <azure.devops.v7_0.work_item_tracking.models.WorkItemTemplate>`
"""
project = None
team = None
if team_context is not None:
if team_context.project_id:
project = team_context.project_id
else:
project = team_context.project
if team_context.team_id:
team = team_context.team_id
else:
team = team_context.team
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'string')
if team is not None:
route_values['team'] = self._serialize.url('team', team, 'string')
content = self._serialize.body(template, 'WorkItemTemplate')
response = self._send(http_method='POST',
location_id='6a90345f-a676-4969-afce-8e163e1d5642',
version='7.0',
route_values=route_values,
content=content)
return self._deserialize('WorkItemTemplate', response)
def get_templates(self, team_context, workitemtypename=None):
"""GetTemplates.
Gets template
:param :class:`<TeamContext> <azure.devops.v7_0.work_item_tracking.models.TeamContext>` team_context: The team context for the operation
:param str workitemtypename: Optional, When specified returns templates for given Work item type.
:rtype: [WorkItemTemplateReference]
"""
project = None
team = None
if team_context is not None:
if team_context.project_id:
project = team_context.project_id
else:
project = team_context.project
if team_context.team_id:
team = team_context.team_id
else:
team = team_context.team
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'string')
if team is not None:
route_values['team'] = self._serialize.url('team', team, 'string')
query_parameters = {}
if workitemtypename is not None:
query_parameters['workitemtypename'] = self._serialize.query('workitemtypename', workitemtypename, 'str')
response = self._send(http_method='GET',
location_id='6a90345f-a676-4969-afce-8e163e1d5642',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[WorkItemTemplateReference]', self._unwrap_collection(response))
def delete_template(self, team_context, template_id):
"""DeleteTemplate.
Deletes the template with given id
:param :class:`<TeamContext> <azure.devops.v7_0.work_item_tracking.models.TeamContext>` team_context: The team context for the operation
:param str template_id: Template id
"""
project = None
team = None
if team_context is not None:
if team_context.project_id:
project = team_context.project_id
else:
project = team_context.project
if team_context.team_id:
team = team_context.team_id
else:
team = team_context.team
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'string')
if team is not None:
route_values['team'] = self._serialize.url('team', team, 'string')
if template_id is not None:
route_values['templateId'] = self._serialize.url('template_id', template_id, 'str')
self._send(http_method='DELETE',
location_id='fb10264a-8836-48a0-8033-1b0ccd2748d5',
version='7.0',
route_values=route_values)
def get_template(self, team_context, template_id):
"""GetTemplate.
Gets the template with specified id
:param :class:`<TeamContext> <azure.devops.v7_0.work_item_tracking.models.TeamContext>` team_context: The team context for the operation
:param str template_id: Template Id
:rtype: :class:`<WorkItemTemplate> <azure.devops.v7_0.work_item_tracking.models.WorkItemTemplate>`
"""
project = None
team = None
if team_context is not None:
if team_context.project_id:
project = team_context.project_id
else:
project = team_context.project
if team_context.team_id:
team = team_context.team_id
else:
team = team_context.team
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'string')
if team is not None:
route_values['team'] = self._serialize.url('team', team, 'string')
if template_id is not None:
route_values['templateId'] = self._serialize.url('template_id', template_id, 'str')
response = self._send(http_method='GET',
location_id='fb10264a-8836-48a0-8033-1b0ccd2748d5',
version='7.0',
route_values=route_values)
return self._deserialize('WorkItemTemplate', response)
def replace_template(self, template_content, team_context, template_id):
"""ReplaceTemplate.
Replace template contents
:param :class:`<WorkItemTemplate> <azure.devops.v7_0.work_item_tracking.models.WorkItemTemplate>` template_content: Template contents to replace with
:param :class:`<TeamContext> <azure.devops.v7_0.work_item_tracking.models.TeamContext>` team_context: The team context for the operation
:param str template_id: Template id
:rtype: :class:`<WorkItemTemplate> <azure.devops.v7_0.work_item_tracking.models.WorkItemTemplate>`
"""
project = None
team = None
if team_context is not None:
if team_context.project_id:
project = team_context.project_id
else:
project = team_context.project
if team_context.team_id:
team = team_context.team_id
else:
team = team_context.team
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'string')
if team is not None:
route_values['team'] = self._serialize.url('team', team, 'string')
if template_id is not None:
route_values['templateId'] = self._serialize.url('template_id', template_id, 'str')
content = self._serialize.body(template_content, 'WorkItemTemplate')
response = self._send(http_method='PUT',
location_id='fb10264a-8836-48a0-8033-1b0ccd2748d5',
version='7.0',
route_values=route_values,
content=content)
return self._deserialize('WorkItemTemplate', response)
def get_update(self, id, update_number, project=None):
"""GetUpdate.
Returns a single update for a work item
:param int id:
:param int update_number:
:param str project: Project ID or project name
:rtype: :class:`<WorkItemUpdate> <azure.devops.v7_0.work_item_tracking.models.WorkItemUpdate>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if id is not None:
route_values['id'] = self._serialize.url('id', id, 'int')
if update_number is not None:
route_values['updateNumber'] = self._serialize.url('update_number', update_number, 'int')
response = self._send(http_method='GET',
location_id='6570bf97-d02c-4a91-8d93-3abe9895b1a9',
version='7.0',
route_values=route_values)
return self._deserialize('WorkItemUpdate', response)
def get_updates(self, id, project=None, top=None, skip=None):
"""GetUpdates.
Returns a the deltas between work item revisions
:param int id:
:param str project: Project ID or project name
:param int top:
:param int skip:
:rtype: [WorkItemUpdate]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if id is not None:
route_values['id'] = self._serialize.url('id', id, 'int')
query_parameters = {}
if top is not None:
query_parameters['$top'] = self._serialize.query('top', top, 'int')
if skip is not None:
query_parameters['$skip'] = self._serialize.query('skip', skip, 'int')
response = self._send(http_method='GET',
location_id='6570bf97-d02c-4a91-8d93-3abe9895b1a9',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[WorkItemUpdate]', self._unwrap_collection(response))
def query_by_wiql(self, wiql, team_context=None, time_precision=None, top=None):
"""QueryByWiql.
Gets the results of the query given its WIQL.
:param :class:`<Wiql> <azure.devops.v7_0.work_item_tracking.models.Wiql>` wiql: The query containing the WIQL.
:param :class:`<TeamContext> <azure.devops.v7_0.work_item_tracking.models.TeamContext>` team_context: The team context for the operation
:param bool time_precision: Whether or not to use time precision.
:param int top: The max number of results to return.
:rtype: :class:`<WorkItemQueryResult> <azure.devops.v7_0.work_item_tracking.models.WorkItemQueryResult>`
"""
project = None
team = None
if team_context is not None:
if team_context.project_id:
project = team_context.project_id
else:
project = team_context.project
if team_context.team_id:
team = team_context.team_id
else:
team = team_context.team
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'string')
if team is not None:
route_values['team'] = self._serialize.url('team', team, 'string')
query_parameters = {}
if time_precision is not None:
query_parameters['timePrecision'] = self._serialize.query('time_precision', time_precision, 'bool')
if top is not None:
query_parameters['$top'] = self._serialize.query('top', top, 'int')
content = self._serialize.body(wiql, 'Wiql')
response = self._send(http_method='POST',
location_id='1a9c53f7-f243-4447-b110-35ef023636e4',
version='7.0',
route_values=route_values,
query_parameters=query_parameters,
content=content)
return self._deserialize('WorkItemQueryResult', response)
def get_query_result_count(self, id, team_context=None, time_precision=None, top=None):
"""GetQueryResultCount.
Gets the results of the query given the query ID.
:param str id: The query ID.
:param :class:`<TeamContext> <azure.devops.v7_0.work_item_tracking.models.TeamContext>` team_context: The team context for the operation
:param bool time_precision: Whether or not to use time precision.
:param int top: The max number of results to return.
:rtype: int
"""
project = None
team = None
if team_context is not None:
if team_context.project_id:
project = team_context.project_id
else:
project = team_context.project
if team_context.team_id:
team = team_context.team_id
else:
team = team_context.team
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'string')
if team is not None:
route_values['team'] = self._serialize.url('team', team, 'string')
if id is not None:
route_values['id'] = self._serialize.url('id', id, 'str')
query_parameters = {}
if time_precision is not None:
query_parameters['timePrecision'] = self._serialize.query('time_precision', time_precision, 'bool')
if top is not None:
query_parameters['$top'] = self._serialize.query('top', top, 'int')
response = self._send(http_method='HEAD',
location_id='a02355f5-5f8a-4671-8e32-369d23aac83d',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('int', response)
def query_by_id(self, id, team_context=None, time_precision=None, top=None):
"""QueryById.
Gets the results of the query given the query ID.
:param str id: The query ID.
:param :class:`<TeamContext> <azure.devops.v7_0.work_item_tracking.models.TeamContext>` team_context: The team context for the operation
:param bool time_precision: Whether or not to use time precision.
:param int top: The max number of results to return.
:rtype: :class:`<WorkItemQueryResult> <azure.devops.v7_0.work_item_tracking.models.WorkItemQueryResult>`
"""
project = None
team = None
if team_context is not None:
if team_context.project_id:
project = team_context.project_id
else:
project = team_context.project
if team_context.team_id:
team = team_context.team_id
else:
team = team_context.team
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'string')
if team is not None:
route_values['team'] = self._serialize.url('team', team, 'string')
if id is not None:
route_values['id'] = self._serialize.url('id', id, 'str')
query_parameters = {}
if time_precision is not None:
query_parameters['timePrecision'] = self._serialize.query('time_precision', time_precision, 'bool')
if top is not None:
query_parameters['$top'] = self._serialize.query('top', top, 'int')
response = self._send(http_method='GET',
location_id='a02355f5-5f8a-4671-8e32-369d23aac83d',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('WorkItemQueryResult', response)
def get_work_item_icon_json(self, icon, color=None, v=None):
"""GetWorkItemIconJson.
Get a work item icon given the friendly name and icon color.
:param str icon: The name of the icon
:param str color: The 6-digit hex color for the icon
:param int v: The version of the icon (used only for cache invalidation)
:rtype: :class:`<WorkItemIcon> <azure.devops.v7_0.work_item_tracking.models.WorkItemIcon>`
"""
route_values = {}
if icon is not None:
route_values['icon'] = self._serialize.url('icon', icon, 'str')
query_parameters = {}
if color is not None:
query_parameters['color'] = self._serialize.query('color', color, 'str')
if v is not None:
query_parameters['v'] = self._serialize.query('v', v, 'int')
response = self._send(http_method='GET',
location_id='4e1eb4a5-1970-4228-a682-ec48eb2dca30',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('WorkItemIcon', response)
def get_work_item_icons(self):
"""GetWorkItemIcons.
Get a list of all work item icons.
:rtype: [WorkItemIcon]
"""
response = self._send(http_method='GET',
location_id='4e1eb4a5-1970-4228-a682-ec48eb2dca30',
version='7.0')
return self._deserialize('[WorkItemIcon]', self._unwrap_collection(response))
def get_work_item_icon_svg(self, icon, color=None, v=None, **kwargs):
"""GetWorkItemIconSvg.
Get a work item icon given the friendly name and icon color.
:param str icon: The name of the icon
:param str color: The 6-digit hex color for the icon
:param int v: The version of the icon (used only for cache invalidation)
:rtype: object
"""
route_values = {}
if icon is not None:
route_values['icon'] = self._serialize.url('icon', icon, 'str')
query_parameters = {}
if color is not None:
query_parameters['color'] = self._serialize.query('color', color, 'str')
if v is not None:
query_parameters['v'] = self._serialize.query('v', v, 'int')
response = self._send(http_method='GET',
location_id='4e1eb4a5-1970-4228-a682-ec48eb2dca30',
version='7.0',
route_values=route_values,
query_parameters=query_parameters,
accept_media_type='image/svg+xml')
if "callback" in kwargs:
callback = kwargs["callback"]
else:
callback = None
return self._client.stream_download(response, callback=callback)
def get_work_item_icon_xaml(self, icon, color=None, v=None, **kwargs):
"""GetWorkItemIconXaml.
Get a work item icon given the friendly name and icon color.
:param str icon: The name of the icon
:param str color: The 6-digit hex color for the icon
:param int v: The version of the icon (used only for cache invalidation)
:rtype: object
"""
route_values = {}
if icon is not None:
route_values['icon'] = self._serialize.url('icon', icon, 'str')
query_parameters = {}
if color is not None:
query_parameters['color'] = self._serialize.query('color', color, 'str')
if v is not None:
query_parameters['v'] = self._serialize.query('v', v, 'int')
response = self._send(http_method='GET',
location_id='4e1eb4a5-1970-4228-a682-ec48eb2dca30',
version='7.0',
route_values=route_values,
query_parameters=query_parameters,
accept_media_type='image/xaml+xml')
if "callback" in kwargs:
callback = kwargs["callback"]
else:
callback = None
return self._client.stream_download(response, callback=callback)
def get_reporting_links_by_link_type(self, project=None, link_types=None, types=None, continuation_token=None, start_date_time=None):
"""GetReportingLinksByLinkType.
Get a batch of work item links
:param str project: Project ID or project name
:param [str] link_types: A list of types to filter the results to specific link types. Omit this parameter to get work item links of all link types.
:param [str] types: A list of types to filter the results to specific work item types. Omit this parameter to get work item links of all work item types.
:param str continuation_token: Specifies the continuationToken to start the batch from. Omit this parameter to get the first batch of links.
:param datetime start_date_time: Date/time to use as a starting point for link changes. Only link changes that occurred after that date/time will be returned. Cannot be used in conjunction with 'watermark' parameter.
:rtype: :class:`<ReportingWorkItemLinksBatch> <azure.devops.v7_0.work_item_tracking.models.ReportingWorkItemLinksBatch>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if link_types is not None:
link_types = ",".join(link_types)
query_parameters['linkTypes'] = self._serialize.query('link_types', link_types, 'str')
if types is not None:
types = ",".join(types)
query_parameters['types'] = self._serialize.query('types', types, 'str')
if continuation_token is not None:
query_parameters['continuationToken'] = self._serialize.query('continuation_token', continuation_token, 'str')
if start_date_time is not None:
query_parameters['startDateTime'] = self._serialize.query('start_date_time', start_date_time, 'iso-8601')
response = self._send(http_method='GET',
location_id='b5b5b6d0-0308-40a1-b3f4-b9bb3c66878f',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('ReportingWorkItemLinksBatch', response)
def get_relation_type(self, relation):
"""GetRelationType.
Gets the work item relation type definition.
:param str relation: The relation name
:rtype: :class:`<WorkItemRelationType> <azure.devops.v7_0.work_item_tracking.models.WorkItemRelationType>`
"""
route_values = {}
if relation is not None:
route_values['relation'] = self._serialize.url('relation', relation, 'str')
response = self._send(http_method='GET',
location_id='f5d33bc9-5b49-4a3c-a9bd-f3cd46dd2165',
version='7.0',
route_values=route_values)
return self._deserialize('WorkItemRelationType', response)
def get_relation_types(self):
"""GetRelationTypes.
Gets the work item relation types.
:rtype: [WorkItemRelationType]
"""
response = self._send(http_method='GET',
location_id='f5d33bc9-5b49-4a3c-a9bd-f3cd46dd2165',
version='7.0')
return self._deserialize('[WorkItemRelationType]', self._unwrap_collection(response))
def read_reporting_revisions_get(self, project=None, fields=None, types=None, continuation_token=None, start_date_time=None, include_identity_ref=None, include_deleted=None, include_tag_ref=None, include_latest_only=None, expand=None, include_discussion_changes_only=None, max_page_size=None):
"""ReadReportingRevisionsGet.
Get a batch of work item revisions with the option of including deleted items
:param str project: Project ID or project name
:param [str] fields: A list of fields to return in work item revisions. Omit this parameter to get all reportable fields.
:param [str] types: A list of types to filter the results to specific work item types. Omit this parameter to get work item revisions of all work item types.
:param str continuation_token: Specifies the watermark to start the batch from. Omit this parameter to get the first batch of revisions.
:param datetime start_date_time: Date/time to use as a starting point for revisions, all revisions will occur after this date/time. Cannot be used in conjunction with 'watermark' parameter.
:param bool include_identity_ref: Return an identity reference instead of a string value for identity fields.
:param bool include_deleted: Specify if the deleted item should be returned.
:param bool include_tag_ref: Specify if the tag objects should be returned for System.Tags field.
:param bool include_latest_only: Return only the latest revisions of work items, skipping all historical revisions
:param str expand: Return all the fields in work item revisions, including long text fields which are not returned by default
:param bool include_discussion_changes_only: Return only the those revisions of work items, where only history field was changed
:param int max_page_size: The maximum number of results to return in this batch
:rtype: :class:`<ReportingWorkItemRevisionsBatch> <azure.devops.v7_0.work_item_tracking.models.ReportingWorkItemRevisionsBatch>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if fields is not None:
fields = ",".join(fields)
query_parameters['fields'] = self._serialize.query('fields', fields, 'str')
if types is not None:
types = ",".join(types)
query_parameters['types'] = self._serialize.query('types', types, 'str')
if continuation_token is not None:
query_parameters['continuationToken'] = self._serialize.query('continuation_token', continuation_token, 'str')
if start_date_time is not None:
query_parameters['startDateTime'] = self._serialize.query('start_date_time', start_date_time, 'iso-8601')
if include_identity_ref is not None:
query_parameters['includeIdentityRef'] = self._serialize.query('include_identity_ref', include_identity_ref, 'bool')
if include_deleted is not None:
query_parameters['includeDeleted'] = self._serialize.query('include_deleted', include_deleted, 'bool')
if include_tag_ref is not None:
query_parameters['includeTagRef'] = self._serialize.query('include_tag_ref', include_tag_ref, 'bool')
if include_latest_only is not None:
query_parameters['includeLatestOnly'] = self._serialize.query('include_latest_only', include_latest_only, 'bool')
if expand is not None:
query_parameters['$expand'] = self._serialize.query('expand', expand, 'str')
if include_discussion_changes_only is not None:
query_parameters['includeDiscussionChangesOnly'] = self._serialize.query('include_discussion_changes_only', include_discussion_changes_only, 'bool')
if max_page_size is not None:
query_parameters['$maxPageSize'] = self._serialize.query('max_page_size', max_page_size, 'int')
response = self._send(http_method='GET',
location_id='f828fe59-dd87-495d-a17c-7a8d6211ca6c',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('ReportingWorkItemRevisionsBatch', response)
def read_reporting_revisions_post(self, filter, project=None, continuation_token=None, start_date_time=None, expand=None):
"""ReadReportingRevisionsPost.
Get a batch of work item revisions. This request may be used if your list of fields is large enough that it may run the URL over the length limit.
:param :class:`<ReportingWorkItemRevisionsFilter> <azure.devops.v7_0.work_item_tracking.models.ReportingWorkItemRevisionsFilter>` filter: An object that contains request settings: field filter, type filter, identity format
:param str project: Project ID or project name
:param str continuation_token: Specifies the watermark to start the batch from. Omit this parameter to get the first batch of revisions.
:param datetime start_date_time: Date/time to use as a starting point for revisions, all revisions will occur after this date/time. Cannot be used in conjunction with 'watermark' parameter.
:param str expand:
:rtype: :class:`<ReportingWorkItemRevisionsBatch> <azure.devops.v7_0.work_item_tracking.models.ReportingWorkItemRevisionsBatch>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if continuation_token is not None:
query_parameters['continuationToken'] = self._serialize.query('continuation_token', continuation_token, 'str')
if start_date_time is not None:
query_parameters['startDateTime'] = self._serialize.query('start_date_time', start_date_time, 'iso-8601')
if expand is not None:
query_parameters['$expand'] = self._serialize.query('expand', expand, 'str')
content = self._serialize.body(filter, 'ReportingWorkItemRevisionsFilter')
response = self._send(http_method='POST',
location_id='f828fe59-dd87-495d-a17c-7a8d6211ca6c',
version='7.0',
route_values=route_values,
query_parameters=query_parameters,
content=content)
return self._deserialize('ReportingWorkItemRevisionsBatch', response)
def read_reporting_discussions(self, project=None, continuation_token=None, max_page_size=None):
"""ReadReportingDiscussions.
:param str project: Project ID or project name
:param str continuation_token:
:param int max_page_size:
:rtype: :class:`<ReportingWorkItemRevisionsBatch> <azure.devops.v7_0.work_item_tracking.models.ReportingWorkItemRevisionsBatch>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if continuation_token is not None:
query_parameters['continuationToken'] = self._serialize.query('continuation_token', continuation_token, 'str')
if max_page_size is not None:
query_parameters['$maxPageSize'] = self._serialize.query('max_page_size', max_page_size, 'int')
response = self._send(http_method='GET',
location_id='4a644469-90c5-4fcc-9a9f-be0827d369ec',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('ReportingWorkItemRevisionsBatch', response)
def create_work_item(self, document, project, type, validate_only=None, bypass_rules=None, suppress_notifications=None, expand=None):
"""CreateWorkItem.
Creates a single work item.
:param :class:`<[JsonPatchOperation]> <azure.devops.v7_0.work_item_tracking.models.[JsonPatchOperation]>` document: The JSON Patch document representing the work item
:param str project: Project ID or project name
:param str type: The work item type of the work item to create
:param bool validate_only: Indicate if you only want to validate the changes without saving the work item
:param bool bypass_rules: Do not enforce the work item type rules on this update
:param bool suppress_notifications: Do not fire any notifications for this change
:param str expand: The expand parameters for work item attributes. Possible options are { None, Relations, Fields, Links, All }.
:rtype: :class:`<WorkItem> <azure.devops.v7_0.work_item_tracking.models.WorkItem>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if type is not None:
route_values['type'] = self._serialize.url('type', type, 'str')
query_parameters = {}
if validate_only is not None:
query_parameters['validateOnly'] = self._serialize.query('validate_only', validate_only, 'bool')
if bypass_rules is not None:
query_parameters['bypassRules'] = self._serialize.query('bypass_rules', bypass_rules, 'bool')
if suppress_notifications is not None:
query_parameters['suppressNotifications'] = self._serialize.query('suppress_notifications', suppress_notifications, 'bool')
if expand is not None:
query_parameters['$expand'] = self._serialize.query('expand', expand, 'str')
content = self._serialize.body(document, '[JsonPatchOperation]')
response = self._send(http_method='POST',
location_id='62d3d110-0047-428c-ad3c-4fe872c91c74',
version='7.0',
route_values=route_values,
query_parameters=query_parameters,
content=content,
media_type='application/json-patch+json')
return self._deserialize('WorkItem', response)
def get_work_item_template(self, project, type, fields=None, as_of=None, expand=None):
"""GetWorkItemTemplate.
Returns a single work item from a template.
:param str project: Project ID or project name
:param str type: The work item type name
:param str fields: Comma-separated list of requested fields
:param datetime as_of: AsOf UTC date time string
:param str expand: The expand parameters for work item attributes. Possible options are { None, Relations, Fields, Links, All }.
:rtype: :class:`<WorkItem> <azure.devops.v7_0.work_item_tracking.models.WorkItem>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if type is not None:
route_values['type'] = self._serialize.url('type', type, 'str')
query_parameters = {}
if fields is not None:
query_parameters['fields'] = self._serialize.query('fields', fields, 'str')
if as_of is not None:
query_parameters['asOf'] = self._serialize.query('as_of', as_of, 'iso-8601')
if expand is not None:
query_parameters['$expand'] = self._serialize.query('expand', expand, 'str')
response = self._send(http_method='GET',
location_id='62d3d110-0047-428c-ad3c-4fe872c91c74',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('WorkItem', response)
def delete_work_item(self, id, project=None, destroy=None):
"""DeleteWorkItem.
Deletes the specified work item and sends it to the Recycle Bin, so that it can be restored back, if required. Optionally, if the destroy parameter has been set to true, it destroys the work item permanently. WARNING: If the destroy parameter is set to true, work items deleted by this command will NOT go to recycle-bin and there is no way to restore/recover them after deletion. It is recommended NOT to use this parameter. If you do, please use this parameter with extreme caution.
:param int id: ID of the work item to be deleted
:param str project: Project ID or project name
:param bool destroy: Optional parameter, if set to true, the work item is deleted permanently. Please note: the destroy action is PERMANENT and cannot be undone.
:rtype: :class:`<WorkItemDelete> <azure.devops.v7_0.work_item_tracking.models.WorkItemDelete>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if id is not None:
route_values['id'] = self._serialize.url('id', id, 'int')
query_parameters = {}
if destroy is not None:
query_parameters['destroy'] = self._serialize.query('destroy', destroy, 'bool')
response = self._send(http_method='DELETE',
location_id='72c7ddf8-2cdc-4f60-90cd-ab71c14a399b',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('WorkItemDelete', response)
def get_work_item(self, id, project=None, fields=None, as_of=None, expand=None):
"""GetWorkItem.
Returns a single work item.
:param int id: The work item id
:param str project: Project ID or project name
:param [str] fields: Comma-separated list of requested fields
:param datetime as_of: AsOf UTC date time string
:param str expand: The expand parameters for work item attributes. Possible options are { None, Relations, Fields, Links, All }.
:rtype: :class:`<WorkItem> <azure.devops.v7_0.work_item_tracking.models.WorkItem>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if id is not None:
route_values['id'] = self._serialize.url('id', id, 'int')
query_parameters = {}
if fields is not None:
fields = ",".join(fields)
query_parameters['fields'] = self._serialize.query('fields', fields, 'str')
if as_of is not None:
query_parameters['asOf'] = self._serialize.query('as_of', as_of, 'iso-8601')
if expand is not None:
query_parameters['$expand'] = self._serialize.query('expand', expand, 'str')
response = self._send(http_method='GET',
location_id='72c7ddf8-2cdc-4f60-90cd-ab71c14a399b',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('WorkItem', response)
def get_work_items(self, ids, project=None, fields=None, as_of=None, expand=None, error_policy=None):
"""GetWorkItems.
Returns a list of work items (Maximum 200)
:param [int] ids: The comma-separated list of requested work item ids. (Maximum 200 ids allowed).
:param str project: Project ID or project name
:param [str] fields: Comma-separated list of requested fields
:param datetime as_of: AsOf UTC date time string
:param str expand: The expand parameters for work item attributes. Possible options are { None, Relations, Fields, Links, All }.
:param str error_policy: The flag to control error policy in a bulk get work items request. Possible options are {Fail, Omit}.
:rtype: [WorkItem]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if ids is not None:
ids = ",".join(map(str, ids))
query_parameters['ids'] = self._serialize.query('ids', ids, 'str')
if fields is not None:
fields = ",".join(fields)
query_parameters['fields'] = self._serialize.query('fields', fields, 'str')
if as_of is not None:
query_parameters['asOf'] = self._serialize.query('as_of', as_of, 'iso-8601')
if expand is not None:
query_parameters['$expand'] = self._serialize.query('expand', expand, 'str')
if error_policy is not None:
query_parameters['errorPolicy'] = self._serialize.query('error_policy', error_policy, 'str')
response = self._send(http_method='GET',
location_id='72c7ddf8-2cdc-4f60-90cd-ab71c14a399b',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[WorkItem]', self._unwrap_collection(response))
def update_work_item(self, document, id, project=None, validate_only=None, bypass_rules=None, suppress_notifications=None, expand=None):
"""UpdateWorkItem.
Updates a single work item.
:param :class:`<[JsonPatchOperation]> <azure.devops.v7_0.work_item_tracking.models.[JsonPatchOperation]>` document: The JSON Patch document representing the update
:param int id: The id of the work item to update
:param str project: Project ID or project name
:param bool validate_only: Indicate if you only want to validate the changes without saving the work item
:param bool bypass_rules: Do not enforce the work item type rules on this update
:param bool suppress_notifications: Do not fire any notifications for this change
:param str expand: The expand parameters for work item attributes. Possible options are { None, Relations, Fields, Links, All }.
:rtype: :class:`<WorkItem> <azure.devops.v7_0.work_item_tracking.models.WorkItem>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if id is not None:
route_values['id'] = self._serialize.url('id', id, 'int')
query_parameters = {}
if validate_only is not None:
query_parameters['validateOnly'] = self._serialize.query('validate_only', validate_only, 'bool')
if bypass_rules is not None:
query_parameters['bypassRules'] = self._serialize.query('bypass_rules', bypass_rules, 'bool')
if suppress_notifications is not None:
query_parameters['suppressNotifications'] = self._serialize.query('suppress_notifications', suppress_notifications, 'bool')
if expand is not None:
query_parameters['$expand'] = self._serialize.query('expand', expand, 'str')
content = self._serialize.body(document, '[JsonPatchOperation]')
response = self._send(http_method='PATCH',
location_id='72c7ddf8-2cdc-4f60-90cd-ab71c14a399b',
version='7.0',
route_values=route_values,
query_parameters=query_parameters,
content=content,
media_type='application/json-patch+json')
return self._deserialize('WorkItem', response)
def get_work_items_batch(self, work_item_get_request, project=None):
"""GetWorkItemsBatch.
Gets work items for a list of work item ids (Maximum 200)
:param :class:`<WorkItemBatchGetRequest> <azure.devops.v7_0.work_item_tracking.models.WorkItemBatchGetRequest>` work_item_get_request:
:param str project: Project ID or project name
:rtype: [WorkItem]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
content = self._serialize.body(work_item_get_request, 'WorkItemBatchGetRequest')
response = self._send(http_method='POST',
location_id='908509b6-4248-4475-a1cd-829139ba419f',
version='7.0',
route_values=route_values,
content=content)
return self._deserialize('[WorkItem]', self._unwrap_collection(response))
def get_work_item_next_states_on_checkin_action(self, ids, action=None):
"""GetWorkItemNextStatesOnCheckinAction.
Returns the next state on the given work item IDs.
:param [int] ids: list of work item ids
:param str action: possible actions. Currently only supports checkin
:rtype: [WorkItemNextStateOnTransition]
"""
query_parameters = {}
if ids is not None:
ids = ",".join(map(str, ids))
query_parameters['ids'] = self._serialize.query('ids', ids, 'str')
if action is not None:
query_parameters['action'] = self._serialize.query('action', action, 'str')
response = self._send(http_method='GET',
location_id='afae844b-e2f6-44c2-8053-17b3bb936a40',
version='7.0',
query_parameters=query_parameters)
return self._deserialize('[WorkItemNextStateOnTransition]', self._unwrap_collection(response))
def get_work_item_type_categories(self, project):
"""GetWorkItemTypeCategories.
Get all work item type categories.
:param str project: Project ID or project name
:rtype: [WorkItemTypeCategory]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
response = self._send(http_method='GET',
location_id='9b9f5734-36c8-415e-ba67-f83b45c31408',
version='7.0',
route_values=route_values)
return self._deserialize('[WorkItemTypeCategory]', self._unwrap_collection(response))
def get_work_item_type_category(self, project, category):
"""GetWorkItemTypeCategory.
Get specific work item type category by name.
:param str project: Project ID or project name
:param str category: The category name
:rtype: :class:`<WorkItemTypeCategory> <azure.devops.v7_0.work_item_tracking.models.WorkItemTypeCategory>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if category is not None:
route_values['category'] = self._serialize.url('category', category, 'str')
response = self._send(http_method='GET',
location_id='9b9f5734-36c8-415e-ba67-f83b45c31408',
version='7.0',
route_values=route_values)
return self._deserialize('WorkItemTypeCategory', response)
def get_work_item_type(self, project, type):
"""GetWorkItemType.
Returns a work item type definition.
:param str project: Project ID or project name
:param str type: Work item type name
:rtype: :class:`<WorkItemType> <azure.devops.v7_0.work_item_tracking.models.WorkItemType>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if type is not None:
route_values['type'] = self._serialize.url('type', type, 'str')
response = self._send(http_method='GET',
location_id='7c8d7a76-4a09-43e8-b5df-bd792f4ac6aa',
version='7.0',
route_values=route_values)
return self._deserialize('WorkItemType', response)
def get_work_item_types(self, project):
"""GetWorkItemTypes.
Returns the list of work item types
:param str project: Project ID or project name
:rtype: [WorkItemType]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
response = self._send(http_method='GET',
location_id='7c8d7a76-4a09-43e8-b5df-bd792f4ac6aa',
version='7.0',
route_values=route_values)
return self._deserialize('[WorkItemType]', self._unwrap_collection(response))
def get_work_item_type_fields_with_references(self, project, type, expand=None):
"""GetWorkItemTypeFieldsWithReferences.
Get a list of fields for a work item type with detailed references.
:param str project: Project ID or project name
:param str type: Work item type.
:param str expand: Expand level for the API response. Properties: to include allowedvalues, default value, isRequired etc. as a part of response; None: to skip these properties.
:rtype: [WorkItemTypeFieldWithReferences]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if type is not None:
route_values['type'] = self._serialize.url('type', type, 'str')
query_parameters = {}
if expand is not None:
query_parameters['$expand'] = self._serialize.query('expand', expand, 'str')
response = self._send(http_method='GET',
location_id='bd293ce5-3d25-4192-8e67-e8092e879efb',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[WorkItemTypeFieldWithReferences]', self._unwrap_collection(response))
def get_work_item_type_field_with_references(self, project, type, field, expand=None):
"""GetWorkItemTypeFieldWithReferences.
Get a field for a work item type with detailed references.
:param str project: Project ID or project name
:param str type: Work item type.
:param str field:
:param str expand: Expand level for the API response. Properties: to include allowedvalues, default value, isRequired etc. as a part of response; None: to skip these properties.
:rtype: :class:`<WorkItemTypeFieldWithReferences> <azure.devops.v7_0.work_item_tracking.models.WorkItemTypeFieldWithReferences>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if type is not None:
route_values['type'] = self._serialize.url('type', type, 'str')
if field is not None:
route_values['field'] = self._serialize.url('field', field, 'str')
query_parameters = {}
if expand is not None:
query_parameters['$expand'] = self._serialize.query('expand', expand, 'str')
response = self._send(http_method='GET',
location_id='bd293ce5-3d25-4192-8e67-e8092e879efb',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('WorkItemTypeFieldWithReferences', response)
def get_work_item_type_states(self, project, type):
"""GetWorkItemTypeStates.
Returns the state names and colors for a work item type.
:param str project: Project ID or project name
:param str type: The state name
:rtype: [WorkItemStateColor]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if type is not None:
route_values['type'] = self._serialize.url('type', type, 'str')
response = self._send(http_method='GET',
location_id='7c9d7a76-4a09-43e8-b5df-bd792f4ac6aa',
version='7.0',
route_values=route_values)
return self._deserialize('[WorkItemStateColor]', self._unwrap_collection(response))
|
azure-devops-python-api/azure-devops/azure/devops/v7_0/work_item_tracking/work_item_tracking_client.py/0
|
{
"file_path": "azure-devops-python-api/azure-devops/azure/devops/v7_0/work_item_tracking/work_item_tracking_client.py",
"repo_id": "azure-devops-python-api",
"token_count": 50934
}
| 406 |
ο»Ώ# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest.serialization import Model
class AgentPoolQueue(Model):
"""
Represents a queue for running builds.
:param _links:
:type _links: :class:`ReferenceLinks <azure.devops.v7_1.build.models.ReferenceLinks>`
:param id: The ID of the queue.
:type id: int
:param name: The name of the queue.
:type name: str
:param pool: The pool used by this queue.
:type pool: :class:`TaskAgentPoolReference <azure.devops.v7_1.build.models.TaskAgentPoolReference>`
:param url: The full http link to the resource.
:type url: str
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'id': {'key': 'id', 'type': 'int'},
'name': {'key': 'name', 'type': 'str'},
'pool': {'key': 'pool', 'type': 'TaskAgentPoolReference'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, _links=None, id=None, name=None, pool=None, url=None):
super(AgentPoolQueue, self).__init__()
self._links = _links
self.id = id
self.name = name
self.pool = pool
self.url = url
class AgentSpecification(Model):
"""
Specification of the agent defined by the pool provider.
:param identifier: Agent specification unique identifier.
:type identifier: str
"""
_attribute_map = {
'identifier': {'key': 'identifier', 'type': 'str'}
}
def __init__(self, identifier=None):
super(AgentSpecification, self).__init__()
self.identifier = identifier
class AggregatedResultsAnalysis(Model):
"""
:param duration:
:type duration: object
:param not_reported_results_by_outcome:
:type not_reported_results_by_outcome: dict
:param previous_context:
:type previous_context: :class:`TestResultsContext <azure.devops.v7_1.microsoft._team_foundation._test_management._web_api.models.TestResultsContext>`
:param results_by_outcome:
:type results_by_outcome: dict
:param results_difference:
:type results_difference: :class:`AggregatedResultsDifference <azure.devops.v7_1.microsoft._team_foundation._test_management._web_api.models.AggregatedResultsDifference>`
:param run_summary_by_outcome:
:type run_summary_by_outcome: dict
:param run_summary_by_state:
:type run_summary_by_state: dict
:param total_tests:
:type total_tests: int
"""
_attribute_map = {
'duration': {'key': 'duration', 'type': 'object'},
'not_reported_results_by_outcome': {'key': 'notReportedResultsByOutcome', 'type': '{AggregatedResultsByOutcome}'},
'previous_context': {'key': 'previousContext', 'type': 'TestResultsContext'},
'results_by_outcome': {'key': 'resultsByOutcome', 'type': '{AggregatedResultsByOutcome}'},
'results_difference': {'key': 'resultsDifference', 'type': 'AggregatedResultsDifference'},
'run_summary_by_outcome': {'key': 'runSummaryByOutcome', 'type': '{AggregatedRunsByOutcome}'},
'run_summary_by_state': {'key': 'runSummaryByState', 'type': '{AggregatedRunsByState}'},
'total_tests': {'key': 'totalTests', 'type': 'int'}
}
def __init__(self, duration=None, not_reported_results_by_outcome=None, previous_context=None, results_by_outcome=None, results_difference=None, run_summary_by_outcome=None, run_summary_by_state=None, total_tests=None):
super(AggregatedResultsAnalysis, self).__init__()
self.duration = duration
self.not_reported_results_by_outcome = not_reported_results_by_outcome
self.previous_context = previous_context
self.results_by_outcome = results_by_outcome
self.results_difference = results_difference
self.run_summary_by_outcome = run_summary_by_outcome
self.run_summary_by_state = run_summary_by_state
self.total_tests = total_tests
class AggregatedResultsByOutcome(Model):
"""
:param count:
:type count: int
:param duration:
:type duration: object
:param group_by_field:
:type group_by_field: str
:param group_by_value:
:type group_by_value: object
:param outcome:
:type outcome: object
:param rerun_result_count:
:type rerun_result_count: int
"""
_attribute_map = {
'count': {'key': 'count', 'type': 'int'},
'duration': {'key': 'duration', 'type': 'object'},
'group_by_field': {'key': 'groupByField', 'type': 'str'},
'group_by_value': {'key': 'groupByValue', 'type': 'object'},
'outcome': {'key': 'outcome', 'type': 'object'},
'rerun_result_count': {'key': 'rerunResultCount', 'type': 'int'}
}
def __init__(self, count=None, duration=None, group_by_field=None, group_by_value=None, outcome=None, rerun_result_count=None):
super(AggregatedResultsByOutcome, self).__init__()
self.count = count
self.duration = duration
self.group_by_field = group_by_field
self.group_by_value = group_by_value
self.outcome = outcome
self.rerun_result_count = rerun_result_count
class AggregatedResultsDifference(Model):
"""
:param increase_in_duration:
:type increase_in_duration: object
:param increase_in_failures:
:type increase_in_failures: int
:param increase_in_non_impacted_tests:
:type increase_in_non_impacted_tests: int
:param increase_in_other_tests:
:type increase_in_other_tests: int
:param increase_in_passed_tests:
:type increase_in_passed_tests: int
:param increase_in_total_tests:
:type increase_in_total_tests: int
"""
_attribute_map = {
'increase_in_duration': {'key': 'increaseInDuration', 'type': 'object'},
'increase_in_failures': {'key': 'increaseInFailures', 'type': 'int'},
'increase_in_non_impacted_tests': {'key': 'increaseInNonImpactedTests', 'type': 'int'},
'increase_in_other_tests': {'key': 'increaseInOtherTests', 'type': 'int'},
'increase_in_passed_tests': {'key': 'increaseInPassedTests', 'type': 'int'},
'increase_in_total_tests': {'key': 'increaseInTotalTests', 'type': 'int'}
}
def __init__(self, increase_in_duration=None, increase_in_failures=None, increase_in_non_impacted_tests=None, increase_in_other_tests=None, increase_in_passed_tests=None, increase_in_total_tests=None):
super(AggregatedResultsDifference, self).__init__()
self.increase_in_duration = increase_in_duration
self.increase_in_failures = increase_in_failures
self.increase_in_non_impacted_tests = increase_in_non_impacted_tests
self.increase_in_other_tests = increase_in_other_tests
self.increase_in_passed_tests = increase_in_passed_tests
self.increase_in_total_tests = increase_in_total_tests
class AggregatedRunsByOutcome(Model):
"""
:param outcome:
:type outcome: object
:param runs_count:
:type runs_count: int
"""
_attribute_map = {
'outcome': {'key': 'outcome', 'type': 'object'},
'runs_count': {'key': 'runsCount', 'type': 'int'}
}
def __init__(self, outcome=None, runs_count=None):
super(AggregatedRunsByOutcome, self).__init__()
self.outcome = outcome
self.runs_count = runs_count
class AggregatedRunsByState(Model):
"""
:param results_by_outcome:
:type results_by_outcome: dict
:param runs_count:
:type runs_count: int
:param state:
:type state: object
"""
_attribute_map = {
'results_by_outcome': {'key': 'resultsByOutcome', 'type': '{AggregatedResultsByOutcome}'},
'runs_count': {'key': 'runsCount', 'type': 'int'},
'state': {'key': 'state', 'type': 'object'}
}
def __init__(self, results_by_outcome=None, runs_count=None, state=None):
super(AggregatedRunsByState, self).__init__()
self.results_by_outcome = results_by_outcome
self.runs_count = runs_count
self.state = state
class ArtifactResource(Model):
"""
:param _links:
:type _links: :class:`ReferenceLinks <azure.devops.v7_1.build.models.ReferenceLinks>`
:param data: Type-specific data about the artifact.
:type data: str
:param download_url: A link to download the resource.
:type download_url: str
:param properties: Type-specific properties of the artifact.
:type properties: dict
:param type: The type of the resource: File container, version control folder, UNC path, etc.
:type type: str
:param url: The full http link to the resource.
:type url: str
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'data': {'key': 'data', 'type': 'str'},
'download_url': {'key': 'downloadUrl', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{str}'},
'type': {'key': 'type', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, _links=None, data=None, download_url=None, properties=None, type=None, url=None):
super(ArtifactResource, self).__init__()
self._links = _links
self.data = data
self.download_url = download_url
self.properties = properties
self.type = type
self.url = url
class AssociatedWorkItem(Model):
"""
:param assigned_to:
:type assigned_to: str
:param id: Id of associated the work item.
:type id: int
:param state:
:type state: str
:param title:
:type title: str
:param url: REST Url of the work item.
:type url: str
:param web_url:
:type web_url: str
:param work_item_type:
:type work_item_type: str
"""
_attribute_map = {
'assigned_to': {'key': 'assignedTo', 'type': 'str'},
'id': {'key': 'id', 'type': 'int'},
'state': {'key': 'state', 'type': 'str'},
'title': {'key': 'title', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'web_url': {'key': 'webUrl', 'type': 'str'},
'work_item_type': {'key': 'workItemType', 'type': 'str'}
}
def __init__(self, assigned_to=None, id=None, state=None, title=None, url=None, web_url=None, work_item_type=None):
super(AssociatedWorkItem, self).__init__()
self.assigned_to = assigned_to
self.id = id
self.state = state
self.title = title
self.url = url
self.web_url = web_url
self.work_item_type = work_item_type
class Attachment(Model):
"""
Represents an attachment to a build.
:param _links:
:type _links: :class:`ReferenceLinks <azure.devops.v7_1.build.models.ReferenceLinks>`
:param name: The name of the attachment.
:type name: str
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'name': {'key': 'name', 'type': 'str'}
}
def __init__(self, _links=None, name=None):
super(Attachment, self).__init__()
self._links = _links
self.name = name
class AuthorizationHeader(Model):
"""
:param name:
:type name: str
:param value:
:type value: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'}
}
def __init__(self, name=None, value=None):
super(AuthorizationHeader, self).__init__()
self.name = name
self.value = value
class Build(Model):
"""
Data representation of a build.
:param _links:
:type _links: :class:`ReferenceLinks <azure.devops.v7_1.build.models.ReferenceLinks>`
:param agent_specification: The agent specification for the build.
:type agent_specification: :class:`AgentSpecification <azure.devops.v7_1.build.models.AgentSpecification>`
:param append_commit_message_to_run_name: Append Commit Message To BuildNumber in UI.
:type append_commit_message_to_run_name: bool
:param build_number: The build number/name of the build.
:type build_number: str
:param build_number_revision: The build number revision.
:type build_number_revision: int
:param controller: The build controller. This is only set if the definition type is Xaml.
:type controller: :class:`BuildController <azure.devops.v7_1.build.models.BuildController>`
:param definition: The definition associated with the build.
:type definition: :class:`DefinitionReference <azure.devops.v7_1.build.models.DefinitionReference>`
:param deleted: Indicates whether the build has been deleted.
:type deleted: bool
:param deleted_by: The identity of the process or person that deleted the build.
:type deleted_by: :class:`IdentityRef <azure.devops.v7_1.build.models.IdentityRef>`
:param deleted_date: The date the build was deleted.
:type deleted_date: datetime
:param deleted_reason: The description of how the build was deleted.
:type deleted_reason: str
:param demands: A list of demands that represents the agent capabilities required by this build.
:type demands: list of :class:`object <azure.devops.v7_1.build.models.object>`
:param finish_time: The time that the build was completed.
:type finish_time: datetime
:param id: The ID of the build.
:type id: int
:param keep_forever: Indicates whether the build should be skipped by retention policies.
:type keep_forever: bool
:param last_changed_by: The identity representing the process or person that last changed the build.
:type last_changed_by: :class:`IdentityRef <azure.devops.v7_1.build.models.IdentityRef>`
:param last_changed_date: The date the build was last changed.
:type last_changed_date: datetime
:param logs: Information about the build logs.
:type logs: :class:`BuildLogReference <azure.devops.v7_1.build.models.BuildLogReference>`
:param orchestration_plan: The orchestration plan for the build.
:type orchestration_plan: :class:`TaskOrchestrationPlanReference <azure.devops.v7_1.build.models.TaskOrchestrationPlanReference>`
:param parameters: The parameters for the build.
:type parameters: str
:param plans: Orchestration plans associated with the build (build, cleanup)
:type plans: list of :class:`TaskOrchestrationPlanReference <azure.devops.v7_1.build.models.TaskOrchestrationPlanReference>`
:param priority: The build's priority.
:type priority: object
:param project: The team project.
:type project: :class:`TeamProjectReference <azure.devops.v7_1.build.models.TeamProjectReference>`
:param properties:
:type properties: :class:`object <azure.devops.v7_1.build.models.object>`
:param quality: The quality of the xaml build (good, bad, etc.)
:type quality: str
:param queue: The queue. This is only set if the definition type is Build. WARNING: this field is deprecated and does not corresponds to the jobs queues.
:type queue: :class:`AgentPoolQueue <azure.devops.v7_1.build.models.AgentPoolQueue>`
:param queue_options: Additional options for queueing the build.
:type queue_options: object
:param queue_position: The current position of the build in the queue.
:type queue_position: int
:param queue_time: The time that the build was queued.
:type queue_time: datetime
:param reason: The reason that the build was created.
:type reason: object
:param repository: The repository.
:type repository: :class:`BuildRepository <azure.devops.v7_1.build.models.BuildRepository>`
:param requested_by: The identity that queued the build.
:type requested_by: :class:`IdentityRef <azure.devops.v7_1.build.models.IdentityRef>`
:param requested_for: The identity on whose behalf the build was queued.
:type requested_for: :class:`IdentityRef <azure.devops.v7_1.build.models.IdentityRef>`
:param result: The build result.
:type result: object
:param retained_by_release: Indicates whether the build is retained by a release.
:type retained_by_release: bool
:param source_branch: The source branch.
:type source_branch: str
:param source_version: The source version.
:type source_version: str
:param start_time: The time that the build was started.
:type start_time: datetime
:param status: The status of the build.
:type status: object
:param tags:
:type tags: list of str
:param template_parameters: Parameters to template expression evaluation
:type template_parameters: dict
:param triggered_by_build: The build that triggered this build via a Build completion trigger.
:type triggered_by_build: :class:`Build <azure.devops.v7_1.build.models.Build>`
:param trigger_info: Sourceprovider-specific information about what triggered the build
:type trigger_info: dict
:param uri: The URI of the build.
:type uri: str
:param url: The REST URL of the build.
:type url: str
:param validation_results:
:type validation_results: list of :class:`BuildRequestValidationResult <azure.devops.v7_1.build.models.BuildRequestValidationResult>`
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'agent_specification': {'key': 'agentSpecification', 'type': 'AgentSpecification'},
'append_commit_message_to_run_name': {'key': 'appendCommitMessageToRunName', 'type': 'bool'},
'build_number': {'key': 'buildNumber', 'type': 'str'},
'build_number_revision': {'key': 'buildNumberRevision', 'type': 'int'},
'controller': {'key': 'controller', 'type': 'BuildController'},
'definition': {'key': 'definition', 'type': 'DefinitionReference'},
'deleted': {'key': 'deleted', 'type': 'bool'},
'deleted_by': {'key': 'deletedBy', 'type': 'IdentityRef'},
'deleted_date': {'key': 'deletedDate', 'type': 'iso-8601'},
'deleted_reason': {'key': 'deletedReason', 'type': 'str'},
'demands': {'key': 'demands', 'type': '[object]'},
'finish_time': {'key': 'finishTime', 'type': 'iso-8601'},
'id': {'key': 'id', 'type': 'int'},
'keep_forever': {'key': 'keepForever', 'type': 'bool'},
'last_changed_by': {'key': 'lastChangedBy', 'type': 'IdentityRef'},
'last_changed_date': {'key': 'lastChangedDate', 'type': 'iso-8601'},
'logs': {'key': 'logs', 'type': 'BuildLogReference'},
'orchestration_plan': {'key': 'orchestrationPlan', 'type': 'TaskOrchestrationPlanReference'},
'parameters': {'key': 'parameters', 'type': 'str'},
'plans': {'key': 'plans', 'type': '[TaskOrchestrationPlanReference]'},
'priority': {'key': 'priority', 'type': 'object'},
'project': {'key': 'project', 'type': 'TeamProjectReference'},
'properties': {'key': 'properties', 'type': 'object'},
'quality': {'key': 'quality', 'type': 'str'},
'queue': {'key': 'queue', 'type': 'AgentPoolQueue'},
'queue_options': {'key': 'queueOptions', 'type': 'object'},
'queue_position': {'key': 'queuePosition', 'type': 'int'},
'queue_time': {'key': 'queueTime', 'type': 'iso-8601'},
'reason': {'key': 'reason', 'type': 'object'},
'repository': {'key': 'repository', 'type': 'BuildRepository'},
'requested_by': {'key': 'requestedBy', 'type': 'IdentityRef'},
'requested_for': {'key': 'requestedFor', 'type': 'IdentityRef'},
'result': {'key': 'result', 'type': 'object'},
'retained_by_release': {'key': 'retainedByRelease', 'type': 'bool'},
'source_branch': {'key': 'sourceBranch', 'type': 'str'},
'source_version': {'key': 'sourceVersion', 'type': 'str'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'status': {'key': 'status', 'type': 'object'},
'tags': {'key': 'tags', 'type': '[str]'},
'template_parameters': {'key': 'templateParameters', 'type': '{str}'},
'triggered_by_build': {'key': 'triggeredByBuild', 'type': 'Build'},
'trigger_info': {'key': 'triggerInfo', 'type': '{str}'},
'uri': {'key': 'uri', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'validation_results': {'key': 'validationResults', 'type': '[BuildRequestValidationResult]'}
}
def __init__(self, _links=None, agent_specification=None, append_commit_message_to_run_name=None, build_number=None, build_number_revision=None, controller=None, definition=None, deleted=None, deleted_by=None, deleted_date=None, deleted_reason=None, demands=None, finish_time=None, id=None, keep_forever=None, last_changed_by=None, last_changed_date=None, logs=None, orchestration_plan=None, parameters=None, plans=None, priority=None, project=None, properties=None, quality=None, queue=None, queue_options=None, queue_position=None, queue_time=None, reason=None, repository=None, requested_by=None, requested_for=None, result=None, retained_by_release=None, source_branch=None, source_version=None, start_time=None, status=None, tags=None, template_parameters=None, triggered_by_build=None, trigger_info=None, uri=None, url=None, validation_results=None):
super(Build, self).__init__()
self._links = _links
self.agent_specification = agent_specification
self.append_commit_message_to_run_name = append_commit_message_to_run_name
self.build_number = build_number
self.build_number_revision = build_number_revision
self.controller = controller
self.definition = definition
self.deleted = deleted
self.deleted_by = deleted_by
self.deleted_date = deleted_date
self.deleted_reason = deleted_reason
self.demands = demands
self.finish_time = finish_time
self.id = id
self.keep_forever = keep_forever
self.last_changed_by = last_changed_by
self.last_changed_date = last_changed_date
self.logs = logs
self.orchestration_plan = orchestration_plan
self.parameters = parameters
self.plans = plans
self.priority = priority
self.project = project
self.properties = properties
self.quality = quality
self.queue = queue
self.queue_options = queue_options
self.queue_position = queue_position
self.queue_time = queue_time
self.reason = reason
self.repository = repository
self.requested_by = requested_by
self.requested_for = requested_for
self.result = result
self.retained_by_release = retained_by_release
self.source_branch = source_branch
self.source_version = source_version
self.start_time = start_time
self.status = status
self.tags = tags
self.template_parameters = template_parameters
self.triggered_by_build = triggered_by_build
self.trigger_info = trigger_info
self.uri = uri
self.url = url
self.validation_results = validation_results
class BuildArtifact(Model):
"""
Represents an artifact produced by a build.
:param id: The artifact ID.
:type id: int
:param name: The name of the artifact.
:type name: str
:param resource: The actual resource.
:type resource: :class:`ArtifactResource <azure.devops.v7_1.build.models.ArtifactResource>`
:param source: The artifact source, which will be the ID of the job that produced this artifact. If an artifact is associated with multiple sources, this points to the first source.
:type source: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'int'},
'name': {'key': 'name', 'type': 'str'},
'resource': {'key': 'resource', 'type': 'ArtifactResource'},
'source': {'key': 'source', 'type': 'str'}
}
def __init__(self, id=None, name=None, resource=None, source=None):
super(BuildArtifact, self).__init__()
self.id = id
self.name = name
self.resource = resource
self.source = source
class BuildBadge(Model):
"""
Represents a build badge.
:param build_id: The ID of the build represented by this badge.
:type build_id: int
:param image_url: A link to the SVG resource.
:type image_url: str
"""
_attribute_map = {
'build_id': {'key': 'buildId', 'type': 'int'},
'image_url': {'key': 'imageUrl', 'type': 'str'}
}
def __init__(self, build_id=None, image_url=None):
super(BuildBadge, self).__init__()
self.build_id = build_id
self.image_url = image_url
class BuildDefinitionRevision(Model):
"""
Represents a revision of a build definition.
:param comment: The comment associated with the change.
:type comment: str
:param definition_url: A link to the definition at this revision.
:type definition_url: str
:param changed_by: The identity of the person or process that changed the definition.
:type changed_by: :class:`IdentityRef <azure.devops.v7_1.build.models.IdentityRef>`
:param changed_date: The date and time that the definition was changed.
:type changed_date: datetime
:param change_type: The change type (add, edit, delete).
:type change_type: object
:param name: The name of the definition.
:type name: str
:param revision: The revision number.
:type revision: int
"""
_attribute_map = {
'comment': {'key': 'comment', 'type': 'str'},
'definition_url': {'key': 'definitionUrl', 'type': 'str'},
'changed_by': {'key': 'changedBy', 'type': 'IdentityRef'},
'changed_date': {'key': 'changedDate', 'type': 'iso-8601'},
'change_type': {'key': 'changeType', 'type': 'object'},
'name': {'key': 'name', 'type': 'str'},
'revision': {'key': 'revision', 'type': 'int'}
}
def __init__(self, comment=None, definition_url=None, changed_by=None, changed_date=None, change_type=None, name=None, revision=None):
super(BuildDefinitionRevision, self).__init__()
self.comment = comment
self.definition_url = definition_url
self.changed_by = changed_by
self.changed_date = changed_date
self.change_type = change_type
self.name = name
self.revision = revision
class BuildDefinitionStep(Model):
"""
Represents a step in a build phase.
:param always_run: Indicates whether this step should run even if a previous step fails.
:type always_run: bool
:param condition: A condition that determines whether this step should run.
:type condition: str
:param continue_on_error: Indicates whether the phase should continue even if this step fails.
:type continue_on_error: bool
:param display_name: The display name for this step.
:type display_name: str
:param enabled: Indicates whether the step is enabled.
:type enabled: bool
:param environment:
:type environment: dict
:param inputs:
:type inputs: dict
:param ref_name: The reference name for this step.
:type ref_name: str
:param retry_count_on_task_failure: Number of retries.
:type retry_count_on_task_failure: int
:param task: The task associated with this step.
:type task: :class:`TaskDefinitionReference <azure.devops.v7_1.build.models.TaskDefinitionReference>`
:param timeout_in_minutes: The time, in minutes, that this step is allowed to run.
:type timeout_in_minutes: int
"""
_attribute_map = {
'always_run': {'key': 'alwaysRun', 'type': 'bool'},
'condition': {'key': 'condition', 'type': 'str'},
'continue_on_error': {'key': 'continueOnError', 'type': 'bool'},
'display_name': {'key': 'displayName', 'type': 'str'},
'enabled': {'key': 'enabled', 'type': 'bool'},
'environment': {'key': 'environment', 'type': '{str}'},
'inputs': {'key': 'inputs', 'type': '{str}'},
'ref_name': {'key': 'refName', 'type': 'str'},
'retry_count_on_task_failure': {'key': 'retryCountOnTaskFailure', 'type': 'int'},
'task': {'key': 'task', 'type': 'TaskDefinitionReference'},
'timeout_in_minutes': {'key': 'timeoutInMinutes', 'type': 'int'}
}
def __init__(self, always_run=None, condition=None, continue_on_error=None, display_name=None, enabled=None, environment=None, inputs=None, ref_name=None, retry_count_on_task_failure=None, task=None, timeout_in_minutes=None):
super(BuildDefinitionStep, self).__init__()
self.always_run = always_run
self.condition = condition
self.continue_on_error = continue_on_error
self.display_name = display_name
self.enabled = enabled
self.environment = environment
self.inputs = inputs
self.ref_name = ref_name
self.retry_count_on_task_failure = retry_count_on_task_failure
self.task = task
self.timeout_in_minutes = timeout_in_minutes
class BuildDefinitionTemplate(Model):
"""
Represents a template from which new build definitions can be created.
:param can_delete: Indicates whether the template can be deleted.
:type can_delete: bool
:param category: The template category.
:type category: str
:param default_hosted_queue: An optional hosted agent queue for the template to use by default.
:type default_hosted_queue: str
:param description: A description of the template.
:type description: str
:param icons:
:type icons: dict
:param icon_task_id: The ID of the task whose icon is used when showing this template in the UI.
:type icon_task_id: str
:param id: The ID of the template.
:type id: str
:param name: The name of the template.
:type name: str
:param template: The actual template.
:type template: :class:`BuildDefinition <azure.devops.v7_1.build.models.BuildDefinition>`
"""
_attribute_map = {
'can_delete': {'key': 'canDelete', 'type': 'bool'},
'category': {'key': 'category', 'type': 'str'},
'default_hosted_queue': {'key': 'defaultHostedQueue', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'icons': {'key': 'icons', 'type': '{str}'},
'icon_task_id': {'key': 'iconTaskId', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'template': {'key': 'template', 'type': 'BuildDefinition'}
}
def __init__(self, can_delete=None, category=None, default_hosted_queue=None, description=None, icons=None, icon_task_id=None, id=None, name=None, template=None):
super(BuildDefinitionTemplate, self).__init__()
self.can_delete = can_delete
self.category = category
self.default_hosted_queue = default_hosted_queue
self.description = description
self.icons = icons
self.icon_task_id = icon_task_id
self.id = id
self.name = name
self.template = template
class BuildDefinitionTemplate3_2(Model):
"""
For back-compat with extensions that use the old Steps format instead of Process and Phases
:param can_delete:
:type can_delete: bool
:param category:
:type category: str
:param default_hosted_queue:
:type default_hosted_queue: str
:param description:
:type description: str
:param icons:
:type icons: dict
:param icon_task_id:
:type icon_task_id: str
:param id:
:type id: str
:param name:
:type name: str
:param template:
:type template: :class:`BuildDefinition3_2 <azure.devops.v7_1.build.models.BuildDefinition3_2>`
"""
_attribute_map = {
'can_delete': {'key': 'canDelete', 'type': 'bool'},
'category': {'key': 'category', 'type': 'str'},
'default_hosted_queue': {'key': 'defaultHostedQueue', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'icons': {'key': 'icons', 'type': '{str}'},
'icon_task_id': {'key': 'iconTaskId', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'template': {'key': 'template', 'type': 'BuildDefinition3_2'}
}
def __init__(self, can_delete=None, category=None, default_hosted_queue=None, description=None, icons=None, icon_task_id=None, id=None, name=None, template=None):
super(BuildDefinitionTemplate3_2, self).__init__()
self.can_delete = can_delete
self.category = category
self.default_hosted_queue = default_hosted_queue
self.description = description
self.icons = icons
self.icon_task_id = icon_task_id
self.id = id
self.name = name
self.template = template
class BuildDefinitionVariable(Model):
"""
Represents a variable used by a build definition.
:param allow_override: Indicates whether the value can be set at queue time.
:type allow_override: bool
:param is_secret: Indicates whether the variable's value is a secret.
:type is_secret: bool
:param value: The value of the variable.
:type value: str
"""
_attribute_map = {
'allow_override': {'key': 'allowOverride', 'type': 'bool'},
'is_secret': {'key': 'isSecret', 'type': 'bool'},
'value': {'key': 'value', 'type': 'str'}
}
def __init__(self, allow_override=None, is_secret=None, value=None):
super(BuildDefinitionVariable, self).__init__()
self.allow_override = allow_override
self.is_secret = is_secret
self.value = value
class BuildLogReference(Model):
"""
Represents a reference to a build log.
:param id: The ID of the log.
:type id: int
:param type: The type of the log location.
:type type: str
:param url: A full link to the log resource.
:type url: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'int'},
'type': {'key': 'type', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, id=None, type=None, url=None):
super(BuildLogReference, self).__init__()
self.id = id
self.type = type
self.url = url
class BuildMetric(Model):
"""
Represents metadata about builds in the system.
:param date: The date for the scope.
:type date: datetime
:param int_value: The value.
:type int_value: int
:param name: The name of the metric.
:type name: str
:param scope: The scope.
:type scope: str
"""
_attribute_map = {
'date': {'key': 'date', 'type': 'iso-8601'},
'int_value': {'key': 'intValue', 'type': 'int'},
'name': {'key': 'name', 'type': 'str'},
'scope': {'key': 'scope', 'type': 'str'}
}
def __init__(self, date=None, int_value=None, name=None, scope=None):
super(BuildMetric, self).__init__()
self.date = date
self.int_value = int_value
self.name = name
self.scope = scope
class BuildOption(Model):
"""
Represents the application of an optional behavior to a build definition.
:param definition: A reference to the build option.
:type definition: :class:`BuildOptionDefinitionReference <azure.devops.v7_1.build.models.BuildOptionDefinitionReference>`
:param enabled: Indicates whether the behavior is enabled.
:type enabled: bool
:param inputs:
:type inputs: dict
"""
_attribute_map = {
'definition': {'key': 'definition', 'type': 'BuildOptionDefinitionReference'},
'enabled': {'key': 'enabled', 'type': 'bool'},
'inputs': {'key': 'inputs', 'type': '{str}'}
}
def __init__(self, definition=None, enabled=None, inputs=None):
super(BuildOption, self).__init__()
self.definition = definition
self.enabled = enabled
self.inputs = inputs
class BuildOptionDefinitionReference(Model):
"""
Represents a reference to a build option definition.
:param id: The ID of the referenced build option.
:type id: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'}
}
def __init__(self, id=None):
super(BuildOptionDefinitionReference, self).__init__()
self.id = id
class BuildOptionGroupDefinition(Model):
"""
Represents a group of inputs for a build option.
:param display_name: The name of the group to display in the UI.
:type display_name: str
:param is_expanded: Indicates whether the group is initially displayed as expanded in the UI.
:type is_expanded: bool
:param name: The internal name of the group.
:type name: str
"""
_attribute_map = {
'display_name': {'key': 'displayName', 'type': 'str'},
'is_expanded': {'key': 'isExpanded', 'type': 'bool'},
'name': {'key': 'name', 'type': 'str'}
}
def __init__(self, display_name=None, is_expanded=None, name=None):
super(BuildOptionGroupDefinition, self).__init__()
self.display_name = display_name
self.is_expanded = is_expanded
self.name = name
class BuildOptionInputDefinition(Model):
"""
Represents an input for a build option.
:param default_value: The default value.
:type default_value: str
:param group_name: The name of the input group that this input belongs to.
:type group_name: str
:param help:
:type help: dict
:param label: The label for the input.
:type label: str
:param name: The name of the input.
:type name: str
:param options:
:type options: dict
:param required: Indicates whether the input is required to have a value.
:type required: bool
:param type: Indicates the type of the input value.
:type type: object
:param visible_rule: The rule that is applied to determine whether the input is visible in the UI.
:type visible_rule: str
"""
_attribute_map = {
'default_value': {'key': 'defaultValue', 'type': 'str'},
'group_name': {'key': 'groupName', 'type': 'str'},
'help': {'key': 'help', 'type': '{str}'},
'label': {'key': 'label', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'options': {'key': 'options', 'type': '{str}'},
'required': {'key': 'required', 'type': 'bool'},
'type': {'key': 'type', 'type': 'object'},
'visible_rule': {'key': 'visibleRule', 'type': 'str'}
}
def __init__(self, default_value=None, group_name=None, help=None, label=None, name=None, options=None, required=None, type=None, visible_rule=None):
super(BuildOptionInputDefinition, self).__init__()
self.default_value = default_value
self.group_name = group_name
self.help = help
self.label = label
self.name = name
self.options = options
self.required = required
self.type = type
self.visible_rule = visible_rule
class BuildReportMetadata(Model):
"""
Represents information about a build report.
:param build_id: The Id of the build.
:type build_id: int
:param content: The content of the report.
:type content: str
:param type: The type of the report.
:type type: str
"""
_attribute_map = {
'build_id': {'key': 'buildId', 'type': 'int'},
'content': {'key': 'content', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'}
}
def __init__(self, build_id=None, content=None, type=None):
super(BuildReportMetadata, self).__init__()
self.build_id = build_id
self.content = content
self.type = type
class BuildRepository(Model):
"""
Represents a repository used by a build definition.
:param clean: Indicates whether to clean the target folder when getting code from the repository.
:type clean: str
:param default_branch: The name of the default branch.
:type default_branch: str
:param checkout_submodules: Indicates whether to checkout submodules.
:type checkout_submodules: bool
:param id: The ID of the repository.
:type id: str
:param name: The friendly name of the repository.
:type name: str
:param properties:
:type properties: dict
:param root_folder: The root folder.
:type root_folder: str
:param type: The type of the repository.
:type type: str
:param url: The URL of the repository.
:type url: str
"""
_attribute_map = {
'clean': {'key': 'clean', 'type': 'str'},
'default_branch': {'key': 'defaultBranch', 'type': 'str'},
'checkout_submodules': {'key': 'checkoutSubmodules', 'type': 'bool'},
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{str}'},
'root_folder': {'key': 'rootFolder', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, clean=None, default_branch=None, checkout_submodules=None, id=None, name=None, properties=None, root_folder=None, type=None, url=None):
super(BuildRepository, self).__init__()
self.clean = clean
self.default_branch = default_branch
self.checkout_submodules = checkout_submodules
self.id = id
self.name = name
self.properties = properties
self.root_folder = root_folder
self.type = type
self.url = url
class BuildRequestValidationResult(Model):
"""
Represents the result of validating a build request.
:param message: The message associated with the result.
:type message: str
:param result: The result.
:type result: object
"""
_attribute_map = {
'message': {'key': 'message', 'type': 'str'},
'result': {'key': 'result', 'type': 'object'}
}
def __init__(self, message=None, result=None):
super(BuildRequestValidationResult, self).__init__()
self.message = message
self.result = result
class BuildResourceUsage(Model):
"""
Represents information about resources used by builds in the system.
:param distributed_task_agents: The number of build agents.
:type distributed_task_agents: int
:param paid_private_agent_slots: The number of paid private agent slots.
:type paid_private_agent_slots: int
:param total_usage: The total usage.
:type total_usage: int
:param xaml_controllers: The number of XAML controllers.
:type xaml_controllers: int
"""
_attribute_map = {
'distributed_task_agents': {'key': 'distributedTaskAgents', 'type': 'int'},
'paid_private_agent_slots': {'key': 'paidPrivateAgentSlots', 'type': 'int'},
'total_usage': {'key': 'totalUsage', 'type': 'int'},
'xaml_controllers': {'key': 'xamlControllers', 'type': 'int'}
}
def __init__(self, distributed_task_agents=None, paid_private_agent_slots=None, total_usage=None, xaml_controllers=None):
super(BuildResourceUsage, self).__init__()
self.distributed_task_agents = distributed_task_agents
self.paid_private_agent_slots = paid_private_agent_slots
self.total_usage = total_usage
self.xaml_controllers = xaml_controllers
class BuildRetentionHistory(Model):
"""
A historical overview of build retention information. This includes a list of snapshots taken about build retention usage, and a list of builds that have exceeded the default 30 day retention policy.
:param build_retention_samples: A list of builds that are older than the default retention policy, but are not marked as retained. Something is causing these builds to not get cleaned up.
:type build_retention_samples: list of :class:`BuildRetentionSample <azure.devops.v7_1.build.models.BuildRetentionSample>`
"""
_attribute_map = {
'build_retention_samples': {'key': 'buildRetentionSamples', 'type': '[BuildRetentionSample]'}
}
def __init__(self, build_retention_samples=None):
super(BuildRetentionHistory, self).__init__()
self.build_retention_samples = build_retention_samples
class BuildRetentionSample(Model):
"""
A snapshot of build retention information. This class takes a sample at the given time. It provides information about retained builds, files associated with those retained builds, and number of files being retained.
:param builds: Summary of retention by build
:type builds: str
:param definitions: List of build definitions
:type definitions: str
:param files: Summary of files consumed by retained builds
:type files: str
:param sample_time: The date and time when the sample was taken
:type sample_time: datetime
"""
_attribute_map = {
'builds': {'key': 'builds', 'type': 'str'},
'definitions': {'key': 'definitions', 'type': 'str'},
'files': {'key': 'files', 'type': 'str'},
'sample_time': {'key': 'sampleTime', 'type': 'iso-8601'}
}
def __init__(self, builds=None, definitions=None, files=None, sample_time=None):
super(BuildRetentionSample, self).__init__()
self.builds = builds
self.definitions = definitions
self.files = files
self.sample_time = sample_time
class BuildSettings(Model):
"""
Represents system-wide build settings.
:param days_to_keep_deleted_builds_before_destroy: The number of days to keep records of deleted builds.
:type days_to_keep_deleted_builds_before_destroy: int
:param default_retention_policy: The default retention policy.
:type default_retention_policy: :class:`RetentionPolicy <azure.devops.v7_1.build.models.RetentionPolicy>`
:param maximum_retention_policy: The maximum retention policy.
:type maximum_retention_policy: :class:`RetentionPolicy <azure.devops.v7_1.build.models.RetentionPolicy>`
"""
_attribute_map = {
'days_to_keep_deleted_builds_before_destroy': {'key': 'daysToKeepDeletedBuildsBeforeDestroy', 'type': 'int'},
'default_retention_policy': {'key': 'defaultRetentionPolicy', 'type': 'RetentionPolicy'},
'maximum_retention_policy': {'key': 'maximumRetentionPolicy', 'type': 'RetentionPolicy'}
}
def __init__(self, days_to_keep_deleted_builds_before_destroy=None, default_retention_policy=None, maximum_retention_policy=None):
super(BuildSettings, self).__init__()
self.days_to_keep_deleted_builds_before_destroy = days_to_keep_deleted_builds_before_destroy
self.default_retention_policy = default_retention_policy
self.maximum_retention_policy = maximum_retention_policy
class DataSourceBindingBase(Model):
"""
Represents binding of data source for the service endpoint request.
:param callback_context_template: Pagination format supported by this data source(ContinuationToken/SkipTop).
:type callback_context_template: str
:param callback_required_template: Subsequent calls needed?
:type callback_required_template: str
:param data_source_name: Gets or sets the name of the data source.
:type data_source_name: str
:param endpoint_id: Gets or sets the endpoint Id.
:type endpoint_id: str
:param endpoint_url: Gets or sets the url of the service endpoint.
:type endpoint_url: str
:param headers: Gets or sets the authorization headers.
:type headers: list of :class:`AuthorizationHeader <azure.devops.v7_1.microsoft._team_foundation._distributed_task._common._contracts.models.AuthorizationHeader>`
:param initial_context_template: Defines the initial value of the query params
:type initial_context_template: str
:param parameters: Gets or sets the parameters for the data source.
:type parameters: dict
:param request_content: Gets or sets http request body
:type request_content: str
:param request_verb: Gets or sets http request verb
:type request_verb: str
:param result_selector: Gets or sets the result selector.
:type result_selector: str
:param result_template: Gets or sets the result template.
:type result_template: str
:param target: Gets or sets the target of the data source.
:type target: str
"""
_attribute_map = {
'callback_context_template': {'key': 'callbackContextTemplate', 'type': 'str'},
'callback_required_template': {'key': 'callbackRequiredTemplate', 'type': 'str'},
'data_source_name': {'key': 'dataSourceName', 'type': 'str'},
'endpoint_id': {'key': 'endpointId', 'type': 'str'},
'endpoint_url': {'key': 'endpointUrl', 'type': 'str'},
'headers': {'key': 'headers', 'type': '[AuthorizationHeader]'},
'initial_context_template': {'key': 'initialContextTemplate', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': '{str}'},
'request_content': {'key': 'requestContent', 'type': 'str'},
'request_verb': {'key': 'requestVerb', 'type': 'str'},
'result_selector': {'key': 'resultSelector', 'type': 'str'},
'result_template': {'key': 'resultTemplate', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'}
}
def __init__(self, callback_context_template=None, callback_required_template=None, data_source_name=None, endpoint_id=None, endpoint_url=None, headers=None, initial_context_template=None, parameters=None, request_content=None, request_verb=None, result_selector=None, result_template=None, target=None):
super(DataSourceBindingBase, self).__init__()
self.callback_context_template = callback_context_template
self.callback_required_template = callback_required_template
self.data_source_name = data_source_name
self.endpoint_id = endpoint_id
self.endpoint_url = endpoint_url
self.headers = headers
self.initial_context_template = initial_context_template
self.parameters = parameters
self.request_content = request_content
self.request_verb = request_verb
self.result_selector = result_selector
self.result_template = result_template
self.target = target
class DefinitionReference(Model):
"""
Represents a reference to a definition.
:param created_date: The date this version of the definition was created.
:type created_date: datetime
:param id: The ID of the referenced definition.
:type id: int
:param name: The name of the referenced definition.
:type name: str
:param path: The folder path of the definition.
:type path: str
:param project: A reference to the project.
:type project: :class:`TeamProjectReference <azure.devops.v7_1.build.models.TeamProjectReference>`
:param queue_status: A value that indicates whether builds can be queued against this definition.
:type queue_status: object
:param revision: The definition revision number.
:type revision: int
:param type: The type of the definition.
:type type: object
:param uri: The definition's URI.
:type uri: str
:param url: The REST URL of the definition.
:type url: str
"""
_attribute_map = {
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'id': {'key': 'id', 'type': 'int'},
'name': {'key': 'name', 'type': 'str'},
'path': {'key': 'path', 'type': 'str'},
'project': {'key': 'project', 'type': 'TeamProjectReference'},
'queue_status': {'key': 'queueStatus', 'type': 'object'},
'revision': {'key': 'revision', 'type': 'int'},
'type': {'key': 'type', 'type': 'object'},
'uri': {'key': 'uri', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, created_date=None, id=None, name=None, path=None, project=None, queue_status=None, revision=None, type=None, uri=None, url=None):
super(DefinitionReference, self).__init__()
self.created_date = created_date
self.id = id
self.name = name
self.path = path
self.project = project
self.queue_status = queue_status
self.revision = revision
self.type = type
self.uri = uri
self.url = url
class DefinitionResourceReference(Model):
"""
:param authorized: Indicates whether the resource is authorized for use.
:type authorized: bool
:param id: The id of the resource.
:type id: str
:param name: A friendly name for the resource.
:type name: str
:param type: The type of the resource.
:type type: str
"""
_attribute_map = {
'authorized': {'key': 'authorized', 'type': 'bool'},
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'}
}
def __init__(self, authorized=None, id=None, name=None, type=None):
super(DefinitionResourceReference, self).__init__()
self.authorized = authorized
self.id = id
self.name = name
self.type = type
class Deployment(Model):
"""
Represents the data from the build information nodes for type "DeploymentInformation" for xaml builds
:param type:
:type type: str
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'}
}
def __init__(self, type=None):
super(Deployment, self).__init__()
self.type = type
class Folder(Model):
"""
Represents a folder that contains build definitions.
:param created_by: The process or person who created the folder.
:type created_by: :class:`IdentityRef <azure.devops.v7_1.build.models.IdentityRef>`
:param created_on: The date the folder was created.
:type created_on: datetime
:param description: The description.
:type description: str
:param last_changed_by: The process or person that last changed the folder.
:type last_changed_by: :class:`IdentityRef <azure.devops.v7_1.build.models.IdentityRef>`
:param last_changed_date: The date the folder was last changed.
:type last_changed_date: datetime
:param path: The full path.
:type path: str
:param project: The project.
:type project: :class:`TeamProjectReference <azure.devops.v7_1.build.models.TeamProjectReference>`
"""
_attribute_map = {
'created_by': {'key': 'createdBy', 'type': 'IdentityRef'},
'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
'description': {'key': 'description', 'type': 'str'},
'last_changed_by': {'key': 'lastChangedBy', 'type': 'IdentityRef'},
'last_changed_date': {'key': 'lastChangedDate', 'type': 'iso-8601'},
'path': {'key': 'path', 'type': 'str'},
'project': {'key': 'project', 'type': 'TeamProjectReference'}
}
def __init__(self, created_by=None, created_on=None, description=None, last_changed_by=None, last_changed_date=None, path=None, project=None):
super(Folder, self).__init__()
self.created_by = created_by
self.created_on = created_on
self.description = description
self.last_changed_by = last_changed_by
self.last_changed_date = last_changed_date
self.path = path
self.project = project
class GraphSubjectBase(Model):
"""
:param _links: This field contains zero or more interesting links about the graph subject. These links may be invoked to obtain additional relationships or more detailed information about this graph subject.
:type _links: :class:`ReferenceLinks <azure.devops.v7_1.microsoft._visual_studio._services._web_api.models.ReferenceLinks>`
:param descriptor: The descriptor is the primary way to reference the graph subject while the system is running. This field will uniquely identify the same graph subject across both Accounts and Organizations.
:type descriptor: str
:param display_name: This is the non-unique display name of the graph subject. To change this field, you must alter its value in the source provider.
:type display_name: str
:param url: This url is the full route to the source resource of this graph subject.
:type url: str
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'descriptor': {'key': 'descriptor', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, _links=None, descriptor=None, display_name=None, url=None):
super(GraphSubjectBase, self).__init__()
self._links = _links
self.descriptor = descriptor
self.display_name = display_name
self.url = url
class Change(Model):
"""
Represents a change associated with a build.
:param author: The author of the change.
:type author: :class:`IdentityRef <azure.devops.v7_1.build.models.IdentityRef>`
:param display_uri: The location of a user-friendly representation of the resource.
:type display_uri: str
:param id: The identifier for the change. For a commit, this would be the SHA1. For a TFVC changeset, this would be the changeset ID.
:type id: str
:param location: The location of the full representation of the resource.
:type location: str
:param message: The description of the change. This might be a commit message or changeset description.
:type message: str
:param message_truncated: Indicates whether the message was truncated.
:type message_truncated: bool
:param pusher: The person or process that pushed the change.
:type pusher: str
:param timestamp: The timestamp for the change.
:type timestamp: datetime
:param type: The type of change. "commit", "changeset", etc.
:type type: str
"""
_attribute_map = {
'author': {'key': 'author', 'type': 'IdentityRef'},
'display_uri': {'key': 'displayUri', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'message_truncated': {'key': 'messageTruncated', 'type': 'bool'},
'pusher': {'key': 'pusher', 'type': 'str'},
'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
'type': {'key': 'type', 'type': 'str'}
}
def __init__(self, author=None, display_uri=None, id=None, location=None, message=None, message_truncated=None, pusher=None, timestamp=None, type=None):
super(Change, self).__init__()
self.author = author
self.display_uri = display_uri
self.id = id
self.location = location
self.message = message
self.message_truncated = message_truncated
self.pusher = pusher
self.timestamp = timestamp
self.type = type
class IdentityRef(GraphSubjectBase):
"""
:param _links: This field contains zero or more interesting links about the graph subject. These links may be invoked to obtain additional relationships or more detailed information about this graph subject.
:type _links: :class:`ReferenceLinks <azure.devops.v7_1.microsoft._visual_studio._services._web_api.models.ReferenceLinks>`
:param descriptor: The descriptor is the primary way to reference the graph subject while the system is running. This field will uniquely identify the same graph subject across both Accounts and Organizations.
:type descriptor: str
:param display_name: This is the non-unique display name of the graph subject. To change this field, you must alter its value in the source provider.
:type display_name: str
:param url: This url is the full route to the source resource of this graph subject.
:type url: str
:param directory_alias: Deprecated - Can be retrieved by querying the Graph user referenced in the "self" entry of the IdentityRef "_links" dictionary
:type directory_alias: str
:param id:
:type id: str
:param image_url: Deprecated - Available in the "avatar" entry of the IdentityRef "_links" dictionary
:type image_url: str
:param inactive: Deprecated - Can be retrieved by querying the Graph membership state referenced in the "membershipState" entry of the GraphUser "_links" dictionary
:type inactive: bool
:param is_aad_identity: Deprecated - Can be inferred from the subject type of the descriptor (Descriptor.IsAadUserType/Descriptor.IsAadGroupType)
:type is_aad_identity: bool
:param is_container: Deprecated - Can be inferred from the subject type of the descriptor (Descriptor.IsGroupType)
:type is_container: bool
:param is_deleted_in_origin:
:type is_deleted_in_origin: bool
:param profile_url: Deprecated - not in use in most preexisting implementations of ToIdentityRef
:type profile_url: str
:param unique_name: Deprecated - use Domain+PrincipalName instead
:type unique_name: str
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'descriptor': {'key': 'descriptor', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'directory_alias': {'key': 'directoryAlias', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'image_url': {'key': 'imageUrl', 'type': 'str'},
'inactive': {'key': 'inactive', 'type': 'bool'},
'is_aad_identity': {'key': 'isAadIdentity', 'type': 'bool'},
'is_container': {'key': 'isContainer', 'type': 'bool'},
'is_deleted_in_origin': {'key': 'isDeletedInOrigin', 'type': 'bool'},
'profile_url': {'key': 'profileUrl', 'type': 'str'},
'unique_name': {'key': 'uniqueName', 'type': 'str'}
}
def __init__(self, _links=None, descriptor=None, display_name=None, url=None, directory_alias=None, id=None, image_url=None, inactive=None, is_aad_identity=None, is_container=None, is_deleted_in_origin=None, profile_url=None, unique_name=None):
super(IdentityRef, self).__init__(_links=_links, descriptor=descriptor, display_name=display_name, url=url)
self.directory_alias = directory_alias
self.id = id
self.image_url = image_url
self.inactive = inactive
self.is_aad_identity = is_aad_identity
self.is_container = is_container
self.is_deleted_in_origin = is_deleted_in_origin
self.profile_url = profile_url
self.unique_name = unique_name
class Issue(Model):
"""
Represents an issue (error, warning) associated with a build.
:param category: The category.
:type category: str
:param data:
:type data: dict
:param message: A description of the issue.
:type message: str
:param type: The type (error, warning) of the issue.
:type type: object
"""
_attribute_map = {
'category': {'key': 'category', 'type': 'str'},
'data': {'key': 'data', 'type': '{str}'},
'message': {'key': 'message', 'type': 'str'},
'type': {'key': 'type', 'type': 'object'}
}
def __init__(self, category=None, data=None, message=None, type=None):
super(Issue, self).__init__()
self.category = category
self.data = data
self.message = message
self.type = type
class JobReference(Model):
"""
Job in pipeline. This is related to matrixing in YAML.
:param attempt: Attempt number of the job
:type attempt: int
:param job_name: Matrixing in YAML generates copies of a job with different inputs in matrix. JobName is the name of those input. Maximum supported length for name is 256 character.
:type job_name: str
"""
_attribute_map = {
'attempt': {'key': 'attempt', 'type': 'int'},
'job_name': {'key': 'jobName', 'type': 'str'}
}
def __init__(self, attempt=None, job_name=None):
super(JobReference, self).__init__()
self.attempt = attempt
self.job_name = job_name
class JsonPatchOperation(Model):
"""
The JSON model for a JSON Patch operation
:param from_: The path to copy from for the Move/Copy operation.
:type from_: str
:param op: The patch operation
:type op: object
:param path: The path for the operation. In the case of an array, a zero based index can be used to specify the position in the array (e.g. /biscuits/0/name). The "-" character can be used instead of an index to insert at the end of the array (e.g. /biscuits/-).
:type path: str
:param value: The value for the operation. This is either a primitive or a JToken.
:type value: object
"""
_attribute_map = {
'from_': {'key': 'from', 'type': 'str'},
'op': {'key': 'op', 'type': 'object'},
'path': {'key': 'path', 'type': 'str'},
'value': {'key': 'value', 'type': 'object'}
}
def __init__(self, from_=None, op=None, path=None, value=None):
super(JsonPatchOperation, self).__init__()
self.from_ = from_
self.op = op
self.path = path
self.value = value
class MinimalRetentionLease(Model):
"""
:param definition_id: The pipeline definition of the run.
:type definition_id: int
:param owner_id: User-provided string that identifies the owner of a retention lease.
:type owner_id: str
:param run_id: The pipeline run to protect.
:type run_id: int
"""
_attribute_map = {
'definition_id': {'key': 'definitionId', 'type': 'int'},
'owner_id': {'key': 'ownerId', 'type': 'str'},
'run_id': {'key': 'runId', 'type': 'int'}
}
def __init__(self, definition_id=None, owner_id=None, run_id=None):
super(MinimalRetentionLease, self).__init__()
self.definition_id = definition_id
self.owner_id = owner_id
self.run_id = run_id
class NewRetentionLease(Model):
"""
Required information to create a new retention lease.
:param days_valid: The number of days to consider the lease valid. A retention lease valid for more than 100 years (36500 days) will display as retaining the build "forever".
:type days_valid: int
:param definition_id: The pipeline definition of the run.
:type definition_id: int
:param owner_id: User-provided string that identifies the owner of a retention lease.
:type owner_id: str
:param protect_pipeline: If set, this lease will also prevent the pipeline from being deleted while the lease is still valid.
:type protect_pipeline: bool
:param run_id: The pipeline run to protect.
:type run_id: int
"""
_attribute_map = {
'days_valid': {'key': 'daysValid', 'type': 'int'},
'definition_id': {'key': 'definitionId', 'type': 'int'},
'owner_id': {'key': 'ownerId', 'type': 'str'},
'protect_pipeline': {'key': 'protectPipeline', 'type': 'bool'},
'run_id': {'key': 'runId', 'type': 'int'}
}
def __init__(self, days_valid=None, definition_id=None, owner_id=None, protect_pipeline=None, run_id=None):
super(NewRetentionLease, self).__init__()
self.days_valid = days_valid
self.definition_id = definition_id
self.owner_id = owner_id
self.protect_pipeline = protect_pipeline
self.run_id = run_id
class PhaseReference(Model):
"""
Phase in pipeline
:param attempt: Attempt number of the phase
:type attempt: int
:param phase_name: Name of the phase. Maximum supported length for name is 256 character.
:type phase_name: str
"""
_attribute_map = {
'attempt': {'key': 'attempt', 'type': 'int'},
'phase_name': {'key': 'phaseName', 'type': 'str'}
}
def __init__(self, attempt=None, phase_name=None):
super(PhaseReference, self).__init__()
self.attempt = attempt
self.phase_name = phase_name
class PipelineGeneralSettings(Model):
"""
Contains pipeline general settings.
:param audit_enforce_settable_var: If enabled, audit logs will be generated whenever someone queues a pipeline run and defines variables that are not marked as "Settable at queue time".
:type audit_enforce_settable_var: bool
:param disable_classic_pipeline_creation: Disable classic pipelines creation.
:type disable_classic_pipeline_creation: bool
:param enforce_job_auth_scope: If enabled, scope of access for all non-release pipelines reduces to the current project.
:type enforce_job_auth_scope: bool
:param enforce_job_auth_scope_for_releases: If enabled, scope of access for all release pipelines reduces to the current project.
:type enforce_job_auth_scope_for_releases: bool
:param enforce_referenced_repo_scoped_token: Restricts the scope of access for all pipelines to only repositories explicitly referenced by the pipeline.
:type enforce_referenced_repo_scoped_token: bool
:param enforce_settable_var: If enabled, only those variables that are explicitly marked as "Settable at queue time" can be set at queue time.
:type enforce_settable_var: bool
:param publish_pipeline_metadata: Allows pipelines to record metadata.
:type publish_pipeline_metadata: bool
:param status_badges_are_private: Anonymous users can access the status badge API for all pipelines unless this option is enabled.
:type status_badges_are_private: bool
"""
_attribute_map = {
'audit_enforce_settable_var': {'key': 'auditEnforceSettableVar', 'type': 'bool'},
'disable_classic_pipeline_creation': {'key': 'disableClassicPipelineCreation', 'type': 'bool'},
'enforce_job_auth_scope': {'key': 'enforceJobAuthScope', 'type': 'bool'},
'enforce_job_auth_scope_for_releases': {'key': 'enforceJobAuthScopeForReleases', 'type': 'bool'},
'enforce_referenced_repo_scoped_token': {'key': 'enforceReferencedRepoScopedToken', 'type': 'bool'},
'enforce_settable_var': {'key': 'enforceSettableVar', 'type': 'bool'},
'publish_pipeline_metadata': {'key': 'publishPipelineMetadata', 'type': 'bool'},
'status_badges_are_private': {'key': 'statusBadgesArePrivate', 'type': 'bool'}
}
def __init__(self, audit_enforce_settable_var=None, disable_classic_pipeline_creation=None, enforce_job_auth_scope=None, enforce_job_auth_scope_for_releases=None, enforce_referenced_repo_scoped_token=None, enforce_settable_var=None, publish_pipeline_metadata=None, status_badges_are_private=None):
super(PipelineGeneralSettings, self).__init__()
self.audit_enforce_settable_var = audit_enforce_settable_var
self.disable_classic_pipeline_creation = disable_classic_pipeline_creation
self.enforce_job_auth_scope = enforce_job_auth_scope
self.enforce_job_auth_scope_for_releases = enforce_job_auth_scope_for_releases
self.enforce_referenced_repo_scoped_token = enforce_referenced_repo_scoped_token
self.enforce_settable_var = enforce_settable_var
self.publish_pipeline_metadata = publish_pipeline_metadata
self.status_badges_are_private = status_badges_are_private
class PipelineReference(Model):
"""
Pipeline reference
:param job_reference: Reference of the job
:type job_reference: :class:`JobReference <azure.devops.v7_1.microsoft._team_foundation._test_management._web_api.models.JobReference>`
:param phase_reference: Reference of the phase.
:type phase_reference: :class:`PhaseReference <azure.devops.v7_1.microsoft._team_foundation._test_management._web_api.models.PhaseReference>`
:param pipeline_id: Reference of the pipeline with which this pipeline instance is related.
:type pipeline_id: int
:param stage_reference: Reference of the stage.
:type stage_reference: :class:`StageReference <azure.devops.v7_1.microsoft._team_foundation._test_management._web_api.models.StageReference>`
"""
_attribute_map = {
'job_reference': {'key': 'jobReference', 'type': 'JobReference'},
'phase_reference': {'key': 'phaseReference', 'type': 'PhaseReference'},
'pipeline_id': {'key': 'pipelineId', 'type': 'int'},
'stage_reference': {'key': 'stageReference', 'type': 'StageReference'}
}
def __init__(self, job_reference=None, phase_reference=None, pipeline_id=None, stage_reference=None):
super(PipelineReference, self).__init__()
self.job_reference = job_reference
self.phase_reference = phase_reference
self.pipeline_id = pipeline_id
self.stage_reference = stage_reference
class ProcessParameters(Model):
"""
:param data_source_bindings:
:type data_source_bindings: list of :class:`DataSourceBindingBase <azure.devops.v7_1.microsoft._team_foundation._distributed_task._common._contracts.models.DataSourceBindingBase>`
:param inputs:
:type inputs: list of :class:`TaskInputDefinitionBase <azure.devops.v7_1.microsoft._team_foundation._distributed_task._common._contracts.models.TaskInputDefinitionBase>`
:param source_definitions:
:type source_definitions: list of :class:`TaskSourceDefinitionBase <azure.devops.v7_1.microsoft._team_foundation._distributed_task._common._contracts.models.TaskSourceDefinitionBase>`
"""
_attribute_map = {
'data_source_bindings': {'key': 'dataSourceBindings', 'type': '[DataSourceBindingBase]'},
'inputs': {'key': 'inputs', 'type': '[TaskInputDefinitionBase]'},
'source_definitions': {'key': 'sourceDefinitions', 'type': '[TaskSourceDefinitionBase]'}
}
def __init__(self, data_source_bindings=None, inputs=None, source_definitions=None):
super(ProcessParameters, self).__init__()
self.data_source_bindings = data_source_bindings
self.inputs = inputs
self.source_definitions = source_definitions
class ProjectRetentionSetting(Model):
"""
Contains the settings for the retention rules.
:param purge_artifacts: The rules for artifact retention. Artifacts can not live longer than a run, so will be overridden by a shorter run purge setting.
:type purge_artifacts: :class:`RetentionSetting <azure.devops.v7_1.build.models.RetentionSetting>`
:param purge_pull_request_runs: The rules for pull request pipeline run retention.
:type purge_pull_request_runs: :class:`RetentionSetting <azure.devops.v7_1.build.models.RetentionSetting>`
:param purge_runs: The rules for pipeline run retention.
:type purge_runs: :class:`RetentionSetting <azure.devops.v7_1.build.models.RetentionSetting>`
:param retain_runs_per_protected_branch: The rules for retaining runs per protected branch.
:type retain_runs_per_protected_branch: :class:`RetentionSetting <azure.devops.v7_1.build.models.RetentionSetting>`
"""
_attribute_map = {
'purge_artifacts': {'key': 'purgeArtifacts', 'type': 'RetentionSetting'},
'purge_pull_request_runs': {'key': 'purgePullRequestRuns', 'type': 'RetentionSetting'},
'purge_runs': {'key': 'purgeRuns', 'type': 'RetentionSetting'},
'retain_runs_per_protected_branch': {'key': 'retainRunsPerProtectedBranch', 'type': 'RetentionSetting'}
}
def __init__(self, purge_artifacts=None, purge_pull_request_runs=None, purge_runs=None, retain_runs_per_protected_branch=None):
super(ProjectRetentionSetting, self).__init__()
self.purge_artifacts = purge_artifacts
self.purge_pull_request_runs = purge_pull_request_runs
self.purge_runs = purge_runs
self.retain_runs_per_protected_branch = retain_runs_per_protected_branch
class PullRequest(Model):
"""
Represents a pull request object. These are retrieved from Source Providers.
:param _links: The links to other objects related to this object.
:type _links: :class:`ReferenceLinks <azure.devops.v7_1.build.models.ReferenceLinks>`
:param author: Author of the pull request.
:type author: :class:`IdentityRef <azure.devops.v7_1.build.models.IdentityRef>`
:param current_state: Current state of the pull request, e.g. open, merged, closed, conflicts, etc.
:type current_state: str
:param description: Description for the pull request.
:type description: str
:param draft: Returns if pull request is draft
:type draft: bool
:param id: Unique identifier for the pull request
:type id: str
:param provider_name: The name of the provider this pull request is associated with.
:type provider_name: str
:param source_branch_ref: Source branch ref of this pull request
:type source_branch_ref: str
:param source_repository_owner: Owner of the source repository of this pull request
:type source_repository_owner: str
:param target_branch_ref: Target branch ref of this pull request
:type target_branch_ref: str
:param target_repository_owner: Owner of the target repository of this pull request
:type target_repository_owner: str
:param title: Title of the pull request.
:type title: str
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'author': {'key': 'author', 'type': 'IdentityRef'},
'current_state': {'key': 'currentState', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'draft': {'key': 'draft', 'type': 'bool'},
'id': {'key': 'id', 'type': 'str'},
'provider_name': {'key': 'providerName', 'type': 'str'},
'source_branch_ref': {'key': 'sourceBranchRef', 'type': 'str'},
'source_repository_owner': {'key': 'sourceRepositoryOwner', 'type': 'str'},
'target_branch_ref': {'key': 'targetBranchRef', 'type': 'str'},
'target_repository_owner': {'key': 'targetRepositoryOwner', 'type': 'str'},
'title': {'key': 'title', 'type': 'str'}
}
def __init__(self, _links=None, author=None, current_state=None, description=None, draft=None, id=None, provider_name=None, source_branch_ref=None, source_repository_owner=None, target_branch_ref=None, target_repository_owner=None, title=None):
super(PullRequest, self).__init__()
self._links = _links
self.author = author
self.current_state = current_state
self.description = description
self.draft = draft
self.id = id
self.provider_name = provider_name
self.source_branch_ref = source_branch_ref
self.source_repository_owner = source_repository_owner
self.target_branch_ref = target_branch_ref
self.target_repository_owner = target_repository_owner
self.title = title
class ReferenceLinks(Model):
"""
The class to represent a collection of REST reference links.
:param links: The readonly view of the links. Because Reference links are readonly, we only want to expose them as read only.
:type links: dict
"""
_attribute_map = {
'links': {'key': 'links', 'type': '{object}'}
}
def __init__(self, links=None):
super(ReferenceLinks, self).__init__()
self.links = links
class ReleaseReference(Model):
"""
Reference to a release.
:param attempt: Number of Release Attempt.
:type attempt: int
:param creation_date: Release Creation Date(UTC).
:type creation_date: datetime
:param definition_id: Release definition ID.
:type definition_id: int
:param environment_creation_date: Environment creation Date(UTC).
:type environment_creation_date: datetime
:param environment_definition_id: Release environment definition ID.
:type environment_definition_id: int
:param environment_definition_name: Release environment definition name.
:type environment_definition_name: str
:param environment_id: Release environment ID.
:type environment_id: int
:param environment_name: Release environment name.
:type environment_name: str
:param id: Release ID.
:type id: int
:param name: Release name.
:type name: str
"""
_attribute_map = {
'attempt': {'key': 'attempt', 'type': 'int'},
'creation_date': {'key': 'creationDate', 'type': 'iso-8601'},
'definition_id': {'key': 'definitionId', 'type': 'int'},
'environment_creation_date': {'key': 'environmentCreationDate', 'type': 'iso-8601'},
'environment_definition_id': {'key': 'environmentDefinitionId', 'type': 'int'},
'environment_definition_name': {'key': 'environmentDefinitionName', 'type': 'str'},
'environment_id': {'key': 'environmentId', 'type': 'int'},
'environment_name': {'key': 'environmentName', 'type': 'str'},
'id': {'key': 'id', 'type': 'int'},
'name': {'key': 'name', 'type': 'str'}
}
def __init__(self, attempt=None, creation_date=None, definition_id=None, environment_creation_date=None, environment_definition_id=None, environment_definition_name=None, environment_id=None, environment_name=None, id=None, name=None):
super(ReleaseReference, self).__init__()
self.attempt = attempt
self.creation_date = creation_date
self.definition_id = definition_id
self.environment_creation_date = environment_creation_date
self.environment_definition_id = environment_definition_id
self.environment_definition_name = environment_definition_name
self.environment_id = environment_id
self.environment_name = environment_name
self.id = id
self.name = name
class RepositoryWebhook(Model):
"""
Represents a repository's webhook returned from a source provider.
:param name: The friendly name of the repository.
:type name: str
:param types:
:type types: list of DefinitionTriggerType
:param url: The URL of the repository.
:type url: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'types': {'key': 'types', 'type': '[object]'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, name=None, types=None, url=None):
super(RepositoryWebhook, self).__init__()
self.name = name
self.types = types
self.url = url
class ResourceRef(Model):
"""
:param id:
:type id: str
:param url:
:type url: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, id=None, url=None):
super(ResourceRef, self).__init__()
self.id = id
self.url = url
class RetentionLease(Model):
"""
A valid retention lease prevents automated systems from deleting a pipeline run.
:param created_on: When the lease was created.
:type created_on: datetime
:param definition_id: The pipeline definition of the run.
:type definition_id: int
:param lease_id: The unique identifier for this lease.
:type lease_id: int
:param owner_id: Non-unique string that identifies the owner of a retention lease.
:type owner_id: str
:param protect_pipeline: If set, this lease will also prevent the pipeline from being deleted while the lease is still valid.
:type protect_pipeline: bool
:param run_id: The pipeline run protected by this lease.
:type run_id: int
:param valid_until: The last day the lease is considered valid.
:type valid_until: datetime
"""
_attribute_map = {
'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
'definition_id': {'key': 'definitionId', 'type': 'int'},
'lease_id': {'key': 'leaseId', 'type': 'int'},
'owner_id': {'key': 'ownerId', 'type': 'str'},
'protect_pipeline': {'key': 'protectPipeline', 'type': 'bool'},
'run_id': {'key': 'runId', 'type': 'int'},
'valid_until': {'key': 'validUntil', 'type': 'iso-8601'}
}
def __init__(self, created_on=None, definition_id=None, lease_id=None, owner_id=None, protect_pipeline=None, run_id=None, valid_until=None):
super(RetentionLease, self).__init__()
self.created_on = created_on
self.definition_id = definition_id
self.lease_id = lease_id
self.owner_id = owner_id
self.protect_pipeline = protect_pipeline
self.run_id = run_id
self.valid_until = valid_until
class RetentionLeaseUpdate(Model):
"""
An update to the retention parameters of a retention lease.
:param days_valid: The number of days to consider the lease valid. A retention lease valid for more than 100 years (36500 days) will display as retaining the build "forever".
:type days_valid: int
:param protect_pipeline: If set, this lease will also prevent the pipeline from being deleted while the lease is still valid.
:type protect_pipeline: bool
"""
_attribute_map = {
'days_valid': {'key': 'daysValid', 'type': 'int'},
'protect_pipeline': {'key': 'protectPipeline', 'type': 'bool'}
}
def __init__(self, days_valid=None, protect_pipeline=None):
super(RetentionLeaseUpdate, self).__init__()
self.days_valid = days_valid
self.protect_pipeline = protect_pipeline
class RetentionPolicy(Model):
"""
Represents a retention policy for a build definition.
:param artifacts:
:type artifacts: list of str
:param artifact_types_to_delete:
:type artifact_types_to_delete: list of str
:param branches:
:type branches: list of str
:param days_to_keep: The number of days to keep builds.
:type days_to_keep: int
:param delete_build_record: Indicates whether the build record itself should be deleted.
:type delete_build_record: bool
:param delete_test_results: Indicates whether to delete test results associated with the build.
:type delete_test_results: bool
:param minimum_to_keep: The minimum number of builds to keep.
:type minimum_to_keep: int
"""
_attribute_map = {
'artifacts': {'key': 'artifacts', 'type': '[str]'},
'artifact_types_to_delete': {'key': 'artifactTypesToDelete', 'type': '[str]'},
'branches': {'key': 'branches', 'type': '[str]'},
'days_to_keep': {'key': 'daysToKeep', 'type': 'int'},
'delete_build_record': {'key': 'deleteBuildRecord', 'type': 'bool'},
'delete_test_results': {'key': 'deleteTestResults', 'type': 'bool'},
'minimum_to_keep': {'key': 'minimumToKeep', 'type': 'int'}
}
def __init__(self, artifacts=None, artifact_types_to_delete=None, branches=None, days_to_keep=None, delete_build_record=None, delete_test_results=None, minimum_to_keep=None):
super(RetentionPolicy, self).__init__()
self.artifacts = artifacts
self.artifact_types_to_delete = artifact_types_to_delete
self.branches = branches
self.days_to_keep = days_to_keep
self.delete_build_record = delete_build_record
self.delete_test_results = delete_test_results
self.minimum_to_keep = minimum_to_keep
class RetentionSetting(Model):
"""
Contains the minimum, maximum, and current value for a retention setting.
:param max:
:type max: int
:param min:
:type min: int
:param value:
:type value: int
"""
_attribute_map = {
'max': {'key': 'max', 'type': 'int'},
'min': {'key': 'min', 'type': 'int'},
'value': {'key': 'value', 'type': 'int'}
}
def __init__(self, max=None, min=None, value=None):
super(RetentionSetting, self).__init__()
self.max = max
self.min = min
self.value = value
class SourceProviderAttributes(Model):
"""
:param name: The name of the source provider.
:type name: str
:param supported_capabilities: The capabilities supported by this source provider.
:type supported_capabilities: dict
:param supported_triggers: The types of triggers supported by this source provider.
:type supported_triggers: list of :class:`SupportedTrigger <azure.devops.v7_1.build.models.SupportedTrigger>`
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'supported_capabilities': {'key': 'supportedCapabilities', 'type': '{bool}'},
'supported_triggers': {'key': 'supportedTriggers', 'type': '[SupportedTrigger]'}
}
def __init__(self, name=None, supported_capabilities=None, supported_triggers=None):
super(SourceProviderAttributes, self).__init__()
self.name = name
self.supported_capabilities = supported_capabilities
self.supported_triggers = supported_triggers
class SourceRepositories(Model):
"""
A set of repositories returned from the source provider.
:param continuation_token: A token used to continue this paged request; 'null' if the request is complete
:type continuation_token: str
:param page_length: The number of repositories requested for each page
:type page_length: int
:param repositories: A list of repositories
:type repositories: list of :class:`SourceRepository <azure.devops.v7_1.build.models.SourceRepository>`
:param total_page_count: The total number of pages, or '-1' if unknown
:type total_page_count: int
"""
_attribute_map = {
'continuation_token': {'key': 'continuationToken', 'type': 'str'},
'page_length': {'key': 'pageLength', 'type': 'int'},
'repositories': {'key': 'repositories', 'type': '[SourceRepository]'},
'total_page_count': {'key': 'totalPageCount', 'type': 'int'}
}
def __init__(self, continuation_token=None, page_length=None, repositories=None, total_page_count=None):
super(SourceRepositories, self).__init__()
self.continuation_token = continuation_token
self.page_length = page_length
self.repositories = repositories
self.total_page_count = total_page_count
class SourceRepository(Model):
"""
Represents a repository returned from a source provider.
:param default_branch: The name of the default branch.
:type default_branch: str
:param full_name: The full name of the repository.
:type full_name: str
:param id: The ID of the repository.
:type id: str
:param name: The friendly name of the repository.
:type name: str
:param properties:
:type properties: dict
:param source_provider_name: The name of the source provider the repository is from.
:type source_provider_name: str
:param url: The URL of the repository.
:type url: str
"""
_attribute_map = {
'default_branch': {'key': 'defaultBranch', 'type': 'str'},
'full_name': {'key': 'fullName', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{str}'},
'source_provider_name': {'key': 'sourceProviderName', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, default_branch=None, full_name=None, id=None, name=None, properties=None, source_provider_name=None, url=None):
super(SourceRepository, self).__init__()
self.default_branch = default_branch
self.full_name = full_name
self.id = id
self.name = name
self.properties = properties
self.source_provider_name = source_provider_name
self.url = url
class SourceRepositoryItem(Model):
"""
Represents an item in a repository from a source provider.
:param is_container: Whether the item is able to have sub-items (e.g., is a folder).
:type is_container: bool
:param path: The full path of the item, relative to the root of the repository.
:type path: str
:param type: The type of the item (folder, file, etc).
:type type: str
:param url: The URL of the item.
:type url: str
"""
_attribute_map = {
'is_container': {'key': 'isContainer', 'type': 'bool'},
'path': {'key': 'path', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, is_container=None, path=None, type=None, url=None):
super(SourceRepositoryItem, self).__init__()
self.is_container = is_container
self.path = path
self.type = type
self.url = url
class StageReference(Model):
"""
Stage in pipeline
:param attempt: Attempt number of stage
:type attempt: int
:param stage_name: Name of the stage. Maximum supported length for name is 256 character.
:type stage_name: str
"""
_attribute_map = {
'attempt': {'key': 'attempt', 'type': 'int'},
'stage_name': {'key': 'stageName', 'type': 'str'}
}
def __init__(self, attempt=None, stage_name=None):
super(StageReference, self).__init__()
self.attempt = attempt
self.stage_name = stage_name
class SupportedTrigger(Model):
"""
:param default_polling_interval: The default interval to wait between polls (only relevant when NotificationType is Polling).
:type default_polling_interval: int
:param notification_type: How the trigger is notified of changes.
:type notification_type: str
:param supported_capabilities: The capabilities supported by this trigger.
:type supported_capabilities: dict
:param type: The type of trigger.
:type type: object
"""
_attribute_map = {
'default_polling_interval': {'key': 'defaultPollingInterval', 'type': 'int'},
'notification_type': {'key': 'notificationType', 'type': 'str'},
'supported_capabilities': {'key': 'supportedCapabilities', 'type': '{object}'},
'type': {'key': 'type', 'type': 'object'}
}
def __init__(self, default_polling_interval=None, notification_type=None, supported_capabilities=None, type=None):
super(SupportedTrigger, self).__init__()
self.default_polling_interval = default_polling_interval
self.notification_type = notification_type
self.supported_capabilities = supported_capabilities
self.type = type
class TaskAgentPoolReference(Model):
"""
Represents a reference to an agent pool.
:param id: The pool ID.
:type id: int
:param is_hosted: A value indicating whether or not this pool is managed by the service.
:type is_hosted: bool
:param name: The pool name.
:type name: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'int'},
'is_hosted': {'key': 'isHosted', 'type': 'bool'},
'name': {'key': 'name', 'type': 'str'}
}
def __init__(self, id=None, is_hosted=None, name=None):
super(TaskAgentPoolReference, self).__init__()
self.id = id
self.is_hosted = is_hosted
self.name = name
class TaskDefinitionReference(Model):
"""
A reference to a task definition.
:param definition_type: The type of task (task or task group).
:type definition_type: str
:param id: The ID of the task.
:type id: str
:param version_spec: The version of the task.
:type version_spec: str
"""
_attribute_map = {
'definition_type': {'key': 'definitionType', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'version_spec': {'key': 'versionSpec', 'type': 'str'}
}
def __init__(self, definition_type=None, id=None, version_spec=None):
super(TaskDefinitionReference, self).__init__()
self.definition_type = definition_type
self.id = id
self.version_spec = version_spec
class TaskInputDefinitionBase(Model):
"""
:param aliases:
:type aliases: list of str
:param default_value:
:type default_value: str
:param group_name:
:type group_name: str
:param help_mark_down:
:type help_mark_down: str
:param label:
:type label: str
:param name:
:type name: str
:param options:
:type options: dict
:param properties:
:type properties: dict
:param required:
:type required: bool
:param type:
:type type: str
:param validation:
:type validation: :class:`TaskInputValidation <azure.devops.v7_1.microsoft._team_foundation._distributed_task._common._contracts.models.TaskInputValidation>`
:param visible_rule:
:type visible_rule: str
"""
_attribute_map = {
'aliases': {'key': 'aliases', 'type': '[str]'},
'default_value': {'key': 'defaultValue', 'type': 'str'},
'group_name': {'key': 'groupName', 'type': 'str'},
'help_mark_down': {'key': 'helpMarkDown', 'type': 'str'},
'label': {'key': 'label', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'options': {'key': 'options', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
'required': {'key': 'required', 'type': 'bool'},
'type': {'key': 'type', 'type': 'str'},
'validation': {'key': 'validation', 'type': 'TaskInputValidation'},
'visible_rule': {'key': 'visibleRule', 'type': 'str'}
}
def __init__(self, aliases=None, default_value=None, group_name=None, help_mark_down=None, label=None, name=None, options=None, properties=None, required=None, type=None, validation=None, visible_rule=None):
super(TaskInputDefinitionBase, self).__init__()
self.aliases = aliases
self.default_value = default_value
self.group_name = group_name
self.help_mark_down = help_mark_down
self.label = label
self.name = name
self.options = options
self.properties = properties
self.required = required
self.type = type
self.validation = validation
self.visible_rule = visible_rule
class TaskInputValidation(Model):
"""
:param expression: Conditional expression
:type expression: str
:param message: Message explaining how user can correct if validation fails
:type message: str
"""
_attribute_map = {
'expression': {'key': 'expression', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'}
}
def __init__(self, expression=None, message=None):
super(TaskInputValidation, self).__init__()
self.expression = expression
self.message = message
class TaskOrchestrationPlanReference(Model):
"""
Represents a reference to an orchestration plan.
:param orchestration_type: The type of the plan.
:type orchestration_type: int
:param plan_id: The ID of the plan.
:type plan_id: str
"""
_attribute_map = {
'orchestration_type': {'key': 'orchestrationType', 'type': 'int'},
'plan_id': {'key': 'planId', 'type': 'str'}
}
def __init__(self, orchestration_type=None, plan_id=None):
super(TaskOrchestrationPlanReference, self).__init__()
self.orchestration_type = orchestration_type
self.plan_id = plan_id
class TaskReference(Model):
"""
Represents a reference to a task.
:param id: The ID of the task definition.
:type id: str
:param name: The name of the task definition.
:type name: str
:param version: The version of the task definition.
:type version: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'}
}
def __init__(self, id=None, name=None, version=None):
super(TaskReference, self).__init__()
self.id = id
self.name = name
self.version = version
class TaskSourceDefinitionBase(Model):
"""
:param auth_key:
:type auth_key: str
:param endpoint:
:type endpoint: str
:param key_selector:
:type key_selector: str
:param selector:
:type selector: str
:param target:
:type target: str
"""
_attribute_map = {
'auth_key': {'key': 'authKey', 'type': 'str'},
'endpoint': {'key': 'endpoint', 'type': 'str'},
'key_selector': {'key': 'keySelector', 'type': 'str'},
'selector': {'key': 'selector', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'}
}
def __init__(self, auth_key=None, endpoint=None, key_selector=None, selector=None, target=None):
super(TaskSourceDefinitionBase, self).__init__()
self.auth_key = auth_key
self.endpoint = endpoint
self.key_selector = key_selector
self.selector = selector
self.target = target
class TeamProjectReference(Model):
"""
Represents a shallow reference to a TeamProject.
:param abbreviation: Project abbreviation.
:type abbreviation: str
:param default_team_image_url: Url to default team identity image.
:type default_team_image_url: str
:param description: The project's description (if any).
:type description: str
:param id: Project identifier.
:type id: str
:param last_update_time: Project last update time.
:type last_update_time: datetime
:param name: Project name.
:type name: str
:param revision: Project revision.
:type revision: long
:param state: Project state.
:type state: object
:param url: Url to the full version of the object.
:type url: str
:param visibility: Project visibility.
:type visibility: object
"""
_attribute_map = {
'abbreviation': {'key': 'abbreviation', 'type': 'str'},
'default_team_image_url': {'key': 'defaultTeamImageUrl', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'last_update_time': {'key': 'lastUpdateTime', 'type': 'iso-8601'},
'name': {'key': 'name', 'type': 'str'},
'revision': {'key': 'revision', 'type': 'long'},
'state': {'key': 'state', 'type': 'object'},
'url': {'key': 'url', 'type': 'str'},
'visibility': {'key': 'visibility', 'type': 'object'}
}
def __init__(self, abbreviation=None, default_team_image_url=None, description=None, id=None, last_update_time=None, name=None, revision=None, state=None, url=None, visibility=None):
super(TeamProjectReference, self).__init__()
self.abbreviation = abbreviation
self.default_team_image_url = default_team_image_url
self.description = description
self.id = id
self.last_update_time = last_update_time
self.name = name
self.revision = revision
self.state = state
self.url = url
self.visibility = visibility
class TestResultsContext(Model):
"""
:param build:
:type build: :class:`BuildReference <azure.devops.v7_1.microsoft._team_foundation._test_management._web_api.models.BuildReference>`
:param context_type:
:type context_type: object
:param pipeline_reference:
:type pipeline_reference: :class:`PipelineReference <azure.devops.v7_1.microsoft._team_foundation._test_management._web_api.models.PipelineReference>`
:param release:
:type release: :class:`ReleaseReference <azure.devops.v7_1.microsoft._team_foundation._test_management._web_api.models.ReleaseReference>`
"""
_attribute_map = {
'build': {'key': 'build', 'type': 'BuildReference'},
'context_type': {'key': 'contextType', 'type': 'object'},
'pipeline_reference': {'key': 'pipelineReference', 'type': 'PipelineReference'},
'release': {'key': 'release', 'type': 'ReleaseReference'}
}
def __init__(self, build=None, context_type=None, pipeline_reference=None, release=None):
super(TestResultsContext, self).__init__()
self.build = build
self.context_type = context_type
self.pipeline_reference = pipeline_reference
self.release = release
class TimelineAttempt(Model):
"""
:param attempt: Gets or sets the attempt of the record.
:type attempt: int
:param record_id: Gets or sets the record identifier located within the specified timeline.
:type record_id: str
:param timeline_id: Gets or sets the timeline identifier which owns the record representing this attempt.
:type timeline_id: str
"""
_attribute_map = {
'attempt': {'key': 'attempt', 'type': 'int'},
'record_id': {'key': 'recordId', 'type': 'str'},
'timeline_id': {'key': 'timelineId', 'type': 'str'}
}
def __init__(self, attempt=None, record_id=None, timeline_id=None):
super(TimelineAttempt, self).__init__()
self.attempt = attempt
self.record_id = record_id
self.timeline_id = timeline_id
class TimelineRecord(Model):
"""
Represents an entry in a build's timeline.
:param _links:
:type _links: :class:`ReferenceLinks <azure.devops.v7_1.build.models.ReferenceLinks>`
:param attempt: Attempt number of record.
:type attempt: int
:param current_operation: A string that indicates the current operation.
:type current_operation: str
:param details: A reference to a sub-timeline.
:type details: :class:`TimelineReference <azure.devops.v7_1.build.models.TimelineReference>`
:param error_count: The number of errors produced by this operation.
:type error_count: int
:param finish_time: The finish time.
:type finish_time: datetime
:param change_id: The change ID.
:type change_id: int
:param id: The ID of the record.
:type id: str
:param identifier: String identifier that is consistent across attempts.
:type identifier: str
:param issues:
:type issues: list of :class:`Issue <azure.devops.v7_1.build.models.Issue>`
:param last_modified: The time the record was last modified.
:type last_modified: datetime
:param log: A reference to the log produced by this operation.
:type log: :class:`BuildLogReference <azure.devops.v7_1.build.models.BuildLogReference>`
:param name: The name.
:type name: str
:param order: An ordinal value relative to other records.
:type order: int
:param parent_id: The ID of the record's parent.
:type parent_id: str
:param percent_complete: The current completion percentage.
:type percent_complete: int
:param previous_attempts:
:type previous_attempts: list of :class:`TimelineAttempt <azure.devops.v7_1.build.models.TimelineAttempt>`
:param queue_id: The queue ID of the queue that the operation ran on.
:type queue_id: int
:param result: The result.
:type result: object
:param result_code: The result code.
:type result_code: str
:param start_time: The start time.
:type start_time: datetime
:param state: The state of the record.
:type state: object
:param task: A reference to the task represented by this timeline record.
:type task: :class:`TaskReference <azure.devops.v7_1.build.models.TaskReference>`
:param type: The type of the record.
:type type: str
:param url: The REST URL of the timeline record.
:type url: str
:param warning_count: The number of warnings produced by this operation.
:type warning_count: int
:param worker_name: The name of the agent running the operation.
:type worker_name: str
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'attempt': {'key': 'attempt', 'type': 'int'},
'current_operation': {'key': 'currentOperation', 'type': 'str'},
'details': {'key': 'details', 'type': 'TimelineReference'},
'error_count': {'key': 'errorCount', 'type': 'int'},
'finish_time': {'key': 'finishTime', 'type': 'iso-8601'},
'change_id': {'key': 'changeId', 'type': 'int'},
'id': {'key': 'id', 'type': 'str'},
'identifier': {'key': 'identifier', 'type': 'str'},
'issues': {'key': 'issues', 'type': '[Issue]'},
'last_modified': {'key': 'lastModified', 'type': 'iso-8601'},
'log': {'key': 'log', 'type': 'BuildLogReference'},
'name': {'key': 'name', 'type': 'str'},
'order': {'key': 'order', 'type': 'int'},
'parent_id': {'key': 'parentId', 'type': 'str'},
'percent_complete': {'key': 'percentComplete', 'type': 'int'},
'previous_attempts': {'key': 'previousAttempts', 'type': '[TimelineAttempt]'},
'queue_id': {'key': 'queueId', 'type': 'int'},
'result': {'key': 'result', 'type': 'object'},
'result_code': {'key': 'resultCode', 'type': 'str'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'state': {'key': 'state', 'type': 'object'},
'task': {'key': 'task', 'type': 'TaskReference'},
'type': {'key': 'type', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'warning_count': {'key': 'warningCount', 'type': 'int'},
'worker_name': {'key': 'workerName', 'type': 'str'}
}
def __init__(self, _links=None, attempt=None, current_operation=None, details=None, error_count=None, finish_time=None, change_id=None, id=None, identifier=None, issues=None, last_modified=None, log=None, name=None, order=None, parent_id=None, percent_complete=None, previous_attempts=None, queue_id=None, result=None, result_code=None, start_time=None, state=None, task=None, type=None, url=None, warning_count=None, worker_name=None):
super(TimelineRecord, self).__init__()
self._links = _links
self.attempt = attempt
self.current_operation = current_operation
self.details = details
self.error_count = error_count
self.finish_time = finish_time
self.change_id = change_id
self.id = id
self.identifier = identifier
self.issues = issues
self.last_modified = last_modified
self.log = log
self.name = name
self.order = order
self.parent_id = parent_id
self.percent_complete = percent_complete
self.previous_attempts = previous_attempts
self.queue_id = queue_id
self.result = result
self.result_code = result_code
self.start_time = start_time
self.state = state
self.task = task
self.type = type
self.url = url
self.warning_count = warning_count
self.worker_name = worker_name
class TimelineReference(Model):
"""
Represents a reference to a timeline.
:param change_id: The change ID.
:type change_id: int
:param id: The ID of the timeline.
:type id: str
:param url: The REST URL of the timeline.
:type url: str
"""
_attribute_map = {
'change_id': {'key': 'changeId', 'type': 'int'},
'id': {'key': 'id', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, change_id=None, id=None, url=None):
super(TimelineReference, self).__init__()
self.change_id = change_id
self.id = id
self.url = url
class UpdateProjectRetentionSettingModel(Model):
"""
Contains members for updating the retention settings values. All fields are optional.
:param artifacts_retention:
:type artifacts_retention: :class:`UpdateRetentionSettingModel <azure.devops.v7_1.build.models.UpdateRetentionSettingModel>`
:param pull_request_run_retention:
:type pull_request_run_retention: :class:`UpdateRetentionSettingModel <azure.devops.v7_1.build.models.UpdateRetentionSettingModel>`
:param retain_runs_per_protected_branch:
:type retain_runs_per_protected_branch: :class:`UpdateRetentionSettingModel <azure.devops.v7_1.build.models.UpdateRetentionSettingModel>`
:param run_retention:
:type run_retention: :class:`UpdateRetentionSettingModel <azure.devops.v7_1.build.models.UpdateRetentionSettingModel>`
"""
_attribute_map = {
'artifacts_retention': {'key': 'artifactsRetention', 'type': 'UpdateRetentionSettingModel'},
'pull_request_run_retention': {'key': 'pullRequestRunRetention', 'type': 'UpdateRetentionSettingModel'},
'retain_runs_per_protected_branch': {'key': 'retainRunsPerProtectedBranch', 'type': 'UpdateRetentionSettingModel'},
'run_retention': {'key': 'runRetention', 'type': 'UpdateRetentionSettingModel'}
}
def __init__(self, artifacts_retention=None, pull_request_run_retention=None, retain_runs_per_protected_branch=None, run_retention=None):
super(UpdateProjectRetentionSettingModel, self).__init__()
self.artifacts_retention = artifacts_retention
self.pull_request_run_retention = pull_request_run_retention
self.retain_runs_per_protected_branch = retain_runs_per_protected_branch
self.run_retention = run_retention
class UpdateRetentionSettingModel(Model):
"""
:param value:
:type value: int
"""
_attribute_map = {
'value': {'key': 'value', 'type': 'int'}
}
def __init__(self, value=None):
super(UpdateRetentionSettingModel, self).__init__()
self.value = value
class UpdateStageParameters(Model):
"""
:param force_retry_all_jobs:
:type force_retry_all_jobs: bool
:param state:
:type state: object
"""
_attribute_map = {
'force_retry_all_jobs': {'key': 'forceRetryAllJobs', 'type': 'bool'},
'state': {'key': 'state', 'type': 'object'}
}
def __init__(self, force_retry_all_jobs=None, state=None):
super(UpdateStageParameters, self).__init__()
self.force_retry_all_jobs = force_retry_all_jobs
self.state = state
class UpdateTagParameters(Model):
"""
:param tags_to_add:
:type tags_to_add: list of str
:param tags_to_remove:
:type tags_to_remove: list of str
"""
_attribute_map = {
'tags_to_add': {'key': 'tagsToAdd', 'type': '[str]'},
'tags_to_remove': {'key': 'tagsToRemove', 'type': '[str]'}
}
def __init__(self, tags_to_add=None, tags_to_remove=None):
super(UpdateTagParameters, self).__init__()
self.tags_to_add = tags_to_add
self.tags_to_remove = tags_to_remove
class VariableGroupReference(Model):
"""
Represents a reference to a variable group.
:param alias: The Name of the variable group.
:type alias: str
:param id: The ID of the variable group.
:type id: int
"""
_attribute_map = {
'alias': {'key': 'alias', 'type': 'str'},
'id': {'key': 'id', 'type': 'int'}
}
def __init__(self, alias=None, id=None):
super(VariableGroupReference, self).__init__()
self.alias = alias
self.id = id
class WebApiConnectedServiceRef(Model):
"""
:param id:
:type id: str
:param url:
:type url: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, id=None, url=None):
super(WebApiConnectedServiceRef, self).__init__()
self.id = id
self.url = url
class XamlBuildControllerReference(Model):
"""
:param id: Id of the resource
:type id: int
:param name: Name of the linked resource (definition name, controller name, etc.)
:type name: str
:param url: Full http link to the resource
:type url: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'int'},
'name': {'key': 'name', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, id=None, name=None, url=None):
super(XamlBuildControllerReference, self).__init__()
self.id = id
self.name = name
self.url = url
class YamlBuild(Model):
"""
Represents a yaml build.
:param yaml: The yaml used to define the build
:type yaml: str
"""
_attribute_map = {
'yaml': {'key': 'yaml', 'type': 'str'}
}
def __init__(self, yaml=None):
super(YamlBuild, self).__init__()
self.yaml = yaml
class BuildController(XamlBuildControllerReference):
"""
:param id: Id of the resource
:type id: int
:param name: Name of the linked resource (definition name, controller name, etc.)
:type name: str
:param url: Full http link to the resource
:type url: str
:param _links:
:type _links: :class:`ReferenceLinks <azure.devops.v7_1.build.models.ReferenceLinks>`
:param created_date: The date the controller was created.
:type created_date: datetime
:param description: The description of the controller.
:type description: str
:param enabled: Indicates whether the controller is enabled.
:type enabled: bool
:param status: The status of the controller.
:type status: object
:param updated_date: The date the controller was last updated.
:type updated_date: datetime
:param uri: The controller's URI.
:type uri: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'int'},
'name': {'key': 'name', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'description': {'key': 'description', 'type': 'str'},
'enabled': {'key': 'enabled', 'type': 'bool'},
'status': {'key': 'status', 'type': 'object'},
'updated_date': {'key': 'updatedDate', 'type': 'iso-8601'},
'uri': {'key': 'uri', 'type': 'str'}
}
def __init__(self, id=None, name=None, url=None, _links=None, created_date=None, description=None, enabled=None, status=None, updated_date=None, uri=None):
super(BuildController, self).__init__(id=id, name=name, url=url)
self._links = _links
self.created_date = created_date
self.description = description
self.enabled = enabled
self.status = status
self.updated_date = updated_date
self.uri = uri
class BuildDefinitionReference(DefinitionReference):
"""
Represents a reference to a build definition.
:param created_date: The date this version of the definition was created.
:type created_date: datetime
:param id: The ID of the referenced definition.
:type id: int
:param name: The name of the referenced definition.
:type name: str
:param path: The folder path of the definition.
:type path: str
:param project: A reference to the project.
:type project: :class:`TeamProjectReference <azure.devops.v7_1.build.models.TeamProjectReference>`
:param queue_status: A value that indicates whether builds can be queued against this definition.
:type queue_status: object
:param revision: The definition revision number.
:type revision: int
:param type: The type of the definition.
:type type: object
:param uri: The definition's URI.
:type uri: str
:param url: The REST URL of the definition.
:type url: str
:param _links:
:type _links: :class:`ReferenceLinks <azure.devops.v7_1.build.models.ReferenceLinks>`
:param authored_by: The author of the definition.
:type authored_by: :class:`IdentityRef <azure.devops.v7_1.build.models.IdentityRef>`
:param draft_of: A reference to the definition that this definition is a draft of, if this is a draft definition.
:type draft_of: :class:`DefinitionReference <azure.devops.v7_1.build.models.DefinitionReference>`
:param drafts: The list of drafts associated with this definition, if this is not a draft definition.
:type drafts: list of :class:`DefinitionReference <azure.devops.v7_1.build.models.DefinitionReference>`
:param latest_build:
:type latest_build: :class:`Build <azure.devops.v7_1.build.models.Build>`
:param latest_completed_build:
:type latest_completed_build: :class:`Build <azure.devops.v7_1.build.models.Build>`
:param metrics:
:type metrics: list of :class:`BuildMetric <azure.devops.v7_1.build.models.BuildMetric>`
:param quality: The quality of the definition document (draft, etc.)
:type quality: object
:param queue: The default queue for builds run against this definition.
:type queue: :class:`AgentPoolQueue <azure.devops.v7_1.build.models.AgentPoolQueue>`
"""
_attribute_map = {
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'id': {'key': 'id', 'type': 'int'},
'name': {'key': 'name', 'type': 'str'},
'path': {'key': 'path', 'type': 'str'},
'project': {'key': 'project', 'type': 'TeamProjectReference'},
'queue_status': {'key': 'queueStatus', 'type': 'object'},
'revision': {'key': 'revision', 'type': 'int'},
'type': {'key': 'type', 'type': 'object'},
'uri': {'key': 'uri', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'authored_by': {'key': 'authoredBy', 'type': 'IdentityRef'},
'draft_of': {'key': 'draftOf', 'type': 'DefinitionReference'},
'drafts': {'key': 'drafts', 'type': '[DefinitionReference]'},
'latest_build': {'key': 'latestBuild', 'type': 'Build'},
'latest_completed_build': {'key': 'latestCompletedBuild', 'type': 'Build'},
'metrics': {'key': 'metrics', 'type': '[BuildMetric]'},
'quality': {'key': 'quality', 'type': 'object'},
'queue': {'key': 'queue', 'type': 'AgentPoolQueue'}
}
def __init__(self, created_date=None, id=None, name=None, path=None, project=None, queue_status=None, revision=None, type=None, uri=None, url=None, _links=None, authored_by=None, draft_of=None, drafts=None, latest_build=None, latest_completed_build=None, metrics=None, quality=None, queue=None):
super(BuildDefinitionReference, self).__init__(created_date=created_date, id=id, name=name, path=path, project=project, queue_status=queue_status, revision=revision, type=type, uri=uri, url=url)
self._links = _links
self.authored_by = authored_by
self.draft_of = draft_of
self.drafts = drafts
self.latest_build = latest_build
self.latest_completed_build = latest_completed_build
self.metrics = metrics
self.quality = quality
self.queue = queue
class BuildDefinitionReference3_2(DefinitionReference):
"""
For back-compat with extensions that use the old Steps format instead of Process and Phases
:param created_date: The date this version of the definition was created.
:type created_date: datetime
:param id: The ID of the referenced definition.
:type id: int
:param name: The name of the referenced definition.
:type name: str
:param path: The folder path of the definition.
:type path: str
:param project: A reference to the project.
:type project: :class:`TeamProjectReference <azure.devops.v7_1.build.models.TeamProjectReference>`
:param queue_status: A value that indicates whether builds can be queued against this definition.
:type queue_status: object
:param revision: The definition revision number.
:type revision: int
:param type: The type of the definition.
:type type: object
:param uri: The definition's URI.
:type uri: str
:param url: The REST URL of the definition.
:type url: str
:param _links:
:type _links: :class:`ReferenceLinks <azure.devops.v7_1.build.models.ReferenceLinks>`
:param authored_by: The author of the definition.
:type authored_by: :class:`IdentityRef <azure.devops.v7_1.build.models.IdentityRef>`
:param draft_of: A reference to the definition that this definition is a draft of, if this is a draft definition.
:type draft_of: :class:`DefinitionReference <azure.devops.v7_1.build.models.DefinitionReference>`
:param drafts: The list of drafts associated with this definition, if this is not a draft definition.
:type drafts: list of :class:`DefinitionReference <azure.devops.v7_1.build.models.DefinitionReference>`
:param metrics:
:type metrics: list of :class:`BuildMetric <azure.devops.v7_1.build.models.BuildMetric>`
:param quality: The quality of the definition document (draft, etc.)
:type quality: object
:param queue: The default queue for builds run against this definition.
:type queue: :class:`AgentPoolQueue <azure.devops.v7_1.build.models.AgentPoolQueue>`
"""
_attribute_map = {
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'id': {'key': 'id', 'type': 'int'},
'name': {'key': 'name', 'type': 'str'},
'path': {'key': 'path', 'type': 'str'},
'project': {'key': 'project', 'type': 'TeamProjectReference'},
'queue_status': {'key': 'queueStatus', 'type': 'object'},
'revision': {'key': 'revision', 'type': 'int'},
'type': {'key': 'type', 'type': 'object'},
'uri': {'key': 'uri', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'authored_by': {'key': 'authoredBy', 'type': 'IdentityRef'},
'draft_of': {'key': 'draftOf', 'type': 'DefinitionReference'},
'drafts': {'key': 'drafts', 'type': '[DefinitionReference]'},
'metrics': {'key': 'metrics', 'type': '[BuildMetric]'},
'quality': {'key': 'quality', 'type': 'object'},
'queue': {'key': 'queue', 'type': 'AgentPoolQueue'}
}
def __init__(self, created_date=None, id=None, name=None, path=None, project=None, queue_status=None, revision=None, type=None, uri=None, url=None, _links=None, authored_by=None, draft_of=None, drafts=None, metrics=None, quality=None, queue=None):
super(BuildDefinitionReference3_2, self).__init__(created_date=created_date, id=id, name=name, path=path, project=project, queue_status=queue_status, revision=revision, type=type, uri=uri, url=url)
self._links = _links
self.authored_by = authored_by
self.draft_of = draft_of
self.drafts = drafts
self.metrics = metrics
self.quality = quality
self.queue = queue
class BuildLog(BuildLogReference):
"""
Represents a build log.
:param id: The ID of the log.
:type id: int
:param type: The type of the log location.
:type type: str
:param url: A full link to the log resource.
:type url: str
:param created_on: The date and time the log was created.
:type created_on: datetime
:param last_changed_on: The date and time the log was last changed.
:type last_changed_on: datetime
:param line_count: The number of lines in the log.
:type line_count: long
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'int'},
'type': {'key': 'type', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
'last_changed_on': {'key': 'lastChangedOn', 'type': 'iso-8601'},
'line_count': {'key': 'lineCount', 'type': 'long'}
}
def __init__(self, id=None, type=None, url=None, created_on=None, last_changed_on=None, line_count=None):
super(BuildLog, self).__init__(id=id, type=type, url=url)
self.created_on = created_on
self.last_changed_on = last_changed_on
self.line_count = line_count
class BuildOptionDefinition(BuildOptionDefinitionReference):
"""
Represents an optional behavior that can be applied to a build definition.
:param id: The ID of the referenced build option.
:type id: str
:param description: The description.
:type description: str
:param groups: The list of input groups defined for the build option.
:type groups: list of :class:`BuildOptionGroupDefinition <azure.devops.v7_1.build.models.BuildOptionGroupDefinition>`
:param inputs: The list of inputs defined for the build option.
:type inputs: list of :class:`BuildOptionInputDefinition <azure.devops.v7_1.build.models.BuildOptionInputDefinition>`
:param name: The name of the build option.
:type name: str
:param ordinal: A value that indicates the relative order in which the behavior should be applied.
:type ordinal: int
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'groups': {'key': 'groups', 'type': '[BuildOptionGroupDefinition]'},
'inputs': {'key': 'inputs', 'type': '[BuildOptionInputDefinition]'},
'name': {'key': 'name', 'type': 'str'},
'ordinal': {'key': 'ordinal', 'type': 'int'}
}
def __init__(self, id=None, description=None, groups=None, inputs=None, name=None, ordinal=None):
super(BuildOptionDefinition, self).__init__(id=id)
self.description = description
self.groups = groups
self.inputs = inputs
self.name = name
self.ordinal = ordinal
class Timeline(TimelineReference):
"""
Represents the timeline of a build.
:param change_id: The change ID.
:type change_id: int
:param id: The ID of the timeline.
:type id: str
:param url: The REST URL of the timeline.
:type url: str
:param last_changed_by: The process or person that last changed the timeline.
:type last_changed_by: str
:param last_changed_on: The time the timeline was last changed.
:type last_changed_on: datetime
:param records:
:type records: list of :class:`TimelineRecord <azure.devops.v7_1.build.models.TimelineRecord>`
"""
_attribute_map = {
'change_id': {'key': 'changeId', 'type': 'int'},
'id': {'key': 'id', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'last_changed_by': {'key': 'lastChangedBy', 'type': 'str'},
'last_changed_on': {'key': 'lastChangedOn', 'type': 'iso-8601'},
'records': {'key': 'records', 'type': '[TimelineRecord]'}
}
def __init__(self, change_id=None, id=None, url=None, last_changed_by=None, last_changed_on=None, records=None):
super(Timeline, self).__init__(change_id=change_id, id=id, url=url)
self.last_changed_by = last_changed_by
self.last_changed_on = last_changed_on
self.records = records
class VariableGroup(VariableGroupReference):
"""
Represents a variable group.
:param alias: The Name of the variable group.
:type alias: str
:param id: The ID of the variable group.
:type id: int
:param description: The description.
:type description: str
:param name: The name of the variable group.
:type name: str
:param type: The type of the variable group.
:type type: str
:param variables:
:type variables: dict
"""
_attribute_map = {
'alias': {'key': 'alias', 'type': 'str'},
'id': {'key': 'id', 'type': 'int'},
'description': {'key': 'description', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'variables': {'key': 'variables', 'type': '{BuildDefinitionVariable}'}
}
def __init__(self, alias=None, id=None, description=None, name=None, type=None, variables=None):
super(VariableGroup, self).__init__(alias=alias, id=id)
self.description = description
self.name = name
self.type = type
self.variables = variables
class BuildDefinition(BuildDefinitionReference):
"""
Represents a build definition.
:param created_date: The date this version of the definition was created.
:type created_date: datetime
:param id: The ID of the referenced definition.
:type id: int
:param name: The name of the referenced definition.
:type name: str
:param path: The folder path of the definition.
:type path: str
:param project: A reference to the project.
:type project: :class:`TeamProjectReference <azure.devops.v7_1.build.models.TeamProjectReference>`
:param queue_status: A value that indicates whether builds can be queued against this definition.
:type queue_status: object
:param revision: The definition revision number.
:type revision: int
:param type: The type of the definition.
:type type: object
:param uri: The definition's URI.
:type uri: str
:param url: The REST URL of the definition.
:type url: str
:param _links:
:type _links: :class:`ReferenceLinks <azure.devops.v7_1.build.models.ReferenceLinks>`
:param authored_by: The author of the definition.
:type authored_by: :class:`IdentityRef <azure.devops.v7_1.build.models.IdentityRef>`
:param draft_of: A reference to the definition that this definition is a draft of, if this is a draft definition.
:type draft_of: :class:`DefinitionReference <azure.devops.v7_1.build.models.DefinitionReference>`
:param drafts: The list of drafts associated with this definition, if this is not a draft definition.
:type drafts: list of :class:`DefinitionReference <azure.devops.v7_1.build.models.DefinitionReference>`
:param latest_build:
:type latest_build: :class:`Build <azure.devops.v7_1.build.models.Build>`
:param latest_completed_build:
:type latest_completed_build: :class:`Build <azure.devops.v7_1.build.models.Build>`
:param metrics:
:type metrics: list of :class:`BuildMetric <azure.devops.v7_1.build.models.BuildMetric>`
:param quality: The quality of the definition document (draft, etc.)
:type quality: object
:param queue: The default queue for builds run against this definition.
:type queue: :class:`AgentPoolQueue <azure.devops.v7_1.build.models.AgentPoolQueue>`
:param badge_enabled: Indicates whether badges are enabled for this definition.
:type badge_enabled: bool
:param build_number_format: The build number format.
:type build_number_format: str
:param comment: A save-time comment for the definition.
:type comment: str
:param demands:
:type demands: list of :class:`object <azure.devops.v7_1.build.models.object>`
:param description: The description.
:type description: str
:param drop_location: The drop location for the definition.
:type drop_location: str
:param job_authorization_scope: The job authorization scope for builds queued against this definition.
:type job_authorization_scope: object
:param job_cancel_timeout_in_minutes: The job cancel timeout (in minutes) for builds cancelled by user for this definition.
:type job_cancel_timeout_in_minutes: int
:param job_timeout_in_minutes: The job execution timeout (in minutes) for builds queued against this definition.
:type job_timeout_in_minutes: int
:param options:
:type options: list of :class:`BuildOption <azure.devops.v7_1.build.models.BuildOption>`
:param process: The build process.
:type process: :class:`object <azure.devops.v7_1.build.models.object>`
:param process_parameters: The process parameters for this definition.
:type process_parameters: :class:`ProcessParameters <azure.devops.v7_1.build.models.ProcessParameters>`
:param properties:
:type properties: :class:`object <azure.devops.v7_1.build.models.object>`
:param repository: The repository.
:type repository: :class:`BuildRepository <azure.devops.v7_1.build.models.BuildRepository>`
:param retention_rules:
:type retention_rules: list of :class:`RetentionPolicy <azure.devops.v7_1.build.models.RetentionPolicy>`
:param tags:
:type tags: list of str
:param triggers:
:type triggers: list of :class:`object <azure.devops.v7_1.build.models.object>`
:param variable_groups:
:type variable_groups: list of :class:`VariableGroup <azure.devops.v7_1.build.models.VariableGroup>`
:param variables:
:type variables: dict
"""
_attribute_map = {
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'id': {'key': 'id', 'type': 'int'},
'name': {'key': 'name', 'type': 'str'},
'path': {'key': 'path', 'type': 'str'},
'project': {'key': 'project', 'type': 'TeamProjectReference'},
'queue_status': {'key': 'queueStatus', 'type': 'object'},
'revision': {'key': 'revision', 'type': 'int'},
'type': {'key': 'type', 'type': 'object'},
'uri': {'key': 'uri', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'authored_by': {'key': 'authoredBy', 'type': 'IdentityRef'},
'draft_of': {'key': 'draftOf', 'type': 'DefinitionReference'},
'drafts': {'key': 'drafts', 'type': '[DefinitionReference]'},
'latest_build': {'key': 'latestBuild', 'type': 'Build'},
'latest_completed_build': {'key': 'latestCompletedBuild', 'type': 'Build'},
'metrics': {'key': 'metrics', 'type': '[BuildMetric]'},
'quality': {'key': 'quality', 'type': 'object'},
'queue': {'key': 'queue', 'type': 'AgentPoolQueue'},
'badge_enabled': {'key': 'badgeEnabled', 'type': 'bool'},
'build_number_format': {'key': 'buildNumberFormat', 'type': 'str'},
'comment': {'key': 'comment', 'type': 'str'},
'demands': {'key': 'demands', 'type': '[object]'},
'description': {'key': 'description', 'type': 'str'},
'drop_location': {'key': 'dropLocation', 'type': 'str'},
'job_authorization_scope': {'key': 'jobAuthorizationScope', 'type': 'object'},
'job_cancel_timeout_in_minutes': {'key': 'jobCancelTimeoutInMinutes', 'type': 'int'},
'job_timeout_in_minutes': {'key': 'jobTimeoutInMinutes', 'type': 'int'},
'options': {'key': 'options', 'type': '[BuildOption]'},
'process': {'key': 'process', 'type': 'object'},
'process_parameters': {'key': 'processParameters', 'type': 'ProcessParameters'},
'properties': {'key': 'properties', 'type': 'object'},
'repository': {'key': 'repository', 'type': 'BuildRepository'},
'retention_rules': {'key': 'retentionRules', 'type': '[RetentionPolicy]'},
'tags': {'key': 'tags', 'type': '[str]'},
'triggers': {'key': 'triggers', 'type': '[object]'},
'variable_groups': {'key': 'variableGroups', 'type': '[VariableGroup]'},
'variables': {'key': 'variables', 'type': '{BuildDefinitionVariable}'}
}
def __init__(self, created_date=None, id=None, name=None, path=None, project=None, queue_status=None, revision=None, type=None, uri=None, url=None, _links=None, authored_by=None, draft_of=None, drafts=None, latest_build=None, latest_completed_build=None, metrics=None, quality=None, queue=None, badge_enabled=None, build_number_format=None, comment=None, demands=None, description=None, drop_location=None, job_authorization_scope=None, job_cancel_timeout_in_minutes=None, job_timeout_in_minutes=None, options=None, process=None, process_parameters=None, properties=None, repository=None, retention_rules=None, tags=None, triggers=None, variable_groups=None, variables=None):
super(BuildDefinition, self).__init__(created_date=created_date, id=id, name=name, path=path, project=project, queue_status=queue_status, revision=revision, type=type, uri=uri, url=url, _links=_links, authored_by=authored_by, draft_of=draft_of, drafts=drafts, latest_build=latest_build, latest_completed_build=latest_completed_build, metrics=metrics, quality=quality, queue=queue)
self.badge_enabled = badge_enabled
self.build_number_format = build_number_format
self.comment = comment
self.demands = demands
self.description = description
self.drop_location = drop_location
self.job_authorization_scope = job_authorization_scope
self.job_cancel_timeout_in_minutes = job_cancel_timeout_in_minutes
self.job_timeout_in_minutes = job_timeout_in_minutes
self.options = options
self.process = process
self.process_parameters = process_parameters
self.properties = properties
self.repository = repository
self.retention_rules = retention_rules
self.tags = tags
self.triggers = triggers
self.variable_groups = variable_groups
self.variables = variables
class BuildDefinition3_2(BuildDefinitionReference3_2):
"""
For back-compat with extensions that use the old Steps format instead of Process and Phases
:param created_date: The date this version of the definition was created.
:type created_date: datetime
:param id: The ID of the referenced definition.
:type id: int
:param name: The name of the referenced definition.
:type name: str
:param path: The folder path of the definition.
:type path: str
:param project: A reference to the project.
:type project: :class:`TeamProjectReference <azure.devops.v7_1.build.models.TeamProjectReference>`
:param queue_status: A value that indicates whether builds can be queued against this definition.
:type queue_status: object
:param revision: The definition revision number.
:type revision: int
:param type: The type of the definition.
:type type: object
:param uri: The definition's URI.
:type uri: str
:param url: The REST URL of the definition.
:type url: str
:param _links:
:type _links: :class:`ReferenceLinks <azure.devops.v7_1.build.models.ReferenceLinks>`
:param authored_by: The author of the definition.
:type authored_by: :class:`IdentityRef <azure.devops.v7_1.build.models.IdentityRef>`
:param draft_of: A reference to the definition that this definition is a draft of, if this is a draft definition.
:type draft_of: :class:`DefinitionReference <azure.devops.v7_1.build.models.DefinitionReference>`
:param drafts: The list of drafts associated with this definition, if this is not a draft definition.
:type drafts: list of :class:`DefinitionReference <azure.devops.v7_1.build.models.DefinitionReference>`
:param metrics:
:type metrics: list of :class:`BuildMetric <azure.devops.v7_1.build.models.BuildMetric>`
:param quality: The quality of the definition document (draft, etc.)
:type quality: object
:param queue: The default queue for builds run against this definition.
:type queue: :class:`AgentPoolQueue <azure.devops.v7_1.build.models.AgentPoolQueue>`
:param badge_enabled: Indicates whether badges are enabled for this definition
:type badge_enabled: bool
:param build:
:type build: list of :class:`BuildDefinitionStep <azure.devops.v7_1.build.models.BuildDefinitionStep>`
:param build_number_format: The build number format
:type build_number_format: str
:param comment: The comment entered when saving the definition
:type comment: str
:param demands:
:type demands: list of :class:`object <azure.devops.v7_1.build.models.object>`
:param description: The description
:type description: str
:param drop_location: The drop location for the definition
:type drop_location: str
:param job_authorization_scope: The job authorization scope for builds which are queued against this definition
:type job_authorization_scope: object
:param job_cancel_timeout_in_minutes: The job cancel timeout in minutes for builds which are cancelled by user for this definition
:type job_cancel_timeout_in_minutes: int
:param job_timeout_in_minutes: The job execution timeout in minutes for builds which are queued against this definition
:type job_timeout_in_minutes: int
:param latest_build:
:type latest_build: :class:`Build <azure.devops.v7_1.build.models.Build>`
:param latest_completed_build:
:type latest_completed_build: :class:`Build <azure.devops.v7_1.build.models.Build>`
:param options:
:type options: list of :class:`BuildOption <azure.devops.v7_1.build.models.BuildOption>`
:param process_parameters: Process Parameters
:type process_parameters: :class:`ProcessParameters <azure.devops.v7_1.build.models.ProcessParameters>`
:param properties:
:type properties: :class:`object <azure.devops.v7_1.build.models.object>`
:param repository: The repository
:type repository: :class:`BuildRepository <azure.devops.v7_1.build.models.BuildRepository>`
:param retention_rules:
:type retention_rules: list of :class:`RetentionPolicy <azure.devops.v7_1.build.models.RetentionPolicy>`
:param tags:
:type tags: list of str
:param triggers:
:type triggers: list of :class:`object <azure.devops.v7_1.build.models.object>`
:param variables:
:type variables: dict
"""
_attribute_map = {
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'id': {'key': 'id', 'type': 'int'},
'name': {'key': 'name', 'type': 'str'},
'path': {'key': 'path', 'type': 'str'},
'project': {'key': 'project', 'type': 'TeamProjectReference'},
'queue_status': {'key': 'queueStatus', 'type': 'object'},
'revision': {'key': 'revision', 'type': 'int'},
'type': {'key': 'type', 'type': 'object'},
'uri': {'key': 'uri', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'authored_by': {'key': 'authoredBy', 'type': 'IdentityRef'},
'draft_of': {'key': 'draftOf', 'type': 'DefinitionReference'},
'drafts': {'key': 'drafts', 'type': '[DefinitionReference]'},
'metrics': {'key': 'metrics', 'type': '[BuildMetric]'},
'quality': {'key': 'quality', 'type': 'object'},
'queue': {'key': 'queue', 'type': 'AgentPoolQueue'},
'badge_enabled': {'key': 'badgeEnabled', 'type': 'bool'},
'build': {'key': 'build', 'type': '[BuildDefinitionStep]'},
'build_number_format': {'key': 'buildNumberFormat', 'type': 'str'},
'comment': {'key': 'comment', 'type': 'str'},
'demands': {'key': 'demands', 'type': '[object]'},
'description': {'key': 'description', 'type': 'str'},
'drop_location': {'key': 'dropLocation', 'type': 'str'},
'job_authorization_scope': {'key': 'jobAuthorizationScope', 'type': 'object'},
'job_cancel_timeout_in_minutes': {'key': 'jobCancelTimeoutInMinutes', 'type': 'int'},
'job_timeout_in_minutes': {'key': 'jobTimeoutInMinutes', 'type': 'int'},
'latest_build': {'key': 'latestBuild', 'type': 'Build'},
'latest_completed_build': {'key': 'latestCompletedBuild', 'type': 'Build'},
'options': {'key': 'options', 'type': '[BuildOption]'},
'process_parameters': {'key': 'processParameters', 'type': 'ProcessParameters'},
'properties': {'key': 'properties', 'type': 'object'},
'repository': {'key': 'repository', 'type': 'BuildRepository'},
'retention_rules': {'key': 'retentionRules', 'type': '[RetentionPolicy]'},
'tags': {'key': 'tags', 'type': '[str]'},
'triggers': {'key': 'triggers', 'type': '[object]'},
'variables': {'key': 'variables', 'type': '{BuildDefinitionVariable}'}
}
def __init__(self, created_date=None, id=None, name=None, path=None, project=None, queue_status=None, revision=None, type=None, uri=None, url=None, _links=None, authored_by=None, draft_of=None, drafts=None, metrics=None, quality=None, queue=None, badge_enabled=None, build=None, build_number_format=None, comment=None, demands=None, description=None, drop_location=None, job_authorization_scope=None, job_cancel_timeout_in_minutes=None, job_timeout_in_minutes=None, latest_build=None, latest_completed_build=None, options=None, process_parameters=None, properties=None, repository=None, retention_rules=None, tags=None, triggers=None, variables=None):
super(BuildDefinition3_2, self).__init__(created_date=created_date, id=id, name=name, path=path, project=project, queue_status=queue_status, revision=revision, type=type, uri=uri, url=url, _links=_links, authored_by=authored_by, draft_of=draft_of, drafts=drafts, metrics=metrics, quality=quality, queue=queue)
self.badge_enabled = badge_enabled
self.build = build
self.build_number_format = build_number_format
self.comment = comment
self.demands = demands
self.description = description
self.drop_location = drop_location
self.job_authorization_scope = job_authorization_scope
self.job_cancel_timeout_in_minutes = job_cancel_timeout_in_minutes
self.job_timeout_in_minutes = job_timeout_in_minutes
self.latest_build = latest_build
self.latest_completed_build = latest_completed_build
self.options = options
self.process_parameters = process_parameters
self.properties = properties
self.repository = repository
self.retention_rules = retention_rules
self.tags = tags
self.triggers = triggers
self.variables = variables
__all__ = [
'AgentPoolQueue',
'AgentSpecification',
'AggregatedResultsAnalysis',
'AggregatedResultsByOutcome',
'AggregatedResultsDifference',
'AggregatedRunsByOutcome',
'AggregatedRunsByState',
'ArtifactResource',
'AssociatedWorkItem',
'Attachment',
'AuthorizationHeader',
'Build',
'BuildArtifact',
'BuildBadge',
'BuildDefinitionRevision',
'BuildDefinitionStep',
'BuildDefinitionTemplate',
'BuildDefinitionTemplate3_2',
'BuildDefinitionVariable',
'BuildLogReference',
'BuildMetric',
'BuildOption',
'BuildOptionDefinitionReference',
'BuildOptionGroupDefinition',
'BuildOptionInputDefinition',
'BuildReportMetadata',
'BuildRepository',
'BuildRequestValidationResult',
'BuildResourceUsage',
'BuildRetentionHistory',
'BuildRetentionSample',
'BuildSettings',
'DataSourceBindingBase',
'DefinitionReference',
'DefinitionResourceReference',
'Deployment',
'Folder',
'GraphSubjectBase',
'Change',
'IdentityRef',
'Issue',
'JobReference',
'JsonPatchOperation',
'MinimalRetentionLease',
'NewRetentionLease',
'PhaseReference',
'PipelineGeneralSettings',
'PipelineReference',
'ProcessParameters',
'ProjectRetentionSetting',
'PullRequest',
'ReferenceLinks',
'ReleaseReference',
'RepositoryWebhook',
'ResourceRef',
'RetentionLease',
'RetentionLeaseUpdate',
'RetentionPolicy',
'RetentionSetting',
'SourceProviderAttributes',
'SourceRepositories',
'SourceRepository',
'SourceRepositoryItem',
'StageReference',
'SupportedTrigger',
'TaskAgentPoolReference',
'TaskDefinitionReference',
'TaskInputDefinitionBase',
'TaskInputValidation',
'TaskOrchestrationPlanReference',
'TaskReference',
'TaskSourceDefinitionBase',
'TeamProjectReference',
'TestResultsContext',
'TimelineAttempt',
'TimelineRecord',
'TimelineReference',
'UpdateProjectRetentionSettingModel',
'UpdateRetentionSettingModel',
'UpdateStageParameters',
'UpdateTagParameters',
'VariableGroupReference',
'WebApiConnectedServiceRef',
'XamlBuildControllerReference',
'YamlBuild',
'BuildController',
'BuildDefinitionReference',
'BuildDefinitionReference3_2',
'BuildLog',
'BuildOptionDefinition',
'Timeline',
'VariableGroup',
'BuildDefinition',
'BuildDefinition3_2',
]
|
azure-devops-python-api/azure-devops/azure/devops/v7_1/build/models.py/0
|
{
"file_path": "azure-devops-python-api/azure-devops/azure/devops/v7_1/build/models.py",
"repo_id": "azure-devops-python-api",
"token_count": 57486
}
| 407 |
ο»Ώ# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest.serialization import Model
class CustomerIntelligenceEvent(Model):
"""
:param area:
:type area: str
:param feature:
:type feature: str
:param properties:
:type properties: dict
"""
_attribute_map = {
'area': {'key': 'area', 'type': 'str'},
'feature': {'key': 'feature', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{object}'}
}
def __init__(self, area=None, feature=None, properties=None):
super(CustomerIntelligenceEvent, self).__init__()
self.area = area
self.feature = feature
self.properties = properties
__all__ = [
'CustomerIntelligenceEvent',
]
|
azure-devops-python-api/azure-devops/azure/devops/v7_1/customer_intelligence/models.py/0
|
{
"file_path": "azure-devops-python-api/azure-devops/azure/devops/v7_1/customer_intelligence/models.py",
"repo_id": "azure-devops-python-api",
"token_count": 353
}
| 408 |
ο»Ώ# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from .models import *
from .identity_client import IdentityClient
__all__ = [
'AccessTokenResult',
'AuthorizationGrant',
'CreateScopeInfo',
'FrameworkIdentityInfo',
'GroupMembership',
'ChangedIdentities',
'ChangedIdentitiesContext',
'Identity',
'IdentityBase',
'IdentityBatchInfo',
'IdentityRightsTransferData',
'IdentityScope',
'IdentitySelf',
'IdentitySnapshot',
'IdentityUpdateData',
'JsonPatchOperation',
'JsonWebToken',
'PagedIdentities',
'RefreshTokenGrant',
'SwapIdentityInfo',
'TenantInfo',
'IdentityClient'
]
|
azure-devops-python-api/azure-devops/azure/devops/v7_1/identity/__init__.py/0
|
{
"file_path": "azure-devops-python-api/azure-devops/azure/devops/v7_1/identity/__init__.py",
"repo_id": "azure-devops-python-api",
"token_count": 317
}
| 409 |
ο»Ώ# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest import Serializer, Deserializer
from ...client import Client
from . import models
class PipelinesChecksClient(Client):
"""PipelinesChecks
:param str base_url: Service URL
:param Authentication creds: Authenticated credentials.
"""
def __init__(self, base_url=None, creds=None):
super(PipelinesChecksClient, self).__init__(base_url, creds)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
resource_area_identifier = '4a933897-0488-45af-bd82-6fd3ad33f46a'
def add_check_configuration(self, configuration, project):
"""AddCheckConfiguration.
[Preview API] Add a check configuration
:param :class:`<CheckConfiguration> <azure.devops.v7_1.pipelines_checks.models.CheckConfiguration>` configuration:
:param str project: Project ID or project name
:rtype: :class:`<CheckConfiguration> <azure.devops.v7_1.pipelines_checks.models.CheckConfiguration>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
content = self._serialize.body(configuration, 'CheckConfiguration')
response = self._send(http_method='POST',
location_id='86c8381e-5aee-4cde-8ae4-25c0c7f5eaea',
version='7.1-preview.1',
route_values=route_values,
content=content)
return self._deserialize('CheckConfiguration', response)
def delete_check_configuration(self, project, id):
"""DeleteCheckConfiguration.
[Preview API] Delete check configuration by id
:param str project: Project ID or project name
:param int id: check configuration id
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if id is not None:
route_values['id'] = self._serialize.url('id', id, 'int')
self._send(http_method='DELETE',
location_id='86c8381e-5aee-4cde-8ae4-25c0c7f5eaea',
version='7.1-preview.1',
route_values=route_values)
def get_check_configuration(self, project, id, expand=None):
"""GetCheckConfiguration.
[Preview API] Get Check configuration by Id
:param str project: Project ID or project name
:param int id:
:param str expand:
:rtype: :class:`<CheckConfiguration> <azure.devops.v7_1.pipelines_checks.models.CheckConfiguration>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if id is not None:
route_values['id'] = self._serialize.url('id', id, 'int')
query_parameters = {}
if expand is not None:
query_parameters['$expand'] = self._serialize.query('expand', expand, 'str')
response = self._send(http_method='GET',
location_id='86c8381e-5aee-4cde-8ae4-25c0c7f5eaea',
version='7.1-preview.1',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('CheckConfiguration', response)
def get_check_configurations_on_resource(self, project, resource_type=None, resource_id=None, expand=None):
"""GetCheckConfigurationsOnResource.
[Preview API] Get Check configuration by resource type and id
:param str project: Project ID or project name
:param str resource_type: resource type
:param str resource_id: resource id
:param str expand:
:rtype: [CheckConfiguration]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if resource_type is not None:
query_parameters['resourceType'] = self._serialize.query('resource_type', resource_type, 'str')
if resource_id is not None:
query_parameters['resourceId'] = self._serialize.query('resource_id', resource_id, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query('expand', expand, 'str')
response = self._send(http_method='GET',
location_id='86c8381e-5aee-4cde-8ae4-25c0c7f5eaea',
version='7.1-preview.1',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[CheckConfiguration]', self._unwrap_collection(response))
def update_check_configuration(self, configuration, project, id):
"""UpdateCheckConfiguration.
[Preview API] Update check configuration
:param :class:`<CheckConfiguration> <azure.devops.v7_1.pipelines_checks.models.CheckConfiguration>` configuration: check configuration
:param str project: Project ID or project name
:param int id: check configuration id
:rtype: :class:`<CheckConfiguration> <azure.devops.v7_1.pipelines_checks.models.CheckConfiguration>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if id is not None:
route_values['id'] = self._serialize.url('id', id, 'int')
content = self._serialize.body(configuration, 'CheckConfiguration')
response = self._send(http_method='PATCH',
location_id='86c8381e-5aee-4cde-8ae4-25c0c7f5eaea',
version='7.1-preview.1',
route_values=route_values,
content=content)
return self._deserialize('CheckConfiguration', response)
def query_check_configurations_on_resources(self, resources, project, expand=None):
"""QueryCheckConfigurationsOnResources.
[Preview API] Get check configurations for multiple resources by resource type and id.
:param [Resource] resources: List of resources.
:param str project: Project ID or project name
:param str expand: The properties that should be expanded in the list of check configurations.
:rtype: [CheckConfiguration]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if expand is not None:
query_parameters['$expand'] = self._serialize.query('expand', expand, 'str')
content = self._serialize.body(resources, '[Resource]')
response = self._send(http_method='POST',
location_id='5f3d0e64-f943-4584-8811-77eb495e831e',
version='7.1-preview.1',
route_values=route_values,
query_parameters=query_parameters,
content=content)
return self._deserialize('[CheckConfiguration]', self._unwrap_collection(response))
def evaluate_check_suite(self, request, project, expand=None):
"""EvaluateCheckSuite.
[Preview API] Initiate an evaluation for a check in a pipeline
:param :class:`<CheckSuiteRequest> <azure.devops.v7_1.pipelines_checks.models.CheckSuiteRequest>` request:
:param str project: Project ID or project name
:param str expand:
:rtype: :class:`<CheckSuite> <azure.devops.v7_1.pipelines_checks.models.CheckSuite>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if expand is not None:
query_parameters['$expand'] = self._serialize.query('expand', expand, 'str')
content = self._serialize.body(request, 'CheckSuiteRequest')
response = self._send(http_method='POST',
location_id='91282c1d-c183-444f-9554-1485bfb3879d',
version='7.1-preview.1',
route_values=route_values,
query_parameters=query_parameters,
content=content)
return self._deserialize('CheckSuite', response)
def get_check_suite(self, project, check_suite_id, expand=None):
"""GetCheckSuite.
[Preview API] Get details for a specific check evaluation
:param str project: Project ID or project name
:param str check_suite_id:
:param str expand:
:rtype: :class:`<CheckSuite> <azure.devops.v7_1.pipelines_checks.models.CheckSuite>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if check_suite_id is not None:
route_values['checkSuiteId'] = self._serialize.url('check_suite_id', check_suite_id, 'str')
query_parameters = {}
if expand is not None:
query_parameters['$expand'] = self._serialize.query('expand', expand, 'str')
response = self._send(http_method='GET',
location_id='91282c1d-c183-444f-9554-1485bfb3879d',
version='7.1-preview.1',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('CheckSuite', response)
|
azure-devops-python-api/azure-devops/azure/devops/v7_1/pipelines_checks/pipelines_checks_client.py/0
|
{
"file_path": "azure-devops-python-api/azure-devops/azure/devops/v7_1/pipelines_checks/pipelines_checks_client.py",
"repo_id": "azure-devops-python-api",
"token_count": 4538
}
| 410 |
ο»Ώ# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from .models import *
from .py_pi_api_client import PyPiApiClient
__all__ = [
'BatchOperationData',
'JsonPatchOperation',
'MinimalPackageDetails',
'Package',
'PackageVersionDetails',
'PyPiPackagesBatchRequest',
'PyPiPackageVersionDeletionState',
'PyPiRecycleBinPackageVersionDetails',
'ReferenceLinks',
'UpstreamingBehavior',
'UpstreamSourceInfo',
'PyPiApiClient'
]
|
azure-devops-python-api/azure-devops/azure/devops/v7_1/py_pi_api/__init__.py/0
|
{
"file_path": "azure-devops-python-api/azure-devops/azure/devops/v7_1/py_pi_api/__init__.py",
"repo_id": "azure-devops-python-api",
"token_count": 238
}
| 411 |
ο»Ώ# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest.serialization import Model
class AuthenticationSchemeReference(Model):
"""
Specifies the authentication scheme to be used for authentication.
:param inputs: Gets or sets the key and value of the fields used for authentication.
:type inputs: dict
:param type: Gets or sets the type of authentication scheme of an endpoint.
:type type: str
"""
_attribute_map = {
'inputs': {'key': 'inputs', 'type': '{str}'},
'type': {'key': 'type', 'type': 'str'}
}
def __init__(self, inputs=None, type=None):
super(AuthenticationSchemeReference, self).__init__()
self.inputs = inputs
self.type = type
class AuthorizationHeader(Model):
"""
Represents the header of the REST request.
:param name: Gets or sets the name of authorization header.
:type name: str
:param value: Gets or sets the value of authorization header.
:type value: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'}
}
def __init__(self, name=None, value=None):
super(AuthorizationHeader, self).__init__()
self.name = name
self.value = value
class AzureManagementGroup(Model):
"""
Azure Management Group
:param display_name: Display name of azure management group
:type display_name: str
:param id: Id of azure management group
:type id: str
:param name: Azure management group name
:type name: str
:param tenant_id: Id of tenant from which azure management group belogs
:type tenant_id: str
"""
_attribute_map = {
'display_name': {'key': 'displayName', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'tenant_id': {'key': 'tenantId', 'type': 'str'}
}
def __init__(self, display_name=None, id=None, name=None, tenant_id=None):
super(AzureManagementGroup, self).__init__()
self.display_name = display_name
self.id = id
self.name = name
self.tenant_id = tenant_id
class AzureManagementGroupQueryResult(Model):
"""
Azure management group query result
:param error_message: Error message in case of an exception
:type error_message: str
:param value: List of azure management groups
:type value: list of :class:`AzureManagementGroup <azure.devops.v7_1.service_endpoint.models.AzureManagementGroup>`
"""
_attribute_map = {
'error_message': {'key': 'errorMessage', 'type': 'str'},
'value': {'key': 'value', 'type': '[AzureManagementGroup]'}
}
def __init__(self, error_message=None, value=None):
super(AzureManagementGroupQueryResult, self).__init__()
self.error_message = error_message
self.value = value
class AzureSubscription(Model):
"""
:param display_name:
:type display_name: str
:param subscription_id:
:type subscription_id: str
:param subscription_tenant_id:
:type subscription_tenant_id: str
:param subscription_tenant_name:
:type subscription_tenant_name: str
"""
_attribute_map = {
'display_name': {'key': 'displayName', 'type': 'str'},
'subscription_id': {'key': 'subscriptionId', 'type': 'str'},
'subscription_tenant_id': {'key': 'subscriptionTenantId', 'type': 'str'},
'subscription_tenant_name': {'key': 'subscriptionTenantName', 'type': 'str'}
}
def __init__(self, display_name=None, subscription_id=None, subscription_tenant_id=None, subscription_tenant_name=None):
super(AzureSubscription, self).__init__()
self.display_name = display_name
self.subscription_id = subscription_id
self.subscription_tenant_id = subscription_tenant_id
self.subscription_tenant_name = subscription_tenant_name
class AzureSubscriptionQueryResult(Model):
"""
:param error_message:
:type error_message: str
:param value:
:type value: list of :class:`AzureSubscription <azure.devops.v7_1.service_endpoint.models.AzureSubscription>`
"""
_attribute_map = {
'error_message': {'key': 'errorMessage', 'type': 'str'},
'value': {'key': 'value', 'type': '[AzureSubscription]'}
}
def __init__(self, error_message=None, value=None):
super(AzureSubscriptionQueryResult, self).__init__()
self.error_message = error_message
self.value = value
class ClientCertificate(Model):
"""
Specifies the client certificate to be used for the endpoint request.
:param value: Gets or sets the value of client certificate.
:type value: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': 'str'}
}
def __init__(self, value=None):
super(ClientCertificate, self).__init__()
self.value = value
class DataSource(Model):
"""
Specifies the data sources for this endpoint.
:param authentication_scheme: Gets or sets the authentication scheme for the endpoint request.
:type authentication_scheme: :class:`AuthenticationSchemeReference <azure.devops.v7_1.service_endpoint.models.AuthenticationSchemeReference>`
:param callback_context_template: Gets or sets the pagination format supported by this data source(ContinuationToken/SkipTop).
:type callback_context_template: str
:param callback_required_template: Gets or sets the template to check if subsequent call is needed.
:type callback_required_template: str
:param endpoint_url: Gets or sets the endpoint url of the data source.
:type endpoint_url: str
:param headers: Gets or sets the authorization headers of the request.
:type headers: list of :class:`AuthorizationHeader <azure.devops.v7_1.service_endpoint.models.AuthorizationHeader>`
:param initial_context_template: Gets or sets the initial value of the query params.
:type initial_context_template: str
:param name: Gets or sets the name of the data source.
:type name: str
:param request_content: Gets or sets the request content of the endpoint request.
:type request_content: str
:param request_verb: Gets or sets the request method of the endpoint request.
:type request_verb: str
:param resource_url: Gets or sets the resource url of the endpoint request.
:type resource_url: str
:param result_selector: Gets or sets the result selector to filter the response of the endpoint request.
:type result_selector: str
"""
_attribute_map = {
'authentication_scheme': {'key': 'authenticationScheme', 'type': 'AuthenticationSchemeReference'},
'callback_context_template': {'key': 'callbackContextTemplate', 'type': 'str'},
'callback_required_template': {'key': 'callbackRequiredTemplate', 'type': 'str'},
'endpoint_url': {'key': 'endpointUrl', 'type': 'str'},
'headers': {'key': 'headers', 'type': '[AuthorizationHeader]'},
'initial_context_template': {'key': 'initialContextTemplate', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'request_content': {'key': 'requestContent', 'type': 'str'},
'request_verb': {'key': 'requestVerb', 'type': 'str'},
'resource_url': {'key': 'resourceUrl', 'type': 'str'},
'result_selector': {'key': 'resultSelector', 'type': 'str'}
}
def __init__(self, authentication_scheme=None, callback_context_template=None, callback_required_template=None, endpoint_url=None, headers=None, initial_context_template=None, name=None, request_content=None, request_verb=None, resource_url=None, result_selector=None):
super(DataSource, self).__init__()
self.authentication_scheme = authentication_scheme
self.callback_context_template = callback_context_template
self.callback_required_template = callback_required_template
self.endpoint_url = endpoint_url
self.headers = headers
self.initial_context_template = initial_context_template
self.name = name
self.request_content = request_content
self.request_verb = request_verb
self.resource_url = resource_url
self.result_selector = result_selector
class DataSourceBindingBase(Model):
"""
Represents binding of data source for the service endpoint request.
:param callback_context_template: Pagination format supported by this data source(ContinuationToken/SkipTop).
:type callback_context_template: str
:param callback_required_template: Subsequent calls needed?
:type callback_required_template: str
:param data_source_name: Gets or sets the name of the data source.
:type data_source_name: str
:param endpoint_id: Gets or sets the endpoint Id.
:type endpoint_id: str
:param endpoint_url: Gets or sets the url of the service endpoint.
:type endpoint_url: str
:param headers: Gets or sets the authorization headers.
:type headers: list of :class:`AuthorizationHeader <azure.devops.v7_1.microsoft._team_foundation._distributed_task._common._contracts.models.AuthorizationHeader>`
:param initial_context_template: Defines the initial value of the query params
:type initial_context_template: str
:param parameters: Gets or sets the parameters for the data source.
:type parameters: dict
:param request_content: Gets or sets http request body
:type request_content: str
:param request_verb: Gets or sets http request verb
:type request_verb: str
:param result_selector: Gets or sets the result selector.
:type result_selector: str
:param result_template: Gets or sets the result template.
:type result_template: str
:param target: Gets or sets the target of the data source.
:type target: str
"""
_attribute_map = {
'callback_context_template': {'key': 'callbackContextTemplate', 'type': 'str'},
'callback_required_template': {'key': 'callbackRequiredTemplate', 'type': 'str'},
'data_source_name': {'key': 'dataSourceName', 'type': 'str'},
'endpoint_id': {'key': 'endpointId', 'type': 'str'},
'endpoint_url': {'key': 'endpointUrl', 'type': 'str'},
'headers': {'key': 'headers', 'type': '[AuthorizationHeader]'},
'initial_context_template': {'key': 'initialContextTemplate', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': '{str}'},
'request_content': {'key': 'requestContent', 'type': 'str'},
'request_verb': {'key': 'requestVerb', 'type': 'str'},
'result_selector': {'key': 'resultSelector', 'type': 'str'},
'result_template': {'key': 'resultTemplate', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'}
}
def __init__(self, callback_context_template=None, callback_required_template=None, data_source_name=None, endpoint_id=None, endpoint_url=None, headers=None, initial_context_template=None, parameters=None, request_content=None, request_verb=None, result_selector=None, result_template=None, target=None):
super(DataSourceBindingBase, self).__init__()
self.callback_context_template = callback_context_template
self.callback_required_template = callback_required_template
self.data_source_name = data_source_name
self.endpoint_id = endpoint_id
self.endpoint_url = endpoint_url
self.headers = headers
self.initial_context_template = initial_context_template
self.parameters = parameters
self.request_content = request_content
self.request_verb = request_verb
self.result_selector = result_selector
self.result_template = result_template
self.target = target
class DataSourceDetails(Model):
"""
Represents details of the service endpoint data source.
:param data_source_name: Gets or sets the data source name.
:type data_source_name: str
:param data_source_url: Gets or sets the data source url.
:type data_source_url: str
:param headers: Gets or sets the request headers.
:type headers: list of :class:`AuthorizationHeader <azure.devops.v7_1.service_endpoint.models.AuthorizationHeader>`
:param initial_context_template: Gets or sets the initialization context used for the initial call to the data source
:type initial_context_template: str
:param parameters: Gets the parameters of data source.
:type parameters: dict
:param request_content: Gets or sets the data source request content.
:type request_content: str
:param request_verb: Gets or sets the data source request verb. Get/Post are the only implemented types
:type request_verb: str
:param resource_url: Gets or sets the resource url of data source.
:type resource_url: str
:param result_selector: Gets or sets the result selector.
:type result_selector: str
"""
_attribute_map = {
'data_source_name': {'key': 'dataSourceName', 'type': 'str'},
'data_source_url': {'key': 'dataSourceUrl', 'type': 'str'},
'headers': {'key': 'headers', 'type': '[AuthorizationHeader]'},
'initial_context_template': {'key': 'initialContextTemplate', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': '{str}'},
'request_content': {'key': 'requestContent', 'type': 'str'},
'request_verb': {'key': 'requestVerb', 'type': 'str'},
'resource_url': {'key': 'resourceUrl', 'type': 'str'},
'result_selector': {'key': 'resultSelector', 'type': 'str'}
}
def __init__(self, data_source_name=None, data_source_url=None, headers=None, initial_context_template=None, parameters=None, request_content=None, request_verb=None, resource_url=None, result_selector=None):
super(DataSourceDetails, self).__init__()
self.data_source_name = data_source_name
self.data_source_url = data_source_url
self.headers = headers
self.initial_context_template = initial_context_template
self.parameters = parameters
self.request_content = request_content
self.request_verb = request_verb
self.resource_url = resource_url
self.result_selector = result_selector
class DependencyBinding(Model):
"""
Represents the details of the input on which a given input is dependent.
:param key: Gets or sets the value of the field on which url is dependent.
:type key: str
:param value: Gets or sets the corresponding value of url.
:type value: str
"""
_attribute_map = {
'key': {'key': 'key', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'}
}
def __init__(self, key=None, value=None):
super(DependencyBinding, self).__init__()
self.key = key
self.value = value
class DependencyData(Model):
"""
Represents the dependency data for the endpoint inputs.
:param input: Gets or sets the category of dependency data.
:type input: str
:param map: Gets or sets the key-value pair to specify properties and their values.
:type map: list of { key: str; value: [{ key: str; value: str }] }
"""
_attribute_map = {
'input': {'key': 'input', 'type': 'str'},
'map': {'key': 'map', 'type': '[{ key: str; value: [{ key: str; value: str }] }]'}
}
def __init__(self, input=None, map=None):
super(DependencyData, self).__init__()
self.input = input
self.map = map
class DependsOn(Model):
"""
Represents the inputs on which any given input is dependent.
:param input: Gets or sets the ID of the field on which URL's value is dependent.
:type input: str
:param map: Gets or sets key-value pair containing other's field value and corresponding url value.
:type map: list of :class:`DependencyBinding <azure.devops.v7_1.service_endpoint.models.DependencyBinding>`
"""
_attribute_map = {
'input': {'key': 'input', 'type': 'str'},
'map': {'key': 'map', 'type': '[DependencyBinding]'}
}
def __init__(self, input=None, map=None):
super(DependsOn, self).__init__()
self.input = input
self.map = map
class EndpointAuthorization(Model):
"""
Represents the authorization used for service endpoint.
:param parameters: Gets or sets the parameters for the selected authorization scheme.
:type parameters: dict
:param scheme: Gets or sets the scheme used for service endpoint authentication.
:type scheme: str
"""
_attribute_map = {
'parameters': {'key': 'parameters', 'type': '{str}'},
'scheme': {'key': 'scheme', 'type': 'str'}
}
def __init__(self, parameters=None, scheme=None):
super(EndpointAuthorization, self).__init__()
self.parameters = parameters
self.scheme = scheme
class EndpointUrl(Model):
"""
Represents url of the service endpoint.
:param depends_on: Gets or sets the dependency bindings.
:type depends_on: :class:`DependsOn <azure.devops.v7_1.service_endpoint.models.DependsOn>`
:param display_name: Gets or sets the display name of service endpoint url.
:type display_name: str
:param format: Gets or sets the format of the url.
:type format: str
:param help_text: Gets or sets the help text of service endpoint url.
:type help_text: str
:param is_visible: Gets or sets the visibility of service endpoint url.
:type is_visible: str
:param value: Gets or sets the value of service endpoint url.
:type value: str
"""
_attribute_map = {
'depends_on': {'key': 'dependsOn', 'type': 'DependsOn'},
'display_name': {'key': 'displayName', 'type': 'str'},
'format': {'key': 'format', 'type': 'str'},
'help_text': {'key': 'helpText', 'type': 'str'},
'is_visible': {'key': 'isVisible', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'}
}
def __init__(self, depends_on=None, display_name=None, format=None, help_text=None, is_visible=None, value=None):
super(EndpointUrl, self).__init__()
self.depends_on = depends_on
self.display_name = display_name
self.format = format
self.help_text = help_text
self.is_visible = is_visible
self.value = value
class GraphSubjectBase(Model):
"""
:param _links: This field contains zero or more interesting links about the graph subject. These links may be invoked to obtain additional relationships or more detailed information about this graph subject.
:type _links: :class:`ReferenceLinks <azure.devops.v7_1.microsoft._visual_studio._services._web_api.models.ReferenceLinks>`
:param descriptor: The descriptor is the primary way to reference the graph subject while the system is running. This field will uniquely identify the same graph subject across both Accounts and Organizations.
:type descriptor: str
:param display_name: This is the non-unique display name of the graph subject. To change this field, you must alter its value in the source provider.
:type display_name: str
:param url: This url is the full route to the source resource of this graph subject.
:type url: str
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'descriptor': {'key': 'descriptor', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, _links=None, descriptor=None, display_name=None, url=None):
super(GraphSubjectBase, self).__init__()
self._links = _links
self.descriptor = descriptor
self.display_name = display_name
self.url = url
class HelpLink(Model):
"""
Specifies the public url of the help documentation.
:param text: Gets or sets the help text.
:type text: str
:param url: Gets or sets the public url of the help documentation.
:type url: str
"""
_attribute_map = {
'text': {'key': 'text', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, text=None, url=None):
super(HelpLink, self).__init__()
self.text = text
self.url = url
class IdentityRef(GraphSubjectBase):
"""
:param _links: This field contains zero or more interesting links about the graph subject. These links may be invoked to obtain additional relationships or more detailed information about this graph subject.
:type _links: :class:`ReferenceLinks <azure.devops.v7_1.microsoft._visual_studio._services._web_api.models.ReferenceLinks>`
:param descriptor: The descriptor is the primary way to reference the graph subject while the system is running. This field will uniquely identify the same graph subject across both Accounts and Organizations.
:type descriptor: str
:param display_name: This is the non-unique display name of the graph subject. To change this field, you must alter its value in the source provider.
:type display_name: str
:param url: This url is the full route to the source resource of this graph subject.
:type url: str
:param directory_alias: Deprecated - Can be retrieved by querying the Graph user referenced in the "self" entry of the IdentityRef "_links" dictionary
:type directory_alias: str
:param id:
:type id: str
:param image_url: Deprecated - Available in the "avatar" entry of the IdentityRef "_links" dictionary
:type image_url: str
:param inactive: Deprecated - Can be retrieved by querying the Graph membership state referenced in the "membershipState" entry of the GraphUser "_links" dictionary
:type inactive: bool
:param is_aad_identity: Deprecated - Can be inferred from the subject type of the descriptor (Descriptor.IsAadUserType/Descriptor.IsAadGroupType)
:type is_aad_identity: bool
:param is_container: Deprecated - Can be inferred from the subject type of the descriptor (Descriptor.IsGroupType)
:type is_container: bool
:param is_deleted_in_origin:
:type is_deleted_in_origin: bool
:param profile_url: Deprecated - not in use in most preexisting implementations of ToIdentityRef
:type profile_url: str
:param unique_name: Deprecated - use Domain+PrincipalName instead
:type unique_name: str
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'descriptor': {'key': 'descriptor', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'directory_alias': {'key': 'directoryAlias', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'image_url': {'key': 'imageUrl', 'type': 'str'},
'inactive': {'key': 'inactive', 'type': 'bool'},
'is_aad_identity': {'key': 'isAadIdentity', 'type': 'bool'},
'is_container': {'key': 'isContainer', 'type': 'bool'},
'is_deleted_in_origin': {'key': 'isDeletedInOrigin', 'type': 'bool'},
'profile_url': {'key': 'profileUrl', 'type': 'str'},
'unique_name': {'key': 'uniqueName', 'type': 'str'}
}
def __init__(self, _links=None, descriptor=None, display_name=None, url=None, directory_alias=None, id=None, image_url=None, inactive=None, is_aad_identity=None, is_container=None, is_deleted_in_origin=None, profile_url=None, unique_name=None):
super(IdentityRef, self).__init__(_links=_links, descriptor=descriptor, display_name=display_name, url=url)
self.directory_alias = directory_alias
self.id = id
self.image_url = image_url
self.inactive = inactive
self.is_aad_identity = is_aad_identity
self.is_container = is_container
self.is_deleted_in_origin = is_deleted_in_origin
self.profile_url = profile_url
self.unique_name = unique_name
class InputDescriptor(Model):
"""
Describes an input for subscriptions.
:param dependency_input_ids: The ids of all inputs that the value of this input is dependent on.
:type dependency_input_ids: list of str
:param description: Description of what this input is used for
:type description: str
:param group_name: The group localized name to which this input belongs and can be shown as a header for the container that will include all the inputs in the group.
:type group_name: str
:param has_dynamic_value_information: If true, the value information for this input is dynamic and should be fetched when the value of dependency inputs change.
:type has_dynamic_value_information: bool
:param id: Identifier for the subscription input
:type id: str
:param input_mode: Mode in which the value of this input should be entered
:type input_mode: object
:param is_confidential: Gets whether this input is confidential, such as for a password or application key
:type is_confidential: bool
:param name: Localized name which can be shown as a label for the subscription input
:type name: str
:param properties: Custom properties for the input which can be used by the service provider
:type properties: dict
:param type: Underlying data type for the input value. When this value is specified, InputMode, Validation and Values are optional.
:type type: str
:param use_in_default_description: Gets whether this input is included in the default generated action description.
:type use_in_default_description: bool
:param validation: Information to use to validate this input's value
:type validation: :class:`InputValidation <azure.devops.v7_1.microsoft._visual_studio._services._web_api.models.InputValidation>`
:param value_hint: A hint for input value. It can be used in the UI as the input placeholder.
:type value_hint: str
:param values: Information about possible values for this input
:type values: :class:`InputValues <azure.devops.v7_1.microsoft._visual_studio._services._web_api.models.InputValues>`
"""
_attribute_map = {
'dependency_input_ids': {'key': 'dependencyInputIds', 'type': '[str]'},
'description': {'key': 'description', 'type': 'str'},
'group_name': {'key': 'groupName', 'type': 'str'},
'has_dynamic_value_information': {'key': 'hasDynamicValueInformation', 'type': 'bool'},
'id': {'key': 'id', 'type': 'str'},
'input_mode': {'key': 'inputMode', 'type': 'object'},
'is_confidential': {'key': 'isConfidential', 'type': 'bool'},
'name': {'key': 'name', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'use_in_default_description': {'key': 'useInDefaultDescription', 'type': 'bool'},
'validation': {'key': 'validation', 'type': 'InputValidation'},
'value_hint': {'key': 'valueHint', 'type': 'str'},
'values': {'key': 'values', 'type': 'InputValues'}
}
def __init__(self, dependency_input_ids=None, description=None, group_name=None, has_dynamic_value_information=None, id=None, input_mode=None, is_confidential=None, name=None, properties=None, type=None, use_in_default_description=None, validation=None, value_hint=None, values=None):
super(InputDescriptor, self).__init__()
self.dependency_input_ids = dependency_input_ids
self.description = description
self.group_name = group_name
self.has_dynamic_value_information = has_dynamic_value_information
self.id = id
self.input_mode = input_mode
self.is_confidential = is_confidential
self.name = name
self.properties = properties
self.type = type
self.use_in_default_description = use_in_default_description
self.validation = validation
self.value_hint = value_hint
self.values = values
class InputValidation(Model):
"""
Describes what values are valid for a subscription input
:param data_type: Gets or sets the data type to validate.
:type data_type: object
:param is_required: Gets or sets if this is a required field.
:type is_required: bool
:param max_length: Gets or sets the maximum length of this descriptor.
:type max_length: int
:param max_value: Gets or sets the minimum value for this descriptor.
:type max_value: decimal
:param min_length: Gets or sets the minimum length of this descriptor.
:type min_length: int
:param min_value: Gets or sets the minimum value for this descriptor.
:type min_value: decimal
:param pattern: Gets or sets the pattern to validate.
:type pattern: str
:param pattern_mismatch_error_message: Gets or sets the error on pattern mismatch.
:type pattern_mismatch_error_message: str
"""
_attribute_map = {
'data_type': {'key': 'dataType', 'type': 'object'},
'is_required': {'key': 'isRequired', 'type': 'bool'},
'max_length': {'key': 'maxLength', 'type': 'int'},
'max_value': {'key': 'maxValue', 'type': 'decimal'},
'min_length': {'key': 'minLength', 'type': 'int'},
'min_value': {'key': 'minValue', 'type': 'decimal'},
'pattern': {'key': 'pattern', 'type': 'str'},
'pattern_mismatch_error_message': {'key': 'patternMismatchErrorMessage', 'type': 'str'}
}
def __init__(self, data_type=None, is_required=None, max_length=None, max_value=None, min_length=None, min_value=None, pattern=None, pattern_mismatch_error_message=None):
super(InputValidation, self).__init__()
self.data_type = data_type
self.is_required = is_required
self.max_length = max_length
self.max_value = max_value
self.min_length = min_length
self.min_value = min_value
self.pattern = pattern
self.pattern_mismatch_error_message = pattern_mismatch_error_message
class InputValue(Model):
"""
Information about a single value for an input
:param data: Any other data about this input
:type data: dict
:param display_value: The text to show for the display of this value
:type display_value: str
:param value: The value to store for this input
:type value: str
"""
_attribute_map = {
'data': {'key': 'data', 'type': '{object}'},
'display_value': {'key': 'displayValue', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'}
}
def __init__(self, data=None, display_value=None, value=None):
super(InputValue, self).__init__()
self.data = data
self.display_value = display_value
self.value = value
class InputValues(Model):
"""
Information about the possible/allowed values for a given subscription input
:param default_value: The default value to use for this input
:type default_value: str
:param error: Errors encountered while computing dynamic values.
:type error: :class:`InputValuesError <azure.devops.v7_1.microsoft._visual_studio._services._web_api.models.InputValuesError>`
:param input_id: The id of the input
:type input_id: str
:param is_disabled: Should this input be disabled
:type is_disabled: bool
:param is_limited_to_possible_values: Should the value be restricted to one of the values in the PossibleValues (True) or are the values in PossibleValues just a suggestion (False)
:type is_limited_to_possible_values: bool
:param is_read_only: Should this input be made read-only
:type is_read_only: bool
:param possible_values: Possible values that this input can take
:type possible_values: list of :class:`InputValue <azure.devops.v7_1.microsoft._visual_studio._services._web_api.models.InputValue>`
"""
_attribute_map = {
'default_value': {'key': 'defaultValue', 'type': 'str'},
'error': {'key': 'error', 'type': 'InputValuesError'},
'input_id': {'key': 'inputId', 'type': 'str'},
'is_disabled': {'key': 'isDisabled', 'type': 'bool'},
'is_limited_to_possible_values': {'key': 'isLimitedToPossibleValues', 'type': 'bool'},
'is_read_only': {'key': 'isReadOnly', 'type': 'bool'},
'possible_values': {'key': 'possibleValues', 'type': '[InputValue]'}
}
def __init__(self, default_value=None, error=None, input_id=None, is_disabled=None, is_limited_to_possible_values=None, is_read_only=None, possible_values=None):
super(InputValues, self).__init__()
self.default_value = default_value
self.error = error
self.input_id = input_id
self.is_disabled = is_disabled
self.is_limited_to_possible_values = is_limited_to_possible_values
self.is_read_only = is_read_only
self.possible_values = possible_values
class InputValuesError(Model):
"""
Error information related to a subscription input value.
:param message: The error message.
:type message: str
"""
_attribute_map = {
'message': {'key': 'message', 'type': 'str'}
}
def __init__(self, message=None):
super(InputValuesError, self).__init__()
self.message = message
class OAuthConfiguration(Model):
"""
:param client_id: Gets or sets the ClientId
:type client_id: str
:param client_secret: Gets or sets the ClientSecret
:type client_secret: str
:param created_by: Gets or sets the identity who created the config.
:type created_by: :class:`IdentityRef <azure.devops.v7_1.service_endpoint.models.IdentityRef>`
:param created_on: Gets or sets the time when config was created.
:type created_on: datetime
:param endpoint_type: Gets or sets the type of the endpoint.
:type endpoint_type: str
:param id: Gets or sets the unique identifier of this field
:type id: str
:param modified_by: Gets or sets the identity who modified the config.
:type modified_by: :class:`IdentityRef <azure.devops.v7_1.service_endpoint.models.IdentityRef>`
:param modified_on: Gets or sets the time when variable group was modified
:type modified_on: datetime
:param name: Gets or sets the name
:type name: str
:param url: Gets or sets the Url
:type url: str
"""
_attribute_map = {
'client_id': {'key': 'clientId', 'type': 'str'},
'client_secret': {'key': 'clientSecret', 'type': 'str'},
'created_by': {'key': 'createdBy', 'type': 'IdentityRef'},
'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
'endpoint_type': {'key': 'endpointType', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'modified_by': {'key': 'modifiedBy', 'type': 'IdentityRef'},
'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
'name': {'key': 'name', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, client_id=None, client_secret=None, created_by=None, created_on=None, endpoint_type=None, id=None, modified_by=None, modified_on=None, name=None, url=None):
super(OAuthConfiguration, self).__init__()
self.client_id = client_id
self.client_secret = client_secret
self.created_by = created_by
self.created_on = created_on
self.endpoint_type = endpoint_type
self.id = id
self.modified_by = modified_by
self.modified_on = modified_on
self.name = name
self.url = url
class OAuthConfigurationParams(Model):
"""
:param client_id: Gets or sets the ClientId
:type client_id: str
:param client_secret: Gets or sets the ClientSecret
:type client_secret: str
:param endpoint_type: Gets or sets the type of the endpoint.
:type endpoint_type: str
:param name: Gets or sets the name
:type name: str
:param url: Gets or sets the Url
:type url: str
"""
_attribute_map = {
'client_id': {'key': 'clientId', 'type': 'str'},
'client_secret': {'key': 'clientSecret', 'type': 'str'},
'endpoint_type': {'key': 'endpointType', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, client_id=None, client_secret=None, endpoint_type=None, name=None, url=None):
super(OAuthConfigurationParams, self).__init__()
self.client_id = client_id
self.client_secret = client_secret
self.endpoint_type = endpoint_type
self.name = name
self.url = url
class ProjectReference(Model):
"""
:param id:
:type id: str
:param name:
:type name: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'}
}
def __init__(self, id=None, name=None):
super(ProjectReference, self).__init__()
self.id = id
self.name = name
class ReferenceLinks(Model):
"""
The class to represent a collection of REST reference links.
:param links: The readonly view of the links. Because Reference links are readonly, we only want to expose them as read only.
:type links: dict
"""
_attribute_map = {
'links': {'key': 'links', 'type': '{object}'}
}
def __init__(self, links=None):
super(ReferenceLinks, self).__init__()
self.links = links
class RefreshAuthenticationParameters(Model):
"""
Specify the properties for refreshing the endpoint authentication object being queried
:param endpoint_id: EndpointId which needs new authentication params
:type endpoint_id: str
:param scope: Scope of the token requested. For GitHub marketplace apps, scope contains repository Ids
:type scope: list of int
:param token_validity_in_minutes: The requested endpoint authentication should be valid for _ minutes. Authentication params will not be refreshed if the token contained in endpoint already has active token.
:type token_validity_in_minutes: int
"""
_attribute_map = {
'endpoint_id': {'key': 'endpointId', 'type': 'str'},
'scope': {'key': 'scope', 'type': '[int]'},
'token_validity_in_minutes': {'key': 'tokenValidityInMinutes', 'type': 'int'}
}
def __init__(self, endpoint_id=None, scope=None, token_validity_in_minutes=None):
super(RefreshAuthenticationParameters, self).__init__()
self.endpoint_id = endpoint_id
self.scope = scope
self.token_validity_in_minutes = token_validity_in_minutes
class ResultTransformationDetails(Model):
"""
Represents template to transform the result data.
:param callback_context_template: Gets or sets the template for callback parameters
:type callback_context_template: str
:param callback_required_template: Gets or sets the template to decide whether to callback or not
:type callback_required_template: str
:param result_template: Gets or sets the template for result transformation.
:type result_template: str
"""
_attribute_map = {
'callback_context_template': {'key': 'callbackContextTemplate', 'type': 'str'},
'callback_required_template': {'key': 'callbackRequiredTemplate', 'type': 'str'},
'result_template': {'key': 'resultTemplate', 'type': 'str'}
}
def __init__(self, callback_context_template=None, callback_required_template=None, result_template=None):
super(ResultTransformationDetails, self).__init__()
self.callback_context_template = callback_context_template
self.callback_required_template = callback_required_template
self.result_template = result_template
class ServiceEndpoint(Model):
"""
Represents an endpoint which may be used by an orchestration job.
:param administrators_group: This is a deprecated field.
:type administrators_group: :class:`IdentityRef <azure.devops.v7_1.service_endpoint.models.IdentityRef>`
:param authorization: Gets or sets the authorization data for talking to the endpoint.
:type authorization: :class:`EndpointAuthorization <azure.devops.v7_1.service_endpoint.models.EndpointAuthorization>`
:param created_by: Gets or sets the identity reference for the user who created the Service endpoint.
:type created_by: :class:`IdentityRef <azure.devops.v7_1.service_endpoint.models.IdentityRef>`
:param data:
:type data: dict
:param description: Gets or sets the description of endpoint.
:type description: str
:param group_scope_id: This is a deprecated field.
:type group_scope_id: str
:param id: Gets or sets the identifier of this endpoint.
:type id: str
:param is_ready: EndPoint state indicator
:type is_ready: bool
:param is_shared: Indicates whether service endpoint is shared with other projects or not.
:type is_shared: bool
:param name: Gets or sets the friendly name of the endpoint.
:type name: str
:param operation_status: Error message during creation/deletion of endpoint
:type operation_status: :class:`object <azure.devops.v7_1.service_endpoint.models.object>`
:param owner: Owner of the endpoint Supported values are "library", "agentcloud"
:type owner: str
:param readers_group: Gets or sets the identity reference for the readers group of the service endpoint.
:type readers_group: :class:`IdentityRef <azure.devops.v7_1.service_endpoint.models.IdentityRef>`
:param service_endpoint_project_references: All other project references where the service endpoint is shared.
:type service_endpoint_project_references: list of :class:`ServiceEndpointProjectReference <azure.devops.v7_1.service_endpoint.models.ServiceEndpointProjectReference>`
:param type: Gets or sets the type of the endpoint.
:type type: str
:param url: Gets or sets the url of the endpoint.
:type url: str
"""
_attribute_map = {
'administrators_group': {'key': 'administratorsGroup', 'type': 'IdentityRef'},
'authorization': {'key': 'authorization', 'type': 'EndpointAuthorization'},
'created_by': {'key': 'createdBy', 'type': 'IdentityRef'},
'data': {'key': 'data', 'type': '{str}'},
'description': {'key': 'description', 'type': 'str'},
'group_scope_id': {'key': 'groupScopeId', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'is_ready': {'key': 'isReady', 'type': 'bool'},
'is_shared': {'key': 'isShared', 'type': 'bool'},
'name': {'key': 'name', 'type': 'str'},
'operation_status': {'key': 'operationStatus', 'type': 'object'},
'owner': {'key': 'owner', 'type': 'str'},
'readers_group': {'key': 'readersGroup', 'type': 'IdentityRef'},
'service_endpoint_project_references': {'key': 'serviceEndpointProjectReferences', 'type': '[ServiceEndpointProjectReference]'},
'type': {'key': 'type', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, administrators_group=None, authorization=None, created_by=None, data=None, description=None, group_scope_id=None, id=None, is_ready=None, is_shared=None, name=None, operation_status=None, owner=None, readers_group=None, service_endpoint_project_references=None, type=None, url=None):
super(ServiceEndpoint, self).__init__()
self.administrators_group = administrators_group
self.authorization = authorization
self.created_by = created_by
self.data = data
self.description = description
self.group_scope_id = group_scope_id
self.id = id
self.is_ready = is_ready
self.is_shared = is_shared
self.name = name
self.operation_status = operation_status
self.owner = owner
self.readers_group = readers_group
self.service_endpoint_project_references = service_endpoint_project_references
self.type = type
self.url = url
class ServiceEndpointAuthenticationScheme(Model):
"""
Represents the authentication scheme used to authenticate the endpoint.
:param authorization_headers: Gets or sets the authorization headers of service endpoint authentication scheme.
:type authorization_headers: list of :class:`AuthorizationHeader <azure.devops.v7_1.service_endpoint.models.AuthorizationHeader>`
:param authorization_url: Gets or sets the Authorization url required to authenticate using OAuth2
:type authorization_url: str
:param client_certificates: Gets or sets the certificates of service endpoint authentication scheme.
:type client_certificates: list of :class:`ClientCertificate <azure.devops.v7_1.service_endpoint.models.ClientCertificate>`
:param data_source_bindings: Gets or sets the data source bindings of the endpoint.
:type data_source_bindings: list of :class:`DataSourceBinding <azure.devops.v7_1.service_endpoint.models.DataSourceBinding>`
:param display_name: Gets or sets the display name for the service endpoint authentication scheme.
:type display_name: str
:param input_descriptors: Gets or sets the input descriptors for the service endpoint authentication scheme.
:type input_descriptors: list of :class:`InputDescriptor <azure.devops.v7_1.service_endpoint.models.InputDescriptor>`
:param properties: Gets or sets the properties of service endpoint authentication scheme.
:type properties: dict
:param requires_oAuth2_configuration: Gets or sets whether this auth scheme requires OAuth2 configuration or not.
:type requires_oAuth2_configuration: bool
:param scheme: Gets or sets the scheme for service endpoint authentication.
:type scheme: str
"""
_attribute_map = {
'authorization_headers': {'key': 'authorizationHeaders', 'type': '[AuthorizationHeader]'},
'authorization_url': {'key': 'authorizationUrl', 'type': 'str'},
'client_certificates': {'key': 'clientCertificates', 'type': '[ClientCertificate]'},
'data_source_bindings': {'key': 'dataSourceBindings', 'type': '[DataSourceBinding]'},
'display_name': {'key': 'displayName', 'type': 'str'},
'input_descriptors': {'key': 'inputDescriptors', 'type': '[InputDescriptor]'},
'properties': {'key': 'properties', 'type': '{str}'},
'requires_oAuth2_configuration': {'key': 'requiresOAuth2Configuration', 'type': 'bool'},
'scheme': {'key': 'scheme', 'type': 'str'}
}
def __init__(self, authorization_headers=None, authorization_url=None, client_certificates=None, data_source_bindings=None, display_name=None, input_descriptors=None, properties=None, requires_oAuth2_configuration=None, scheme=None):
super(ServiceEndpointAuthenticationScheme, self).__init__()
self.authorization_headers = authorization_headers
self.authorization_url = authorization_url
self.client_certificates = client_certificates
self.data_source_bindings = data_source_bindings
self.display_name = display_name
self.input_descriptors = input_descriptors
self.properties = properties
self.requires_oAuth2_configuration = requires_oAuth2_configuration
self.scheme = scheme
class ServiceEndpointDetails(Model):
"""
Represents details of the service endpoint.
:param authorization: Gets or sets the authorization of service endpoint.
:type authorization: :class:`EndpointAuthorization <azure.devops.v7_1.service_endpoint.models.EndpointAuthorization>`
:param data: Gets or sets the data of service endpoint.
:type data: dict
:param type: Gets or sets the type of service endpoint.
:type type: str
:param url: Gets or sets the connection url of service endpoint.
:type url: str
"""
_attribute_map = {
'authorization': {'key': 'authorization', 'type': 'EndpointAuthorization'},
'data': {'key': 'data', 'type': '{str}'},
'type': {'key': 'type', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, authorization=None, data=None, type=None, url=None):
super(ServiceEndpointDetails, self).__init__()
self.authorization = authorization
self.data = data
self.type = type
self.url = url
class ServiceEndpointExecutionData(Model):
"""
Represents service endpoint execution data.
:param definition: Gets the definition of service endpoint execution owner.
:type definition: :class:`ServiceEndpointExecutionOwner <azure.devops.v7_1.service_endpoint.models.ServiceEndpointExecutionOwner>`
:param finish_time: Gets the finish time of service endpoint execution.
:type finish_time: datetime
:param id: Gets the Id of service endpoint execution data.
:type id: long
:param owner: Gets the owner of service endpoint execution data.
:type owner: :class:`ServiceEndpointExecutionOwner <azure.devops.v7_1.service_endpoint.models.ServiceEndpointExecutionOwner>`
:param owner_details: Gets the additional details about the instance that used the service endpoint.
:type owner_details: str
:param plan_type: Gets the plan type of service endpoint execution data.
:type plan_type: str
:param result: Gets the result of service endpoint execution.
:type result: object
:param start_time: Gets the start time of service endpoint execution.
:type start_time: datetime
"""
_attribute_map = {
'definition': {'key': 'definition', 'type': 'ServiceEndpointExecutionOwner'},
'finish_time': {'key': 'finishTime', 'type': 'iso-8601'},
'id': {'key': 'id', 'type': 'long'},
'owner': {'key': 'owner', 'type': 'ServiceEndpointExecutionOwner'},
'owner_details': {'key': 'ownerDetails', 'type': 'str'},
'plan_type': {'key': 'planType', 'type': 'str'},
'result': {'key': 'result', 'type': 'object'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'}
}
def __init__(self, definition=None, finish_time=None, id=None, owner=None, owner_details=None, plan_type=None, result=None, start_time=None):
super(ServiceEndpointExecutionData, self).__init__()
self.definition = definition
self.finish_time = finish_time
self.id = id
self.owner = owner
self.owner_details = owner_details
self.plan_type = plan_type
self.result = result
self.start_time = start_time
class ServiceEndpointExecutionOwner(Model):
"""
Represents execution owner of the service endpoint.
:param _links:
:type _links: :class:`ReferenceLinks <azure.devops.v7_1.service_endpoint.models.ReferenceLinks>`
:param id: Gets or sets the Id of service endpoint execution owner.
:type id: int
:param name: Gets or sets the name of service endpoint execution owner.
:type name: str
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'id': {'key': 'id', 'type': 'int'},
'name': {'key': 'name', 'type': 'str'}
}
def __init__(self, _links=None, id=None, name=None):
super(ServiceEndpointExecutionOwner, self).__init__()
self._links = _links
self.id = id
self.name = name
class ServiceEndpointExecutionRecord(Model):
"""
Represents the details of service endpoint execution.
:param data: Gets the execution data of service endpoint execution.
:type data: :class:`ServiceEndpointExecutionData <azure.devops.v7_1.service_endpoint.models.ServiceEndpointExecutionData>`
:param endpoint_id: Gets the Id of service endpoint.
:type endpoint_id: str
"""
_attribute_map = {
'data': {'key': 'data', 'type': 'ServiceEndpointExecutionData'},
'endpoint_id': {'key': 'endpointId', 'type': 'str'}
}
def __init__(self, data=None, endpoint_id=None):
super(ServiceEndpointExecutionRecord, self).__init__()
self.data = data
self.endpoint_id = endpoint_id
class ServiceEndpointExecutionRecordsInput(Model):
"""
:param data:
:type data: :class:`ServiceEndpointExecutionData <azure.devops.v7_1.service_endpoint.models.ServiceEndpointExecutionData>`
:param endpoint_ids:
:type endpoint_ids: list of str
"""
_attribute_map = {
'data': {'key': 'data', 'type': 'ServiceEndpointExecutionData'},
'endpoint_ids': {'key': 'endpointIds', 'type': '[str]'}
}
def __init__(self, data=None, endpoint_ids=None):
super(ServiceEndpointExecutionRecordsInput, self).__init__()
self.data = data
self.endpoint_ids = endpoint_ids
class ServiceEndpointProjectReference(Model):
"""
:param description: Gets or sets description of the service endpoint.
:type description: str
:param name: Gets or sets name of the service endpoint.
:type name: str
:param project_reference: Gets or sets project reference of the service endpoint.
:type project_reference: :class:`ProjectReference <azure.devops.v7_1.service_endpoint.models.ProjectReference>`
"""
_attribute_map = {
'description': {'key': 'description', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'project_reference': {'key': 'projectReference', 'type': 'ProjectReference'}
}
def __init__(self, description=None, name=None, project_reference=None):
super(ServiceEndpointProjectReference, self).__init__()
self.description = description
self.name = name
self.project_reference = project_reference
class ServiceEndpointRequest(Model):
"""
:param data_source_details: Gets or sets the data source details for the service endpoint request.
:type data_source_details: :class:`DataSourceDetails <azure.devops.v7_1.service_endpoint.models.DataSourceDetails>`
:param result_transformation_details: Gets or sets the result transformation details for the service endpoint request.
:type result_transformation_details: :class:`ResultTransformationDetails <azure.devops.v7_1.service_endpoint.models.ResultTransformationDetails>`
:param service_endpoint_details: Gets or sets the service endpoint details for the service endpoint request.
:type service_endpoint_details: :class:`ServiceEndpointDetails <azure.devops.v7_1.service_endpoint.models.ServiceEndpointDetails>`
"""
_attribute_map = {
'data_source_details': {'key': 'dataSourceDetails', 'type': 'DataSourceDetails'},
'result_transformation_details': {'key': 'resultTransformationDetails', 'type': 'ResultTransformationDetails'},
'service_endpoint_details': {'key': 'serviceEndpointDetails', 'type': 'ServiceEndpointDetails'}
}
def __init__(self, data_source_details=None, result_transformation_details=None, service_endpoint_details=None):
super(ServiceEndpointRequest, self).__init__()
self.data_source_details = data_source_details
self.result_transformation_details = result_transformation_details
self.service_endpoint_details = service_endpoint_details
class ServiceEndpointRequestResult(Model):
"""
Represents result of the service endpoint request.
:param callback_context_parameters: Gets or sets the parameters used to make subsequent calls to the data source
:type callback_context_parameters: dict
:param callback_required: Gets or sets the flat that decides if another call to the data source is to be made
:type callback_required: bool
:param error_message: Gets or sets the error message of the service endpoint request result.
:type error_message: str
:param result: Gets or sets the result of service endpoint request.
:type result: :class:`object <azure.devops.v7_1.service_endpoint.models.object>`
:param status_code: Gets or sets the status code of the service endpoint request result.
:type status_code: object
"""
_attribute_map = {
'callback_context_parameters': {'key': 'callbackContextParameters', 'type': '{str}'},
'callback_required': {'key': 'callbackRequired', 'type': 'bool'},
'error_message': {'key': 'errorMessage', 'type': 'str'},
'result': {'key': 'result', 'type': 'object'},
'status_code': {'key': 'statusCode', 'type': 'object'}
}
def __init__(self, callback_context_parameters=None, callback_required=None, error_message=None, result=None, status_code=None):
super(ServiceEndpointRequestResult, self).__init__()
self.callback_context_parameters = callback_context_parameters
self.callback_required = callback_required
self.error_message = error_message
self.result = result
self.status_code = status_code
class ServiceEndpointType(Model):
"""
Represents type of the service endpoint.
:param authentication_schemes: Authentication scheme of service endpoint type.
:type authentication_schemes: list of :class:`ServiceEndpointAuthenticationScheme <azure.devops.v7_1.service_endpoint.models.ServiceEndpointAuthenticationScheme>`
:param data_sources: Data sources of service endpoint type.
:type data_sources: list of :class:`DataSource <azure.devops.v7_1.service_endpoint.models.DataSource>`
:param dependency_data: Dependency data of service endpoint type.
:type dependency_data: list of :class:`DependencyData <azure.devops.v7_1.service_endpoint.models.DependencyData>`
:param description: Gets or sets the description of service endpoint type.
:type description: str
:param display_name: Gets or sets the display name of service endpoint type.
:type display_name: str
:param endpoint_url: Gets or sets the endpoint url of service endpoint type.
:type endpoint_url: :class:`EndpointUrl <azure.devops.v7_1.service_endpoint.models.EndpointUrl>`
:param help_link: Gets or sets the help link of service endpoint type.
:type help_link: :class:`HelpLink <azure.devops.v7_1.service_endpoint.models.HelpLink>`
:param help_mark_down: Gets or sets the help text shown at the endpoint create dialog.
:type help_mark_down: str
:param icon_url: Gets or sets the icon url of service endpoint type.
:type icon_url: str
:param input_descriptors: Input descriptor of service endpoint type.
:type input_descriptors: list of :class:`InputDescriptor <azure.devops.v7_1.service_endpoint.models.InputDescriptor>`
:param name: Gets or sets the name of service endpoint type.
:type name: str
:param trusted_hosts: Trusted hosts of a service endpoint type.
:type trusted_hosts: list of str
:param ui_contribution_id: Gets or sets the ui contribution id of service endpoint type.
:type ui_contribution_id: str
"""
_attribute_map = {
'authentication_schemes': {'key': 'authenticationSchemes', 'type': '[ServiceEndpointAuthenticationScheme]'},
'data_sources': {'key': 'dataSources', 'type': '[DataSource]'},
'dependency_data': {'key': 'dependencyData', 'type': '[DependencyData]'},
'description': {'key': 'description', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'endpoint_url': {'key': 'endpointUrl', 'type': 'EndpointUrl'},
'help_link': {'key': 'helpLink', 'type': 'HelpLink'},
'help_mark_down': {'key': 'helpMarkDown', 'type': 'str'},
'icon_url': {'key': 'iconUrl', 'type': 'str'},
'input_descriptors': {'key': 'inputDescriptors', 'type': '[InputDescriptor]'},
'name': {'key': 'name', 'type': 'str'},
'trusted_hosts': {'key': 'trustedHosts', 'type': '[str]'},
'ui_contribution_id': {'key': 'uiContributionId', 'type': 'str'}
}
def __init__(self, authentication_schemes=None, data_sources=None, dependency_data=None, description=None, display_name=None, endpoint_url=None, help_link=None, help_mark_down=None, icon_url=None, input_descriptors=None, name=None, trusted_hosts=None, ui_contribution_id=None):
super(ServiceEndpointType, self).__init__()
self.authentication_schemes = authentication_schemes
self.data_sources = data_sources
self.dependency_data = dependency_data
self.description = description
self.display_name = display_name
self.endpoint_url = endpoint_url
self.help_link = help_link
self.help_mark_down = help_mark_down
self.icon_url = icon_url
self.input_descriptors = input_descriptors
self.name = name
self.trusted_hosts = trusted_hosts
self.ui_contribution_id = ui_contribution_id
class DataSourceBinding(DataSourceBindingBase):
"""
Represents the data source binding of the endpoint.
:param callback_context_template: Pagination format supported by this data source(ContinuationToken/SkipTop).
:type callback_context_template: str
:param callback_required_template: Subsequent calls needed?
:type callback_required_template: str
:param data_source_name: Gets or sets the name of the data source.
:type data_source_name: str
:param endpoint_id: Gets or sets the endpoint Id.
:type endpoint_id: str
:param endpoint_url: Gets or sets the url of the service endpoint.
:type endpoint_url: str
:param headers: Gets or sets the authorization headers.
:type headers: list of :class:`AuthorizationHeader <azure.devops.v7_1.service_endpoint.models.AuthorizationHeader>`
:param initial_context_template: Defines the initial value of the query params
:type initial_context_template: str
:param parameters: Gets or sets the parameters for the data source.
:type parameters: dict
:param request_content: Gets or sets http request body
:type request_content: str
:param request_verb: Gets or sets http request verb
:type request_verb: str
:param result_selector: Gets or sets the result selector.
:type result_selector: str
:param result_template: Gets or sets the result template.
:type result_template: str
:param target: Gets or sets the target of the data source.
:type target: str
"""
_attribute_map = {
'callback_context_template': {'key': 'callbackContextTemplate', 'type': 'str'},
'callback_required_template': {'key': 'callbackRequiredTemplate', 'type': 'str'},
'data_source_name': {'key': 'dataSourceName', 'type': 'str'},
'endpoint_id': {'key': 'endpointId', 'type': 'str'},
'endpoint_url': {'key': 'endpointUrl', 'type': 'str'},
'headers': {'key': 'headers', 'type': '[AuthorizationHeader]'},
'initial_context_template': {'key': 'initialContextTemplate', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': '{str}'},
'request_content': {'key': 'requestContent', 'type': 'str'},
'request_verb': {'key': 'requestVerb', 'type': 'str'},
'result_selector': {'key': 'resultSelector', 'type': 'str'},
'result_template': {'key': 'resultTemplate', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'},
}
def __init__(self, callback_context_template=None, callback_required_template=None, data_source_name=None, endpoint_id=None, endpoint_url=None, headers=None, initial_context_template=None, parameters=None, request_content=None, request_verb=None, result_selector=None, result_template=None, target=None):
super(DataSourceBinding, self).__init__(callback_context_template=callback_context_template, callback_required_template=callback_required_template, data_source_name=data_source_name, endpoint_id=endpoint_id, endpoint_url=endpoint_url, headers=headers, initial_context_template=initial_context_template, parameters=parameters, request_content=request_content, request_verb=request_verb, result_selector=result_selector, result_template=result_template, target=target)
__all__ = [
'AuthenticationSchemeReference',
'AuthorizationHeader',
'AzureManagementGroup',
'AzureManagementGroupQueryResult',
'AzureSubscription',
'AzureSubscriptionQueryResult',
'ClientCertificate',
'DataSource',
'DataSourceBindingBase',
'DataSourceDetails',
'DependencyBinding',
'DependencyData',
'DependsOn',
'EndpointAuthorization',
'EndpointUrl',
'GraphSubjectBase',
'HelpLink',
'IdentityRef',
'InputDescriptor',
'InputValidation',
'InputValue',
'InputValues',
'InputValuesError',
'OAuthConfiguration',
'OAuthConfigurationParams',
'ProjectReference',
'ReferenceLinks',
'RefreshAuthenticationParameters',
'ResultTransformationDetails',
'ServiceEndpoint',
'ServiceEndpointAuthenticationScheme',
'ServiceEndpointDetails',
'ServiceEndpointExecutionData',
'ServiceEndpointExecutionOwner',
'ServiceEndpointExecutionRecord',
'ServiceEndpointExecutionRecordsInput',
'ServiceEndpointProjectReference',
'ServiceEndpointRequest',
'ServiceEndpointRequestResult',
'ServiceEndpointType',
'DataSourceBinding',
]
|
azure-devops-python-api/azure-devops/azure/devops/v7_1/service_endpoint/models.py/0
|
{
"file_path": "azure-devops-python-api/azure-devops/azure/devops/v7_1/service_endpoint/models.py",
"repo_id": "azure-devops-python-api",
"token_count": 23307
}
| 412 |
ο»Ώ# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from .models import *
from .test_client import TestClient
__all__ = [
'AggregatedDataForResultTrend',
'AggregatedResultsAnalysis',
'AggregatedResultsByOutcome',
'AggregatedResultsDifference',
'AggregatedRunsByOutcome',
'AggregatedRunsByState',
'BuildConfiguration',
'BuildCoverage',
'BuildReference',
'CloneOperationInformation',
'CloneOptions',
'CloneStatistics',
'CodeCoverageData',
'CodeCoverageStatistics',
'CodeCoverageSummary',
'CoverageStatistics',
'CustomTestField',
'CustomTestFieldDefinition',
'DtlEnvironmentDetails',
'FailingSince',
'FieldDetailsForTestResults',
'FunctionCoverage',
'GraphSubjectBase',
'IdentityRef',
'JobReference',
'LastResultDetails',
'LinkedWorkItemsQuery',
'LinkedWorkItemsQueryResult',
'ModuleCoverage',
'NameValuePair',
'PhaseReference',
'PipelineReference',
'PlanUpdateModel',
'PointAssignment',
'PointsFilter',
'PointUpdateModel',
'PropertyBag',
'QueryModel',
'ReferenceLinks',
'ReleaseEnvironmentDefinitionReference',
'ReleaseReference',
'ResultRetentionSettings',
'ResultsFilter',
'RunCreateModel',
'RunFilter',
'RunStatistic',
'RunSummaryModel',
'RunUpdateModel',
'ShallowReference',
'ShallowTestCaseResult',
'SharedStepModel',
'StageReference',
'SuiteCreateModel',
'SuiteEntry',
'SuiteEntryUpdateModel',
'SuiteTestCase',
'SuiteTestCaseUpdateModel',
'SuiteUpdateModel',
'TeamContext',
'TeamProjectReference',
'TestActionResultModel',
'TestAttachment',
'TestAttachmentReference',
'TestAttachmentRequestModel',
'TestCaseResult',
'TestCaseResultAttachmentModel',
'TestCaseResultIdentifier',
'TestCaseResultUpdateModel',
'TestConfiguration',
'TestEnvironment',
'TestFailureDetails',
'TestFailuresAnalysis',
'TestFlakyIdentifier',
'TestHistoryQuery',
'TestIterationDetailsModel',
'TestMessageLogDetails',
'TestMethod',
'TestOperationReference',
'TestOutcomeSettings',
'TestPlan',
'TestPlanCloneRequest',
'TestPoint',
'TestPointsQuery',
'TestResolutionState',
'TestResultCreateModel',
'TestResultDocument',
'TestResultHistory',
'TestResultHistoryDetailsForGroup',
'TestResultHistoryForGroup',
'TestResultMetaData',
'TestResultModelBase',
'TestResultParameterModel',
'TestResultPayload',
'TestResultsContext',
'TestResultsDetails',
'TestResultsDetailsForGroup',
'TestResultsGroupsForBuild',
'TestResultsGroupsForRelease',
'TestResultsQuery',
'TestResultSummary',
'TestResultTrendFilter',
'TestRun',
'TestRunCoverage',
'TestRunStatistic',
'TestSession',
'TestSettings',
'TestSubResult',
'TestSuite',
'TestSuiteCloneRequest',
'TestSummaryForWorkItem',
'TestTag',
'TestToWorkItemLinks',
'TestVariable',
'WorkItemReference',
'WorkItemToTestLinks',
'TestClient'
]
|
azure-devops-python-api/azure-devops/azure/devops/v7_1/test/__init__.py/0
|
{
"file_path": "azure-devops-python-api/azure-devops/azure/devops/v7_1/test/__init__.py",
"repo_id": "azure-devops-python-api",
"token_count": 1225
}
| 413 |
ο»Ώ# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest.serialization import Model
class BatchOperationData(Model):
"""
Do not attempt to use this type to create a new BatchOperationData. This type does not contain sufficient fields to create a new batch operation data.
"""
_attribute_map = {
}
def __init__(self):
super(BatchOperationData, self).__init__()
class JsonPatchOperation(Model):
"""
The JSON model for a JSON Patch operation
:param from_: The path to copy from for the Move/Copy operation.
:type from_: str
:param op: The patch operation
:type op: object
:param path: The path for the operation. In the case of an array, a zero based index can be used to specify the position in the array (e.g. /biscuits/0/name). The "-" character can be used instead of an index to insert at the end of the array (e.g. /biscuits/-).
:type path: str
:param value: The value for the operation. This is either a primitive or a JToken.
:type value: object
"""
_attribute_map = {
'from_': {'key': 'from', 'type': 'str'},
'op': {'key': 'op', 'type': 'object'},
'path': {'key': 'path', 'type': 'str'},
'value': {'key': 'value', 'type': 'object'}
}
def __init__(self, from_=None, op=None, path=None, value=None):
super(JsonPatchOperation, self).__init__()
self.from_ = from_
self.op = op
self.path = path
self.value = value
class MinimalPackageDetails(Model):
"""
Minimal package details required to identify a package within a protocol.
:param id: Package name.
:type id: str
:param version: Package version.
:type version: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'}
}
def __init__(self, id=None, version=None):
super(MinimalPackageDetails, self).__init__()
self.id = id
self.version = version
class Package(Model):
"""
Package version metadata for a Universal package
:param _links: Related REST links.
:type _links: :class:`ReferenceLinks <azure.devops.v7_1.upack.models.ReferenceLinks>`
:param deleted_date: If and when the package was deleted.
:type deleted_date: datetime
:param id: Package Id.
:type id: str
:param name: The display name of the package.
:type name: str
:param permanently_deleted_date: If and when the package was permanently deleted.
:type permanently_deleted_date: datetime
:param version: The version of the package.
:type version: str
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'deleted_date': {'key': 'deletedDate', 'type': 'iso-8601'},
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'permanently_deleted_date': {'key': 'permanentlyDeletedDate', 'type': 'iso-8601'},
'version': {'key': 'version', 'type': 'str'}
}
def __init__(self, _links=None, deleted_date=None, id=None, name=None, permanently_deleted_date=None, version=None):
super(Package, self).__init__()
self._links = _links
self.deleted_date = deleted_date
self.id = id
self.name = name
self.permanently_deleted_date = permanently_deleted_date
self.version = version
class PackageVersionDetails(Model):
"""
:param views: The view to which the package version will be added
:type views: :class:`JsonPatchOperation <azure.devops.v7_1.upack.models.JsonPatchOperation>`
"""
_attribute_map = {
'views': {'key': 'views', 'type': 'JsonPatchOperation'}
}
def __init__(self, views=None):
super(PackageVersionDetails, self).__init__()
self.views = views
class ReferenceLinks(Model):
"""
The class to represent a collection of REST reference links.
:param links: The readonly view of the links. Because Reference links are readonly, we only want to expose them as read only.
:type links: dict
"""
_attribute_map = {
'links': {'key': 'links', 'type': '{object}'}
}
def __init__(self, links=None):
super(ReferenceLinks, self).__init__()
self.links = links
class UPackPackagesBatchRequest(Model):
"""
A batch of operations to apply to package versions.
:param data: Data required to perform the operation. This is optional based on the type of the operation. Use BatchPromoteData if performing a promote operation.
:type data: :class:`BatchOperationData <azure.devops.v7_1.upack.models.BatchOperationData>`
:param operation: Type of operation that needs to be performed on packages.
:type operation: object
:param packages: The packages onto which the operation will be performed.
:type packages: list of :class:`MinimalPackageDetails <azure.devops.v7_1.upack.models.MinimalPackageDetails>`
"""
_attribute_map = {
'data': {'key': 'data', 'type': 'BatchOperationData'},
'operation': {'key': 'operation', 'type': 'object'},
'packages': {'key': 'packages', 'type': '[MinimalPackageDetails]'}
}
def __init__(self, data=None, operation=None, packages=None):
super(UPackPackagesBatchRequest, self).__init__()
self.data = data
self.operation = operation
self.packages = packages
class UPackPackageVersionDeletionState(Model):
"""
Deletion state of a Universal package.
:param deleted_date: UTC date the package was deleted.
:type deleted_date: datetime
:param name: Name of the package.
:type name: str
:param version: Version of the package.
:type version: str
"""
_attribute_map = {
'deleted_date': {'key': 'deletedDate', 'type': 'iso-8601'},
'name': {'key': 'name', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'}
}
def __init__(self, deleted_date=None, name=None, version=None):
super(UPackPackageVersionDeletionState, self).__init__()
self.deleted_date = deleted_date
self.name = name
self.version = version
class UPackRecycleBinPackageVersionDetails(Model):
"""
:param deleted: Setting to false will undo earlier deletion and restore the package to feed.
:type deleted: bool
"""
_attribute_map = {
'deleted': {'key': 'deleted', 'type': 'bool'}
}
def __init__(self, deleted=None):
super(UPackRecycleBinPackageVersionDetails, self).__init__()
self.deleted = deleted
__all__ = [
'BatchOperationData',
'JsonPatchOperation',
'MinimalPackageDetails',
'Package',
'PackageVersionDetails',
'ReferenceLinks',
'UPackPackagesBatchRequest',
'UPackPackageVersionDeletionState',
'UPackRecycleBinPackageVersionDetails',
]
|
azure-devops-python-api/azure-devops/azure/devops/v7_1/upack_api/models.py/0
|
{
"file_path": "azure-devops-python-api/azure-devops/azure/devops/v7_1/upack_api/models.py",
"repo_id": "azure-devops-python-api",
"token_count": 2655
}
| 414 |
ο»Ώ# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest import Serializer, Deserializer
from ...client import Client
from . import models
class WorkItemTrackingProcessClient(Client):
"""WorkItemTrackingProcess
:param str base_url: Service URL
:param Authentication creds: Authenticated credentials.
"""
def __init__(self, base_url=None, creds=None):
super(WorkItemTrackingProcessClient, self).__init__(base_url, creds)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
resource_area_identifier = '5264459e-e5e0-4bd8-b118-0985e68a4ec5'
def create_process_behavior(self, behavior, process_id):
"""CreateProcessBehavior.
[Preview API] Creates a single behavior in the given process.
:param :class:`<ProcessBehaviorCreateRequest> <azure.devops.v7_1.work_item_tracking_process.models.ProcessBehaviorCreateRequest>` behavior:
:param str process_id: The ID of the process
:rtype: :class:`<ProcessBehavior> <azure.devops.v7_1.work_item_tracking_process.models.ProcessBehavior>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
content = self._serialize.body(behavior, 'ProcessBehaviorCreateRequest')
response = self._send(http_method='POST',
location_id='d1800200-f184-4e75-a5f2-ad0b04b4373e',
version='7.1-preview.2',
route_values=route_values,
content=content)
return self._deserialize('ProcessBehavior', response)
def delete_process_behavior(self, process_id, behavior_ref_name):
"""DeleteProcessBehavior.
[Preview API] Removes a behavior in the process.
:param str process_id: The ID of the process
:param str behavior_ref_name: The reference name of the behavior
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if behavior_ref_name is not None:
route_values['behaviorRefName'] = self._serialize.url('behavior_ref_name', behavior_ref_name, 'str')
self._send(http_method='DELETE',
location_id='d1800200-f184-4e75-a5f2-ad0b04b4373e',
version='7.1-preview.2',
route_values=route_values)
def get_process_behavior(self, process_id, behavior_ref_name, expand=None):
"""GetProcessBehavior.
[Preview API] Returns a behavior of the process.
:param str process_id: The ID of the process
:param str behavior_ref_name: The reference name of the behavior
:param str expand:
:rtype: :class:`<ProcessBehavior> <azure.devops.v7_1.work_item_tracking_process.models.ProcessBehavior>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if behavior_ref_name is not None:
route_values['behaviorRefName'] = self._serialize.url('behavior_ref_name', behavior_ref_name, 'str')
query_parameters = {}
if expand is not None:
query_parameters['$expand'] = self._serialize.query('expand', expand, 'str')
response = self._send(http_method='GET',
location_id='d1800200-f184-4e75-a5f2-ad0b04b4373e',
version='7.1-preview.2',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('ProcessBehavior', response)
def get_process_behaviors(self, process_id, expand=None):
"""GetProcessBehaviors.
[Preview API] Returns a list of all behaviors in the process.
:param str process_id: The ID of the process
:param str expand:
:rtype: [ProcessBehavior]
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
query_parameters = {}
if expand is not None:
query_parameters['$expand'] = self._serialize.query('expand', expand, 'str')
response = self._send(http_method='GET',
location_id='d1800200-f184-4e75-a5f2-ad0b04b4373e',
version='7.1-preview.2',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[ProcessBehavior]', self._unwrap_collection(response))
def update_process_behavior(self, behavior_data, process_id, behavior_ref_name):
"""UpdateProcessBehavior.
[Preview API] Replaces a behavior in the process.
:param :class:`<ProcessBehaviorUpdateRequest> <azure.devops.v7_1.work_item_tracking_process.models.ProcessBehaviorUpdateRequest>` behavior_data:
:param str process_id: The ID of the process
:param str behavior_ref_name: The reference name of the behavior
:rtype: :class:`<ProcessBehavior> <azure.devops.v7_1.work_item_tracking_process.models.ProcessBehavior>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if behavior_ref_name is not None:
route_values['behaviorRefName'] = self._serialize.url('behavior_ref_name', behavior_ref_name, 'str')
content = self._serialize.body(behavior_data, 'ProcessBehaviorUpdateRequest')
response = self._send(http_method='PUT',
location_id='d1800200-f184-4e75-a5f2-ad0b04b4373e',
version='7.1-preview.2',
route_values=route_values,
content=content)
return self._deserialize('ProcessBehavior', response)
def create_control_in_group(self, control, process_id, wit_ref_name, group_id):
"""CreateControlInGroup.
[Preview API] Creates a control in a group.
:param :class:`<Control> <azure.devops.v7_1.work_item_tracking_process.models.Control>` control: The control.
:param str process_id: The ID of the process.
:param str wit_ref_name: The reference name of the work item type.
:param str group_id: The ID of the group to add the control to.
:rtype: :class:`<Control> <azure.devops.v7_1.work_item_tracking_process.models.Control>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
if group_id is not None:
route_values['groupId'] = self._serialize.url('group_id', group_id, 'str')
content = self._serialize.body(control, 'Control')
response = self._send(http_method='POST',
location_id='1f59b363-a2d0-4b7e-9bc6-eb9f5f3f0e58',
version='7.1-preview.1',
route_values=route_values,
content=content)
return self._deserialize('Control', response)
def move_control_to_group(self, control, process_id, wit_ref_name, group_id, control_id, remove_from_group_id=None):
"""MoveControlToGroup.
[Preview API] Moves a control to a specified group.
:param :class:`<Control> <azure.devops.v7_1.work_item_tracking_process.models.Control>` control: The control.
:param str process_id: The ID of the process.
:param str wit_ref_name: The reference name of the work item type.
:param str group_id: The ID of the group to move the control to.
:param str control_id: The ID of the control.
:param str remove_from_group_id: The group ID to remove the control from.
:rtype: :class:`<Control> <azure.devops.v7_1.work_item_tracking_process.models.Control>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
if group_id is not None:
route_values['groupId'] = self._serialize.url('group_id', group_id, 'str')
if control_id is not None:
route_values['controlId'] = self._serialize.url('control_id', control_id, 'str')
query_parameters = {}
if remove_from_group_id is not None:
query_parameters['removeFromGroupId'] = self._serialize.query('remove_from_group_id', remove_from_group_id, 'str')
content = self._serialize.body(control, 'Control')
response = self._send(http_method='PUT',
location_id='1f59b363-a2d0-4b7e-9bc6-eb9f5f3f0e58',
version='7.1-preview.1',
route_values=route_values,
query_parameters=query_parameters,
content=content)
return self._deserialize('Control', response)
def remove_control_from_group(self, process_id, wit_ref_name, group_id, control_id):
"""RemoveControlFromGroup.
[Preview API] Removes a control from the work item form.
:param str process_id: The ID of the process.
:param str wit_ref_name: The reference name of the work item type.
:param str group_id: The ID of the group.
:param str control_id: The ID of the control to remove.
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
if group_id is not None:
route_values['groupId'] = self._serialize.url('group_id', group_id, 'str')
if control_id is not None:
route_values['controlId'] = self._serialize.url('control_id', control_id, 'str')
self._send(http_method='DELETE',
location_id='1f59b363-a2d0-4b7e-9bc6-eb9f5f3f0e58',
version='7.1-preview.1',
route_values=route_values)
def update_control(self, control, process_id, wit_ref_name, group_id, control_id):
"""UpdateControl.
[Preview API] Updates a control on the work item form.
:param :class:`<Control> <azure.devops.v7_1.work_item_tracking_process.models.Control>` control: The updated control.
:param str process_id: The ID of the process.
:param str wit_ref_name: The reference name of the work item type.
:param str group_id: The ID of the group.
:param str control_id: The ID of the control.
:rtype: :class:`<Control> <azure.devops.v7_1.work_item_tracking_process.models.Control>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
if group_id is not None:
route_values['groupId'] = self._serialize.url('group_id', group_id, 'str')
if control_id is not None:
route_values['controlId'] = self._serialize.url('control_id', control_id, 'str')
content = self._serialize.body(control, 'Control')
response = self._send(http_method='PATCH',
location_id='1f59b363-a2d0-4b7e-9bc6-eb9f5f3f0e58',
version='7.1-preview.1',
route_values=route_values,
content=content)
return self._deserialize('Control', response)
def add_field_to_work_item_type(self, field, process_id, wit_ref_name):
"""AddFieldToWorkItemType.
[Preview API] Adds a field to a work item type.
:param :class:`<AddProcessWorkItemTypeFieldRequest> <azure.devops.v7_1.work_item_tracking_process.models.AddProcessWorkItemTypeFieldRequest>` field:
:param str process_id: The ID of the process.
:param str wit_ref_name: The reference name of the work item type.
:rtype: :class:`<ProcessWorkItemTypeField> <azure.devops.v7_1.work_item_tracking_process.models.ProcessWorkItemTypeField>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
content = self._serialize.body(field, 'AddProcessWorkItemTypeFieldRequest')
response = self._send(http_method='POST',
location_id='bc0ad8dc-e3f3-46b0-b06c-5bf861793196',
version='7.1-preview.2',
route_values=route_values,
content=content)
return self._deserialize('ProcessWorkItemTypeField', response)
def get_all_work_item_type_fields(self, process_id, wit_ref_name):
"""GetAllWorkItemTypeFields.
[Preview API] Returns a list of all fields in a work item type.
:param str process_id: The ID of the process.
:param str wit_ref_name: The reference name of the work item type.
:rtype: [ProcessWorkItemTypeField]
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
response = self._send(http_method='GET',
location_id='bc0ad8dc-e3f3-46b0-b06c-5bf861793196',
version='7.1-preview.2',
route_values=route_values)
return self._deserialize('[ProcessWorkItemTypeField]', self._unwrap_collection(response))
def get_work_item_type_field(self, process_id, wit_ref_name, field_ref_name, expand=None):
"""GetWorkItemTypeField.
[Preview API] Returns a field in a work item type.
:param str process_id: The ID of the process.
:param str wit_ref_name: The reference name of the work item type.
:param str field_ref_name: The reference name of the field.
:param str expand:
:rtype: :class:`<ProcessWorkItemTypeField> <azure.devops.v7_1.work_item_tracking_process.models.ProcessWorkItemTypeField>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
if field_ref_name is not None:
route_values['fieldRefName'] = self._serialize.url('field_ref_name', field_ref_name, 'str')
query_parameters = {}
if expand is not None:
query_parameters['$expand'] = self._serialize.query('expand', expand, 'str')
response = self._send(http_method='GET',
location_id='bc0ad8dc-e3f3-46b0-b06c-5bf861793196',
version='7.1-preview.2',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('ProcessWorkItemTypeField', response)
def remove_work_item_type_field(self, process_id, wit_ref_name, field_ref_name):
"""RemoveWorkItemTypeField.
[Preview API] Removes a field from a work item type. Does not permanently delete the field.
:param str process_id: The ID of the process.
:param str wit_ref_name: The reference name of the work item type.
:param str field_ref_name: The reference name of the field.
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
if field_ref_name is not None:
route_values['fieldRefName'] = self._serialize.url('field_ref_name', field_ref_name, 'str')
self._send(http_method='DELETE',
location_id='bc0ad8dc-e3f3-46b0-b06c-5bf861793196',
version='7.1-preview.2',
route_values=route_values)
def update_work_item_type_field(self, field, process_id, wit_ref_name, field_ref_name):
"""UpdateWorkItemTypeField.
[Preview API] Updates a field in a work item type.
:param :class:`<UpdateProcessWorkItemTypeFieldRequest> <azure.devops.v7_1.work_item_tracking_process.models.UpdateProcessWorkItemTypeFieldRequest>` field:
:param str process_id: The ID of the process.
:param str wit_ref_name: The reference name of the work item type.
:param str field_ref_name: The reference name of the field.
:rtype: :class:`<ProcessWorkItemTypeField> <azure.devops.v7_1.work_item_tracking_process.models.ProcessWorkItemTypeField>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
if field_ref_name is not None:
route_values['fieldRefName'] = self._serialize.url('field_ref_name', field_ref_name, 'str')
content = self._serialize.body(field, 'UpdateProcessWorkItemTypeFieldRequest')
response = self._send(http_method='PATCH',
location_id='bc0ad8dc-e3f3-46b0-b06c-5bf861793196',
version='7.1-preview.2',
route_values=route_values,
content=content)
return self._deserialize('ProcessWorkItemTypeField', response)
def add_group(self, group, process_id, wit_ref_name, page_id, section_id):
"""AddGroup.
[Preview API] Adds a group to the work item form.
:param :class:`<Group> <azure.devops.v7_1.work_item_tracking_process.models.Group>` group: The group.
:param str process_id: The ID of the process.
:param str wit_ref_name: The reference name of the work item type.
:param str page_id: The ID of the page to add the group to.
:param str section_id: The ID of the section to add the group to.
:rtype: :class:`<Group> <azure.devops.v7_1.work_item_tracking_process.models.Group>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
if page_id is not None:
route_values['pageId'] = self._serialize.url('page_id', page_id, 'str')
if section_id is not None:
route_values['sectionId'] = self._serialize.url('section_id', section_id, 'str')
content = self._serialize.body(group, 'Group')
response = self._send(http_method='POST',
location_id='766e44e1-36a8-41d7-9050-c343ff02f7a5',
version='7.1-preview.1',
route_values=route_values,
content=content)
return self._deserialize('Group', response)
def move_group_to_page(self, group, process_id, wit_ref_name, page_id, section_id, group_id, remove_from_page_id, remove_from_section_id):
"""MoveGroupToPage.
[Preview API] Moves a group to a different page and section.
:param :class:`<Group> <azure.devops.v7_1.work_item_tracking_process.models.Group>` group: The updated group.
:param str process_id: The ID of the process.
:param str wit_ref_name: The reference name of the work item type.
:param str page_id: The ID of the page the group is in.
:param str section_id: The ID of the section the group is i.n
:param str group_id: The ID of the group.
:param str remove_from_page_id: ID of the page to remove the group from.
:param str remove_from_section_id: ID of the section to remove the group from.
:rtype: :class:`<Group> <azure.devops.v7_1.work_item_tracking_process.models.Group>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
if page_id is not None:
route_values['pageId'] = self._serialize.url('page_id', page_id, 'str')
if section_id is not None:
route_values['sectionId'] = self._serialize.url('section_id', section_id, 'str')
if group_id is not None:
route_values['groupId'] = self._serialize.url('group_id', group_id, 'str')
query_parameters = {}
if remove_from_page_id is not None:
query_parameters['removeFromPageId'] = self._serialize.query('remove_from_page_id', remove_from_page_id, 'str')
if remove_from_section_id is not None:
query_parameters['removeFromSectionId'] = self._serialize.query('remove_from_section_id', remove_from_section_id, 'str')
content = self._serialize.body(group, 'Group')
response = self._send(http_method='PUT',
location_id='766e44e1-36a8-41d7-9050-c343ff02f7a5',
version='7.1-preview.1',
route_values=route_values,
query_parameters=query_parameters,
content=content)
return self._deserialize('Group', response)
def move_group_to_section(self, group, process_id, wit_ref_name, page_id, section_id, group_id, remove_from_section_id):
"""MoveGroupToSection.
[Preview API] Moves a group to a different section.
:param :class:`<Group> <azure.devops.v7_1.work_item_tracking_process.models.Group>` group: The updated group.
:param str process_id: The ID of the process.
:param str wit_ref_name: The reference name of the work item type.
:param str page_id: The ID of the page the group is in.
:param str section_id: The ID of the section the group is in.
:param str group_id: The ID of the group.
:param str remove_from_section_id: ID of the section to remove the group from.
:rtype: :class:`<Group> <azure.devops.v7_1.work_item_tracking_process.models.Group>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
if page_id is not None:
route_values['pageId'] = self._serialize.url('page_id', page_id, 'str')
if section_id is not None:
route_values['sectionId'] = self._serialize.url('section_id', section_id, 'str')
if group_id is not None:
route_values['groupId'] = self._serialize.url('group_id', group_id, 'str')
query_parameters = {}
if remove_from_section_id is not None:
query_parameters['removeFromSectionId'] = self._serialize.query('remove_from_section_id', remove_from_section_id, 'str')
content = self._serialize.body(group, 'Group')
response = self._send(http_method='PUT',
location_id='766e44e1-36a8-41d7-9050-c343ff02f7a5',
version='7.1-preview.1',
route_values=route_values,
query_parameters=query_parameters,
content=content)
return self._deserialize('Group', response)
def remove_group(self, process_id, wit_ref_name, page_id, section_id, group_id):
"""RemoveGroup.
[Preview API] Removes a group from the work item form.
:param str process_id: The ID of the process
:param str wit_ref_name: The reference name of the work item type
:param str page_id: The ID of the page the group is in
:param str section_id: The ID of the section to the group is in
:param str group_id: The ID of the group
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
if page_id is not None:
route_values['pageId'] = self._serialize.url('page_id', page_id, 'str')
if section_id is not None:
route_values['sectionId'] = self._serialize.url('section_id', section_id, 'str')
if group_id is not None:
route_values['groupId'] = self._serialize.url('group_id', group_id, 'str')
self._send(http_method='DELETE',
location_id='766e44e1-36a8-41d7-9050-c343ff02f7a5',
version='7.1-preview.1',
route_values=route_values)
def update_group(self, group, process_id, wit_ref_name, page_id, section_id, group_id):
"""UpdateGroup.
[Preview API] Updates a group in the work item form.
:param :class:`<Group> <azure.devops.v7_1.work_item_tracking_process.models.Group>` group: The updated group.
:param str process_id: The ID of the process.
:param str wit_ref_name: The reference name of the work item type.
:param str page_id: The ID of the page the group is in.
:param str section_id: The ID of the section the group is in.
:param str group_id: The ID of the group.
:rtype: :class:`<Group> <azure.devops.v7_1.work_item_tracking_process.models.Group>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
if page_id is not None:
route_values['pageId'] = self._serialize.url('page_id', page_id, 'str')
if section_id is not None:
route_values['sectionId'] = self._serialize.url('section_id', section_id, 'str')
if group_id is not None:
route_values['groupId'] = self._serialize.url('group_id', group_id, 'str')
content = self._serialize.body(group, 'Group')
response = self._send(http_method='PATCH',
location_id='766e44e1-36a8-41d7-9050-c343ff02f7a5',
version='7.1-preview.1',
route_values=route_values,
content=content)
return self._deserialize('Group', response)
def get_form_layout(self, process_id, wit_ref_name):
"""GetFormLayout.
[Preview API] Gets the form layout.
:param str process_id: The ID of the process.
:param str wit_ref_name: The reference name of the work item type.
:rtype: :class:`<FormLayout> <azure.devops.v7_1.work_item_tracking_process.models.FormLayout>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
response = self._send(http_method='GET',
location_id='fa8646eb-43cd-4b71-9564-40106fd63e40',
version='7.1-preview.1',
route_values=route_values)
return self._deserialize('FormLayout', response)
def create_list(self, picklist):
"""CreateList.
[Preview API] Creates a picklist.
:param :class:`<PickList> <azure.devops.v7_1.work_item_tracking_process.models.PickList>` picklist: Picklist
:rtype: :class:`<PickList> <azure.devops.v7_1.work_item_tracking_process.models.PickList>`
"""
content = self._serialize.body(picklist, 'PickList')
response = self._send(http_method='POST',
location_id='01e15468-e27c-4e20-a974-bd957dcccebc',
version='7.1-preview.1',
content=content)
return self._deserialize('PickList', response)
def delete_list(self, list_id):
"""DeleteList.
[Preview API] Removes a picklist.
:param str list_id: The ID of the list
"""
route_values = {}
if list_id is not None:
route_values['listId'] = self._serialize.url('list_id', list_id, 'str')
self._send(http_method='DELETE',
location_id='01e15468-e27c-4e20-a974-bd957dcccebc',
version='7.1-preview.1',
route_values=route_values)
def get_list(self, list_id):
"""GetList.
[Preview API] Returns a picklist.
:param str list_id: The ID of the list
:rtype: :class:`<PickList> <azure.devops.v7_1.work_item_tracking_process.models.PickList>`
"""
route_values = {}
if list_id is not None:
route_values['listId'] = self._serialize.url('list_id', list_id, 'str')
response = self._send(http_method='GET',
location_id='01e15468-e27c-4e20-a974-bd957dcccebc',
version='7.1-preview.1',
route_values=route_values)
return self._deserialize('PickList', response)
def get_lists_metadata(self):
"""GetListsMetadata.
[Preview API] Returns meta data of the picklist.
:rtype: [PickListMetadata]
"""
response = self._send(http_method='GET',
location_id='01e15468-e27c-4e20-a974-bd957dcccebc',
version='7.1-preview.1')
return self._deserialize('[PickListMetadata]', self._unwrap_collection(response))
def update_list(self, picklist, list_id):
"""UpdateList.
[Preview API] Updates a list.
:param :class:`<PickList> <azure.devops.v7_1.work_item_tracking_process.models.PickList>` picklist:
:param str list_id: The ID of the list
:rtype: :class:`<PickList> <azure.devops.v7_1.work_item_tracking_process.models.PickList>`
"""
route_values = {}
if list_id is not None:
route_values['listId'] = self._serialize.url('list_id', list_id, 'str')
content = self._serialize.body(picklist, 'PickList')
response = self._send(http_method='PUT',
location_id='01e15468-e27c-4e20-a974-bd957dcccebc',
version='7.1-preview.1',
route_values=route_values,
content=content)
return self._deserialize('PickList', response)
def add_page(self, page, process_id, wit_ref_name):
"""AddPage.
[Preview API] Adds a page to the work item form.
:param :class:`<Page> <azure.devops.v7_1.work_item_tracking_process.models.Page>` page: The page.
:param str process_id: The ID of the process.
:param str wit_ref_name: The reference name of the work item type.
:rtype: :class:`<Page> <azure.devops.v7_1.work_item_tracking_process.models.Page>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
content = self._serialize.body(page, 'Page')
response = self._send(http_method='POST',
location_id='1cc7b29f-6697-4d9d-b0a1-2650d3e1d584',
version='7.1-preview.1',
route_values=route_values,
content=content)
return self._deserialize('Page', response)
def remove_page(self, process_id, wit_ref_name, page_id):
"""RemovePage.
[Preview API] Removes a page from the work item form
:param str process_id: The ID of the process
:param str wit_ref_name: The reference name of the work item type
:param str page_id: The ID of the page
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
if page_id is not None:
route_values['pageId'] = self._serialize.url('page_id', page_id, 'str')
self._send(http_method='DELETE',
location_id='1cc7b29f-6697-4d9d-b0a1-2650d3e1d584',
version='7.1-preview.1',
route_values=route_values)
def update_page(self, page, process_id, wit_ref_name):
"""UpdatePage.
[Preview API] Updates a page on the work item form
:param :class:`<Page> <azure.devops.v7_1.work_item_tracking_process.models.Page>` page: The page
:param str process_id: The ID of the process
:param str wit_ref_name: The reference name of the work item type
:rtype: :class:`<Page> <azure.devops.v7_1.work_item_tracking_process.models.Page>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
content = self._serialize.body(page, 'Page')
response = self._send(http_method='PATCH',
location_id='1cc7b29f-6697-4d9d-b0a1-2650d3e1d584',
version='7.1-preview.1',
route_values=route_values,
content=content)
return self._deserialize('Page', response)
def create_new_process(self, create_request):
"""CreateNewProcess.
[Preview API] Creates a process.
:param :class:`<CreateProcessModel> <azure.devops.v7_1.work_item_tracking_process.models.CreateProcessModel>` create_request: CreateProcessModel.
:rtype: :class:`<ProcessInfo> <azure.devops.v7_1.work_item_tracking_process.models.ProcessInfo>`
"""
content = self._serialize.body(create_request, 'CreateProcessModel')
response = self._send(http_method='POST',
location_id='02cc6a73-5cfb-427d-8c8e-b49fb086e8af',
version='7.1-preview.2',
content=content)
return self._deserialize('ProcessInfo', response)
def delete_process_by_id(self, process_type_id):
"""DeleteProcessById.
[Preview API] Removes a process of a specific ID.
:param str process_type_id:
"""
route_values = {}
if process_type_id is not None:
route_values['processTypeId'] = self._serialize.url('process_type_id', process_type_id, 'str')
self._send(http_method='DELETE',
location_id='02cc6a73-5cfb-427d-8c8e-b49fb086e8af',
version='7.1-preview.2',
route_values=route_values)
def edit_process(self, update_request, process_type_id):
"""EditProcess.
[Preview API] Edit a process of a specific ID.
:param :class:`<UpdateProcessModel> <azure.devops.v7_1.work_item_tracking_process.models.UpdateProcessModel>` update_request:
:param str process_type_id:
:rtype: :class:`<ProcessInfo> <azure.devops.v7_1.work_item_tracking_process.models.ProcessInfo>`
"""
route_values = {}
if process_type_id is not None:
route_values['processTypeId'] = self._serialize.url('process_type_id', process_type_id, 'str')
content = self._serialize.body(update_request, 'UpdateProcessModel')
response = self._send(http_method='PATCH',
location_id='02cc6a73-5cfb-427d-8c8e-b49fb086e8af',
version='7.1-preview.2',
route_values=route_values,
content=content)
return self._deserialize('ProcessInfo', response)
def get_list_of_processes(self, expand=None):
"""GetListOfProcesses.
[Preview API] Get list of all processes including system and inherited.
:param str expand:
:rtype: [ProcessInfo]
"""
query_parameters = {}
if expand is not None:
query_parameters['$expand'] = self._serialize.query('expand', expand, 'str')
response = self._send(http_method='GET',
location_id='02cc6a73-5cfb-427d-8c8e-b49fb086e8af',
version='7.1-preview.2',
query_parameters=query_parameters)
return self._deserialize('[ProcessInfo]', self._unwrap_collection(response))
def get_process_by_its_id(self, process_type_id, expand=None):
"""GetProcessByItsId.
[Preview API] Get a single process of a specified ID.
:param str process_type_id:
:param str expand:
:rtype: :class:`<ProcessInfo> <azure.devops.v7_1.work_item_tracking_process.models.ProcessInfo>`
"""
route_values = {}
if process_type_id is not None:
route_values['processTypeId'] = self._serialize.url('process_type_id', process_type_id, 'str')
query_parameters = {}
if expand is not None:
query_parameters['$expand'] = self._serialize.query('expand', expand, 'str')
response = self._send(http_method='GET',
location_id='02cc6a73-5cfb-427d-8c8e-b49fb086e8af',
version='7.1-preview.2',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('ProcessInfo', response)
def add_process_work_item_type_rule(self, process_rule_create, process_id, wit_ref_name):
"""AddProcessWorkItemTypeRule.
[Preview API] Adds a rule to work item type in the process.
:param :class:`<CreateProcessRuleRequest> <azure.devops.v7_1.work_item_tracking_process.models.CreateProcessRuleRequest>` process_rule_create:
:param str process_id: The ID of the process
:param str wit_ref_name: The reference name of the work item type
:rtype: :class:`<ProcessRule> <azure.devops.v7_1.work_item_tracking_process.models.ProcessRule>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
content = self._serialize.body(process_rule_create, 'CreateProcessRuleRequest')
response = self._send(http_method='POST',
location_id='76fe3432-d825-479d-a5f6-983bbb78b4f3',
version='7.1-preview.2',
route_values=route_values,
content=content)
return self._deserialize('ProcessRule', response)
def delete_process_work_item_type_rule(self, process_id, wit_ref_name, rule_id):
"""DeleteProcessWorkItemTypeRule.
[Preview API] Removes a rule from the work item type in the process.
:param str process_id: The ID of the process
:param str wit_ref_name: The reference name of the work item type
:param str rule_id: The ID of the rule
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
if rule_id is not None:
route_values['ruleId'] = self._serialize.url('rule_id', rule_id, 'str')
self._send(http_method='DELETE',
location_id='76fe3432-d825-479d-a5f6-983bbb78b4f3',
version='7.1-preview.2',
route_values=route_values)
def get_process_work_item_type_rule(self, process_id, wit_ref_name, rule_id):
"""GetProcessWorkItemTypeRule.
[Preview API] Returns a single rule in the work item type of the process.
:param str process_id: The ID of the process
:param str wit_ref_name: The reference name of the work item type
:param str rule_id: The ID of the rule
:rtype: :class:`<ProcessRule> <azure.devops.v7_1.work_item_tracking_process.models.ProcessRule>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
if rule_id is not None:
route_values['ruleId'] = self._serialize.url('rule_id', rule_id, 'str')
response = self._send(http_method='GET',
location_id='76fe3432-d825-479d-a5f6-983bbb78b4f3',
version='7.1-preview.2',
route_values=route_values)
return self._deserialize('ProcessRule', response)
def get_process_work_item_type_rules(self, process_id, wit_ref_name):
"""GetProcessWorkItemTypeRules.
[Preview API] Returns a list of all rules in the work item type of the process.
:param str process_id: The ID of the process
:param str wit_ref_name: The reference name of the work item type
:rtype: [ProcessRule]
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
response = self._send(http_method='GET',
location_id='76fe3432-d825-479d-a5f6-983bbb78b4f3',
version='7.1-preview.2',
route_values=route_values)
return self._deserialize('[ProcessRule]', self._unwrap_collection(response))
def update_process_work_item_type_rule(self, process_rule, process_id, wit_ref_name, rule_id):
"""UpdateProcessWorkItemTypeRule.
[Preview API] Updates a rule in the work item type of the process.
:param :class:`<UpdateProcessRuleRequest> <azure.devops.v7_1.work_item_tracking_process.models.UpdateProcessRuleRequest>` process_rule:
:param str process_id: The ID of the process
:param str wit_ref_name: The reference name of the work item type
:param str rule_id: The ID of the rule
:rtype: :class:`<ProcessRule> <azure.devops.v7_1.work_item_tracking_process.models.ProcessRule>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
if rule_id is not None:
route_values['ruleId'] = self._serialize.url('rule_id', rule_id, 'str')
content = self._serialize.body(process_rule, 'UpdateProcessRuleRequest')
response = self._send(http_method='PUT',
location_id='76fe3432-d825-479d-a5f6-983bbb78b4f3',
version='7.1-preview.2',
route_values=route_values,
content=content)
return self._deserialize('ProcessRule', response)
def create_state_definition(self, state_model, process_id, wit_ref_name):
"""CreateStateDefinition.
[Preview API] Creates a state definition in the work item type of the process.
:param :class:`<WorkItemStateInputModel> <azure.devops.v7_1.work_item_tracking_process.models.WorkItemStateInputModel>` state_model:
:param str process_id: The ID of the process
:param str wit_ref_name: The reference name of the work item type
:rtype: :class:`<WorkItemStateResultModel> <azure.devops.v7_1.work_item_tracking_process.models.WorkItemStateResultModel>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
content = self._serialize.body(state_model, 'WorkItemStateInputModel')
response = self._send(http_method='POST',
location_id='31015d57-2dff-4a46-adb3-2fb4ee3dcec9',
version='7.1-preview.1',
route_values=route_values,
content=content)
return self._deserialize('WorkItemStateResultModel', response)
def delete_state_definition(self, process_id, wit_ref_name, state_id):
"""DeleteStateDefinition.
[Preview API] Removes a state definition in the work item type of the process.
:param str process_id: ID of the process
:param str wit_ref_name: The reference name of the work item type
:param str state_id: ID of the state
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
if state_id is not None:
route_values['stateId'] = self._serialize.url('state_id', state_id, 'str')
self._send(http_method='DELETE',
location_id='31015d57-2dff-4a46-adb3-2fb4ee3dcec9',
version='7.1-preview.1',
route_values=route_values)
def get_state_definition(self, process_id, wit_ref_name, state_id):
"""GetStateDefinition.
[Preview API] Returns a single state definition in a work item type of the process.
:param str process_id: The ID of the process
:param str wit_ref_name: The reference name of the work item type
:param str state_id: The ID of the state
:rtype: :class:`<WorkItemStateResultModel> <azure.devops.v7_1.work_item_tracking_process.models.WorkItemStateResultModel>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
if state_id is not None:
route_values['stateId'] = self._serialize.url('state_id', state_id, 'str')
response = self._send(http_method='GET',
location_id='31015d57-2dff-4a46-adb3-2fb4ee3dcec9',
version='7.1-preview.1',
route_values=route_values)
return self._deserialize('WorkItemStateResultModel', response)
def get_state_definitions(self, process_id, wit_ref_name):
"""GetStateDefinitions.
[Preview API] Returns a list of all state definitions in a work item type of the process.
:param str process_id: The ID of the process
:param str wit_ref_name: The reference name of the work item type
:rtype: [WorkItemStateResultModel]
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
response = self._send(http_method='GET',
location_id='31015d57-2dff-4a46-adb3-2fb4ee3dcec9',
version='7.1-preview.1',
route_values=route_values)
return self._deserialize('[WorkItemStateResultModel]', self._unwrap_collection(response))
def hide_state_definition(self, hide_state_model, process_id, wit_ref_name, state_id):
"""HideStateDefinition.
[Preview API] Hides a state definition in the work item type of the process.Only states with customizationType:System can be hidden.
:param :class:`<HideStateModel> <azure.devops.v7_1.work_item_tracking_process.models.HideStateModel>` hide_state_model:
:param str process_id: The ID of the process
:param str wit_ref_name: The reference name of the work item type
:param str state_id: The ID of the state
:rtype: :class:`<WorkItemStateResultModel> <azure.devops.v7_1.work_item_tracking_process.models.WorkItemStateResultModel>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
if state_id is not None:
route_values['stateId'] = self._serialize.url('state_id', state_id, 'str')
content = self._serialize.body(hide_state_model, 'HideStateModel')
response = self._send(http_method='PUT',
location_id='31015d57-2dff-4a46-adb3-2fb4ee3dcec9',
version='7.1-preview.1',
route_values=route_values,
content=content)
return self._deserialize('WorkItemStateResultModel', response)
def update_state_definition(self, state_model, process_id, wit_ref_name, state_id):
"""UpdateStateDefinition.
[Preview API] Updates a given state definition in the work item type of the process.
:param :class:`<WorkItemStateInputModel> <azure.devops.v7_1.work_item_tracking_process.models.WorkItemStateInputModel>` state_model:
:param str process_id: ID of the process
:param str wit_ref_name: The reference name of the work item type
:param str state_id: ID of the state
:rtype: :class:`<WorkItemStateResultModel> <azure.devops.v7_1.work_item_tracking_process.models.WorkItemStateResultModel>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
if state_id is not None:
route_values['stateId'] = self._serialize.url('state_id', state_id, 'str')
content = self._serialize.body(state_model, 'WorkItemStateInputModel')
response = self._send(http_method='PATCH',
location_id='31015d57-2dff-4a46-adb3-2fb4ee3dcec9',
version='7.1-preview.1',
route_values=route_values,
content=content)
return self._deserialize('WorkItemStateResultModel', response)
def delete_system_control(self, process_id, wit_ref_name, control_id):
"""DeleteSystemControl.
[Preview API] Deletes a system control modification on the work item form.
:param str process_id: The ID of the process.
:param str wit_ref_name: The reference name of the work item type.
:param str control_id: The ID of the control.
:rtype: [Control]
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
if control_id is not None:
route_values['controlId'] = self._serialize.url('control_id', control_id, 'str')
response = self._send(http_method='DELETE',
location_id='ff9a3d2c-32b7-4c6c-991c-d5a251fb9098',
version='7.1-preview.1',
route_values=route_values)
return self._deserialize('[Control]', self._unwrap_collection(response))
def get_system_controls(self, process_id, wit_ref_name):
"""GetSystemControls.
[Preview API] Gets edited system controls for a work item type in a process. To get all system controls (base + edited) use layout API(s)
:param str process_id: The ID of the process.
:param str wit_ref_name: The reference name of the work item type.
:rtype: [Control]
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
response = self._send(http_method='GET',
location_id='ff9a3d2c-32b7-4c6c-991c-d5a251fb9098',
version='7.1-preview.1',
route_values=route_values)
return self._deserialize('[Control]', self._unwrap_collection(response))
def update_system_control(self, control, process_id, wit_ref_name, control_id):
"""UpdateSystemControl.
[Preview API] Updates/adds a system control on the work item form.
:param :class:`<Control> <azure.devops.v7_1.work_item_tracking_process.models.Control>` control:
:param str process_id: The ID of the process.
:param str wit_ref_name: The reference name of the work item type.
:param str control_id: The ID of the control.
:rtype: :class:`<Control> <azure.devops.v7_1.work_item_tracking_process.models.Control>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
if control_id is not None:
route_values['controlId'] = self._serialize.url('control_id', control_id, 'str')
content = self._serialize.body(control, 'Control')
response = self._send(http_method='PATCH',
location_id='ff9a3d2c-32b7-4c6c-991c-d5a251fb9098',
version='7.1-preview.1',
route_values=route_values,
content=content)
return self._deserialize('Control', response)
def create_process_work_item_type(self, work_item_type, process_id):
"""CreateProcessWorkItemType.
[Preview API] Creates a work item type in the process.
:param :class:`<CreateProcessWorkItemTypeRequest> <azure.devops.v7_1.work_item_tracking_process.models.CreateProcessWorkItemTypeRequest>` work_item_type:
:param str process_id: The ID of the process on which to create work item type.
:rtype: :class:`<ProcessWorkItemType> <azure.devops.v7_1.work_item_tracking_process.models.ProcessWorkItemType>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
content = self._serialize.body(work_item_type, 'CreateProcessWorkItemTypeRequest')
response = self._send(http_method='POST',
location_id='e2e9d1a6-432d-4062-8870-bfcb8c324ad7',
version='7.1-preview.2',
route_values=route_values,
content=content)
return self._deserialize('ProcessWorkItemType', response)
def delete_process_work_item_type(self, process_id, wit_ref_name):
"""DeleteProcessWorkItemType.
[Preview API] Removes a work item type in the process.
:param str process_id: The ID of the process.
:param str wit_ref_name: The reference name of the work item type.
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
self._send(http_method='DELETE',
location_id='e2e9d1a6-432d-4062-8870-bfcb8c324ad7',
version='7.1-preview.2',
route_values=route_values)
def get_process_work_item_type(self, process_id, wit_ref_name, expand=None):
"""GetProcessWorkItemType.
[Preview API] Returns a single work item type in a process.
:param str process_id: The ID of the process
:param str wit_ref_name: The reference name of the work item type
:param str expand: Flag to determine what properties of work item type to return
:rtype: :class:`<ProcessWorkItemType> <azure.devops.v7_1.work_item_tracking_process.models.ProcessWorkItemType>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
query_parameters = {}
if expand is not None:
query_parameters['$expand'] = self._serialize.query('expand', expand, 'str')
response = self._send(http_method='GET',
location_id='e2e9d1a6-432d-4062-8870-bfcb8c324ad7',
version='7.1-preview.2',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('ProcessWorkItemType', response)
def get_process_work_item_types(self, process_id, expand=None):
"""GetProcessWorkItemTypes.
[Preview API] Returns a list of all work item types in a process.
:param str process_id: The ID of the process
:param str expand: Flag to determine what properties of work item type to return
:rtype: [ProcessWorkItemType]
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
query_parameters = {}
if expand is not None:
query_parameters['$expand'] = self._serialize.query('expand', expand, 'str')
response = self._send(http_method='GET',
location_id='e2e9d1a6-432d-4062-8870-bfcb8c324ad7',
version='7.1-preview.2',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[ProcessWorkItemType]', self._unwrap_collection(response))
def update_process_work_item_type(self, work_item_type_update, process_id, wit_ref_name):
"""UpdateProcessWorkItemType.
[Preview API] Updates a work item type of the process.
:param :class:`<UpdateProcessWorkItemTypeRequest> <azure.devops.v7_1.work_item_tracking_process.models.UpdateProcessWorkItemTypeRequest>` work_item_type_update:
:param str process_id: The ID of the process
:param str wit_ref_name: The reference name of the work item type
:rtype: :class:`<ProcessWorkItemType> <azure.devops.v7_1.work_item_tracking_process.models.ProcessWorkItemType>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
content = self._serialize.body(work_item_type_update, 'UpdateProcessWorkItemTypeRequest')
response = self._send(http_method='PATCH',
location_id='e2e9d1a6-432d-4062-8870-bfcb8c324ad7',
version='7.1-preview.2',
route_values=route_values,
content=content)
return self._deserialize('ProcessWorkItemType', response)
def add_behavior_to_work_item_type(self, behavior, process_id, wit_ref_name_for_behaviors):
"""AddBehaviorToWorkItemType.
[Preview API] Adds a behavior to the work item type of the process.
:param :class:`<WorkItemTypeBehavior> <azure.devops.v7_1.work_item_tracking_process.models.WorkItemTypeBehavior>` behavior:
:param str process_id: The ID of the process
:param str wit_ref_name_for_behaviors: Work item type reference name for the behavior
:rtype: :class:`<WorkItemTypeBehavior> <azure.devops.v7_1.work_item_tracking_process.models.WorkItemTypeBehavior>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name_for_behaviors is not None:
route_values['witRefNameForBehaviors'] = self._serialize.url('wit_ref_name_for_behaviors', wit_ref_name_for_behaviors, 'str')
content = self._serialize.body(behavior, 'WorkItemTypeBehavior')
response = self._send(http_method='POST',
location_id='6d765a2e-4e1b-4b11-be93-f953be676024',
version='7.1-preview.1',
route_values=route_values,
content=content)
return self._deserialize('WorkItemTypeBehavior', response)
def get_behavior_for_work_item_type(self, process_id, wit_ref_name_for_behaviors, behavior_ref_name):
"""GetBehaviorForWorkItemType.
[Preview API] Returns a behavior for the work item type of the process.
:param str process_id: The ID of the process
:param str wit_ref_name_for_behaviors: Work item type reference name for the behavior
:param str behavior_ref_name: The reference name of the behavior
:rtype: :class:`<WorkItemTypeBehavior> <azure.devops.v7_1.work_item_tracking_process.models.WorkItemTypeBehavior>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name_for_behaviors is not None:
route_values['witRefNameForBehaviors'] = self._serialize.url('wit_ref_name_for_behaviors', wit_ref_name_for_behaviors, 'str')
if behavior_ref_name is not None:
route_values['behaviorRefName'] = self._serialize.url('behavior_ref_name', behavior_ref_name, 'str')
response = self._send(http_method='GET',
location_id='6d765a2e-4e1b-4b11-be93-f953be676024',
version='7.1-preview.1',
route_values=route_values)
return self._deserialize('WorkItemTypeBehavior', response)
def get_behaviors_for_work_item_type(self, process_id, wit_ref_name_for_behaviors):
"""GetBehaviorsForWorkItemType.
[Preview API] Returns a list of all behaviors for the work item type of the process.
:param str process_id: The ID of the process
:param str wit_ref_name_for_behaviors: Work item type reference name for the behavior
:rtype: [WorkItemTypeBehavior]
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name_for_behaviors is not None:
route_values['witRefNameForBehaviors'] = self._serialize.url('wit_ref_name_for_behaviors', wit_ref_name_for_behaviors, 'str')
response = self._send(http_method='GET',
location_id='6d765a2e-4e1b-4b11-be93-f953be676024',
version='7.1-preview.1',
route_values=route_values)
return self._deserialize('[WorkItemTypeBehavior]', self._unwrap_collection(response))
def remove_behavior_from_work_item_type(self, process_id, wit_ref_name_for_behaviors, behavior_ref_name):
"""RemoveBehaviorFromWorkItemType.
[Preview API] Removes a behavior for the work item type of the process.
:param str process_id: The ID of the process
:param str wit_ref_name_for_behaviors: Work item type reference name for the behavior
:param str behavior_ref_name: The reference name of the behavior
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name_for_behaviors is not None:
route_values['witRefNameForBehaviors'] = self._serialize.url('wit_ref_name_for_behaviors', wit_ref_name_for_behaviors, 'str')
if behavior_ref_name is not None:
route_values['behaviorRefName'] = self._serialize.url('behavior_ref_name', behavior_ref_name, 'str')
self._send(http_method='DELETE',
location_id='6d765a2e-4e1b-4b11-be93-f953be676024',
version='7.1-preview.1',
route_values=route_values)
def update_behavior_to_work_item_type(self, behavior, process_id, wit_ref_name_for_behaviors):
"""UpdateBehaviorToWorkItemType.
[Preview API] Updates a behavior for the work item type of the process.
:param :class:`<WorkItemTypeBehavior> <azure.devops.v7_1.work_item_tracking_process.models.WorkItemTypeBehavior>` behavior:
:param str process_id: The ID of the process
:param str wit_ref_name_for_behaviors: Work item type reference name for the behavior
:rtype: :class:`<WorkItemTypeBehavior> <azure.devops.v7_1.work_item_tracking_process.models.WorkItemTypeBehavior>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name_for_behaviors is not None:
route_values['witRefNameForBehaviors'] = self._serialize.url('wit_ref_name_for_behaviors', wit_ref_name_for_behaviors, 'str')
content = self._serialize.body(behavior, 'WorkItemTypeBehavior')
response = self._send(http_method='PATCH',
location_id='6d765a2e-4e1b-4b11-be93-f953be676024',
version='7.1-preview.1',
route_values=route_values,
content=content)
return self._deserialize('WorkItemTypeBehavior', response)
|
azure-devops-python-api/azure-devops/azure/devops/v7_1/work_item_tracking_process/work_item_tracking_process_client.py/0
|
{
"file_path": "azure-devops-python-api/azure-devops/azure/devops/v7_1/work_item_tracking_process/work_item_tracking_process_client.py",
"repo_id": "azure-devops-python-api",
"token_count": 31805
}
| 415 |
##
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
##
"""Defines interfaces for interacting with Azure Quantum"""
import logging
from .version import __version__
from .job.job import *
from .job.session import *
from .workspace import *
from ._client.models._enums import JobStatus, SessionStatus, SessionJobFailurePolicy, ItemType
logger = logging.getLogger(__name__)
logger.info(f"version: {__version__}")
__all__ = [ "Workspace" ]
|
azure-quantum-python/azure-quantum/azure/quantum/__init__.py/0
|
{
"file_path": "azure-quantum-python/azure-quantum/azure/quantum/__init__.py",
"repo_id": "azure-quantum-python",
"token_count": 139
}
| 416 |
# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from io import IOBase
from typing import Any, Callable, Dict, IO, Iterable, List, Optional, TypeVar, Union, overload
import urllib.parse
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
map_error,
)
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.rest import HttpRequest, HttpResponse
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from .. import models as _models
from .._serialization import Serializer
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_jobs_list_request(
subscription_id: str, resource_group_name: str, workspace_name: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-11-13-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Quantum/workspaces/{workspaceName}/jobs" # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"),
"workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"),
}
_url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_jobs_get_request(
job_id: str, subscription_id: str, resource_group_name: str, workspace_name: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-11-13-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Quantum/workspaces/{workspaceName}/jobs/{jobId}" # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"),
"workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"),
"jobId": _SERIALIZER.url(
"job_id",
job_id,
"str",
max_length=36,
pattern=r"^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$",
),
}
_url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_jobs_create_request(
job_id: str, subscription_id: str, resource_group_name: str, workspace_name: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-11-13-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Quantum/workspaces/{workspaceName}/jobs/{jobId}" # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"),
"workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"),
"jobId": _SERIALIZER.url(
"job_id",
job_id,
"str",
max_length=36,
pattern=r"^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$",
),
}
_url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
def build_jobs_cancel_request(
job_id: str, subscription_id: str, resource_group_name: str, workspace_name: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-11-13-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Quantum/workspaces/{workspaceName}/jobs/{jobId}" # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"),
"workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"),
"jobId": _SERIALIZER.url(
"job_id",
job_id,
"str",
max_length=36,
pattern=r"^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$",
),
}
_url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs)
def build_jobs_patch_request(
job_id: str, subscription_id: str, resource_group_name: str, workspace_name: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-11-13-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Quantum/workspaces/{workspaceName}/jobs/{jobId}" # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"),
"workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"),
"jobId": _SERIALIZER.url(
"job_id",
job_id,
"str",
max_length=36,
pattern=r"^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$",
),
}
_url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs)
def build_providers_get_status_request(
subscription_id: str, resource_group_name: str, workspace_name: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-11-13-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Quantum/workspaces/{workspaceName}/providerStatus" # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"),
"workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"),
}
_url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_storage_sas_uri_request(
subscription_id: str, resource_group_name: str, workspace_name: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-11-13-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Quantum/workspaces/{workspaceName}/storage/sasUri" # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"),
"workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"),
}
_url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs)
def build_quotas_list_request(
subscription_id: str, resource_group_name: str, workspace_name: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-11-13-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Quantum/workspaces/{workspaceName}/quotas" # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"),
"workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"),
}
_url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_sessions_list_request(
subscription_id: str, resource_group_name: str, workspace_name: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-11-13-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Quantum/workspaces/{workspaceName}/sessions" # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"),
"workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"),
}
_url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_sessions_get_request(
session_id: str, subscription_id: str, resource_group_name: str, workspace_name: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-11-13-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Quantum/workspaces/{workspaceName}/sessions/{sessionId}" # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"),
"workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"),
"sessionId": _SERIALIZER.url(
"session_id",
session_id,
"str",
max_length=36,
pattern=r"^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$",
),
}
_url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_sessions_open_request(
session_id: str, subscription_id: str, resource_group_name: str, workspace_name: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-11-13-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Quantum/workspaces/{workspaceName}/sessions/{sessionId}" # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"),
"workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"),
"sessionId": _SERIALIZER.url(
"session_id",
session_id,
"str",
max_length=36,
pattern=r"^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$",
),
}
_url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
def build_sessions_close_request(
session_id: str, subscription_id: str, resource_group_name: str, workspace_name: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-11-13-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Quantum/workspaces/{workspaceName}/sessions/{sessionId}:close" # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"),
"workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"),
"sessionId": _SERIALIZER.url(
"session_id",
session_id,
"str",
max_length=36,
pattern=r"^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$",
),
}
_url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs)
def build_sessions_jobs_list_request(
session_id: str, subscription_id: str, resource_group_name: str, workspace_name: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-11-13-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Quantum/workspaces/{workspaceName}/sessions/{sessionId}/jobs" # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"),
"workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"),
"sessionId": _SERIALIZER.url(
"session_id",
session_id,
"str",
max_length=36,
pattern=r"^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$",
),
}
_url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_top_level_items_list_request(
subscription_id: str, resource_group_name: str, workspace_name: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-11-13-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Quantum/workspaces/{workspaceName}/topLevelItems" # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"),
"workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"),
}
_url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
class JobsOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.quantum._client.QuantumClient`'s
:attr:`jobs` attribute.
"""
models = _models
def __init__(self, *args, **kwargs):
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace
def list(self, **kwargs: Any) -> Iterable["_models.JobDetails"]:
"""List jobs.
:return: An iterator like instance of JobDetails
:rtype: ~azure.core.paging.ItemPaged[~azure.quantum._client.models.JobDetails]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = kwargs.pop("params", {}) or {}
cls: ClsType[_models._models.JobDetailsList] = kwargs.pop("cls", None) # pylint: disable=protected-access
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
_request = build_jobs_list_request(
subscription_id=self._config.subscription_id,
resource_group_name=self._config.resource_group_name,
workspace_name=self._config.workspace_name,
api_version=self._config.api_version,
headers=_headers,
params=_params,
)
path_format_arguments = {
"azureRegion": self._serialize.url(
"self._config.azure_region", self._config.azure_region, "str", skip_quote=True
),
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
_request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
path_format_arguments = {
"azureRegion": self._serialize.url(
"self._config.azure_region", self._config.azure_region, "str", skip_quote=True
),
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)
return _request
def extract_data(pipeline_response):
deserialized = self._deserialize(
_models._models.JobDetailsList, pipeline_response # pylint: disable=protected-access
)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
_request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
if _stream:
response.read() # Load the body in memory and close the socket
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.RestError, pipeline_response)
raise HttpResponseError(response=response, model=error)
return pipeline_response
return ItemPaged(get_next, extract_data)
@distributed_trace
def get(self, job_id: str, **kwargs: Any) -> _models.JobDetails:
"""Get job by id.
:param job_id: Id of the job. Required.
:type job_id: str
:return: JobDetails
:rtype: ~azure.quantum._client.models.JobDetails
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = kwargs.pop("params", {}) or {}
cls: ClsType[_models.JobDetails] = kwargs.pop("cls", None)
_request = build_jobs_get_request(
job_id=job_id,
subscription_id=self._config.subscription_id,
resource_group_name=self._config.resource_group_name,
workspace_name=self._config.workspace_name,
api_version=self._config.api_version,
headers=_headers,
params=_params,
)
path_format_arguments = {
"azureRegion": self._serialize.url(
"self._config.azure_region", self._config.azure_region, "str", skip_quote=True
),
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
if _stream:
response.read() # Load the body in memory and close the socket
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.RestError, pipeline_response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize("JobDetails", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized # type: ignore
@overload
def create(
self, job_id: str, job: _models.JobDetails, *, content_type: str = "application/json", **kwargs: Any
) -> _models.JobDetails:
"""Create a job.
:param job_id: Id of the job. Required.
:type job_id: str
:param job: The complete metadata of the job to submit. Required.
:type job: ~azure.quantum._client.models.JobDetails
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:return: JobDetails
:rtype: ~azure.quantum._client.models.JobDetails
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def create(
self, job_id: str, job: IO[bytes], *, content_type: str = "application/json", **kwargs: Any
) -> _models.JobDetails:
"""Create a job.
:param job_id: Id of the job. Required.
:type job_id: str
:param job: The complete metadata of the job to submit. Required.
:type job: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:return: JobDetails
:rtype: ~azure.quantum._client.models.JobDetails
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def create(self, job_id: str, job: Union[_models.JobDetails, IO[bytes]], **kwargs: Any) -> _models.JobDetails:
"""Create a job.
:param job_id: Id of the job. Required.
:type job_id: str
:param job: The complete metadata of the job to submit. Is either a JobDetails type or a
IO[bytes] type. Required.
:type job: ~azure.quantum._client.models.JobDetails or IO[bytes]
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:return: JobDetails
:rtype: ~azure.quantum._client.models.JobDetails
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = kwargs.pop("params", {}) or {}
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.JobDetails] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(job, (IOBase, bytes)):
_content = job
else:
_json = self._serialize.body(job, "JobDetails")
_request = build_jobs_create_request(
job_id=job_id,
subscription_id=self._config.subscription_id,
resource_group_name=self._config.resource_group_name,
workspace_name=self._config.workspace_name,
content_type=content_type,
api_version=self._config.api_version,
json=_json,
content=_content,
headers=_headers,
params=_params,
)
path_format_arguments = {
"azureRegion": self._serialize.url(
"self._config.azure_region", self._config.azure_region, "str", skip_quote=True
),
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
if _stream:
response.read() # Load the body in memory and close the socket
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.RestError, pipeline_response)
raise HttpResponseError(response=response, model=error)
if response.status_code == 200:
deserialized = self._deserialize("JobDetails", pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize("JobDetails", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized # type: ignore
@distributed_trace
def cancel(self, job_id: str, **kwargs: Any) -> None: # pylint: disable=inconsistent-return-statements
"""Cancel a job.
:param job_id: Id of the job. Required.
:type job_id: str
:return: None
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = kwargs.pop("params", {}) or {}
cls: ClsType[None] = kwargs.pop("cls", None)
_request = build_jobs_cancel_request(
job_id=job_id,
subscription_id=self._config.subscription_id,
resource_group_name=self._config.resource_group_name,
workspace_name=self._config.workspace_name,
api_version=self._config.api_version,
headers=_headers,
params=_params,
)
path_format_arguments = {
"azureRegion": self._serialize.url(
"self._config.azure_region", self._config.azure_region, "str", skip_quote=True
),
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [204]:
if _stream:
response.read() # Load the body in memory and close the socket
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.RestError, pipeline_response)
raise HttpResponseError(response=response, model=error)
if cls:
return cls(pipeline_response, None, {}) # type: ignore
@overload
def patch(
self,
job_id: str,
patch_job: List[_models.JsonPatchDocument],
*,
content_type: str = "application/json",
**kwargs: Any
) -> Optional[_models.JobDetails]:
"""Patch a job.
:param job_id: Id of the job. Required.
:type job_id: str
:param patch_job: The json patch document containing the patch operations. Required.
:type patch_job: list[~azure.quantum._client.models.JsonPatchDocument]
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:return: JobDetails or None
:rtype: ~azure.quantum._client.models.JobDetails or None
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def patch(
self, job_id: str, patch_job: IO[bytes], *, content_type: str = "application/json", **kwargs: Any
) -> Optional[_models.JobDetails]:
"""Patch a job.
:param job_id: Id of the job. Required.
:type job_id: str
:param patch_job: The json patch document containing the patch operations. Required.
:type patch_job: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:return: JobDetails or None
:rtype: ~azure.quantum._client.models.JobDetails or None
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def patch(
self, job_id: str, patch_job: Union[List[_models.JsonPatchDocument], IO[bytes]], **kwargs: Any
) -> Optional[_models.JobDetails]:
"""Patch a job.
:param job_id: Id of the job. Required.
:type job_id: str
:param patch_job: The json patch document containing the patch operations. Is either a
[JsonPatchDocument] type or a IO[bytes] type. Required.
:type patch_job: list[~azure.quantum._client.models.JsonPatchDocument] or IO[bytes]
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:return: JobDetails or None
:rtype: ~azure.quantum._client.models.JobDetails or None
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = kwargs.pop("params", {}) or {}
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[Optional[_models.JobDetails]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(patch_job, (IOBase, bytes)):
_content = patch_job
else:
_json = self._serialize.body(patch_job, "[JsonPatchDocument]")
_request = build_jobs_patch_request(
job_id=job_id,
subscription_id=self._config.subscription_id,
resource_group_name=self._config.resource_group_name,
workspace_name=self._config.workspace_name,
content_type=content_type,
api_version=self._config.api_version,
json=_json,
content=_content,
headers=_headers,
params=_params,
)
path_format_arguments = {
"azureRegion": self._serialize.url(
"self._config.azure_region", self._config.azure_region, "str", skip_quote=True
),
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
if _stream:
response.read() # Load the body in memory and close the socket
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.RestError, pipeline_response)
raise HttpResponseError(response=response, model=error)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize("JobDetails", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized # type: ignore
class ProvidersOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.quantum._client.QuantumClient`'s
:attr:`providers` attribute.
"""
models = _models
def __init__(self, *args, **kwargs):
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace
def get_status(self, **kwargs: Any) -> Iterable["_models.ProviderStatus"]:
"""Get provider status.
:return: An iterator like instance of ProviderStatus
:rtype: ~azure.core.paging.ItemPaged[~azure.quantum._client.models.ProviderStatus]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = kwargs.pop("params", {}) or {}
cls: ClsType[_models._models.ProviderStatusList] = kwargs.pop("cls", None) # pylint: disable=protected-access
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
_request = build_providers_get_status_request(
subscription_id=self._config.subscription_id,
resource_group_name=self._config.resource_group_name,
workspace_name=self._config.workspace_name,
api_version=self._config.api_version,
headers=_headers,
params=_params,
)
path_format_arguments = {
"azureRegion": self._serialize.url(
"self._config.azure_region", self._config.azure_region, "str", skip_quote=True
),
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
_request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
path_format_arguments = {
"azureRegion": self._serialize.url(
"self._config.azure_region", self._config.azure_region, "str", skip_quote=True
),
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)
return _request
def extract_data(pipeline_response):
deserialized = self._deserialize(
_models._models.ProviderStatusList, pipeline_response # pylint: disable=protected-access
)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
_request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
if _stream:
response.read() # Load the body in memory and close the socket
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.RestError, pipeline_response)
raise HttpResponseError(response=response, model=error)
return pipeline_response
return ItemPaged(get_next, extract_data)
class StorageOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.quantum._client.QuantumClient`'s
:attr:`storage` attribute.
"""
models = _models
def __init__(self, *args, **kwargs):
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@overload
def sas_uri(
self, blob_details: _models.BlobDetails, *, content_type: str = "application/json", **kwargs: Any
) -> _models.SasUriResponse:
"""Gets a URL with SAS token for a container/blob in the storage account associated with the
workspace. The SAS URL can be used to upload job input and/or download job output.
:param blob_details: The details (name and container) of the blob to store or download data.
Required.
:type blob_details: ~azure.quantum._client.models.BlobDetails
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:return: SasUriResponse
:rtype: ~azure.quantum._client.models.SasUriResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def sas_uri(
self, blob_details: IO[bytes], *, content_type: str = "application/json", **kwargs: Any
) -> _models.SasUriResponse:
"""Gets a URL with SAS token for a container/blob in the storage account associated with the
workspace. The SAS URL can be used to upload job input and/or download job output.
:param blob_details: The details (name and container) of the blob to store or download data.
Required.
:type blob_details: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:return: SasUriResponse
:rtype: ~azure.quantum._client.models.SasUriResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def sas_uri(self, blob_details: Union[_models.BlobDetails, IO[bytes]], **kwargs: Any) -> _models.SasUriResponse:
"""Gets a URL with SAS token for a container/blob in the storage account associated with the
workspace. The SAS URL can be used to upload job input and/or download job output.
:param blob_details: The details (name and container) of the blob to store or download data. Is
either a BlobDetails type or a IO[bytes] type. Required.
:type blob_details: ~azure.quantum._client.models.BlobDetails or IO[bytes]
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:return: SasUriResponse
:rtype: ~azure.quantum._client.models.SasUriResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = kwargs.pop("params", {}) or {}
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.SasUriResponse] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(blob_details, (IOBase, bytes)):
_content = blob_details
else:
_json = self._serialize.body(blob_details, "BlobDetails")
_request = build_storage_sas_uri_request(
subscription_id=self._config.subscription_id,
resource_group_name=self._config.resource_group_name,
workspace_name=self._config.workspace_name,
content_type=content_type,
api_version=self._config.api_version,
json=_json,
content=_content,
headers=_headers,
params=_params,
)
path_format_arguments = {
"azureRegion": self._serialize.url(
"self._config.azure_region", self._config.azure_region, "str", skip_quote=True
),
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
if _stream:
response.read() # Load the body in memory and close the socket
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.RestError, pipeline_response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize("SasUriResponse", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized # type: ignore
class QuotasOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.quantum._client.QuantumClient`'s
:attr:`quotas` attribute.
"""
models = _models
def __init__(self, *args, **kwargs):
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace
def list(self, **kwargs: Any) -> Iterable["_models.Quota"]:
"""List quotas for the given workspace.
:return: An iterator like instance of Quota
:rtype: ~azure.core.paging.ItemPaged[~azure.quantum._client.models.Quota]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = kwargs.pop("params", {}) or {}
cls: ClsType[_models._models.QuotaList] = kwargs.pop("cls", None) # pylint: disable=protected-access
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
_request = build_quotas_list_request(
subscription_id=self._config.subscription_id,
resource_group_name=self._config.resource_group_name,
workspace_name=self._config.workspace_name,
api_version=self._config.api_version,
headers=_headers,
params=_params,
)
path_format_arguments = {
"azureRegion": self._serialize.url(
"self._config.azure_region", self._config.azure_region, "str", skip_quote=True
),
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
_request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
path_format_arguments = {
"azureRegion": self._serialize.url(
"self._config.azure_region", self._config.azure_region, "str", skip_quote=True
),
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)
return _request
def extract_data(pipeline_response):
deserialized = self._deserialize(
_models._models.QuotaList, pipeline_response # pylint: disable=protected-access
)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
_request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
if _stream:
response.read() # Load the body in memory and close the socket
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.RestError, pipeline_response)
raise HttpResponseError(response=response, model=error)
return pipeline_response
return ItemPaged(get_next, extract_data)
class SessionsOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.quantum._client.QuantumClient`'s
:attr:`sessions` attribute.
"""
models = _models
def __init__(self, *args, **kwargs):
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace
def list(self, **kwargs: Any) -> Iterable["_models.SessionDetails"]:
"""List sessions.
:return: An iterator like instance of SessionDetails
:rtype: ~azure.core.paging.ItemPaged[~azure.quantum._client.models.SessionDetails]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = kwargs.pop("params", {}) or {}
cls: ClsType[_models._models.SessionDetailsList] = kwargs.pop("cls", None) # pylint: disable=protected-access
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
_request = build_sessions_list_request(
subscription_id=self._config.subscription_id,
resource_group_name=self._config.resource_group_name,
workspace_name=self._config.workspace_name,
api_version=self._config.api_version,
headers=_headers,
params=_params,
)
path_format_arguments = {
"azureRegion": self._serialize.url(
"self._config.azure_region", self._config.azure_region, "str", skip_quote=True
),
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
_request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
path_format_arguments = {
"azureRegion": self._serialize.url(
"self._config.azure_region", self._config.azure_region, "str", skip_quote=True
),
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)
return _request
def extract_data(pipeline_response):
deserialized = self._deserialize(
_models._models.SessionDetailsList, pipeline_response # pylint: disable=protected-access
)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
_request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
if _stream:
response.read() # Load the body in memory and close the socket
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.RestError, pipeline_response)
raise HttpResponseError(response=response, model=error)
return pipeline_response
return ItemPaged(get_next, extract_data)
@distributed_trace
def get(self, session_id: str, **kwargs: Any) -> _models.SessionDetails:
"""Get session by id.
:param session_id: Id of the session. Required.
:type session_id: str
:return: SessionDetails
:rtype: ~azure.quantum._client.models.SessionDetails
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = kwargs.pop("params", {}) or {}
cls: ClsType[_models.SessionDetails] = kwargs.pop("cls", None)
_request = build_sessions_get_request(
session_id=session_id,
subscription_id=self._config.subscription_id,
resource_group_name=self._config.resource_group_name,
workspace_name=self._config.workspace_name,
api_version=self._config.api_version,
headers=_headers,
params=_params,
)
path_format_arguments = {
"azureRegion": self._serialize.url(
"self._config.azure_region", self._config.azure_region, "str", skip_quote=True
),
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
if _stream:
response.read() # Load the body in memory and close the socket
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.RestError, pipeline_response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize("SessionDetails", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized # type: ignore
@overload
def open(
self, session_id: str, session: _models.SessionDetails, *, content_type: str = "application/json", **kwargs: Any
) -> _models.SessionDetails:
"""Open a session.
:param session_id: Id of the session. Required.
:type session_id: str
:param session: The complete metadata of the session to be opened. Required.
:type session: ~azure.quantum._client.models.SessionDetails
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:return: SessionDetails
:rtype: ~azure.quantum._client.models.SessionDetails
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def open(
self, session_id: str, session: IO[bytes], *, content_type: str = "application/json", **kwargs: Any
) -> _models.SessionDetails:
"""Open a session.
:param session_id: Id of the session. Required.
:type session_id: str
:param session: The complete metadata of the session to be opened. Required.
:type session: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:return: SessionDetails
:rtype: ~azure.quantum._client.models.SessionDetails
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def open(
self, session_id: str, session: Union[_models.SessionDetails, IO[bytes]], **kwargs: Any
) -> _models.SessionDetails:
"""Open a session.
:param session_id: Id of the session. Required.
:type session_id: str
:param session: The complete metadata of the session to be opened. Is either a SessionDetails
type or a IO[bytes] type. Required.
:type session: ~azure.quantum._client.models.SessionDetails or IO[bytes]
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:return: SessionDetails
:rtype: ~azure.quantum._client.models.SessionDetails
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = kwargs.pop("params", {}) or {}
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.SessionDetails] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(session, (IOBase, bytes)):
_content = session
else:
_json = self._serialize.body(session, "SessionDetails")
_request = build_sessions_open_request(
session_id=session_id,
subscription_id=self._config.subscription_id,
resource_group_name=self._config.resource_group_name,
workspace_name=self._config.workspace_name,
content_type=content_type,
api_version=self._config.api_version,
json=_json,
content=_content,
headers=_headers,
params=_params,
)
path_format_arguments = {
"azureRegion": self._serialize.url(
"self._config.azure_region", self._config.azure_region, "str", skip_quote=True
),
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
if _stream:
response.read() # Load the body in memory and close the socket
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.RestError, pipeline_response)
raise HttpResponseError(response=response, model=error)
if response.status_code == 200:
deserialized = self._deserialize("SessionDetails", pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize("SessionDetails", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized # type: ignore
@distributed_trace
def close(self, session_id: str, **kwargs: Any) -> _models.SessionDetails:
"""Close a session.
:param session_id: Id of the session. Required.
:type session_id: str
:return: SessionDetails
:rtype: ~azure.quantum._client.models.SessionDetails
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = kwargs.pop("params", {}) or {}
cls: ClsType[_models.SessionDetails] = kwargs.pop("cls", None)
_request = build_sessions_close_request(
session_id=session_id,
subscription_id=self._config.subscription_id,
resource_group_name=self._config.resource_group_name,
workspace_name=self._config.workspace_name,
api_version=self._config.api_version,
headers=_headers,
params=_params,
)
path_format_arguments = {
"azureRegion": self._serialize.url(
"self._config.azure_region", self._config.azure_region, "str", skip_quote=True
),
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
if _stream:
response.read() # Load the body in memory and close the socket
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.RestError, pipeline_response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize("SessionDetails", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized # type: ignore
@distributed_trace
def jobs_list(self, session_id: str, **kwargs: Any) -> Iterable["_models.JobDetails"]:
"""List jobs in a session.
:param session_id: Id of the session. Required.
:type session_id: str
:return: An iterator like instance of JobDetails
:rtype: ~azure.core.paging.ItemPaged[~azure.quantum._client.models.JobDetails]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = kwargs.pop("params", {}) or {}
cls: ClsType[_models._models.JobDetailsList] = kwargs.pop("cls", None) # pylint: disable=protected-access
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
_request = build_sessions_jobs_list_request(
session_id=session_id,
subscription_id=self._config.subscription_id,
resource_group_name=self._config.resource_group_name,
workspace_name=self._config.workspace_name,
api_version=self._config.api_version,
headers=_headers,
params=_params,
)
path_format_arguments = {
"azureRegion": self._serialize.url(
"self._config.azure_region", self._config.azure_region, "str", skip_quote=True
),
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
_request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
path_format_arguments = {
"azureRegion": self._serialize.url(
"self._config.azure_region", self._config.azure_region, "str", skip_quote=True
),
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)
return _request
def extract_data(pipeline_response):
deserialized = self._deserialize(
_models._models.JobDetailsList, pipeline_response # pylint: disable=protected-access
)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
_request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
if _stream:
response.read() # Load the body in memory and close the socket
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.RestError, pipeline_response)
raise HttpResponseError(response=response, model=error)
return pipeline_response
return ItemPaged(get_next, extract_data)
class TopLevelItemsOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.quantum._client.QuantumClient`'s
:attr:`top_level_items` attribute.
"""
models = _models
def __init__(self, *args, **kwargs):
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace
def list(self, **kwargs: Any) -> Iterable["_models.ItemDetails"]:
"""List top-level items.
:return: An iterator like instance of ItemDetails
:rtype: ~azure.core.paging.ItemPaged[~azure.quantum._client.models.ItemDetails]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = kwargs.pop("params", {}) or {}
cls: ClsType[_models._models.ItemDetailsList] = kwargs.pop("cls", None) # pylint: disable=protected-access
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
_request = build_top_level_items_list_request(
subscription_id=self._config.subscription_id,
resource_group_name=self._config.resource_group_name,
workspace_name=self._config.workspace_name,
api_version=self._config.api_version,
headers=_headers,
params=_params,
)
path_format_arguments = {
"azureRegion": self._serialize.url(
"self._config.azure_region", self._config.azure_region, "str", skip_quote=True
),
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
_request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
path_format_arguments = {
"azureRegion": self._serialize.url(
"self._config.azure_region", self._config.azure_region, "str", skip_quote=True
),
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)
return _request
def extract_data(pipeline_response):
deserialized = self._deserialize(
_models._models.ItemDetailsList, pipeline_response # pylint: disable=protected-access
)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
_request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
if _stream:
response.read() # Load the body in memory and close the socket
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.RestError, pipeline_response)
raise HttpResponseError(response=response, model=error)
return pipeline_response
return ItemPaged(get_next, extract_data)
|
azure-quantum-python/azure-quantum/azure/quantum/_client/operations/_operations.py/0
|
{
"file_path": "azure-quantum-python/azure-quantum/azure/quantum/_client/operations/_operations.py",
"repo_id": "azure-quantum-python",
"token_count": 35108
}
| 417 |
##
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
##
import abc
import logging
import uuid
from enum import Enum
from datetime import datetime, timezone, timedelta
from urllib.parse import urlparse, parse_qs
from typing import Any, Dict, Optional, TYPE_CHECKING
from azure.storage.blob import BlobClient
from azure.quantum.storage import upload_blob, download_blob, download_blob_properties, ContainerClient
from azure.quantum._client.models import JobDetails
from azure.quantum.job.workspace_item import WorkspaceItem
if TYPE_CHECKING:
from azure.quantum.workspace import Workspace
logger = logging.getLogger(__name__)
DEFAULT_TIMEOUT = 300 # Default timeout for waiting for job to complete
class ContentType(str, Enum):
json = "application/json"
text_plain = "text/plain"
class BaseJob(WorkspaceItem):
# Optionally override these to create a Provider-specific Job subclass
"""
Base job class with methods to create a job from raw blob data,
upload blob data and download results.
:param workspace: Workspace instance of the job
:type workspace: Workspace
:param details: Item details model,
contains item ID, name and other details
:type details: ItemDetails
"""
@staticmethod
def create_job_id() -> str:
"""Create a unique id for a new job."""
return str(uuid.uuid1())
@property
def details(self) -> JobDetails:
"""Job details"""
return self._details
@details.setter
def details(self, value: JobDetails):
self._details = value
@property
def container_name(self):
"""Job input/output data container name"""
return f"job-{self.id}"
@classmethod
def from_input_data(
cls,
workspace: "Workspace",
name: str,
target: str,
input_data: bytes,
content_type: ContentType = ContentType.json,
blob_name: str = "inputData",
encoding: str = "",
job_id: str = None,
container_name: str = None,
provider_id: str = None,
input_data_format: str = None,
output_data_format: str = None,
input_params: Dict[str, Any] = None,
session_id: Optional[str] = None,
**kwargs
) -> "BaseJob":
"""Create a new Azure Quantum job based on a raw input_data payload.
:param workspace: Azure Quantum workspace to submit the input_data to
:type workspace: Workspace
:param name: Name of the job
:type name: str
:param target: Azure Quantum target
:type target: str
:param input_data: Raw input data to submit
:type input_data: bytes
:param blob_name: Input data blob name, defaults to "inputData"
:type blob_name: str
:param content_type: Content type, e.g. "application/json"
:type content_type: ContentType
:param encoding: input_data encoding, e.g. "gzip", defaults to empty string
:type encoding: str
:param job_id: Job ID, defaults to None
:type job_id: str
:param container_name: Container name, defaults to None
:type container_name: str
:param provider_id: Provider ID, defaults to None
:type provider_id: str
:param input_data_format: Input data format, defaults to None
:type input_data_format: str
:param output_data_format: Output data format, defaults to None
:type output_data_format: str
:param input_params: Input parameters, defaults to None
:type input_params: Dict[str, Any]
:param input_params: Input params for job
:type input_params: Dict[str, Any]
:return: Azure Quantum Job
:rtype: Job
"""
# Generate job ID if not specified
if job_id is None:
job_id = cls.create_job_id()
# Create container if it does not yet exist
container_uri = workspace.get_container_uri(
job_id=job_id,
container_name=container_name
)
logger.debug(f"Container URI: {container_uri}")
# Upload data to container
input_data_uri = cls.upload_input_data(
container_uri=container_uri,
input_data=input_data,
content_type=content_type,
blob_name=blob_name,
encoding=encoding,
)
# Create and submit job
return cls.from_storage_uri(
workspace=workspace,
job_id=job_id,
target=target,
input_data_uri=input_data_uri,
container_uri=container_uri,
name=name,
input_data_format=input_data_format,
output_data_format=output_data_format,
provider_id=provider_id,
input_params=input_params,
session_id=session_id,
**kwargs
)
@classmethod
def from_storage_uri(
cls,
workspace: "Workspace",
name: str,
target: str,
input_data_uri: str,
provider_id: str,
input_data_format: str,
output_data_format: str,
container_uri: str = None,
job_id: str = None,
input_params: Dict[str, Any] = None,
submit_job: bool = True,
session_id: Optional[str] = None,
**kwargs
) -> "BaseJob":
"""Create new Job from URI if input data is already uploaded
to blob storage
:param workspace: Azure Quantum workspace to submit the blob to
:type workspace: Workspace
:param name: Job name
:type name: str
:param target: Azure Quantum target
:type target: str
:param input_data_uri: Input data URI
:type input_data_uri: str
:param provider_id: Provider ID
:type provider_id: str
:param input_data_format: Input data format
:type input_data_format: str
:param output_data_format: Output data format
:type output_data_format: str
:param container_uri: Container URI, defaults to None
:type container_uri: str
:param job_id: Pre-generated job ID, defaults to None
:type job_id: str
:param input_params: Input parameters, defaults to None
:type input_params: Dict[str, Any]
:param submit_job: If job should be submitted to the service, defaults to True
:type submit_job: bool
:return: Job instance
:rtype: Job
"""
# Generate job_id, input_params, data formats and provider ID if not specified
if job_id is None:
job_id = cls.create_job_id()
if input_params is None:
input_params = {}
# Create container for output data if not specified
if container_uri is None:
container_uri = workspace.get_container_uri(job_id=job_id)
# Create job details and return Job
details = JobDetails(
id=job_id,
name=name,
container_uri=container_uri,
input_data_format=input_data_format,
output_data_format=output_data_format,
input_data_uri=input_data_uri,
provider_id=provider_id,
target=target,
input_params=input_params,
session_id=session_id,
**kwargs
)
job = cls(workspace, details, **kwargs)
logger.info(
f"Submitting job '{name}'. \
Using payload from: '{job.details.input_data_uri}'"
)
if submit_job:
logger.debug(f"==> submitting: {job.details}")
job.submit()
return job
@staticmethod
def upload_input_data(
container_uri: str,
input_data: bytes,
content_type: Optional[ContentType] = ContentType.json,
blob_name: str = "inputData",
encoding: str = "",
return_sas_token: bool = False
) -> str:
"""Upload input data file
:param container_uri: Container URI
:type container_uri: str
:param input_data: Input data in binary format
:type input_data: bytes
:param content_type: Content type, e.g. "application/json"
:type content_type: Optional, ContentType
:param blob_name: Blob name, defaults to "inputData"
:type blob_name: str
:param encoding: Encoding, e.g. "gzip", defaults to ""
:type encoding: str
:param return_sas_token: Flag to return SAS token as part of URI, defaults to False
:type return_sas_token: bool
:return: Uploaded data URI
:rtype: str
"""
container_client = ContainerClient.from_container_url(
container_uri
)
uploaded_blob_uri = upload_blob(
container_client,
blob_name,
content_type,
encoding,
input_data,
return_sas_token=return_sas_token
)
return uploaded_blob_uri
def download_data(self, blob_uri: str) -> dict:
"""Download file from blob uri
:param blob_uri: Blob URI
:type blob_uri: str
:return: Payload from blob
:rtype: dict
"""
blob_uri_with_sas_token = self._get_blob_uri_with_sas_token(blob_uri)
payload = download_blob(blob_uri_with_sas_token)
return payload
def download_blob_properties(self, blob_uri: str):
"""Download Blob properties
:param blob_uri: Blob URI
:type blob_uri: str
:return: Blob properties
:rtype: dict
"""
blob_uri_with_sas_token = self._get_blob_uri_with_sas_token(blob_uri)
return download_blob_properties(blob_uri_with_sas_token)
def upload_attachment(
self,
name: str,
data: bytes,
container_uri: str = None,
**kwargs
) -> str:
"""Uploads an attachment to the job's container file. Attachment's are identified by name.
Uploading to an existing attachment overrides its previous content.
:param name: Attachment name
:type name: str
:param data: Attachment data in binary format
:type input_data: bytes
:param container_uri: Container URI, defaults to the job's linked container.
:type container_uri: str
:return: Uploaded data URI
:rtype: str
"""
# Use Job's default container if not specified
if container_uri is None:
container_uri = self.workspace.get_container_uri(job_id=self.id)
uploaded_blob_uri = self.upload_input_data(
container_uri = container_uri,
blob_name = name,
input_data = data,
**kwargs
)
return uploaded_blob_uri
def download_attachment(
self,
name: str,
container_uri: str = None
):
""" Downloads an attachment from job's container in Azure Storage. Attachments are blobs of data
created as part of the Job's execution, or they can be created by uploading directly from Python
using the upload_attachment method.
:param name: Attachment name
:type name: str
:param container_uri: Container URI, defaults to the job's linked container.
:type container_uri: str
:return: Attachment data
:rtype: bytes
"""
# Use Job's default container if not specified
if container_uri is None:
container_uri = self.workspace.get_container_uri(job_id=self.id)
container_client = ContainerClient.from_container_url(container_uri)
blob_client = container_client.get_blob_client(name)
response = blob_client.download_blob().readall()
return response
def _get_blob_uri_with_sas_token(self, blob_uri: str) -> str:
"""Get Blob URI with SAS-token if one was not specified in blob_uri parameter
:param blob_uri: Blob URI
:type blob_uri: str
:return: Blob URI with SAS-token
:rtype: str
"""
url = urlparse(blob_uri)
query_params = parse_qs(url.query)
token_expire_query_param = query_params.get("se")
token_expire_time = None
if token_expire_query_param is not None:
token_expire_time_str = token_expire_query_param[0]
# Since python < 3.11 can not easily parse Z suffixed UTC timestamp and
# assuming that the timestamp is always UTC, we replace that suffix with UTC offset.
token_expire_time = datetime.fromisoformat(
token_expire_time_str.replace('Z', '+00:00')
)
# Make an expiration time a little earlier, so there's no case where token is
# used a second or so before of its expiration.
token_expire_time = token_expire_time - timedelta(minutes=5)
current_utc_time = datetime.now(tz=timezone.utc)
if token_expire_time is None or current_utc_time >= token_expire_time:
# blob_uri does not contains SAS token or it is expired,
# get sas url from service
blob_client = BlobClient.from_blob_url(
blob_uri
)
blob_uri = self.workspace._get_linked_storage_sas_uri(
blob_client.container_name, blob_client.blob_name
)
return blob_uri
|
azure-quantum-python/azure-quantum/azure/quantum/job/base_job.py/0
|
{
"file_path": "azure-quantum-python/azure-quantum/azure/quantum/job/base_job.py",
"repo_id": "azure-quantum-python",
"token_count": 5854
}
| 418 |
##
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
##
import warnings
import inspect
from itertools import groupby
from typing import Dict, List, Optional, Tuple, Type
from azure.quantum import Workspace
try:
from qiskit.providers import ProviderV1 as Provider
from qiskit.providers.exceptions import QiskitBackendNotFoundError
from qiskit.providers import BackendV1 as Backend
from qiskit.exceptions import QiskitError
except ImportError:
raise ImportError(
"Missing optional 'qiskit' dependencies. \
To install run: pip install azure-quantum[qiskit]"
)
from azure.quantum.qiskit.backends.backend import AzureBackendBase
from azure.quantum.qiskit.job import AzureQuantumJob
from azure.quantum.qiskit.backends import *
QISKIT_USER_AGENT = "azure-quantum-qiskit"
class AzureQuantumProvider(Provider):
def __init__(self, workspace: Optional[Workspace]=None, **kwargs):
"""Class for interfacing with the Azure Quantum service
using Qiskit quantum circuits.
:param workspace: Azure Quantum workspace. If missing it will create a new Workspace passing `kwargs` to the constructor. Defaults to None.
:type workspace: Workspace
"""
if kwargs is not None and len(kwargs) > 0:
from warnings import warn
warn(f"""Consider passing \"workspace\" argument explicitly.
The ability to initialize AzureQuantumProvider with arguments {', '.join(f'"{argName}"' for argName in kwargs)} is going to be deprecated in future versions.""",
DeprecationWarning,
stacklevel=2)
if workspace is None:
workspace = Workspace(**kwargs)
workspace.append_user_agent(QISKIT_USER_AGENT)
self._workspace = workspace
self._backends = None
def get_workspace(self) -> Workspace:
"""Return Azure Quantum Workspace"""
return self._workspace
def get_backend(self, name=None, **kwargs) -> AzureBackendBase:
"""Return a single backend matching the specified filtering.
Args:
name (str): name of the backend.
**kwargs: dict used for filtering.
Returns:
azure.quantum.qiskit.backends.AzureBackendBase: a backend matching the filtering.
Raises:
QiskitBackendNotFoundError: if no backend could be found or
more than one backend matches the filtering criteria.
"""
backends = self.backends(name=name, **kwargs)
if len(backends) > 1:
raise QiskitBackendNotFoundError(
"More than one backend matches the criteria"
)
if not backends:
raise QiskitBackendNotFoundError(
f"Could not find target '{name}'. \
Please make sure the target name is valid and that the associated provider is added to your Workspace. \
To add a provider to your quantum workspace on the Azure Portal, \
see https://aka.ms/AQ/Docs/AddProvider"
)
return backends[0]
def backends(self, name=None, **kwargs):
"""Return a list of backends matching the specified filtering.
Args:
name (str): name of the backend.
**kwargs: dict used for filtering.
Returns:
typing.List[azure.quantum.qiskit.backends.AzureBackendBase]: a list of Backends that match the filtering
criteria.
"""
# Lazy load backends
if self._backends is None:
self._backends = self._init_backends()
if name:
if name not in self._backends:
raise QiskitBackendNotFoundError(
f"The '{name}' backend is not installed in your system."
)
provider_id = kwargs.get("provider_id", None)
allowed_targets = self._get_allowed_targets_from_workspace(name, provider_id)
workspace_allowed = lambda backend: self._is_available_in_ws(
allowed_targets, backend
)
# flatten the available backends
backend_list = [x for v in self._backends.values() for x in v]
# filter by properties specified in the kwargs and filter function
filtered_backends: List[Backend] = self._filter_backends(
backend_list, filters=workspace_allowed, **kwargs
)
# Also filter out non-default backends.
default_backends = list(
filter(
lambda backend: self._match_all(
backend.configuration().to_dict(), {"is_default": True}
),
filtered_backends,
)
)
# If default backends were found - return them, otherwise return the filtered_backends collection.
# The latter case could happen where there's no default backend defined for the specified target.
if len(default_backends) > 0:
return default_backends
return filtered_backends
def get_job(self, job_id) -> AzureQuantumJob:
"""Returns the Job instance associated with the given id.
Args:
job_id (str): Id of the Job to return.
Returns:
AzureQuantumJob: Job instance.
"""
azure_job = self._workspace.get_job(job_id)
backend = self.get_backend(azure_job.details.target)
return AzureQuantumJob(backend, azure_job)
def _is_available_in_ws(
self, allowed_targets: List[Tuple[str, str]], backend: Backend
):
for name, provider in allowed_targets:
if backend.name() == name:
config = backend.configuration().to_dict()
if "azure" in config and "provider_id" in config["azure"]:
if config["azure"]["provider_id"] == provider:
return True
return False
def _get_allowed_targets_from_workspace(
self, name: str, provider_id: str
) -> List[Tuple[str, str]]:
target_statuses = self._workspace._get_target_status(name, provider_id)
candidates: List[Tuple[str, str]] = []
for provider_id, status in target_statuses:
candidates.append((status.id, provider_id))
return candidates
def _get_candidate_subclasses(self, subtype: Type[Backend]):
if not inspect.isabstract(subtype):
yield subtype
subclasses = subtype.__subclasses__()
if subclasses:
for subclass in subclasses:
for leaf in self._get_candidate_subclasses(subclass):
yield leaf
def _init_backends(self) -> Dict[str, List[Backend]]:
instances: Dict[str, List[Backend]] = {}
subclasses = list(self._get_candidate_subclasses(subtype=AzureBackendBase))
for backend_cls in subclasses:
backend_names = self._backend_names(backend_cls)
if backend_names is None:
continue
for name in backend_names:
backend_instance: Backend = self._get_backend_instance(
backend_cls, name
)
backend_name: str = backend_instance.name()
instances.setdefault(backend_name, []).append(backend_instance)
return instances
def _backend_names(self, backend_cls: Type[Backend]) -> List[str]:
if hasattr(backend_cls, "backend_names"):
return list(backend_cls.backend_names)
return None
def _get_backend_instance(self, backend_cls: Type[Backend], name: str) -> Backend:
try:
return backend_cls(name=name, provider=self)
except Exception as err:
raise QiskitError(
f"Backend {backend_cls} could not be instantiated: {err}"
) from err
def _match_all(self, obj, criteria):
"""Return True if all items in criteria matches items in obj."""
return all(
self._match_config(obj, key_, value_) for key_, value_ in criteria.items()
)
def _match_config(self, obj, key, value):
"""Return True if the criteria matches the base config or azure config."""
return obj.get(key, None) == value or self._match_azure_config(
obj, key, value
)
def _match_azure_config(self, obj, key, value):
"""Return True if the criteria matches the azure config."""
azure_config = obj.get("azure", {})
return azure_config.get(key, None) == value
def _has_config_value(self, obj, key):
"""Return True if the key is found in the root config or azure config."""
return key in obj or key in obj.get("azure", {})
def _filter_backends(
self, backends: List[Backend], filters=None, **kwargs
) -> List[Backend]:
"""Return the backends matching the specified filtering.
Filter the `backends` list by their `configuration` attributes,
or from a boolean callable. The criteria for filtering can
be specified via `**kwargs` or as a callable via `filters`, and the
backends must fulfill all specified conditions.
Args:
backends (list[Backend]): list of backends.
filters (callable): filtering conditions as a callable.
**kwargs: dict of criteria.
Returns:
list[Backend]: a list of backend instances matching the
conditions.
"""
configuration_filters = {}
unknown_filters = {}
for key, value in kwargs.items():
# If `any` of the backends has the key in its configuration, filter by it.
# qiskit API for this requires `all` backends to have the key in
# their configuration to be considered for filtering
print(f"Looking for {key} with {value}")
if any(
self._has_config_value(backend.configuration().to_dict(), key) for backend in backends
):
configuration_filters[key] = value
else:
unknown_filters[key] = value
if configuration_filters:
backends = list(
filter(
lambda backend: self._match_all(
backend.configuration().to_dict(), configuration_filters
),
backends,
)
)
if unknown_filters:
warnings.warn(
f"Specified filters {unknown_filters} are not supported by the available backends."
)
backends = list(filter(filters, backends))
return backends
|
azure-quantum-python/azure-quantum/azure/quantum/qiskit/provider.py/0
|
{
"file_path": "azure-quantum-python/azure-quantum/azure/quantum/qiskit/provider.py",
"repo_id": "azure-quantum-python",
"token_count": 4587
}
| 419 |
"""Defines targets and helper functions for the Pasqal provider"""
##
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
##
__all__ = [
"Result",
]
import json
from typing import Union, Dict, List, TypeVar, cast
from ...job import Job
class Result:
"""Downloads the data of a completed Job and provides a dictionary of registers.
.. highlight:: python
.. code-block::
from azure.quantum.job import Job
from azure.quantum.target.pasqal import Result
job = Job(...) # This job should come from a Pasqal target
job.wait_until_completed()
result = Result(job)
"""
def __init__(self, job: Job) -> None:
"""
Decode the results of a Job with output type of "pasqal.pulser-results.v1"
Args:
job (Job): Azure Quantum job
Raises:
RuntimeError: if the job has not completed successfully
"""
if job.details.status != "Succeeded":
raise RuntimeError(
"Cannot retrieve results as job execution failed "
f"(status: {job.details.status}."
f"error: {job.details.error_data})"
)
self.data = cast(Dict[str, int], json.loads(job.download_data(job.details.output_data_uri)))
def __getitem__(self, register_name: str) -> int:
return self.data[register_name]
|
azure-quantum-python/azure-quantum/azure/quantum/target/pasqal/result.py/0
|
{
"file_path": "azure-quantum-python/azure-quantum/azure/quantum/target/pasqal/result.py",
"repo_id": "azure-quantum-python",
"token_count": 565
}
| 420 |
##
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
##
# A resource estimation CLI that can execute resource estimation jobs from
# various input formats and generate JSON output.
import argparse
import json
import os
import sys
from azure.quantum import Workspace
from azure.quantum.target.microsoft import MicrosoftEstimator
# Configure program arguments
parser = argparse.ArgumentParser(
prog="estimate",
description="Estimate physical resources using Azure Quantum")
parser.add_argument(
"filename",
help="Quantum program (.ll, .qir, .bc, .qs, .qasm)")
parser.add_argument(
"-r",
"--resource-id",
default=os.environ.get("AZURE_QUANTUM_RESOURCE_ID"),
help="Resource ID of Azure Quantum workspace (must be set, unless set via "
"environment variable AZURE_QUANTUM_RESOURCE_ID)")
parser.add_argument(
"-l",
"--location",
default=os.environ.get("AZURE_QUANTUM_LOCATION"),
help="Location of Azure Quantum workspace (must be set, unless set via "
"environment AZURE_QUANTUM_LOCATION)")
parser.add_argument(
"-p",
"--job-params",
help="JSON file with job parameters")
parser.add_argument(
"-o",
"--output",
help="Output file (default: stdout)"
)
# Parse and validate arguments
args = parser.parse_args()
if not args.resource_id:
parser.error("the following arguments are required: -r/--resource-id")
if not args.location:
parser.error("the following arguments are required: -l/--location")
# Set up Azure Quantum workspace
workspace = Workspace(resource_id=args.resource_id, location=args.location)
estimator = MicrosoftEstimator(workspace)
# Prepare program input based on file extension
extension = os.path.splitext(args.filename)[1]
if extension == ".ll":
# LLVM IR
try:
import pyqir
ir_code = open(args.filename, "r").read()
context = pyqir.Context()
module = pyqir.Module.from_ir(context, ir_code)
input_data = module.bitcode
except ImportError:
raise ImportError("PyQIR is not installed. Please install the pyqir "
"package to use this feature.")
elif extension == ".qir" or extension == ".bc":
# QIR or LLVM bitcode
input_data = open(args.filename, "rb").read()
elif extension == ".qs":
# Q#
try:
import qsharp
qsharp.packages.add("Microsoft.Quantum.Numerics")
qsharp_code = open(args.filename, "r").read()
input_data = qsharp.compile(qsharp_code)
except ImportError:
raise ImportError("Q# is not installed. Please install the qsharp "
"package to use this feature.")
elif extension == ".qasm":
# OpenQASM
try:
from qiskit import QuantumCircuit
qasm_code = open(args.filename, "r").read()
input_data = QuantumCircuit.from_qasm_str(qasm_code)
except ImportError:
raise ImportError("Qiskit is not installed. Please install the qiskit "
"package to use this feature.")
else:
raise ValueError(f"Unknown file extension {extension}")
# Parse job arguments
input_params = {}
if args.job_params:
with open(args.job_params, 'r') as f:
input_params = json.load(f)
# Submit job
job = estimator.submit(input_data, input_params=input_params)
# Get results
try:
results = job.get_results()
except RuntimeError as e:
print()
print(e, file=sys.stderr)
exit(1)
# Write results to output file
if args.output:
with open(args.output, 'w') as f:
f.write(results.json)
else:
print(results.json)
|
azure-quantum-python/azure-quantum/examples/resource_estimation/cli.py/0
|
{
"file_path": "azure-quantum-python/azure-quantum/examples/resource_estimation/cli.py",
"repo_id": "azure-quantum-python",
"token_count": 1351
}
| 421 |
# Unit tests
## Environment Pre-reqs
Refer to [the parent README](../README.md) for how to prepare the development environment before running the unit tests.
### Environment variables for Recording and Live-Tests
The 'recordings' directory is used to replay network connections.
To manually **create new recordings**, remove the 'recordings' subdirectory and run the tests.
To **force the tests to run live**, even with existing recordings, set the environment variable:
```plaintext
AZURE_TEST_RUN_LIVE="True"
```
This will force the recording files to be deleted before running the tests.
To be able to run the tests in recording or live mode, make sure:
- You have a client app registered in Microsoft Entra ID (formerly Azure Active Directory)
- The client app is configured with [certificate-based authentication](https://learn.microsoft.com/en-us/entra/identity/authentication/how-to-certificate-based-authentication)
- The client app has "Contributor" permissions to your Azure Quantum Workspace
- The following environment variables are set:
- `AZURE_CLIENT_ID` - application (client) ID from Microsoft Entra ID
- `AZURE_TENANT_ID` - directory (tenant) ID from Microsoft Entra ID
- `AZURE_CLIENT_CERTIFICATE_PATH` - path to PEM or PKCS12 certificate file (including the private key) that is configured for the client app
- `AZURE_QUANTUM_SUBSCRIPTION_ID` - ID of the Subscription where Azure Quantum Workspace is deployed
- `AZURE_QUANTUM_WORKSPACE_RG` - name of the Resource Group where Azure Quantum Workspace is deployed
- `AZURE_QUANTUM_WORKSPACE_NAME` - name of the Azure Quantum Workspace
- `AZURE_QUANTUM_WORKSPACE_LOCATION` - Azure region where the Azure Quantum Workspace is deployed
## Recordings
Our testing infrastructure uses Python VCR to record HTTP calls against a live service and then use
the recordings (aka "cassetes") to playback the responses, essentially creating a mock of the live-service.
### Cannot Overwrite Existing Cassette Exception
When the intention is to simply playback the recordings without recording it again, sometimes the Python VCR framework may give an error "Cannot Overwrite Existing Cassette".
#### Cause
The VCR works like a HTTP proxy. It attempts to find the request by matching the full URI and HTTP headers in the recorded file. If found, it will playback the corresponding response. Otherwise, it will attempt to do a live call to the web API and will try to record the results at the end. When it tries to do a recording, if there is already a recording file, it will give the error `CannotOverwriteExistingCassetteException`.
This error could also be caused if the recorded files are manually updated and do not really match the requests that the SDK will actually request.
#### Potential solutions
1. One way to remove the error is to delete the existing recording file and let it do all the live calls and create a new recording file that contains all the requests/responses that the tests need. After that, you should be able to simple playback the recordings with no errors.
2. If the error still persist after trying (1), then probably there is something unique in the URL or HTTP headers of the HTTP request that changes every time you run the tests. In this case, we need to either make that thing constant in the tests, or if they are genuinely unique, we need to replace that unique value in the request recording pipeline such that, at least in the recording file, it will be unique.
For example, see the [process_request](https://github.com/microsoft/qdk-python/blob/main/azure-quantum/tests/unit/common.py) method in the [tests/unit/common.py](https://github.com/microsoft/qdk-python/blob/main/azure-quantum/tests/unit/common.py) file.
In there, we replace several Guids and resource identifiers in the URL and in the HTTP Headers to make sure that the request that will be searched for (during a playback) or recorded (during recording) will have no unique values that could cause VCR to not find the request recording and attempt to do a new live call and rewrite the recording. Another reason we replace the identifiers is to remove potentially sensitive information from the recordings (like authentication tokens, access keys, etc).
See [Sequence ids](#Sequence-ids) and [Non-deterministic Job ids and Session ids](#Non-deterministic-Job-ids-and-Session-ids) for more ideas.
### Recording sanitization
To prevent potentially sensitive information to be checked-in in the repository (like authentication tokens, access keys, etc) inside the recordings, we do several text replacements in the HTTP requests and responses in the VCR pipeline before they end-up persisted in the recorded files.
The [QuantumTestBase.\_\_init\_\_ method](https://github.com/microsoft/azure-quantum-python/blob/main/azure-quantum/tests/unit/common.py#L73) contains several rules (mostly regular expressions) that are applied in the HTTP requests and reponses via several recording and playback processors that are injected in the VCR HTTP pipeline.
We use some common processors provided by the Azure SDK framework (including AccessTokenReplacer, InteractiveAccessTokenReplacer, RequestUrlNormalizer) but we also apply custom text replacement logic in URLs and HTTP Headers via the `process_request` and `process_response` methods and some other processors/filters found at the end of the file.
### Ability to Pause Recordings
If there are certain requests that you don't want to be recorded,
you can pause the recording before doing those requests.
Then, you can resume the recordings.
Note that if during playback the recordings are needed, the test will fail as
the recordings won't be found.
Example:
```python
if self.in_recording:
self.pause_recording()
# do stuff that could generate HTTP requests
# but that you don't want to end-up in the recordings
self.resume_recording()
```
### Sequence ids
By default, VCR does not allow the same request (identified by URI and HTTP Headers) to have multiple responses associated with it.
For example, when a job is submitted and we want to fetch the job status, the HTTP request to get the job status is the same, but the response can be different, initially returning Status="In-Progress" and later returning Status="Completed".
This limitation is solved by the test base class automatically injecting a `test-sequence-id` in the query string via the [CustomRecordingProcessor.\_append_sequence_id method](https://github.com/microsoft/azure-quantum-python/blob/main/azure-quantum/tests/unit/common.py#L458).
### Non-deterministic Job ids and Session ids
By default, the Azure Quantum Python SDK automatically creates a random guid when creating a new job or session if an id is not specified.
This non-deterministic behavior can cause problems when the test recordings are played-back and the ids in the recordings won't match the random ids created the by SDK when the tests run again.
To mitigate this problem, the `CustomRecordingProcessor` automatically looks for guids (registered with the `register_guid_regex` method) in the HTTP requests and replace them with a deterministic and sequential guid via the [CustomRecordingProcessor.\_search_for_guids method](https://github.com/microsoft/azure-quantum-python/blob/main/azure-quantum/tests/unit/common.py#L438) and the same mechanism to sanitize the recordings (regex replacement of URI, Headers and Body).
## Tests
To run the tests, simply run `pytest` from the root of the `azure-quantum` directory:
```bash
pytest
```
To run the a specific test class, run `pytest [test_file.py]`.
Example:
```bash
pytest ./tests/unit/test_job.py
```
To run the a specific test case, run `pytest -k [test_method_name]`.
Example:
```bash
pytest -k test_job_refresh
```
## E2E Live Test Pipeline
We have a private E2E test pipeline that run all the tests against
a live environment on a regular basis.
By default that pipeline will use the latest tests from the `main` branch
of this repository, but will install the latest released `azure-quantum` package from PyPI.
That can create an issue if you add tests for a new feature that has not been
released/published yet, since the tests will expect that feature but the current released
package does not have it.
To mitigate this issue you can add a `pytest.mark.skipif` mark to those new tests if the version
if less or equal to the latest published version.
For example:
```python
import pytest
from azure.quantum.version import __version__
skip_older_version = pytest.mark.skipif(__version__ != "0.0.1" and __version__ <= "0.28.263081", reason="Test requires the version to be > 0.28.263081.")
@skip_older_version
def test_my_test(self):
pass
```
|
azure-quantum-python/azure-quantum/tests/README.md/0
|
{
"file_path": "azure-quantum-python/azure-quantum/tests/README.md",
"repo_id": "azure-quantum-python",
"token_count": 2245
}
| 422 |
interactions:
- request:
body: client_id=PLACEHOLDER&grant_type=client_credentials&client_assertion=PLACEHOLDER&client_info=1&client_assertion_type=PLACEHOLDER&scope=https%3A%2F%2Fquantum.microsoft.com%2F.default
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
Content-Length:
- '181'
Content-Type:
- application/x-www-form-urlencoded
User-Agent:
- azsdk-python-identity/1.16.0 Python/3.9.19 (Windows-10-10.0.22631-SP0)
x-client-current-telemetry:
- 4|730,2|
x-client-os:
- win32
x-client-sku:
- MSAL.Python
x-client-ver:
- 1.28.0
method: POST
uri: https://login.microsoftonline.com/00000000-0000-0000-0000-000000000000/oauth2/v2.0/token
response:
body:
string: '{"token_type": "Bearer", "expires_in": 1746122613, "ext_expires_in":
1746122613, "refresh_in": 31536000, "access_token": "PLACEHOLDER"}'
headers:
content-length:
- '135'
content-type:
- application/json; charset=utf-8
status:
code: 200
message: OK
- request:
body: 'b''{"containerName": "job-00000000-0000-0000-0000-000000000001"}'''
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
Content-Length:
- '64'
Content-Type:
- application/json
User-Agent:
- azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: POST
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/storage/sasUri?api-version=2022-09-12-preview&test-sequence-id=1
response:
body:
string: '{"sasUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?sv=PLACEHOLDER&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&srt=co&ss=b&sp=racwl"}'
headers:
connection:
- keep-alive
content-length:
- '174'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
- request:
body: null
headers:
Accept:
- application/xml
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- azsdk-python-storage-blob/12.19.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
x-ms-date:
- Wed, 01 May 2024 18:03:34 GMT
x-ms-version:
- '2023-11-03'
method: GET
uri: https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?restype=container&sv=PLACEHOLDER&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&srt=co&ss=b&sp=racwl
response:
body:
string: "\uFEFF<?xml version=\"1.0\" encoding=\"utf-8\"?><Error><Code>ContainerNotFound</Code><Message>The
specified container does not exist.\nRequestId:afdf8b36-801e-004f-13f1-9b5d94000000\nTime:2024-05-01T18:03:37.2006267Z</Message></Error>"
headers:
content-length:
- '223'
content-type:
- application/xml
x-ms-version:
- '2023-11-03'
status:
code: 404
message: The specified container does not exist.
- request:
body: null
headers:
Accept:
- application/xml
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
Content-Length:
- '0'
User-Agent:
- azsdk-python-storage-blob/12.19.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
x-ms-date:
- Wed, 01 May 2024 18:03:36 GMT
x-ms-version:
- '2023-11-03'
method: PUT
uri: https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?restype=container&sv=PLACEHOLDER&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&srt=co&ss=b&sp=racwl
response:
body:
string: ''
headers:
content-length:
- '0'
x-ms-version:
- '2023-11-03'
status:
code: 201
message: Created
- request:
body: null
headers:
Accept:
- application/xml
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- azsdk-python-storage-blob/12.19.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
x-ms-date:
- Wed, 01 May 2024 18:03:36 GMT
x-ms-version:
- '2023-11-03'
method: GET
uri: https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?restype=container&sv=PLACEHOLDER&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&srt=co&ss=b&sp=racwl
response:
body:
string: ''
headers:
content-length:
- '0'
x-ms-lease-state:
- available
x-ms-lease-status:
- unlocked
x-ms-version:
- '2023-11-03'
status:
code: 200
message: OK
- request:
body: b'\nDECLARE ro BIT[1]\nDECLARE theta REAL[1]\n\nRX(theta) 0\n\nMEASURE 0
ro[0]\n'
headers:
Accept:
- application/xml
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
Content-Length:
- '81'
Content-Type:
- application/octet-stream
User-Agent:
- azsdk-python-storage-blob/12.19.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
x-ms-blob-type:
- BlockBlob
x-ms-date:
- Wed, 01 May 2024 18:03:38 GMT
x-ms-version:
- '2023-11-03'
method: PUT
uri: https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/inputData?sv=PLACEHOLDER&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&srt=co&ss=b&sp=racwl
response:
body:
string: ''
headers:
content-length:
- '0'
x-ms-version:
- '2023-11-03'
status:
code: 201
message: Created
- request:
body: 'b''{"id": "00000000-0000-0000-0000-000000000001", "name": "qdk-python-test",
"providerId": "rigetti", "target": "rigetti.sim.qvm", "itemType": "Job", "containerUri":
"https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?sv=PLACEHOLDER&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&srt=co&ss=b&sp=racwl",
"inputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/inputData",
"inputDataFormat": "rigetti.quil.v1", "inputParams": {"skipQuilc": false, "substitutions":
{"theta": [[0.0], [3.141592653589793], [6.283185307179586]]}, "count": 5}, "outputDataFormat":
"rigetti.quil-results.v1"}'''
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
Content-Length:
- '652'
Content-Type:
- application/json
User-Agent:
- azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: PUT
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/jobs/00000000-0000-0000-0000-000000000001?api-version=2022-09-12-preview&test-sequence-id=1
response:
body:
string: '{"containerUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?sv=PLACEHOLDER&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&srt=co&ss=b&sp=racwl",
"inputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/inputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=rcw",
"inputDataFormat": "rigetti.quil.v1", "inputParams": {"skipQuilc": false,
"substitutions": {"theta": [[0.0], [3.141592653589793], [6.283185307179586]]},
"count": 5}, "metadata": null, "sessionId": null, "status": "Waiting", "jobType":
"QuantumComputing", "outputDataFormat": "rigetti.quil-results.v1", "outputDataUri":
"https://mystorage.blob.core.windows.net:443/job-00000000-0000-0000-0000-000000000001/outputData?sv=PLACEHOLDER&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&srt=co&ss=b&sp=racwl",
"beginExecutionTime": null, "cancellationTime": null, "quantumComputingData":
null, "errorData": null, "isCancelling": false, "tags": [], "name": "qdk-python-test",
"id": "00000000-0000-0000-0000-000000000001", "providerId": "rigetti", "target":
"rigetti.sim.qvm", "creationTime": "2024-05-01T18:03:39.6502005+00:00", "endExecutionTime":
null, "costEstimate": null, "itemType": "Job"}'
headers:
connection:
- keep-alive
content-length:
- '1244'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
- request:
body: null
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: GET
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/jobs/00000000-0000-0000-0000-000000000001?api-version=2022-09-12-preview&test-sequence-id=1
response:
body:
string: '{"containerUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?sv=PLACEHOLDER&sr=c&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=rcwl",
"inputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/inputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dqdk-python-test-00000000-0000-0000-0000-000000000001.input.json",
"inputDataFormat": "rigetti.quil.v1", "inputParams": {"skipQuilc": false,
"substitutions": {"theta": [[0.0], [3.141592653589793], [6.283185307179586]]},
"count": 5}, "metadata": null, "sessionId": null, "status": "Waiting", "jobType":
"QuantumComputing", "outputDataFormat": "rigetti.quil-results.v1", "outputDataUri":
"https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/outputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dqdk-python-test-00000000-0000-0000-0000-000000000001.output.json",
"beginExecutionTime": null, "cancellationTime": null, "quantumComputingData":
{"count": 1}, "errorData": null, "isCancelling": false, "tags": [], "name":
"qdk-python-test", "id": "00000000-0000-0000-0000-000000000001", "providerId":
"rigetti", "target": "rigetti.sim.qvm", "creationTime": "2024-05-01T18:03:39.6502005+00:00",
"endExecutionTime": null, "costEstimate": null, "itemType": "Job"}'
headers:
connection:
- keep-alive
content-length:
- '1420'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
- request:
body: null
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: GET
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/jobs/00000000-0000-0000-0000-000000000001?api-version=2022-09-12-preview&test-sequence-id=2
response:
body:
string: '{"containerUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?sv=PLACEHOLDER&sr=c&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=rcwl",
"inputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/inputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dqdk-python-test-00000000-0000-0000-0000-000000000001.input.json",
"inputDataFormat": "rigetti.quil.v1", "inputParams": {"skipQuilc": false,
"substitutions": {"theta": [[0.0], [3.141592653589793], [6.283185307179586]]},
"count": 5}, "metadata": null, "sessionId": null, "status": "Waiting", "jobType":
"QuantumComputing", "outputDataFormat": "rigetti.quil-results.v1", "outputDataUri":
"https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/outputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dqdk-python-test-00000000-0000-0000-0000-000000000001.output.json",
"beginExecutionTime": null, "cancellationTime": null, "quantumComputingData":
{"count": 1}, "errorData": null, "isCancelling": false, "tags": [], "name":
"qdk-python-test", "id": "00000000-0000-0000-0000-000000000001", "providerId":
"rigetti", "target": "rigetti.sim.qvm", "creationTime": "2024-05-01T18:03:39.6502005+00:00",
"endExecutionTime": null, "costEstimate": null, "itemType": "Job"}'
headers:
connection:
- keep-alive
content-length:
- '1420'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
- request:
body: null
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: GET
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/jobs/00000000-0000-0000-0000-000000000001?api-version=2022-09-12-preview&test-sequence-id=3
response:
body:
string: '{"containerUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?sv=PLACEHOLDER&sr=c&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=rcwl",
"inputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/inputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dqdk-python-test-00000000-0000-0000-0000-000000000001.input.json",
"inputDataFormat": "rigetti.quil.v1", "inputParams": {"skipQuilc": false,
"substitutions": {"theta": [[0.0], [3.141592653589793], [6.283185307179586]]},
"count": 5}, "metadata": null, "sessionId": null, "status": "Waiting", "jobType":
"QuantumComputing", "outputDataFormat": "rigetti.quil-results.v1", "outputDataUri":
"https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/outputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dqdk-python-test-00000000-0000-0000-0000-000000000001.output.json",
"beginExecutionTime": null, "cancellationTime": null, "quantumComputingData":
{"count": 1}, "errorData": null, "isCancelling": false, "tags": [], "name":
"qdk-python-test", "id": "00000000-0000-0000-0000-000000000001", "providerId":
"rigetti", "target": "rigetti.sim.qvm", "creationTime": "2024-05-01T18:03:39.6502005+00:00",
"endExecutionTime": null, "costEstimate": null, "itemType": "Job"}'
headers:
connection:
- keep-alive
content-length:
- '1420'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
- request:
body: null
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: GET
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/jobs/00000000-0000-0000-0000-000000000001?api-version=2022-09-12-preview&test-sequence-id=4
response:
body:
string: '{"containerUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?sv=PLACEHOLDER&sr=c&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=rcwl",
"inputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/inputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dqdk-python-test-00000000-0000-0000-0000-000000000001.input.json",
"inputDataFormat": "rigetti.quil.v1", "inputParams": {"skipQuilc": false,
"substitutions": {"theta": [[0.0], [3.141592653589793], [6.283185307179586]]},
"count": 5}, "metadata": null, "sessionId": null, "status": "Waiting", "jobType":
"QuantumComputing", "outputDataFormat": "rigetti.quil-results.v1", "outputDataUri":
"https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/outputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dqdk-python-test-00000000-0000-0000-0000-000000000001.output.json",
"beginExecutionTime": null, "cancellationTime": null, "quantumComputingData":
{"count": 1}, "errorData": null, "isCancelling": false, "tags": [], "name":
"qdk-python-test", "id": "00000000-0000-0000-0000-000000000001", "providerId":
"rigetti", "target": "rigetti.sim.qvm", "creationTime": "2024-05-01T18:03:39.6502005+00:00",
"endExecutionTime": null, "costEstimate": null, "itemType": "Job"}'
headers:
connection:
- keep-alive
content-length:
- '1420'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
- request:
body: null
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: GET
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/jobs/00000000-0000-0000-0000-000000000001?api-version=2022-09-12-preview&test-sequence-id=5
response:
body:
string: '{"containerUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?sv=PLACEHOLDER&sr=c&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=rcwl",
"inputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/inputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dqdk-python-test-00000000-0000-0000-0000-000000000001.input.json",
"inputDataFormat": "rigetti.quil.v1", "inputParams": {"skipQuilc": false,
"substitutions": {"theta": [[0.0], [3.141592653589793], [6.283185307179586]]},
"count": 5}, "metadata": null, "sessionId": null, "status": "Waiting", "jobType":
"QuantumComputing", "outputDataFormat": "rigetti.quil-results.v1", "outputDataUri":
"https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/outputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dqdk-python-test-00000000-0000-0000-0000-000000000001.output.json",
"beginExecutionTime": null, "cancellationTime": null, "quantumComputingData":
{"count": 1}, "errorData": null, "isCancelling": false, "tags": [], "name":
"qdk-python-test", "id": "00000000-0000-0000-0000-000000000001", "providerId":
"rigetti", "target": "rigetti.sim.qvm", "creationTime": "2024-05-01T18:03:39.6502005+00:00",
"endExecutionTime": null, "costEstimate": null, "itemType": "Job"}'
headers:
connection:
- keep-alive
content-length:
- '1420'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
- request:
body: null
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: GET
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/jobs/00000000-0000-0000-0000-000000000001?api-version=2022-09-12-preview&test-sequence-id=6
response:
body:
string: '{"containerUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?sv=PLACEHOLDER&sr=c&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=rcwl",
"inputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/inputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dqdk-python-test-00000000-0000-0000-0000-000000000001.input.json",
"inputDataFormat": "rigetti.quil.v1", "inputParams": {"skipQuilc": false,
"substitutions": {"theta": [[0.0], [3.141592653589793], [6.283185307179586]]},
"count": 5}, "metadata": null, "sessionId": null, "status": "Executing", "jobType":
"QuantumComputing", "outputDataFormat": "rigetti.quil-results.v1", "outputDataUri":
"https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/outputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dqdk-python-test-00000000-0000-0000-0000-000000000001.output.json",
"beginExecutionTime": "2024-05-01T18:03:43.2366257Z", "cancellationTime":
null, "quantumComputingData": {"count": 1}, "errorData": null, "isCancelling":
false, "tags": [], "name": "qdk-python-test", "id": "00000000-0000-0000-0000-000000000001",
"providerId": "rigetti", "target": "rigetti.sim.qvm", "creationTime": "2024-05-01T18:03:39.6502005+00:00",
"endExecutionTime": null, "costEstimate": null, "itemType": "Job"}'
headers:
connection:
- keep-alive
content-length:
- '1448'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
- request:
body: null
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: GET
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/jobs/00000000-0000-0000-0000-000000000001?api-version=2022-09-12-preview&test-sequence-id=7
response:
body:
string: '{"containerUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?sv=PLACEHOLDER&sr=c&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=rcwl",
"inputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/inputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dqdk-python-test-00000000-0000-0000-0000-000000000001.input.json",
"inputDataFormat": "rigetti.quil.v1", "inputParams": {"skipQuilc": false,
"substitutions": {"theta": [[0.0], [3.141592653589793], [6.283185307179586]]},
"count": 5}, "metadata": null, "sessionId": null, "status": "Succeeded", "jobType":
"QuantumComputing", "outputDataFormat": "rigetti.quil-results.v1", "outputDataUri":
"https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/rawOutputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dqdk-python-test-00000000-0000-0000-0000-000000000001.output.json",
"beginExecutionTime": "2024-05-01T18:03:43.2366257Z", "cancellationTime":
null, "quantumComputingData": {"count": 1}, "errorData": null, "isCancelling":
false, "tags": [], "name": "qdk-python-test", "id": "00000000-0000-0000-0000-000000000001",
"providerId": "rigetti", "target": "rigetti.sim.qvm", "creationTime": "2024-05-01T18:03:39.6502005+00:00",
"endExecutionTime": "2024-05-01T18:03:44.3490043Z", "costEstimate": {"currencyCode":
"USD", "events": [{"dimensionId": "qpu_time_centiseconds", "dimensionName":
"QPU Execution Time", "measureUnit": "10ms (rounded up)", "amountBilled":
0.0, "amountConsumed": 0.0, "unitPrice": 0.0}], "estimatedTotal": 0.0}, "itemType":
"Job"}'
headers:
connection:
- keep-alive
content-length:
- '1710'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
- request:
body: null
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: GET
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/jobs/00000000-0000-0000-0000-000000000001?api-version=2022-09-12-preview&test-sequence-id=8
response:
body:
string: '{"containerUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?sv=PLACEHOLDER&sr=c&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=rcwl",
"inputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/inputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dqdk-python-test-00000000-0000-0000-0000-000000000001.input.json",
"inputDataFormat": "rigetti.quil.v1", "inputParams": {"skipQuilc": false,
"substitutions": {"theta": [[0.0], [3.141592653589793], [6.283185307179586]]},
"count": 5}, "metadata": null, "sessionId": null, "status": "Succeeded", "jobType":
"QuantumComputing", "outputDataFormat": "rigetti.quil-results.v1", "outputDataUri":
"https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/rawOutputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dqdk-python-test-00000000-0000-0000-0000-000000000001.output.json",
"beginExecutionTime": "2024-05-01T18:03:43.2366257Z", "cancellationTime":
null, "quantumComputingData": {"count": 1}, "errorData": null, "isCancelling":
false, "tags": [], "name": "qdk-python-test", "id": "00000000-0000-0000-0000-000000000001",
"providerId": "rigetti", "target": "rigetti.sim.qvm", "creationTime": "2024-05-01T18:03:39.6502005+00:00",
"endExecutionTime": "2024-05-01T18:03:44.3490043Z", "costEstimate": {"currencyCode":
"USD", "events": [{"dimensionId": "qpu_time_centiseconds", "dimensionName":
"QPU Execution Time", "measureUnit": "10ms (rounded up)", "amountBilled":
0.0, "amountConsumed": 0.0, "unitPrice": 0.0}], "estimatedTotal": 0.0}, "itemType":
"Job"}'
headers:
connection:
- keep-alive
content-length:
- '1710'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
- request:
body: null
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: GET
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/jobs/00000000-0000-0000-0000-000000000001?api-version=2022-09-12-preview&test-sequence-id=9
response:
body:
string: '{"containerUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?sv=PLACEHOLDER&sr=c&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=rcwl",
"inputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/inputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dqdk-python-test-00000000-0000-0000-0000-000000000001.input.json",
"inputDataFormat": "rigetti.quil.v1", "inputParams": {"skipQuilc": false,
"substitutions": {"theta": [[0.0], [3.141592653589793], [6.283185307179586]]},
"count": 5}, "metadata": null, "sessionId": null, "status": "Succeeded", "jobType":
"QuantumComputing", "outputDataFormat": "rigetti.quil-results.v1", "outputDataUri":
"https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/rawOutputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dqdk-python-test-00000000-0000-0000-0000-000000000001.output.json",
"beginExecutionTime": "2024-05-01T18:03:43.2366257Z", "cancellationTime":
null, "quantumComputingData": {"count": 1}, "errorData": null, "isCancelling":
false, "tags": [], "name": "qdk-python-test", "id": "00000000-0000-0000-0000-000000000001",
"providerId": "rigetti", "target": "rigetti.sim.qvm", "creationTime": "2024-05-01T18:03:39.6502005+00:00",
"endExecutionTime": "2024-05-01T18:03:44.3490043Z", "costEstimate": {"currencyCode":
"USD", "events": [{"dimensionId": "qpu_time_centiseconds", "dimensionName":
"QPU Execution Time", "measureUnit": "10ms (rounded up)", "amountBilled":
0.0, "amountConsumed": 0.0, "unitPrice": 0.0}], "estimatedTotal": 0.0}, "itemType":
"Job"}'
headers:
connection:
- keep-alive
content-length:
- '1710'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
- request:
body: null
headers:
Accept:
- application/xml
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- azsdk-python-storage-blob/12.19.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
x-ms-date:
- Wed, 01 May 2024 18:03:45 GMT
x-ms-range:
- bytes=0-33554431
x-ms-version:
- '2023-11-03'
method: GET
uri: https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/rawOutputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dqdk-python-test-00000000-0000-0000-0000-000000000001.output.json
response:
body:
string: '{"ro": [[0], [0], [0], [0], [0], [1], [1], [1], [1], [1], [0], [0],
[0], [0], [0]]}'
headers:
accept-ranges:
- bytes
content-length:
- '83'
content-range:
- bytes 0-67/68
content-type:
- application/json
x-ms-blob-content-md5:
- 39Xn83PqSOB/56gz7iXSwQ==
x-ms-blob-type:
- BlockBlob
x-ms-creation-time:
- Wed, 01 May 2024 18:03:39 GMT
x-ms-lease-state:
- available
x-ms-lease-status:
- unlocked
x-ms-server-encrypted:
- 'true'
x-ms-version:
- '2023-11-03'
status:
code: 206
message: Partial Content
version: 1
|
azure-quantum-python/azure-quantum/tests/unit/recordings/test_parametrized_quil.yaml/0
|
{
"file_path": "azure-quantum-python/azure-quantum/tests/unit/recordings/test_parametrized_quil.yaml",
"repo_id": "azure-quantum-python",
"token_count": 14633
}
| 423 |
interactions:
- request:
body: client_id=PLACEHOLDER&grant_type=client_credentials&client_assertion=PLACEHOLDER&client_info=1&client_assertion_type=PLACEHOLDER&scope=https%3A%2F%2Fquantum.microsoft.com%2F.default
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
Content-Length:
- '181'
Content-Type:
- application/x-www-form-urlencoded
User-Agent:
- azsdk-python-identity/1.16.0 Python/3.9.19 (Windows-10-10.0.22631-SP0)
x-client-current-telemetry:
- 4|730,2|
x-client-os:
- win32
x-client-sku:
- MSAL.Python
x-client-ver:
- 1.28.0
method: POST
uri: https://login.microsoftonline.com/00000000-0000-0000-0000-000000000000/oauth2/v2.0/token
response:
body:
string: '{"token_type": "Bearer", "expires_in": 1746121490, "ext_expires_in":
1746121490, "refresh_in": 31536000, "access_token": "PLACEHOLDER"}'
headers:
content-length:
- '135'
content-type:
- application/json; charset=utf-8
status:
code: 200
message: OK
- request:
body: null
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- testapp-azure-quantum-qiskit azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: GET
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/providerStatus?api-version=2022-09-12-preview&test-sequence-id=1
response:
body:
string: '{"value": [{"id": "microsoft-elements", "currentAvailability": "Available",
"targets": [{"id": "microsoft.dft", "currentAvailability": "Available", "averageQueueTime":
0, "statusPage": null}]}, {"id": "ionq", "currentAvailability": "Degraded",
"targets": [{"id": "ionq.qpu", "currentAvailability": "Available", "averageQueueTime":
493967, "statusPage": "https://status.ionq.co"}, {"id": "ionq.qpu.aria-1",
"currentAvailability": "Unavailable", "averageQueueTime": 735673, "statusPage":
"https://status.ionq.co"}, {"id": "ionq.qpu.aria-2", "currentAvailability":
"Unavailable", "averageQueueTime": 0, "statusPage": "https://status.ionq.co"},
{"id": "ionq.simulator", "currentAvailability": "Available", "averageQueueTime":
3, "statusPage": "https://status.ionq.co"}]}, {"id": "microsoft-qc", "currentAvailability":
"Available", "targets": [{"id": "microsoft.estimator", "currentAvailability":
"Available", "averageQueueTime": 0, "statusPage": null}]}, {"id": "pasqal",
"currentAvailability": "Degraded", "targets": [{"id": "pasqal.sim.emu-tn",
"currentAvailability": "Available", "averageQueueTime": 256, "statusPage":
"https://pasqal.com"}, {"id": "pasqal.qpu.fresnel", "currentAvailability":
"Degraded", "averageQueueTime": 0, "statusPage": "https://pasqal.com"}]},
{"id": "rigetti", "currentAvailability": "Degraded", "targets": [{"id": "rigetti.sim.qvm",
"currentAvailability": "Available", "averageQueueTime": 5, "statusPage": "https://rigetti.statuspage.io/"},
{"id": "rigetti.qpu.ankaa-2", "currentAvailability": "Degraded", "averageQueueTime":
5, "statusPage": "https://rigetti.statuspage.io/"}]}, {"id": "qci", "currentAvailability":
"Available", "targets": [{"id": "qci.simulator", "currentAvailability": "Available",
"averageQueueTime": 1, "statusPage": "https://quantumcircuits.com"}, {"id":
"qci.machine1", "currentAvailability": "Available", "averageQueueTime": 1,
"statusPage": "https://quantumcircuits.com"}, {"id": "qci.simulator.noisy",
"currentAvailability": "Available", "averageQueueTime": 0, "statusPage": "https://quantumcircuits.com"}]},
{"id": "quantinuum", "currentAvailability": "Available", "targets": [{"id":
"quantinuum.qpu.h1-1", "currentAvailability": "Available", "averageQueueTime":
25016, "statusPage": "https://www.quantinuum.com/hardware/h1"}, {"id": "quantinuum.sim.h1-1sc",
"currentAvailability": "Available", "averageQueueTime": 4, "statusPage": "https://www.quantinuum.com/hardware/h1"},
{"id": "quantinuum.sim.h1-1e", "currentAvailability": "Available", "averageQueueTime":
3, "statusPage": "https://www.quantinuum.com/hardware/h1"}, {"id": "quantinuum.qpu.h2-1",
"currentAvailability": "Available", "averageQueueTime": 49414, "statusPage":
"https://www.quantinuum.com/hardware/h2"}, {"id": "quantinuum.sim.h2-1sc",
"currentAvailability": "Available", "averageQueueTime": 0, "statusPage": "https://www.quantinuum.com/hardware/h2"},
{"id": "quantinuum.sim.h2-1e", "currentAvailability": "Available", "averageQueueTime":
648, "statusPage": "https://www.quantinuum.com/hardware/h2"}, {"id": "quantinuum.sim.h1-1sc-preview",
"currentAvailability": "Available", "averageQueueTime": 4, "statusPage": "https://www.quantinuum.com/hardware/h1"},
{"id": "quantinuum.sim.h1-1e-preview", "currentAvailability": "Available",
"averageQueueTime": 3, "statusPage": "https://www.quantinuum.com/hardware/h1"},
{"id": "quantinuum.sim.h1-2e-preview", "currentAvailability": "Available",
"averageQueueTime": 27901, "statusPage": "https://www.quantinuum.com/hardware/h1"},
{"id": "quantinuum.qpu.h1-1-preview", "currentAvailability": "Available",
"averageQueueTime": 25016, "statusPage": "https://www.quantinuum.com/hardware/h1"}]},
{"id": "Microsoft.Test", "currentAvailability": "Available", "targets": [{"id":
"echo-rigetti", "currentAvailability": "Available", "averageQueueTime": 1,
"statusPage": ""}, {"id": "echo-quantinuum", "currentAvailability": "Available",
"averageQueueTime": 1, "statusPage": ""}, {"id": "echo-qci", "currentAvailability":
"Available", "averageQueueTime": 1, "statusPage": ""}, {"id": "echo-ionq",
"currentAvailability": "Available", "averageQueueTime": 1, "statusPage": ""},
{"id": "echo-aquarius", "currentAvailability": "Available", "averageQueueTime":
1, "statusPage": ""}, {"id": "sparse-sim-rigetti", "currentAvailability":
"Available", "averageQueueTime": 1, "statusPage": ""}, {"id": "sparse-sim-quantinuum",
"currentAvailability": "Available", "averageQueueTime": 1, "statusPage": ""},
{"id": "sparse-sim-qci", "currentAvailability": "Available", "averageQueueTime":
1, "statusPage": ""}, {"id": "sparse-sim-ionq", "currentAvailability": "Available",
"averageQueueTime": 1, "statusPage": ""}, {"id": "echo-output", "currentAvailability":
"Available", "averageQueueTime": 1, "statusPage": ""}]}], "nextLink": null}'
headers:
connection:
- keep-alive
content-length:
- '4771'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
- request:
body: 'b''{"containerName": "job-00000000-0000-0000-0000-000000000001"}'''
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
Content-Length:
- '64'
Content-Type:
- application/json
User-Agent:
- testapp-azure-quantum-qiskit azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: POST
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/storage/sasUri?api-version=2022-09-12-preview&test-sequence-id=1
response:
body:
string: '{"sasUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?sv=PLACEHOLDER&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&srt=co&ss=b&sp=racwl"}'
headers:
connection:
- keep-alive
content-length:
- '174'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
- request:
body: null
headers:
Accept:
- application/xml
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- azsdk-python-storage-blob/12.19.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
x-ms-date:
- Wed, 01 May 2024 17:44:52 GMT
x-ms-version:
- '2023-11-03'
method: GET
uri: https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?restype=container&sv=PLACEHOLDER&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&srt=co&ss=b&sp=racwl
response:
body:
string: "\uFEFF<?xml version=\"1.0\" encoding=\"utf-8\"?><Error><Code>ContainerNotFound</Code><Message>The
specified container does not exist.\nRequestId:d57ef6e5-701e-0039-2bef-9bd7dc000000\nTime:2024-05-01T17:44:54.6889529Z</Message></Error>"
headers:
content-length:
- '223'
content-type:
- application/xml
x-ms-version:
- '2023-11-03'
status:
code: 404
message: The specified container does not exist.
- request:
body: null
headers:
Accept:
- application/xml
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
Content-Length:
- '0'
User-Agent:
- azsdk-python-storage-blob/12.19.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
x-ms-date:
- Wed, 01 May 2024 17:44:53 GMT
x-ms-version:
- '2023-11-03'
method: PUT
uri: https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?restype=container&sv=PLACEHOLDER&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&srt=co&ss=b&sp=racwl
response:
body:
string: ''
headers:
content-length:
- '0'
x-ms-version:
- '2023-11-03'
status:
code: 201
message: Created
- request:
body: null
headers:
Accept:
- application/xml
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- azsdk-python-storage-blob/12.19.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
x-ms-date:
- Wed, 01 May 2024 17:44:54 GMT
x-ms-version:
- '2023-11-03'
method: GET
uri: https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?restype=container&sv=PLACEHOLDER&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&srt=co&ss=b&sp=racwl
response:
body:
string: ''
headers:
content-length:
- '0'
x-ms-lease-state:
- available
x-ms-lease-status:
- unlocked
x-ms-version:
- '2023-11-03'
status:
code: 200
message: OK
- request:
body: 'b''{"gateset": "qis", "qubits": 4, "circuit": [{"gate": "h", "targets":
[0]}, {"gate": "x", "targets": [1], "controls": [0]}, {"gate": "x", "targets":
[2], "controls": [1]}, {"gate": "h", "targets": [3]}]}'''
headers:
Accept:
- application/xml
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
Content-Length:
- '205'
Content-Type:
- application/octet-stream
User-Agent:
- azsdk-python-storage-blob/12.19.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
x-ms-blob-type:
- BlockBlob
x-ms-date:
- Wed, 01 May 2024 17:44:55 GMT
x-ms-version:
- '2023-11-03'
method: PUT
uri: https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/inputData?sv=PLACEHOLDER&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&srt=co&ss=b&sp=racwl
response:
body:
string: ''
headers:
content-length:
- '0'
x-ms-version:
- '2023-11-03'
status:
code: 201
message: Created
- request:
body: 'b''{"id": "00000000-0000-0000-0000-000000000001", "name": "Qiskit Sample
- 3-qubit GHZ circuit", "providerId": "ionq", "target": "ionq.simulator", "itemType":
"Job", "containerUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?sv=PLACEHOLDER&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&srt=co&ss=b&sp=racwl",
"inputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/inputData",
"inputDataFormat": "ionq.circuit.v1", "inputParams": {"shots": 500, "count":
500}, "metadata": {"qiskit": "True", "name": "Qiskit Sample - 3-qubit GHZ circuit",
"num_qubits": "4", "metadata": "{}", "meas_map": "[0, 1, 2]"}, "outputDataFormat":
"ionq.quantum-results.v1"}'''
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
Content-Length:
- '726'
Content-Type:
- application/json
User-Agent:
- testapp-azure-quantum-qiskit azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: PUT
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/jobs/00000000-0000-0000-0000-000000000001?api-version=2022-09-12-preview&test-sequence-id=1
response:
body:
string: '{"containerUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?sv=PLACEHOLDER&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&srt=co&ss=b&sp=racwl",
"inputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/inputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=rcw",
"inputDataFormat": "ionq.circuit.v1", "inputParams": {"shots": 500, "count":
500}, "metadata": {"qiskit": "True", "name": "Qiskit Sample - 3-qubit GHZ
circuit", "num_qubits": "4", "metadata": "{}", "meas_map": "[0, 1, 2]"}, "sessionId":
null, "status": "Waiting", "jobType": "QuantumComputing", "outputDataFormat":
"ionq.quantum-results.v1", "outputDataUri": "https://mystorage.blob.core.windows.net:443/job-00000000-0000-0000-0000-000000000001/outputData?sv=PLACEHOLDER&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&srt=co&ss=b&sp=racwl",
"beginExecutionTime": null, "cancellationTime": null, "quantumComputingData":
null, "errorData": null, "isCancelling": false, "tags": [], "name": "Qiskit
Sample - 3-qubit GHZ circuit", "id": "00000000-0000-0000-0000-000000000001",
"providerId": "ionq", "target": "ionq.simulator", "creationTime": "2024-05-01T17:44:56.8394593+00:00",
"endExecutionTime": null, "costEstimate": null, "itemType": "Job"}'
headers:
connection:
- keep-alive
content-length:
- '1300'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
- request:
body: null
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- testapp-azure-quantum-qiskit azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: GET
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/jobs/00000000-0000-0000-0000-000000000001?api-version=2022-09-12-preview&test-sequence-id=1
response:
body:
string: '{"containerUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?sv=PLACEHOLDER&sr=c&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=rcwl",
"inputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/inputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3DQiskit%2BSample%2B-%2B3-qubit%2BGHZ%2Bcircuit-00000000-0000-0000-0000-000000000001.input.json",
"inputDataFormat": "ionq.circuit.v1", "inputParams": {"shots": 500, "count":
500}, "metadata": {"qiskit": "True", "name": "Qiskit Sample - 3-qubit GHZ
circuit", "num_qubits": "4", "metadata": "{}", "meas_map": "[0, 1, 2]"}, "sessionId":
null, "status": "Waiting", "jobType": "QuantumComputing", "outputDataFormat":
"ionq.quantum-results.v1", "outputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/outputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3DQiskit%2BSample%2B-%2B3-qubit%2BGHZ%2Bcircuit-00000000-0000-0000-0000-000000000001.output.json",
"beginExecutionTime": null, "cancellationTime": null, "quantumComputingData":
{"count": 1}, "errorData": null, "isCancelling": false, "tags": [], "name":
"Qiskit Sample - 3-qubit GHZ circuit", "id": "00000000-0000-0000-0000-000000000001",
"providerId": "ionq", "target": "ionq.simulator", "creationTime": "2024-05-01T17:44:56.8394593+00:00",
"endExecutionTime": null, "costEstimate": null, "itemType": "Job"}'
headers:
connection:
- keep-alive
content-length:
- '1536'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
- request:
body: null
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- testapp-azure-quantum-qiskit azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: GET
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/jobs/00000000-0000-0000-0000-000000000001?api-version=2022-09-12-preview&test-sequence-id=2
response:
body:
string: '{"containerUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?sv=PLACEHOLDER&sr=c&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=rcwl",
"inputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/inputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3DQiskit%2BSample%2B-%2B3-qubit%2BGHZ%2Bcircuit-00000000-0000-0000-0000-000000000001.input.json",
"inputDataFormat": "ionq.circuit.v1", "inputParams": {"shots": 500, "count":
500}, "metadata": {"qiskit": "True", "name": "Qiskit Sample - 3-qubit GHZ
circuit", "num_qubits": "4", "metadata": "{}", "meas_map": "[0, 1, 2]"}, "sessionId":
null, "status": "Waiting", "jobType": "QuantumComputing", "outputDataFormat":
"ionq.quantum-results.v1", "outputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/outputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3DQiskit%2BSample%2B-%2B3-qubit%2BGHZ%2Bcircuit-00000000-0000-0000-0000-000000000001.output.json",
"beginExecutionTime": null, "cancellationTime": null, "quantumComputingData":
{"count": 1}, "errorData": null, "isCancelling": false, "tags": [], "name":
"Qiskit Sample - 3-qubit GHZ circuit", "id": "00000000-0000-0000-0000-000000000001",
"providerId": "ionq", "target": "ionq.simulator", "creationTime": "2024-05-01T17:44:56.8394593+00:00",
"endExecutionTime": null, "costEstimate": null, "itemType": "Job"}'
headers:
connection:
- keep-alive
content-length:
- '1536'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
- request:
body: null
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- testapp-azure-quantum-qiskit azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: GET
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/jobs/00000000-0000-0000-0000-000000000001?api-version=2022-09-12-preview&test-sequence-id=3
response:
body:
string: '{"containerUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?sv=PLACEHOLDER&sr=c&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=rcwl",
"inputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/inputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3DQiskit%2BSample%2B-%2B3-qubit%2BGHZ%2Bcircuit-00000000-0000-0000-0000-000000000001.input.json",
"inputDataFormat": "ionq.circuit.v1", "inputParams": {"shots": 500, "count":
500}, "metadata": {"qiskit": "True", "name": "Qiskit Sample - 3-qubit GHZ
circuit", "num_qubits": "4", "metadata": "{}", "meas_map": "[0, 1, 2]"}, "sessionId":
null, "status": "Waiting", "jobType": "QuantumComputing", "outputDataFormat":
"ionq.quantum-results.v1", "outputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/outputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3DQiskit%2BSample%2B-%2B3-qubit%2BGHZ%2Bcircuit-00000000-0000-0000-0000-000000000001.output.json",
"beginExecutionTime": null, "cancellationTime": null, "quantumComputingData":
{"count": 1}, "errorData": null, "isCancelling": false, "tags": [], "name":
"Qiskit Sample - 3-qubit GHZ circuit", "id": "00000000-0000-0000-0000-000000000001",
"providerId": "ionq", "target": "ionq.simulator", "creationTime": "2024-05-01T17:44:56.8394593+00:00",
"endExecutionTime": null, "costEstimate": null, "itemType": "Job"}'
headers:
connection:
- keep-alive
content-length:
- '1536'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
- request:
body: null
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- testapp-azure-quantum-qiskit azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: GET
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/jobs/00000000-0000-0000-0000-000000000001?api-version=2022-09-12-preview&test-sequence-id=4
response:
body:
string: '{"containerUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?sv=PLACEHOLDER&sr=c&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=rcwl",
"inputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/inputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3DQiskit%2BSample%2B-%2B3-qubit%2BGHZ%2Bcircuit-00000000-0000-0000-0000-000000000001.input.json",
"inputDataFormat": "ionq.circuit.v1", "inputParams": {"shots": 500, "count":
500}, "metadata": {"qiskit": "True", "name": "Qiskit Sample - 3-qubit GHZ
circuit", "num_qubits": "4", "metadata": "{}", "meas_map": "[0, 1, 2]"}, "sessionId":
null, "status": "Waiting", "jobType": "QuantumComputing", "outputDataFormat":
"ionq.quantum-results.v1", "outputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/outputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3DQiskit%2BSample%2B-%2B3-qubit%2BGHZ%2Bcircuit-00000000-0000-0000-0000-000000000001.output.json",
"beginExecutionTime": null, "cancellationTime": null, "quantumComputingData":
{"count": 1}, "errorData": null, "isCancelling": false, "tags": [], "name":
"Qiskit Sample - 3-qubit GHZ circuit", "id": "00000000-0000-0000-0000-000000000001",
"providerId": "ionq", "target": "ionq.simulator", "creationTime": "2024-05-01T17:44:56.8394593+00:00",
"endExecutionTime": null, "costEstimate": null, "itemType": "Job"}'
headers:
connection:
- keep-alive
content-length:
- '1536'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
- request:
body: null
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- testapp-azure-quantum-qiskit azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: GET
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/jobs/00000000-0000-0000-0000-000000000001?api-version=2022-09-12-preview&test-sequence-id=5
response:
body:
string: '{"containerUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?sv=PLACEHOLDER&sr=c&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=rcwl",
"inputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/inputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3DQiskit%2BSample%2B-%2B3-qubit%2BGHZ%2Bcircuit-00000000-0000-0000-0000-000000000001.input.json",
"inputDataFormat": "ionq.circuit.v1", "inputParams": {"shots": 500, "count":
500}, "metadata": {"qiskit": "True", "name": "Qiskit Sample - 3-qubit GHZ
circuit", "num_qubits": "4", "metadata": "{}", "meas_map": "[0, 1, 2]"}, "sessionId":
null, "status": "Waiting", "jobType": "QuantumComputing", "outputDataFormat":
"ionq.quantum-results.v1", "outputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/outputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3DQiskit%2BSample%2B-%2B3-qubit%2BGHZ%2Bcircuit-00000000-0000-0000-0000-000000000001.output.json",
"beginExecutionTime": null, "cancellationTime": null, "quantumComputingData":
{"count": 1}, "errorData": null, "isCancelling": false, "tags": [], "name":
"Qiskit Sample - 3-qubit GHZ circuit", "id": "00000000-0000-0000-0000-000000000001",
"providerId": "ionq", "target": "ionq.simulator", "creationTime": "2024-05-01T17:44:56.8394593+00:00",
"endExecutionTime": null, "costEstimate": null, "itemType": "Job"}'
headers:
connection:
- keep-alive
content-length:
- '1536'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
- request:
body: null
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- testapp-azure-quantum-qiskit azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: GET
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/jobs/00000000-0000-0000-0000-000000000001?api-version=2022-09-12-preview&test-sequence-id=6
response:
body:
string: '{"containerUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?sv=PLACEHOLDER&sr=c&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=rcwl",
"inputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/inputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3DQiskit%2BSample%2B-%2B3-qubit%2BGHZ%2Bcircuit-00000000-0000-0000-0000-000000000001.input.json",
"inputDataFormat": "ionq.circuit.v1", "inputParams": {"shots": 500, "count":
500}, "metadata": {"qiskit": "True", "name": "Qiskit Sample - 3-qubit GHZ
circuit", "num_qubits": "4", "metadata": "{}", "meas_map": "[0, 1, 2]"}, "sessionId":
null, "status": "Waiting", "jobType": "QuantumComputing", "outputDataFormat":
"ionq.quantum-results.v1", "outputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/outputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3DQiskit%2BSample%2B-%2B3-qubit%2BGHZ%2Bcircuit-00000000-0000-0000-0000-000000000001.output.json",
"beginExecutionTime": null, "cancellationTime": null, "quantumComputingData":
{"count": 1}, "errorData": null, "isCancelling": false, "tags": [], "name":
"Qiskit Sample - 3-qubit GHZ circuit", "id": "00000000-0000-0000-0000-000000000001",
"providerId": "ionq", "target": "ionq.simulator", "creationTime": "2024-05-01T17:44:56.8394593+00:00",
"endExecutionTime": null, "costEstimate": null, "itemType": "Job"}'
headers:
connection:
- keep-alive
content-length:
- '1536'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
- request:
body: null
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- testapp-azure-quantum-qiskit azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: GET
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/jobs/00000000-0000-0000-0000-000000000001?api-version=2022-09-12-preview&test-sequence-id=7
response:
body:
string: '{"containerUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?sv=PLACEHOLDER&sr=c&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=rcwl",
"inputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/inputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3DQiskit%2BSample%2B-%2B3-qubit%2BGHZ%2Bcircuit-00000000-0000-0000-0000-000000000001.input.json",
"inputDataFormat": "ionq.circuit.v1", "inputParams": {"shots": 500, "count":
500}, "metadata": {"qiskit": "True", "name": "Qiskit Sample - 3-qubit GHZ
circuit", "num_qubits": "4", "metadata": "{}", "meas_map": "[0, 1, 2]"}, "sessionId":
null, "status": "Succeeded", "jobType": "QuantumComputing", "outputDataFormat":
"ionq.quantum-results.v1", "outputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/rawOutputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3DQiskit%2BSample%2B-%2B3-qubit%2BGHZ%2Bcircuit-00000000-0000-0000-0000-000000000001.output.json",
"beginExecutionTime": "2024-05-01T17:45:00.296Z", "cancellationTime": null,
"quantumComputingData": {"count": 1}, "errorData": null, "isCancelling": false,
"tags": [], "name": "Qiskit Sample - 3-qubit GHZ circuit", "id": "00000000-0000-0000-0000-000000000001",
"providerId": "ionq", "target": "ionq.simulator", "creationTime": "2024-05-01T17:44:56.8394593+00:00",
"endExecutionTime": "2024-05-01T17:45:00.32Z", "costEstimate": {"currencyCode":
"USD", "events": [{"dimensionId": "gs1q", "dimensionName": "1Q Gate Shot",
"measureUnit": "1q gate shot", "amountBilled": 0.0, "amountConsumed": 0.0,
"unitPrice": 0.0}, {"dimensionId": "gs2q", "dimensionName": "2Q Gate Shot",
"measureUnit": "2q gate shot", "amountBilled": 0.0, "amountConsumed": 0.0,
"unitPrice": 0.0}], "estimatedTotal": 0.0}, "itemType": "Job"}'
headers:
connection:
- keep-alive
content-length:
- '1940'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
- request:
body: null
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- testapp-azure-quantum-qiskit azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: GET
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/jobs/00000000-0000-0000-0000-000000000001?api-version=2022-09-12-preview&test-sequence-id=8
response:
body:
string: '{"containerUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?sv=PLACEHOLDER&sr=c&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=rcwl",
"inputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/inputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3DQiskit%2BSample%2B-%2B3-qubit%2BGHZ%2Bcircuit-00000000-0000-0000-0000-000000000001.input.json",
"inputDataFormat": "ionq.circuit.v1", "inputParams": {"shots": 500, "count":
500}, "metadata": {"qiskit": "True", "name": "Qiskit Sample - 3-qubit GHZ
circuit", "num_qubits": "4", "metadata": "{}", "meas_map": "[0, 1, 2]"}, "sessionId":
null, "status": "Succeeded", "jobType": "QuantumComputing", "outputDataFormat":
"ionq.quantum-results.v1", "outputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/rawOutputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3DQiskit%2BSample%2B-%2B3-qubit%2BGHZ%2Bcircuit-00000000-0000-0000-0000-000000000001.output.json",
"beginExecutionTime": "2024-05-01T17:45:00.296Z", "cancellationTime": null,
"quantumComputingData": {"count": 1}, "errorData": null, "isCancelling": false,
"tags": [], "name": "Qiskit Sample - 3-qubit GHZ circuit", "id": "00000000-0000-0000-0000-000000000001",
"providerId": "ionq", "target": "ionq.simulator", "creationTime": "2024-05-01T17:44:56.8394593+00:00",
"endExecutionTime": "2024-05-01T17:45:00.32Z", "costEstimate": {"currencyCode":
"USD", "events": [{"dimensionId": "gs1q", "dimensionName": "1Q Gate Shot",
"measureUnit": "1q gate shot", "amountBilled": 0.0, "amountConsumed": 0.0,
"unitPrice": 0.0}, {"dimensionId": "gs2q", "dimensionName": "2Q Gate Shot",
"measureUnit": "2q gate shot", "amountBilled": 0.0, "amountConsumed": 0.0,
"unitPrice": 0.0}], "estimatedTotal": 0.0}, "itemType": "Job"}'
headers:
connection:
- keep-alive
content-length:
- '1940'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
- request:
body: null
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- testapp-azure-quantum-qiskit azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: GET
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/jobs/00000000-0000-0000-0000-000000000001?api-version=2022-09-12-preview&test-sequence-id=9
response:
body:
string: '{"containerUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?sv=PLACEHOLDER&sr=c&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=rcwl",
"inputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/inputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3DQiskit%2BSample%2B-%2B3-qubit%2BGHZ%2Bcircuit-00000000-0000-0000-0000-000000000001.input.json",
"inputDataFormat": "ionq.circuit.v1", "inputParams": {"shots": 500, "count":
500}, "metadata": {"qiskit": "True", "name": "Qiskit Sample - 3-qubit GHZ
circuit", "num_qubits": "4", "metadata": "{}", "meas_map": "[0, 1, 2]"}, "sessionId":
null, "status": "Succeeded", "jobType": "QuantumComputing", "outputDataFormat":
"ionq.quantum-results.v1", "outputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/rawOutputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3DQiskit%2BSample%2B-%2B3-qubit%2BGHZ%2Bcircuit-00000000-0000-0000-0000-000000000001.output.json",
"beginExecutionTime": "2024-05-01T17:45:00.296Z", "cancellationTime": null,
"quantumComputingData": {"count": 1}, "errorData": null, "isCancelling": false,
"tags": [], "name": "Qiskit Sample - 3-qubit GHZ circuit", "id": "00000000-0000-0000-0000-000000000001",
"providerId": "ionq", "target": "ionq.simulator", "creationTime": "2024-05-01T17:44:56.8394593+00:00",
"endExecutionTime": "2024-05-01T17:45:00.32Z", "costEstimate": {"currencyCode":
"USD", "events": [{"dimensionId": "gs1q", "dimensionName": "1Q Gate Shot",
"measureUnit": "1q gate shot", "amountBilled": 0.0, "amountConsumed": 0.0,
"unitPrice": 0.0}, {"dimensionId": "gs2q", "dimensionName": "2Q Gate Shot",
"measureUnit": "2q gate shot", "amountBilled": 0.0, "amountConsumed": 0.0,
"unitPrice": 0.0}], "estimatedTotal": 0.0}, "itemType": "Job"}'
headers:
connection:
- keep-alive
content-length:
- '1940'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
- request:
body: null
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- testapp-azure-quantum-qiskit azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: GET
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/providerStatus?api-version=2022-09-12-preview&test-sequence-id=2
response:
body:
string: '{"value": [{"id": "microsoft-elements", "currentAvailability": "Available",
"targets": [{"id": "microsoft.dft", "currentAvailability": "Available", "averageQueueTime":
0, "statusPage": null}]}, {"id": "ionq", "currentAvailability": "Degraded",
"targets": [{"id": "ionq.qpu", "currentAvailability": "Available", "averageQueueTime":
493967, "statusPage": "https://status.ionq.co"}, {"id": "ionq.qpu.aria-1",
"currentAvailability": "Unavailable", "averageQueueTime": 735673, "statusPage":
"https://status.ionq.co"}, {"id": "ionq.qpu.aria-2", "currentAvailability":
"Unavailable", "averageQueueTime": 0, "statusPage": "https://status.ionq.co"},
{"id": "ionq.simulator", "currentAvailability": "Available", "averageQueueTime":
3, "statusPage": "https://status.ionq.co"}]}, {"id": "microsoft-qc", "currentAvailability":
"Available", "targets": [{"id": "microsoft.estimator", "currentAvailability":
"Available", "averageQueueTime": 0, "statusPage": null}]}, {"id": "pasqal",
"currentAvailability": "Degraded", "targets": [{"id": "pasqal.sim.emu-tn",
"currentAvailability": "Available", "averageQueueTime": 256, "statusPage":
"https://pasqal.com"}, {"id": "pasqal.qpu.fresnel", "currentAvailability":
"Degraded", "averageQueueTime": 0, "statusPage": "https://pasqal.com"}]},
{"id": "rigetti", "currentAvailability": "Degraded", "targets": [{"id": "rigetti.sim.qvm",
"currentAvailability": "Available", "averageQueueTime": 5, "statusPage": "https://rigetti.statuspage.io/"},
{"id": "rigetti.qpu.ankaa-2", "currentAvailability": "Degraded", "averageQueueTime":
5, "statusPage": "https://rigetti.statuspage.io/"}]}, {"id": "qci", "currentAvailability":
"Available", "targets": [{"id": "qci.simulator", "currentAvailability": "Available",
"averageQueueTime": 1, "statusPage": "https://quantumcircuits.com"}, {"id":
"qci.machine1", "currentAvailability": "Available", "averageQueueTime": 1,
"statusPage": "https://quantumcircuits.com"}, {"id": "qci.simulator.noisy",
"currentAvailability": "Available", "averageQueueTime": 0, "statusPage": "https://quantumcircuits.com"}]},
{"id": "quantinuum", "currentAvailability": "Available", "targets": [{"id":
"quantinuum.qpu.h1-1", "currentAvailability": "Available", "averageQueueTime":
25016, "statusPage": "https://www.quantinuum.com/hardware/h1"}, {"id": "quantinuum.sim.h1-1sc",
"currentAvailability": "Available", "averageQueueTime": 4, "statusPage": "https://www.quantinuum.com/hardware/h1"},
{"id": "quantinuum.sim.h1-1e", "currentAvailability": "Available", "averageQueueTime":
3, "statusPage": "https://www.quantinuum.com/hardware/h1"}, {"id": "quantinuum.qpu.h2-1",
"currentAvailability": "Available", "averageQueueTime": 49414, "statusPage":
"https://www.quantinuum.com/hardware/h2"}, {"id": "quantinuum.sim.h2-1sc",
"currentAvailability": "Available", "averageQueueTime": 0, "statusPage": "https://www.quantinuum.com/hardware/h2"},
{"id": "quantinuum.sim.h2-1e", "currentAvailability": "Available", "averageQueueTime":
648, "statusPage": "https://www.quantinuum.com/hardware/h2"}, {"id": "quantinuum.sim.h1-1sc-preview",
"currentAvailability": "Available", "averageQueueTime": 4, "statusPage": "https://www.quantinuum.com/hardware/h1"},
{"id": "quantinuum.sim.h1-1e-preview", "currentAvailability": "Available",
"averageQueueTime": 3, "statusPage": "https://www.quantinuum.com/hardware/h1"},
{"id": "quantinuum.sim.h1-2e-preview", "currentAvailability": "Available",
"averageQueueTime": 27901, "statusPage": "https://www.quantinuum.com/hardware/h1"},
{"id": "quantinuum.qpu.h1-1-preview", "currentAvailability": "Available",
"averageQueueTime": 25016, "statusPage": "https://www.quantinuum.com/hardware/h1"}]},
{"id": "Microsoft.Test", "currentAvailability": "Available", "targets": [{"id":
"echo-rigetti", "currentAvailability": "Available", "averageQueueTime": 1,
"statusPage": ""}, {"id": "echo-quantinuum", "currentAvailability": "Available",
"averageQueueTime": 1, "statusPage": ""}, {"id": "echo-qci", "currentAvailability":
"Available", "averageQueueTime": 1, "statusPage": ""}, {"id": "echo-ionq",
"currentAvailability": "Available", "averageQueueTime": 1, "statusPage": ""},
{"id": "echo-aquarius", "currentAvailability": "Available", "averageQueueTime":
1, "statusPage": ""}, {"id": "sparse-sim-rigetti", "currentAvailability":
"Available", "averageQueueTime": 1, "statusPage": ""}, {"id": "sparse-sim-quantinuum",
"currentAvailability": "Available", "averageQueueTime": 1, "statusPage": ""},
{"id": "sparse-sim-qci", "currentAvailability": "Available", "averageQueueTime":
1, "statusPage": ""}, {"id": "sparse-sim-ionq", "currentAvailability": "Available",
"averageQueueTime": 1, "statusPage": ""}, {"id": "echo-output", "currentAvailability":
"Available", "averageQueueTime": 1, "statusPage": ""}]}], "nextLink": null}'
headers:
connection:
- keep-alive
content-length:
- '4771'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
- request:
body: null
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- testapp-azure-quantum-qiskit azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: GET
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/jobs/00000000-0000-0000-0000-000000000001?api-version=2022-09-12-preview&test-sequence-id=10
response:
body:
string: '{"containerUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?sv=PLACEHOLDER&sr=c&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=rcwl",
"inputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/inputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3DQiskit%2BSample%2B-%2B3-qubit%2BGHZ%2Bcircuit-00000000-0000-0000-0000-000000000001.input.json",
"inputDataFormat": "ionq.circuit.v1", "inputParams": {"shots": 500, "count":
500}, "metadata": {"qiskit": "True", "name": "Qiskit Sample - 3-qubit GHZ
circuit", "num_qubits": "4", "metadata": "{}", "meas_map": "[0, 1, 2]"}, "sessionId":
null, "status": "Succeeded", "jobType": "QuantumComputing", "outputDataFormat":
"ionq.quantum-results.v1", "outputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/rawOutputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3DQiskit%2BSample%2B-%2B3-qubit%2BGHZ%2Bcircuit-00000000-0000-0000-0000-000000000001.output.json",
"beginExecutionTime": "2024-05-01T17:45:00.296Z", "cancellationTime": null,
"quantumComputingData": {"count": 1}, "errorData": null, "isCancelling": false,
"tags": [], "name": "Qiskit Sample - 3-qubit GHZ circuit", "id": "00000000-0000-0000-0000-000000000001",
"providerId": "ionq", "target": "ionq.simulator", "creationTime": "2024-05-01T17:44:56.8394593+00:00",
"endExecutionTime": "2024-05-01T17:45:00.32Z", "costEstimate": {"currencyCode":
"USD", "events": [{"dimensionId": "gs1q", "dimensionName": "1Q Gate Shot",
"measureUnit": "1q gate shot", "amountBilled": 0.0, "amountConsumed": 0.0,
"unitPrice": 0.0}, {"dimensionId": "gs2q", "dimensionName": "2Q Gate Shot",
"measureUnit": "2q gate shot", "amountBilled": 0.0, "amountConsumed": 0.0,
"unitPrice": 0.0}], "estimatedTotal": 0.0}, "itemType": "Job"}'
headers:
connection:
- keep-alive
content-length:
- '1940'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
version: 1
|
azure-quantum-python/azure-quantum/tests/unit/recordings/test_plugins_submit_qiskit_to_ionq_with_default_shots.yaml/0
|
{
"file_path": "azure-quantum-python/azure-quantum/tests/unit/recordings/test_plugins_submit_qiskit_to_ionq_with_default_shots.yaml",
"repo_id": "azure-quantum-python",
"token_count": 20909
}
| 424 |
interactions:
- request:
body: client_id=PLACEHOLDER&grant_type=client_credentials&client_assertion=PLACEHOLDER&client_info=1&client_assertion_type=PLACEHOLDER&scope=https%3A%2F%2Fquantum.microsoft.com%2F.default
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
Content-Length:
- '181'
Content-Type:
- application/x-www-form-urlencoded
User-Agent:
- azsdk-python-identity/1.16.0 Python/3.9.19 (Windows-10-10.0.22631-SP0)
x-client-current-telemetry:
- 4|730,2|
x-client-os:
- win32
x-client-sku:
- MSAL.Python
x-client-ver:
- 1.28.0
method: POST
uri: https://login.microsoftonline.com/00000000-0000-0000-0000-000000000000/oauth2/v2.0/token
response:
body:
string: '{"token_type": "Bearer", "expires_in": 1746121986, "ext_expires_in":
1746121986, "refresh_in": 31536000, "access_token": "PLACEHOLDER"}'
headers:
content-length:
- '135'
content-type:
- application/json; charset=utf-8
status:
code: 200
message: OK
- request:
body: null
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- testapp-azure-quantum-qiskit azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: GET
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/providerStatus?api-version=2022-09-12-preview&test-sequence-id=1
response:
body:
string: '{"value": [{"id": "microsoft-elements", "currentAvailability": "Available",
"targets": [{"id": "microsoft.dft", "currentAvailability": "Available", "averageQueueTime":
0, "statusPage": null}]}, {"id": "ionq", "currentAvailability": "Degraded",
"targets": [{"id": "ionq.qpu", "currentAvailability": "Available", "averageQueueTime":
500482, "statusPage": "https://status.ionq.co"}, {"id": "ionq.qpu.aria-1",
"currentAvailability": "Unavailable", "averageQueueTime": 737161, "statusPage":
"https://status.ionq.co"}, {"id": "ionq.qpu.aria-2", "currentAvailability":
"Unavailable", "averageQueueTime": 0, "statusPage": "https://status.ionq.co"},
{"id": "ionq.simulator", "currentAvailability": "Available", "averageQueueTime":
3, "statusPage": "https://status.ionq.co"}]}, {"id": "microsoft-qc", "currentAvailability":
"Available", "targets": [{"id": "microsoft.estimator", "currentAvailability":
"Available", "averageQueueTime": 0, "statusPage": null}]}, {"id": "pasqal",
"currentAvailability": "Degraded", "targets": [{"id": "pasqal.sim.emu-tn",
"currentAvailability": "Available", "averageQueueTime": 256, "statusPage":
"https://pasqal.com"}, {"id": "pasqal.qpu.fresnel", "currentAvailability":
"Degraded", "averageQueueTime": 0, "statusPage": "https://pasqal.com"}]},
{"id": "rigetti", "currentAvailability": "Degraded", "targets": [{"id": "rigetti.sim.qvm",
"currentAvailability": "Available", "averageQueueTime": 5, "statusPage": "https://rigetti.statuspage.io/"},
{"id": "rigetti.qpu.ankaa-2", "currentAvailability": "Degraded", "averageQueueTime":
5, "statusPage": "https://rigetti.statuspage.io/"}]}, {"id": "qci", "currentAvailability":
"Available", "targets": [{"id": "qci.simulator", "currentAvailability": "Available",
"averageQueueTime": 1, "statusPage": "https://quantumcircuits.com"}, {"id":
"qci.machine1", "currentAvailability": "Available", "averageQueueTime": 1,
"statusPage": "https://quantumcircuits.com"}, {"id": "qci.simulator.noisy",
"currentAvailability": "Available", "averageQueueTime": 0, "statusPage": "https://quantumcircuits.com"}]},
{"id": "quantinuum", "currentAvailability": "Available", "targets": [{"id":
"quantinuum.qpu.h1-1", "currentAvailability": "Available", "averageQueueTime":
25016, "statusPage": "https://www.quantinuum.com/hardware/h1"}, {"id": "quantinuum.sim.h1-1sc",
"currentAvailability": "Available", "averageQueueTime": 4, "statusPage": "https://www.quantinuum.com/hardware/h1"},
{"id": "quantinuum.sim.h1-1e", "currentAvailability": "Available", "averageQueueTime":
15, "statusPage": "https://www.quantinuum.com/hardware/h1"}, {"id": "quantinuum.qpu.h2-1",
"currentAvailability": "Available", "averageQueueTime": 349980, "statusPage":
"https://www.quantinuum.com/hardware/h2"}, {"id": "quantinuum.sim.h2-1sc",
"currentAvailability": "Available", "averageQueueTime": 0, "statusPage": "https://www.quantinuum.com/hardware/h2"},
{"id": "quantinuum.sim.h2-1e", "currentAvailability": "Available", "averageQueueTime":
845, "statusPage": "https://www.quantinuum.com/hardware/h2"}, {"id": "quantinuum.sim.h1-1sc-preview",
"currentAvailability": "Available", "averageQueueTime": 4, "statusPage": "https://www.quantinuum.com/hardware/h1"},
{"id": "quantinuum.sim.h1-1e-preview", "currentAvailability": "Available",
"averageQueueTime": 15, "statusPage": "https://www.quantinuum.com/hardware/h1"},
{"id": "quantinuum.sim.h1-2e-preview", "currentAvailability": "Available",
"averageQueueTime": 27901, "statusPage": "https://www.quantinuum.com/hardware/h1"},
{"id": "quantinuum.qpu.h1-1-preview", "currentAvailability": "Available",
"averageQueueTime": 25016, "statusPage": "https://www.quantinuum.com/hardware/h1"}]},
{"id": "Microsoft.Test", "currentAvailability": "Available", "targets": [{"id":
"echo-rigetti", "currentAvailability": "Available", "averageQueueTime": 1,
"statusPage": ""}, {"id": "echo-quantinuum", "currentAvailability": "Available",
"averageQueueTime": 1, "statusPage": ""}, {"id": "echo-qci", "currentAvailability":
"Available", "averageQueueTime": 1, "statusPage": ""}, {"id": "echo-ionq",
"currentAvailability": "Available", "averageQueueTime": 1, "statusPage": ""},
{"id": "echo-aquarius", "currentAvailability": "Available", "averageQueueTime":
1, "statusPage": ""}, {"id": "sparse-sim-rigetti", "currentAvailability":
"Available", "averageQueueTime": 1, "statusPage": ""}, {"id": "sparse-sim-quantinuum",
"currentAvailability": "Available", "averageQueueTime": 1, "statusPage": ""},
{"id": "sparse-sim-qci", "currentAvailability": "Available", "averageQueueTime":
1, "statusPage": ""}, {"id": "sparse-sim-ionq", "currentAvailability": "Available",
"averageQueueTime": 1, "statusPage": ""}, {"id": "echo-output", "currentAvailability":
"Available", "averageQueueTime": 1, "statusPage": ""}]}], "nextLink": null}'
headers:
connection:
- keep-alive
content-length:
- '4774'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
- request:
body: null
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- testapp-azure-quantum-qiskit azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: GET
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/providerStatus?api-version=2022-09-12-preview&test-sequence-id=2
response:
body:
string: '{"value": [{"id": "microsoft-elements", "currentAvailability": "Available",
"targets": [{"id": "microsoft.dft", "currentAvailability": "Available", "averageQueueTime":
0, "statusPage": null}]}, {"id": "ionq", "currentAvailability": "Degraded",
"targets": [{"id": "ionq.qpu", "currentAvailability": "Available", "averageQueueTime":
500482, "statusPage": "https://status.ionq.co"}, {"id": "ionq.qpu.aria-1",
"currentAvailability": "Unavailable", "averageQueueTime": 737161, "statusPage":
"https://status.ionq.co"}, {"id": "ionq.qpu.aria-2", "currentAvailability":
"Unavailable", "averageQueueTime": 0, "statusPage": "https://status.ionq.co"},
{"id": "ionq.simulator", "currentAvailability": "Available", "averageQueueTime":
3, "statusPage": "https://status.ionq.co"}]}, {"id": "microsoft-qc", "currentAvailability":
"Available", "targets": [{"id": "microsoft.estimator", "currentAvailability":
"Available", "averageQueueTime": 0, "statusPage": null}]}, {"id": "pasqal",
"currentAvailability": "Degraded", "targets": [{"id": "pasqal.sim.emu-tn",
"currentAvailability": "Available", "averageQueueTime": 256, "statusPage":
"https://pasqal.com"}, {"id": "pasqal.qpu.fresnel", "currentAvailability":
"Degraded", "averageQueueTime": 0, "statusPage": "https://pasqal.com"}]},
{"id": "rigetti", "currentAvailability": "Degraded", "targets": [{"id": "rigetti.sim.qvm",
"currentAvailability": "Available", "averageQueueTime": 5, "statusPage": "https://rigetti.statuspage.io/"},
{"id": "rigetti.qpu.ankaa-2", "currentAvailability": "Degraded", "averageQueueTime":
5, "statusPage": "https://rigetti.statuspage.io/"}]}, {"id": "qci", "currentAvailability":
"Available", "targets": [{"id": "qci.simulator", "currentAvailability": "Available",
"averageQueueTime": 1, "statusPage": "https://quantumcircuits.com"}, {"id":
"qci.machine1", "currentAvailability": "Available", "averageQueueTime": 1,
"statusPage": "https://quantumcircuits.com"}, {"id": "qci.simulator.noisy",
"currentAvailability": "Available", "averageQueueTime": 0, "statusPage": "https://quantumcircuits.com"}]},
{"id": "quantinuum", "currentAvailability": "Available", "targets": [{"id":
"quantinuum.qpu.h1-1", "currentAvailability": "Available", "averageQueueTime":
25016, "statusPage": "https://www.quantinuum.com/hardware/h1"}, {"id": "quantinuum.sim.h1-1sc",
"currentAvailability": "Available", "averageQueueTime": 4, "statusPage": "https://www.quantinuum.com/hardware/h1"},
{"id": "quantinuum.sim.h1-1e", "currentAvailability": "Available", "averageQueueTime":
15, "statusPage": "https://www.quantinuum.com/hardware/h1"}, {"id": "quantinuum.qpu.h2-1",
"currentAvailability": "Available", "averageQueueTime": 349980, "statusPage":
"https://www.quantinuum.com/hardware/h2"}, {"id": "quantinuum.sim.h2-1sc",
"currentAvailability": "Available", "averageQueueTime": 0, "statusPage": "https://www.quantinuum.com/hardware/h2"},
{"id": "quantinuum.sim.h2-1e", "currentAvailability": "Available", "averageQueueTime":
845, "statusPage": "https://www.quantinuum.com/hardware/h2"}, {"id": "quantinuum.sim.h1-1sc-preview",
"currentAvailability": "Available", "averageQueueTime": 4, "statusPage": "https://www.quantinuum.com/hardware/h1"},
{"id": "quantinuum.sim.h1-1e-preview", "currentAvailability": "Available",
"averageQueueTime": 15, "statusPage": "https://www.quantinuum.com/hardware/h1"},
{"id": "quantinuum.sim.h1-2e-preview", "currentAvailability": "Available",
"averageQueueTime": 27901, "statusPage": "https://www.quantinuum.com/hardware/h1"},
{"id": "quantinuum.qpu.h1-1-preview", "currentAvailability": "Available",
"averageQueueTime": 25016, "statusPage": "https://www.quantinuum.com/hardware/h1"}]},
{"id": "Microsoft.Test", "currentAvailability": "Available", "targets": [{"id":
"echo-rigetti", "currentAvailability": "Available", "averageQueueTime": 1,
"statusPage": ""}, {"id": "echo-quantinuum", "currentAvailability": "Available",
"averageQueueTime": 1, "statusPage": ""}, {"id": "echo-qci", "currentAvailability":
"Available", "averageQueueTime": 1, "statusPage": ""}, {"id": "echo-ionq",
"currentAvailability": "Available", "averageQueueTime": 1, "statusPage": ""},
{"id": "echo-aquarius", "currentAvailability": "Available", "averageQueueTime":
1, "statusPage": ""}, {"id": "sparse-sim-rigetti", "currentAvailability":
"Available", "averageQueueTime": 1, "statusPage": ""}, {"id": "sparse-sim-quantinuum",
"currentAvailability": "Available", "averageQueueTime": 1, "statusPage": ""},
{"id": "sparse-sim-qci", "currentAvailability": "Available", "averageQueueTime":
1, "statusPage": ""}, {"id": "sparse-sim-ionq", "currentAvailability": "Available",
"averageQueueTime": 1, "statusPage": ""}, {"id": "echo-output", "currentAvailability":
"Available", "averageQueueTime": 1, "statusPage": ""}]}], "nextLink": null}'
headers:
connection:
- keep-alive
content-length:
- '4774'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
version: 1
|
azure-quantum-python/azure-quantum/tests/unit/recordings/test_qiskit_get_ionq_native_gateset.yaml/0
|
{
"file_path": "azure-quantum-python/azure-quantum/tests/unit/recordings/test_qiskit_get_ionq_native_gateset.yaml",
"repo_id": "azure-quantum-python",
"token_count": 5294
}
| 425 |
interactions:
- request:
body: client_id=PLACEHOLDER&grant_type=client_credentials&client_assertion=PLACEHOLDER&client_info=1&client_assertion_type=PLACEHOLDER&scope=https%3A%2F%2Fquantum.microsoft.com%2F.default
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
Content-Length:
- '181'
Content-Type:
- application/x-www-form-urlencoded
User-Agent:
- azsdk-python-identity/1.16.0 Python/3.9.19 (Windows-10-10.0.22631-SP0)
x-client-current-telemetry:
- 4|730,2|
x-client-os:
- win32
x-client-sku:
- MSAL.Python
x-client-ver:
- 1.28.0
method: POST
uri: https://login.microsoftonline.com/00000000-0000-0000-0000-000000000000/oauth2/v2.0/token
response:
body:
string: '{"token_type": "Bearer", "expires_in": 1746122232, "ext_expires_in":
1746122232, "refresh_in": 31536000, "access_token": "PLACEHOLDER"}'
headers:
content-length:
- '135'
content-type:
- application/json; charset=utf-8
status:
code: 200
message: OK
- request:
body: null
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- testapp azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: GET
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/providerStatus?api-version=2022-09-12-preview&test-sequence-id=1
response:
body:
string: '{"value": [{"id": "microsoft-elements", "currentAvailability": "Available",
"targets": [{"id": "microsoft.dft", "currentAvailability": "Available", "averageQueueTime":
0, "statusPage": null}]}, {"id": "ionq", "currentAvailability": "Degraded",
"targets": [{"id": "ionq.qpu", "currentAvailability": "Available", "averageQueueTime":
500482, "statusPage": "https://status.ionq.co"}, {"id": "ionq.qpu.aria-1",
"currentAvailability": "Unavailable", "averageQueueTime": 737161, "statusPage":
"https://status.ionq.co"}, {"id": "ionq.qpu.aria-2", "currentAvailability":
"Unavailable", "averageQueueTime": 0, "statusPage": "https://status.ionq.co"},
{"id": "ionq.simulator", "currentAvailability": "Available", "averageQueueTime":
3, "statusPage": "https://status.ionq.co"}]}, {"id": "microsoft-qc", "currentAvailability":
"Available", "targets": [{"id": "microsoft.estimator", "currentAvailability":
"Available", "averageQueueTime": 0, "statusPage": null}]}, {"id": "pasqal",
"currentAvailability": "Degraded", "targets": [{"id": "pasqal.sim.emu-tn",
"currentAvailability": "Available", "averageQueueTime": 256, "statusPage":
"https://pasqal.com"}, {"id": "pasqal.qpu.fresnel", "currentAvailability":
"Degraded", "averageQueueTime": 0, "statusPage": "https://pasqal.com"}]},
{"id": "rigetti", "currentAvailability": "Degraded", "targets": [{"id": "rigetti.sim.qvm",
"currentAvailability": "Available", "averageQueueTime": 5, "statusPage": "https://rigetti.statuspage.io/"},
{"id": "rigetti.qpu.ankaa-2", "currentAvailability": "Degraded", "averageQueueTime":
5, "statusPage": "https://rigetti.statuspage.io/"}]}, {"id": "qci", "currentAvailability":
"Available", "targets": [{"id": "qci.simulator", "currentAvailability": "Available",
"averageQueueTime": 1, "statusPage": "https://quantumcircuits.com"}, {"id":
"qci.machine1", "currentAvailability": "Available", "averageQueueTime": 1,
"statusPage": "https://quantumcircuits.com"}, {"id": "qci.simulator.noisy",
"currentAvailability": "Available", "averageQueueTime": 0, "statusPage": "https://quantumcircuits.com"}]},
{"id": "quantinuum", "currentAvailability": "Available", "targets": [{"id":
"quantinuum.qpu.h1-1", "currentAvailability": "Available", "averageQueueTime":
25016, "statusPage": "https://www.quantinuum.com/hardware/h1"}, {"id": "quantinuum.sim.h1-1sc",
"currentAvailability": "Available", "averageQueueTime": 4, "statusPage": "https://www.quantinuum.com/hardware/h1"},
{"id": "quantinuum.sim.h1-1e", "currentAvailability": "Available", "averageQueueTime":
4, "statusPage": "https://www.quantinuum.com/hardware/h1"}, {"id": "quantinuum.qpu.h2-1",
"currentAvailability": "Available", "averageQueueTime": 349980, "statusPage":
"https://www.quantinuum.com/hardware/h2"}, {"id": "quantinuum.sim.h2-1sc",
"currentAvailability": "Available", "averageQueueTime": 0, "statusPage": "https://www.quantinuum.com/hardware/h2"},
{"id": "quantinuum.sim.h2-1e", "currentAvailability": "Available", "averageQueueTime":
994, "statusPage": "https://www.quantinuum.com/hardware/h2"}, {"id": "quantinuum.sim.h1-1sc-preview",
"currentAvailability": "Available", "averageQueueTime": 4, "statusPage": "https://www.quantinuum.com/hardware/h1"},
{"id": "quantinuum.sim.h1-1e-preview", "currentAvailability": "Available",
"averageQueueTime": 4, "statusPage": "https://www.quantinuum.com/hardware/h1"},
{"id": "quantinuum.sim.h1-2e-preview", "currentAvailability": "Available",
"averageQueueTime": 27901, "statusPage": "https://www.quantinuum.com/hardware/h1"},
{"id": "quantinuum.qpu.h1-1-preview", "currentAvailability": "Available",
"averageQueueTime": 25016, "statusPage": "https://www.quantinuum.com/hardware/h1"}]},
{"id": "Microsoft.Test", "currentAvailability": "Available", "targets": [{"id":
"echo-rigetti", "currentAvailability": "Available", "averageQueueTime": 1,
"statusPage": ""}, {"id": "echo-quantinuum", "currentAvailability": "Available",
"averageQueueTime": 1, "statusPage": ""}, {"id": "echo-qci", "currentAvailability":
"Available", "averageQueueTime": 1, "statusPage": ""}, {"id": "echo-ionq",
"currentAvailability": "Available", "averageQueueTime": 1, "statusPage": ""},
{"id": "echo-aquarius", "currentAvailability": "Available", "averageQueueTime":
1, "statusPage": ""}, {"id": "sparse-sim-rigetti", "currentAvailability":
"Available", "averageQueueTime": 1, "statusPage": ""}, {"id": "sparse-sim-quantinuum",
"currentAvailability": "Available", "averageQueueTime": 1, "statusPage": ""},
{"id": "sparse-sim-qci", "currentAvailability": "Available", "averageQueueTime":
1, "statusPage": ""}, {"id": "sparse-sim-ionq", "currentAvailability": "Available",
"averageQueueTime": 1, "statusPage": ""}, {"id": "echo-output", "currentAvailability":
"Available", "averageQueueTime": 1, "statusPage": ""}]}], "nextLink": null}'
headers:
connection:
- keep-alive
content-length:
- '4772'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
- request:
body: client_id=PLACEHOLDER&grant_type=client_credentials&client_assertion=PLACEHOLDER&client_info=1&client_assertion_type=PLACEHOLDER&scope=https%3A%2F%2Fquantum.microsoft.com%2F.default
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
Content-Length:
- '181'
Content-Type:
- application/x-www-form-urlencoded
User-Agent:
- azsdk-python-identity/1.16.0 Python/3.9.19 (Windows-10-10.0.22631-SP0)
x-client-current-telemetry:
- 4|730,2|
x-client-os:
- win32
x-client-sku:
- MSAL.Python
x-client-ver:
- 1.28.0
method: POST
uri: https://login.microsoftonline.com/00000000-0000-0000-0000-000000000000/oauth2/v2.0/token
response:
body:
string: '{"token_type": "Bearer", "expires_in": 1746122234, "ext_expires_in":
1746122234, "refresh_in": 31536000, "access_token": "PLACEHOLDER"}'
headers:
content-length:
- '135'
content-type:
- application/json; charset=utf-8
status:
code: 200
message: OK
- request:
body: 'b''{"containerName": "job-00000000-0000-0000-0000-000000000001"}'''
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
Content-Length:
- '64'
Content-Type:
- application/json
User-Agent:
- testapp-azure-quantum-qsharp azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: POST
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/storage/sasUri?api-version=2022-09-12-preview&test-sequence-id=1
response:
body:
string: '{"sasUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?sv=PLACEHOLDER&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&srt=co&ss=b&sp=racwl"}'
headers:
connection:
- keep-alive
content-length:
- '174'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
- request:
body: null
headers:
Accept:
- application/xml
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- azsdk-python-storage-blob/12.19.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
x-ms-date:
- Wed, 01 May 2024 17:57:16 GMT
x-ms-version:
- '2023-11-03'
method: GET
uri: https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?restype=container&sv=PLACEHOLDER&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&srt=co&ss=b&sp=racwl
response:
body:
string: "\uFEFF<?xml version=\"1.0\" encoding=\"utf-8\"?><Error><Code>ContainerNotFound</Code><Message>The
specified container does not exist.\nRequestId:e5292a6d-201e-000b-6ef1-9bd7ab000000\nTime:2024-05-01T17:57:18.3091261Z</Message></Error>"
headers:
content-length:
- '223'
content-type:
- application/xml
x-ms-version:
- '2023-11-03'
status:
code: 404
message: The specified container does not exist.
- request:
body: null
headers:
Accept:
- application/xml
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
Content-Length:
- '0'
User-Agent:
- azsdk-python-storage-blob/12.19.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
x-ms-date:
- Wed, 01 May 2024 17:57:17 GMT
x-ms-version:
- '2023-11-03'
method: PUT
uri: https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?restype=container&sv=PLACEHOLDER&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&srt=co&ss=b&sp=racwl
response:
body:
string: ''
headers:
content-length:
- '0'
x-ms-version:
- '2023-11-03'
status:
code: 201
message: Created
- request:
body: null
headers:
Accept:
- application/xml
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- azsdk-python-storage-blob/12.19.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
x-ms-date:
- Wed, 01 May 2024 17:57:17 GMT
x-ms-version:
- '2023-11-03'
method: GET
uri: https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?restype=container&sv=PLACEHOLDER&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&srt=co&ss=b&sp=racwl
response:
body:
string: ''
headers:
content-length:
- '0'
x-ms-lease-state:
- available
x-ms-lease-status:
- unlocked
x-ms-version:
- '2023-11-03'
status:
code: 200
message: OK
- request:
body: 'b''%Result = type opaque\n%Qubit = type opaque\n\ndefine void @ENTRYPOINT__main()
#0 {\n call void @__quantum__qis__h__body(%Qubit* inttoptr (i64 0 to %Qubit*))\n call
void @__quantum__qis__cx__body(%Qubit* inttoptr (i64 0 to %Qubit*), %Qubit*
inttoptr (i64 1 to %Qubit*))\n call void @__quantum__qis__h__body(%Qubit* inttoptr
(i64 2 to %Qubit*))\n call void @__quantum__qis__cz__body(%Qubit* inttoptr
(i64 2 to %Qubit*), %Qubit* inttoptr (i64 0 to %Qubit*))\n call void @__quantum__qis__h__body(%Qubit*
inttoptr (i64 2 to %Qubit*))\n call void @__quantum__qis__h__body(%Qubit* inttoptr
(i64 3 to %Qubit*))\n call void @__quantum__qis__cz__body(%Qubit* inttoptr
(i64 3 to %Qubit*), %Qubit* inttoptr (i64 1 to %Qubit*))\n call void @__quantum__qis__h__body(%Qubit*
inttoptr (i64 3 to %Qubit*))\n call void @__quantum__qis__mz__body(%Qubit*
inttoptr (i64 2 to %Qubit*), %Result* inttoptr (i64 0 to %Result*)) #1\n call
void @__quantum__qis__mz__body(%Qubit* inttoptr (i64 3 to %Qubit*), %Result*
inttoptr (i64 1 to %Result*)) #1\n call void @__quantum__rt__tuple_record_output(i64
2, i8* null)\n call void @__quantum__rt__result_record_output(%Result* inttoptr
(i64 0 to %Result*), i8* null)\n call void @__quantum__rt__result_record_output(%Result*
inttoptr (i64 1 to %Result*), i8* null)\n ret void\n}\n\ndeclare void @__quantum__qis__ccx__body(%Qubit*,
%Qubit*, %Qubit*)\ndeclare void @__quantum__qis__cx__body(%Qubit*, %Qubit*)\ndeclare
void @__quantum__qis__cy__body(%Qubit*, %Qubit*)\ndeclare void @__quantum__qis__cz__body(%Qubit*,
%Qubit*)\ndeclare void @__quantum__qis__rx__body(double, %Qubit*)\ndeclare void
@__quantum__qis__rxx__body(double, %Qubit*, %Qubit*)\ndeclare void @__quantum__qis__ry__body(double,
%Qubit*)\ndeclare void @__quantum__qis__ryy__body(double, %Qubit*, %Qubit*)\ndeclare
void @__quantum__qis__rz__body(double, %Qubit*)\ndeclare void @__quantum__qis__rzz__body(double,
%Qubit*, %Qubit*)\ndeclare void @__quantum__qis__h__body(%Qubit*)\ndeclare void
@__quantum__qis__s__body(%Qubit*)\ndeclare void @__quantum__qis__s__adj(%Qubit*)\ndeclare
void @__quantum__qis__t__body(%Qubit*)\ndeclare void @__quantum__qis__t__adj(%Qubit*)\ndeclare
void @__quantum__qis__x__body(%Qubit*)\ndeclare void @__quantum__qis__y__body(%Qubit*)\ndeclare
void @__quantum__qis__z__body(%Qubit*)\ndeclare void @__quantum__qis__swap__body(%Qubit*,
%Qubit*)\ndeclare void @__quantum__qis__mz__body(%Qubit*, %Result* writeonly)
#1\ndeclare void @__quantum__rt__result_record_output(%Result*, i8*)\ndeclare
void @__quantum__rt__array_record_output(i64, i8*)\ndeclare void @__quantum__rt__tuple_record_output(i64,
i8*)\n\nattributes #0 = { "entry_point" "output_labeling_schema" "qir_profiles"="base_profile"
"required_num_qubits"="4" "required_num_results"="2" }\nattributes #1 = { "irreversible"
}\n\n; module flags\n\n!llvm.module.flags = !{!0, !1, !2, !3}\n\n!0 = !{i32
1, !"qir_major_version", i32 1}\n!1 = !{i32 7, !"qir_minor_version", i32 0}\n!2
= !{i32 1, !"dynamic_qubit_management", i1 false}\n!3 = !{i32 1, !"dynamic_result_management",
i1 false}\n'''
headers:
Accept:
- application/xml
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
Content-Length:
- '3073'
Content-Type:
- application/octet-stream
User-Agent:
- azsdk-python-storage-blob/12.19.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
x-ms-blob-type:
- BlockBlob
x-ms-date:
- Wed, 01 May 2024 17:57:18 GMT
x-ms-version:
- '2023-11-03'
method: PUT
uri: https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/inputData?sv=PLACEHOLDER&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&srt=co&ss=b&sp=racwl
response:
body:
string: ''
headers:
content-length:
- '0'
x-ms-version:
- '2023-11-03'
status:
code: 201
message: Created
- request:
body: 'b''{"id": "00000000-0000-0000-0000-000000000001", "name": "quantinuum-job",
"providerId": "quantinuum", "target": "quantinuum.sim.h2-1e", "itemType": "Job",
"containerUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?sv=PLACEHOLDER&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&srt=co&ss=b&sp=racwl",
"inputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/inputData",
"inputDataFormat": "qir.v1", "inputParams": {"entryPoint": "ENTRYPOINT__main",
"arguments": [], "targetCapability": "AdaptiveExecution"}, "outputDataFormat":
"microsoft.quantum-results.v1"}'''
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
Content-Length:
- '636'
Content-Type:
- application/json
User-Agent:
- testapp-azure-quantum-qsharp azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: PUT
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/jobs/00000000-0000-0000-0000-000000000001?api-version=2022-09-12-preview&test-sequence-id=1
response:
body:
string: '{"containerUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?sv=PLACEHOLDER&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&srt=co&ss=b&sp=racwl",
"inputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/inputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=rcw",
"inputDataFormat": "qir.v1", "inputParams": {"entryPoint": "ENTRYPOINT__main",
"arguments": [], "targetCapability": "AdaptiveExecution"}, "metadata": null,
"sessionId": null, "status": "Waiting", "jobType": "QuantumComputing", "outputDataFormat":
"microsoft.quantum-results.v1", "outputDataUri": "https://mystorage.blob.core.windows.net:443/job-00000000-0000-0000-0000-000000000001/outputData?sv=PLACEHOLDER&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&srt=co&ss=b&sp=racwl",
"beginExecutionTime": null, "cancellationTime": null, "quantumComputingData":
null, "errorData": null, "isCancelling": false, "tags": [], "name": "quantinuum-job",
"id": "00000000-0000-0000-0000-000000000001", "providerId": "quantinuum",
"target": "quantinuum.sim.h2-1e", "creationTime": "2024-05-01T17:57:20.3515595+00:00",
"endExecutionTime": null, "costEstimate": null, "itemType": "Job"}'
headers:
connection:
- keep-alive
content-length:
- '1228'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
- request:
body: null
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- testapp-azure-quantum-qsharp azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: GET
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/jobs/00000000-0000-0000-0000-000000000001?api-version=2022-09-12-preview&test-sequence-id=1
response:
body:
string: '{"containerUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?sv=PLACEHOLDER&sr=c&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=rcwl",
"inputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/inputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dquantinuum-job-00000000-0000-0000-0000-000000000001.input.json",
"inputDataFormat": "qir.v1", "inputParams": {"entryPoint": "ENTRYPOINT__main",
"arguments": [], "targetCapability": "AdaptiveExecution"}, "metadata": null,
"sessionId": null, "status": "Waiting", "jobType": "QuantumComputing", "outputDataFormat":
"microsoft.quantum-results.v1", "outputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/outputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dquantinuum-job-00000000-0000-0000-0000-000000000001.output.json",
"beginExecutionTime": null, "cancellationTime": null, "quantumComputingData":
null, "errorData": null, "isCancelling": false, "tags": [], "name": "quantinuum-job",
"id": "00000000-0000-0000-0000-000000000001", "providerId": "quantinuum",
"target": "quantinuum.sim.h2-1e", "creationTime": "2024-05-01T17:57:20.3515595+00:00",
"endExecutionTime": null, "costEstimate": null, "itemType": "Job"}'
headers:
connection:
- keep-alive
content-length:
- '1394'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
- request:
body: null
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- testapp-azure-quantum-qsharp azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: GET
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/jobs/00000000-0000-0000-0000-000000000001?api-version=2022-09-12-preview&test-sequence-id=2
response:
body:
string: '{"containerUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?sv=PLACEHOLDER&sr=c&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=rcwl",
"inputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/inputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dquantinuum-job-00000000-0000-0000-0000-000000000001.input.json",
"inputDataFormat": "qir.v1", "inputParams": {"entryPoint": "ENTRYPOINT__main",
"arguments": [], "targetCapability": "AdaptiveExecution"}, "metadata": null,
"sessionId": null, "status": "Waiting", "jobType": "QuantumComputing", "outputDataFormat":
"microsoft.quantum-results.v1", "outputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/outputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dquantinuum-job-00000000-0000-0000-0000-000000000001.output.json",
"beginExecutionTime": null, "cancellationTime": null, "quantumComputingData":
null, "errorData": null, "isCancelling": false, "tags": [], "name": "quantinuum-job",
"id": "00000000-0000-0000-0000-000000000001", "providerId": "quantinuum",
"target": "quantinuum.sim.h2-1e", "creationTime": "2024-05-01T17:57:20.3515595+00:00",
"endExecutionTime": null, "costEstimate": null, "itemType": "Job"}'
headers:
connection:
- keep-alive
content-length:
- '1394'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
- request:
body: null
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- testapp-azure-quantum-qsharp azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: GET
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/jobs/00000000-0000-0000-0000-000000000001?api-version=2022-09-12-preview&test-sequence-id=3
response:
body:
string: '{"containerUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?sv=PLACEHOLDER&sr=c&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=rcwl",
"inputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/inputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dquantinuum-job-00000000-0000-0000-0000-000000000001.input.json",
"inputDataFormat": "qir.v1", "inputParams": {"entryPoint": "ENTRYPOINT__main",
"arguments": [], "targetCapability": "AdaptiveExecution"}, "metadata": null,
"sessionId": null, "status": "Waiting", "jobType": "QuantumComputing", "outputDataFormat":
"microsoft.quantum-results.v1", "outputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/outputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dquantinuum-job-00000000-0000-0000-0000-000000000001.output.json",
"beginExecutionTime": null, "cancellationTime": null, "quantumComputingData":
null, "errorData": null, "isCancelling": false, "tags": [], "name": "quantinuum-job",
"id": "00000000-0000-0000-0000-000000000001", "providerId": "quantinuum",
"target": "quantinuum.sim.h2-1e", "creationTime": "2024-05-01T17:57:20.3515595+00:00",
"endExecutionTime": null, "costEstimate": null, "itemType": "Job"}'
headers:
connection:
- keep-alive
content-length:
- '1394'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
- request:
body: null
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- testapp-azure-quantum-qsharp azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: GET
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/jobs/00000000-0000-0000-0000-000000000001?api-version=2022-09-12-preview&test-sequence-id=4
response:
body:
string: '{"containerUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?sv=PLACEHOLDER&sr=c&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=rcwl",
"inputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/inputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dquantinuum-job-00000000-0000-0000-0000-000000000001.input.json",
"inputDataFormat": "qir.v1", "inputParams": {"entryPoint": "ENTRYPOINT__main",
"arguments": [], "targetCapability": "AdaptiveExecution"}, "metadata": null,
"sessionId": null, "status": "Waiting", "jobType": "QuantumComputing", "outputDataFormat":
"microsoft.quantum-results.v1", "outputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/outputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dquantinuum-job-00000000-0000-0000-0000-000000000001.output.json",
"beginExecutionTime": null, "cancellationTime": null, "quantumComputingData":
null, "errorData": null, "isCancelling": false, "tags": [], "name": "quantinuum-job",
"id": "00000000-0000-0000-0000-000000000001", "providerId": "quantinuum",
"target": "quantinuum.sim.h2-1e", "creationTime": "2024-05-01T17:57:20.3515595+00:00",
"endExecutionTime": null, "costEstimate": null, "itemType": "Job"}'
headers:
connection:
- keep-alive
content-length:
- '1394'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
- request:
body: null
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- testapp-azure-quantum-qsharp azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: GET
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/jobs/00000000-0000-0000-0000-000000000001?api-version=2022-09-12-preview&test-sequence-id=5
response:
body:
string: '{"containerUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?sv=PLACEHOLDER&sr=c&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=rcwl",
"inputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/inputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dquantinuum-job-00000000-0000-0000-0000-000000000001.input.json",
"inputDataFormat": "qir.v1", "inputParams": {"entryPoint": "ENTRYPOINT__main",
"arguments": [], "targetCapability": "AdaptiveExecution"}, "metadata": null,
"sessionId": null, "status": "Waiting", "jobType": "QuantumComputing", "outputDataFormat":
"microsoft.quantum-results.v1", "outputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/outputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dquantinuum-job-00000000-0000-0000-0000-000000000001.output.json",
"beginExecutionTime": null, "cancellationTime": null, "quantumComputingData":
{"count": 1}, "errorData": null, "isCancelling": false, "tags": [], "name":
"quantinuum-job", "id": "00000000-0000-0000-0000-000000000001", "providerId":
"quantinuum", "target": "quantinuum.sim.h2-1e", "creationTime": "2024-05-01T17:57:20.3515595+00:00",
"endExecutionTime": null, "costEstimate": null, "itemType": "Job"}'
headers:
connection:
- keep-alive
content-length:
- '1402'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
- request:
body: null
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- testapp-azure-quantum-qsharp azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: GET
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/jobs/00000000-0000-0000-0000-000000000001?api-version=2022-09-12-preview&test-sequence-id=6
response:
body:
string: '{"containerUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?sv=PLACEHOLDER&sr=c&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=rcwl",
"inputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/inputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dquantinuum-job-00000000-0000-0000-0000-000000000001.input.json",
"inputDataFormat": "qir.v1", "inputParams": {"entryPoint": "ENTRYPOINT__main",
"arguments": [], "targetCapability": "AdaptiveExecution"}, "metadata": null,
"sessionId": null, "status": "Waiting", "jobType": "QuantumComputing", "outputDataFormat":
"microsoft.quantum-results.v1", "outputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/outputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dquantinuum-job-00000000-0000-0000-0000-000000000001.output.json",
"beginExecutionTime": null, "cancellationTime": null, "quantumComputingData":
{"count": 1}, "errorData": null, "isCancelling": false, "tags": [], "name":
"quantinuum-job", "id": "00000000-0000-0000-0000-000000000001", "providerId":
"quantinuum", "target": "quantinuum.sim.h2-1e", "creationTime": "2024-05-01T17:57:20.3515595+00:00",
"endExecutionTime": null, "costEstimate": null, "itemType": "Job"}'
headers:
connection:
- keep-alive
content-length:
- '1402'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
- request:
body: null
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- testapp-azure-quantum-qsharp azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: GET
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/jobs/00000000-0000-0000-0000-000000000001?api-version=2022-09-12-preview&test-sequence-id=7
response:
body:
string: '{"containerUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?sv=PLACEHOLDER&sr=c&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=rcwl",
"inputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/inputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dquantinuum-job-00000000-0000-0000-0000-000000000001.input.json",
"inputDataFormat": "qir.v1", "inputParams": {"entryPoint": "ENTRYPOINT__main",
"arguments": [], "targetCapability": "AdaptiveExecution"}, "metadata": null,
"sessionId": null, "status": "Waiting", "jobType": "QuantumComputing", "outputDataFormat":
"microsoft.quantum-results.v1", "outputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/outputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dquantinuum-job-00000000-0000-0000-0000-000000000001.output.json",
"beginExecutionTime": null, "cancellationTime": null, "quantumComputingData":
{"count": 1}, "errorData": null, "isCancelling": false, "tags": [], "name":
"quantinuum-job", "id": "00000000-0000-0000-0000-000000000001", "providerId":
"quantinuum", "target": "quantinuum.sim.h2-1e", "creationTime": "2024-05-01T17:57:20.3515595+00:00",
"endExecutionTime": null, "costEstimate": null, "itemType": "Job"}'
headers:
connection:
- keep-alive
content-length:
- '1402'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
- request:
body: null
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- testapp-azure-quantum-qsharp azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: GET
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/jobs/00000000-0000-0000-0000-000000000001?api-version=2022-09-12-preview&test-sequence-id=8
response:
body:
string: '{"containerUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?sv=PLACEHOLDER&sr=c&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=rcwl",
"inputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/inputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dquantinuum-job-00000000-0000-0000-0000-000000000001.input.json",
"inputDataFormat": "qir.v1", "inputParams": {"entryPoint": "ENTRYPOINT__main",
"arguments": [], "targetCapability": "AdaptiveExecution"}, "metadata": null,
"sessionId": null, "status": "Waiting", "jobType": "QuantumComputing", "outputDataFormat":
"microsoft.quantum-results.v1", "outputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/outputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dquantinuum-job-00000000-0000-0000-0000-000000000001.output.json",
"beginExecutionTime": null, "cancellationTime": null, "quantumComputingData":
{"count": 1}, "errorData": null, "isCancelling": false, "tags": [], "name":
"quantinuum-job", "id": "00000000-0000-0000-0000-000000000001", "providerId":
"quantinuum", "target": "quantinuum.sim.h2-1e", "creationTime": "2024-05-01T17:57:20.3515595+00:00",
"endExecutionTime": null, "costEstimate": null, "itemType": "Job"}'
headers:
connection:
- keep-alive
content-length:
- '1402'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
- request:
body: null
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- testapp-azure-quantum-qsharp azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: GET
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/jobs/00000000-0000-0000-0000-000000000001?api-version=2022-09-12-preview&test-sequence-id=9
response:
body:
string: '{"containerUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?sv=PLACEHOLDER&sr=c&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=rcwl",
"inputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/inputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dquantinuum-job-00000000-0000-0000-0000-000000000001.input.json",
"inputDataFormat": "qir.v1", "inputParams": {"entryPoint": "ENTRYPOINT__main",
"arguments": [], "targetCapability": "AdaptiveExecution"}, "metadata": null,
"sessionId": null, "status": "Waiting", "jobType": "QuantumComputing", "outputDataFormat":
"microsoft.quantum-results.v1", "outputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/outputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dquantinuum-job-00000000-0000-0000-0000-000000000001.output.json",
"beginExecutionTime": null, "cancellationTime": null, "quantumComputingData":
{"count": 1}, "errorData": null, "isCancelling": false, "tags": [], "name":
"quantinuum-job", "id": "00000000-0000-0000-0000-000000000001", "providerId":
"quantinuum", "target": "quantinuum.sim.h2-1e", "creationTime": "2024-05-01T17:57:20.3515595+00:00",
"endExecutionTime": null, "costEstimate": null, "itemType": "Job"}'
headers:
connection:
- keep-alive
content-length:
- '1402'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
- request:
body: null
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- testapp-azure-quantum-qsharp azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: GET
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/jobs/00000000-0000-0000-0000-000000000001?api-version=2022-09-12-preview&test-sequence-id=10
response:
body:
string: '{"containerUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?sv=PLACEHOLDER&sr=c&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=rcwl",
"inputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/inputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dquantinuum-job-00000000-0000-0000-0000-000000000001.input.json",
"inputDataFormat": "qir.v1", "inputParams": {"entryPoint": "ENTRYPOINT__main",
"arguments": [], "targetCapability": "AdaptiveExecution"}, "metadata": null,
"sessionId": null, "status": "Executing", "jobType": "QuantumComputing", "outputDataFormat":
"microsoft.quantum-results.v1", "outputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/outputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dquantinuum-job-00000000-0000-0000-0000-000000000001.output.json",
"beginExecutionTime": "2024-05-01T17:57:35.619452+00:00", "cancellationTime":
null, "quantumComputingData": {"count": 1}, "errorData": null, "isCancelling":
false, "tags": [], "name": "quantinuum-job", "id": "00000000-0000-0000-0000-000000000001",
"providerId": "quantinuum", "target": "quantinuum.sim.h2-1e", "creationTime":
"2024-05-01T17:57:20.3515595+00:00", "endExecutionTime": null, "costEstimate":
null, "itemType": "Job"}'
headers:
connection:
- keep-alive
content-length:
- '1434'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
- request:
body: null
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- testapp-azure-quantum-qsharp azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: GET
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/jobs/00000000-0000-0000-0000-000000000001?api-version=2022-09-12-preview&test-sequence-id=11
response:
body:
string: '{"containerUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?sv=PLACEHOLDER&sr=c&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=rcwl",
"inputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/inputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dquantinuum-job-00000000-0000-0000-0000-000000000001.input.json",
"inputDataFormat": "qir.v1", "inputParams": {"entryPoint": "ENTRYPOINT__main",
"arguments": [], "targetCapability": "AdaptiveExecution"}, "metadata": null,
"sessionId": null, "status": "Succeeded", "jobType": "QuantumComputing", "outputDataFormat":
"microsoft.quantum-results.v1", "outputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/outputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dquantinuum-job-00000000-0000-0000-0000-000000000001.output.json",
"beginExecutionTime": "2024-05-01T17:57:35.619452+00:00", "cancellationTime":
null, "quantumComputingData": {"count": 1}, "errorData": null, "isCancelling":
false, "tags": [], "name": "quantinuum-job", "id": "00000000-0000-0000-0000-000000000001",
"providerId": "quantinuum", "target": "quantinuum.sim.h2-1e", "creationTime":
"2024-05-01T17:57:20.3515595+00:00", "endExecutionTime": "2024-05-01T17:57:38.499159+00:00",
"costEstimate": {"currencyCode": "USD", "events": [{"dimensionId": "ehqc",
"dimensionName": "EHQC", "measureUnit": "hqc", "amountBilled": 5.01, "amountConsumed":
5.01, "unitPrice": 0.0}], "estimatedTotal": 0.0}, "itemType": "Job"}'
headers:
connection:
- keep-alive
content-length:
- '1654'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
- request:
body: null
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- testapp-azure-quantum-qsharp azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: GET
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/jobs/00000000-0000-0000-0000-000000000001?api-version=2022-09-12-preview&test-sequence-id=12
response:
body:
string: '{"containerUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?sv=PLACEHOLDER&sr=c&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=rcwl",
"inputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/inputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dquantinuum-job-00000000-0000-0000-0000-000000000001.input.json",
"inputDataFormat": "qir.v1", "inputParams": {"entryPoint": "ENTRYPOINT__main",
"arguments": [], "targetCapability": "AdaptiveExecution"}, "metadata": null,
"sessionId": null, "status": "Succeeded", "jobType": "QuantumComputing", "outputDataFormat":
"microsoft.quantum-results.v1", "outputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/outputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dquantinuum-job-00000000-0000-0000-0000-000000000001.output.json",
"beginExecutionTime": "2024-05-01T17:57:35.619452+00:00", "cancellationTime":
null, "quantumComputingData": {"count": 1}, "errorData": null, "isCancelling":
false, "tags": [], "name": "quantinuum-job", "id": "00000000-0000-0000-0000-000000000001",
"providerId": "quantinuum", "target": "quantinuum.sim.h2-1e", "creationTime":
"2024-05-01T17:57:20.3515595+00:00", "endExecutionTime": "2024-05-01T17:57:38.499159+00:00",
"costEstimate": {"currencyCode": "USD", "events": [{"dimensionId": "ehqc",
"dimensionName": "EHQC", "measureUnit": "hqc", "amountBilled": 5.01, "amountConsumed":
5.01, "unitPrice": 0.0}], "estimatedTotal": 0.0}, "itemType": "Job"}'
headers:
connection:
- keep-alive
content-length:
- '1654'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
- request:
body: null
headers:
Accept:
- application/xml
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- azsdk-python-storage-blob/12.19.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
x-ms-date:
- Wed, 01 May 2024 17:57:45 GMT
x-ms-range:
- bytes=0-33554431
x-ms-version:
- '2023-11-03'
method: GET
uri: https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/outputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dquantinuum-job-00000000-0000-0000-0000-000000000001.output.json
response:
body:
string: '{"Histogram": ["(0, 0)", 1.0]}'
headers:
accept-ranges:
- bytes
content-length:
- '30'
content-range:
- bytes 0-27/28
content-type:
- application/octet-stream
x-ms-blob-content-md5:
- hvzyhl0MttfupoQSbqv98Q==
x-ms-blob-type:
- BlockBlob
x-ms-creation-time:
- Wed, 01 May 2024 17:57:39 GMT
x-ms-lease-state:
- available
x-ms-lease-status:
- unlocked
x-ms-server-encrypted:
- 'true'
x-ms-version:
- '2023-11-03'
status:
code: 206
message: Partial Content
version: 1
|
azure-quantum-python/azure-quantum/tests/unit/recordings/test_qsharp_qir_inline_quantinuum_h2.yaml/0
|
{
"file_path": "azure-quantum-python/azure-quantum/tests/unit/recordings/test_qsharp_qir_inline_quantinuum_h2.yaml",
"repo_id": "azure-quantum-python",
"token_count": 21747
}
| 426 |
interactions:
- request:
body: client_id=PLACEHOLDER&grant_type=client_credentials&client_info=1&client_secret=PLACEHOLDER&scope=https%3A%2F%2Fquantum.microsoft.com%2F.default
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
Content-Length:
- '144'
Content-Type:
- application/x-www-form-urlencoded
User-Agent:
- azsdk-python-identity/1.16.0 Python/3.9.19 (Windows-10-10.0.22631-SP0)
x-client-current-telemetry:
- 4|730,2|
x-client-os:
- win32
x-client-sku:
- MSAL.Python
x-client-ver:
- 1.28.0
method: POST
uri: https://login.microsoftonline.com/00000000-0000-0000-0000-000000000000/oauth2/v2.0/token
response:
body:
string: '{"token_type": "Bearer", "expires_in": 1745074157, "ext_expires_in":
1745074157, "refresh_in": 31536000, "access_token": "PLACEHOLDER"}'
headers:
content-length:
- '135'
content-type:
- application/json; charset=utf-8
status:
code: 200
message: OK
- request:
body: 'b''{"id": "00000000-0000-0000-0000-000000000001", "name": "session-00000000-0000-0000-0000-000000000001",
"providerId": "microsoft.test", "target": "echo-quantinuum", "itemType": "Session"}'''
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
Content-Length:
- '189'
Content-Type:
- application/json
User-Agent:
- testapp azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: PUT
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/sessions/00000000-0000-0000-0000-000000000001?api-version=2022-09-12-preview&test-sequence-id=1
response:
body:
string: '{"status": "Waiting", "jobFailurePolicy": "Abort", "name": "session-00000000-0000-0000-0000-000000000001",
"id": "00000000-0000-0000-0000-000000000001", "providerId": "microsoft.test",
"target": "echo-quantinuum", "creationTime": "2024-04-19T14:49:18.3209781Z",
"endExecutionTime": null, "costEstimate": null, "itemType": "Session"}'
headers:
connection:
- keep-alive
content-length:
- '332'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
- request:
body: null
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
Content-Length:
- '0'
User-Agent:
- testapp azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: POST
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/sessions/00000000-0000-0000-0000-000000000001:close?api-version=2022-09-12-preview&test-sequence-id=1
response:
body:
string: '{"status": "Succeeded", "jobFailurePolicy": "Abort", "name": "session-00000000-0000-0000-0000-000000000001",
"id": "00000000-0000-0000-0000-000000000001", "providerId": "microsoft.test",
"target": "echo-quantinuum", "creationTime": "2024-04-19T14:49:18.3209781Z",
"endExecutionTime": "2024-04-19T14:49:18.5398629Z", "costEstimate": null,
"itemType": "Session"}'
headers:
connection:
- keep-alive
content-length:
- '360'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
version: 1
|
azure-quantum-python/azure-quantum/tests/unit/recordings/test_session_with_target_open_session.yaml/0
|
{
"file_path": "azure-quantum-python/azure-quantum/tests/unit/recordings/test_session_with_target_open_session.yaml",
"repo_id": "azure-quantum-python",
"token_count": 1716
}
| 427 |
##
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
##
import re
import unittest
from unittest.mock import Mock
import pytest
from common import QuantumTestBase, RegexScrubbingPatterns
from azure.quantum import Job, JobDetails
from azure.quantum.target import Target
class TestJobResults(QuantumTestBase):
"""TestJob
Tests the azure.quantum.job module.
"""
def test_job_success(self):
job_results = self._get_job_results("test_output_data_format","{\"Histogram\": [\"[0]\", 0.50, \"[1]\", 0.50]}")
self.assertTrue(len(job_results["Histogram"]) == 4)
@pytest.mark.live_test
def test_job_get_results_with_expired_sas_token(self):
"""
Get existing result blob url and replace its sas token with expired one,
so we can test its ability to refresh it.
"""
target = self.create_echo_target()
input_data = "{ input: 'data' }"
job = target.submit(input_data=input_data)
job.wait_until_completed()
# mocking SAS-token expiration date to an expired date
job.details.output_data_uri = re.sub(
pattern=RegexScrubbingPatterns.URL_QUERY_SAS_KEY_EXPIRATION,
repl="se=2024-01-01T00%3A00%3A00Z&",
string=job.details.output_data_uri)
job_results = job.get_results()
self.assertEqual(job_results, input_data)
def test_job_for_microsoft_quantum_results_v1_success(self):
job_results = self._get_job_results("microsoft.quantum-results.v1","{\"Histogram\": [\"[0]\", 0.50, \"[1]\", 0.50]}")
self.assertTrue(len(job_results.keys()) == 2)
self.assertTrue(job_results["[0]"], 0.50)
self.assertTrue(job_results["[1]"], 0.50)
def test_job_for_microsoft_quantum_results_v1_no_histogram_returns_raw_result(self):
job_result_raw = "{\"NotHistogramProperty\": [\"[0]\", 0.50, \"[1]\", 0.50]}"
job_result = self._get_job_results("microsoft.quantum-results.v1", job_result_raw)
self.assertTrue(job_result, job_result_raw)
def test_job_for_microsoft_quantum_results_v1_invalid_histogram_returns_raw_result(self):
job_result_raw = "{\"NotHistogramProperty\": [\"[0]\", 0.50, \"[1]\"]}"
job_result = self._get_job_results("microsoft.quantum-results.v1", job_result_raw)
self.assertTrue(job_result, job_result_raw)
def _get_job_results(self, output_data_format, results_as_json_str):
job_details = JobDetails(
id= "",
name= "",
provider_id="",
target="",
container_uri="",
input_data_format="",
output_data_format = output_data_format)
job_details.status = "Succeeded"
job = Job(
workspace=None,
job_details=job_details)
job.has_completed = Mock(return_value=True)
job.wait_until_completed = Mock()
class DowloadDataMock(object):
def decode(): str
pass
download_data = DowloadDataMock()
download_data.decode = Mock(return_value=results_as_json_str)
job.download_data = Mock(return_value=download_data)
return job.get_results()
if __name__ == "__main__":
unittest.main()
|
azure-quantum-python/azure-quantum/tests/unit/test_job_results.py/0
|
{
"file_path": "azure-quantum-python/azure-quantum/tests/unit/test_job_results.py",
"repo_id": "azure-quantum-python",
"token_count": 1448
}
| 428 |
<jupyter_start><jupyter_text>ππ Hello, world: Submit a Cirq job to QuantinuumIn this notebook, we'll review the basics of Azure Quantum by submitting a simple *job*, or quantum program, to [Quantinuum](https://www.quantinuum.com/). We will use [Cirq](https://quantumai.google/cirq) to express the quantum job. Submit a simple job to Quantinuum using Azure QuantumAzure Quantum provides several ways to express quantum programs. In this example, we are using Cirq, but note that Q and Qiskit are also supported. All code in this example will be written in Python.Let's begin. When you see a code block, hover over it and click the triangle play-button to execute it. To avoid any compilation issues, this should be done in order from top to bottom. 1. Connect to the Azure Quantum workspaceTo connect to the Azure Quantum service, construct an instance of the `AzureQuantumService`. Note that it's imported from `azure.quantum.cirq`.<jupyter_code>from azure.quantum import Workspace
from azure.quantum.cirq import AzureQuantumService
workspace = Workspace(
resource_id = "",
location = "",
)
service = AzureQuantumService(workspace)<jupyter_output><empty_output><jupyter_text>Let's see what providers and targets are enabled in this workspace with the following command:<jupyter_code>print("This workspace's targets:")
for target in service.targets():
print("-", target.name)<jupyter_output><empty_output><jupyter_text>β Do you see `quantinuum.sim.h1-1sc` in your list of targets? If so, you're ready to keep going.Don't see it? You may need to add Quantinuum to your workspace to run this sample. Navigate to the **Providers** page in the portal and click **+Add** to add the Quantinuum provider. Don't worry, there's a free credits plan available. Quantinuum: The quantum providerAzure Quantum partners with third-party companies to deliver solutions to quantum jobs. These company offerings are called *providers*. Each provider can offer multiple *targets* with different capabilities. See the table below for Quantinuum's H1-1 device targets.| Target name | Target ID | Number of qubits | Description|| --- | ---| ---|---|H1-1 Syntax Checker | `quantinuum.sim.h1-1sc` | 20 | Quantinuum's H1-1 Syntax Checker. This will return all zeros in place of actual or simulated results. Use this to validate quantum programs against the H1-1 compiler before submitting to hardware or emulators on Quantinuum's platform. Free of cost. |H2-1 Syntax Checker | `quantinuum.sim.h2-1sc` | 32 | Quantinuum's H2-1 Syntax Checker. This will return all zeros in place of actual or simulated results. Use this to validate quantum programs against the H2-1 compiler before submitting to hardware or emulators on Quantinuum's platform. Free of cost. |H1-1 Emulator | `quantinuum.sim.h1-1e` | 20 | Quantinuum's H1-1 Emulator. Uses a realistic physical model and noise model of H1-1. |H2-1 Emulator | `quantinuum.sim.h2-1e` | 32 | Quantinuum's H2-1 Emulator. Uses a realistic physical model and noise model of H2-1. |H1-1 | `quantinuum.qpu.h1-1` | 20 | Quantinuum's H1-1 trapped ion device. |H2-1 | `quantinuum.qpu.h2-1` | 32 | Quantinuum's H2-1 trapped ion device. |For this example, we will use `quantinuum.sim.h1-1sc` to avoid any costs or credit usage. If you wish to emulate or run the actual circuit, you may replace all instances of `quantinuum.sim.h1-1sc` in subsequent code cells with one of the other values in the table above, but please note any costs incurred. To learn more about Quantinuum's targets, check out our [documentation](https://aka.ms/AQ/Quantinuum/Documentation). 2. Build the quantum programLet's create a simple Cirq circuit to run.<jupyter_code>import cirq
q0 = cirq.LineQubit(0)
circuit = cirq.Circuit(
cirq.H(q0), # Apply an H-gate to q0
cirq.measure(q0, key="0") # Measure q0
)
circuit<jupyter_output><empty_output><jupyter_text>The circuit you built is a simple quantum random bit generator. With Quantinuum's Syntax Checker, we will be able to confirm that the circuit is able to be run on their H1 emulator and hardware. 3. Submit the quantum program to Quantinuum<jupyter_code># Using the Quantinuum Syntax Checker target, call "run" to submit the job. We'll
# use 100 repetitions (simulated runs).
job = service.targets("quantinuum.sim.h1-1sc").submit(circuit, name="hello world-cirq-quantinuum", repetitions=100)
# Print the job ID.
print("Job id:", job.job_id())<jupyter_output><empty_output><jupyter_text>The job ID can be used to retrieve the results later using the [get_job method](https://learn.microsoft.com/python/azure-quantum/azure.quantum.workspace?azure-quantum-workspace-get-job) or by viewing it under the **Job management** section of the portal. 4. Obtain the job resultsTo get the job's results, you can await completion and plot the output, as in the next code cell. This may take a few seconds.<jupyter_code># Await job results.
result = job.results()
from matplotlib import pyplot
# Quantinuum's emulators and quantum computers do not return probabilities, they return a dictionary of measurements.
# (Note that since we targeted Syntax Checker, we expect to see that all measurements result in a zero.)
# In order to visualize the result, we can use a histogram. Since the circuit contains only a single
# measurement, we can expect that all values will be tied to the key "m_0".
pyplot.hist(result['m_0'], bins=[0,1,2], width=0.5)
pyplot.title("Result")
pyplot.xlabel("Measurement")
pyplot.ylabel("Repetitions")
pyplot.show()<jupyter_output><empty_output><jupyter_text>**See the histogram above? Congratulations, you've submitted a job with Azure Quantum! π** 5. Estimate costsTo estimate the costs of running this program on a simulator or hardware, you can use the `service.estimate_cost` method.<jupyter_code>cost = service.estimate_cost(circuit, repetitions=100, target="quantinuum.qpu.h1-1")
print(f"Estimated cost: {cost.estimated_total} {cost.currency_code}")<jupyter_output><empty_output>
|
azure-quantum-python/samples/hello-world/HW-quantinuum-cirq.ipynb/0
|
{
"file_path": "azure-quantum-python/samples/hello-world/HW-quantinuum-cirq.ipynb",
"repo_id": "azure-quantum-python",
"token_count": 1794
}
| 429 |
---
page_type: sample
author: guenp
description: Variational Quantum Eigensolver
ms.author: [email protected]
ms.date: 05/02/2022
languages:
- python
products:
- azure-quantum
---
# Estimating the ground state energy of hydrogen using variational quantum eigensolvers (VQE) on Azure Quantum
This sample shows how to estimate the ground state energy of hydrogen using the Azure Quantum service. In particular, this sample uses the implementation of the variational quantum eigensolver algorithm provided with Qiskit to estimate minimum energies. The sample demonstrates running this VQE implementation on various Azure Quantum backends.
## Manifest
- [VQE-qiskit-hydrogen-session.ipynb](https://github.com/microsoft/azure-quantum-python/blob/main/samples/vqe/VQE-qiskit-hydrogen-session.ipynb): Python + Qiskit notebook demonstrating using VQE on multiple backends using a session.
|
azure-quantum-python/samples/vqe/README.md/0
|
{
"file_path": "azure-quantum-python/samples/vqe/README.md",
"repo_id": "azure-quantum-python",
"token_count": 247
}
| 430 |
/*------------------------------------
Copyright (c) Microsoft Corporation.
Licensed under the MIT License.
All rights reserved.
------------------------------------ */
import React from "react";
import { IColumn } from "@fluentui/react";
import { Icon } from "@fluentui/react/lib/Icon";
import { mergeStyleSets } from "@fluentui/react/lib/Styling";
import { TooltipHost, TooltipOverflowMode } from "@fluentui/react/lib/Tooltip";
import { IItem } from "./Table";
const classNames = mergeStyleSets({
cellText: {
overflow: "hidden",
textOverflow: "ellipsis",
color: "#343434",
},
tooltipHost: {
marginLeft: "8px",
cursor: "default",
},
infoIcon: {
width: "12px",
height: "12px",
display: "inline-block",
verticalAlign: "-0.1rem",
color: "#343434",
},
});
export function GetColumns(): IColumn[] {
const columns: IColumn[] = [
{
key: "name",
name: "Name",
onRender: (item: IItem) => {
return (
<div className={classNames.cellText} data-is-focusable={true}>
{item.name}
{item.description ? (
<TooltipHost
hostClassName={classNames.tooltipHost}
content={item.description}
>
<Icon iconName="Info" className={classNames.infoIcon} />
</TooltipHost>
) : (
<></>
)}
</div>
);
},
minWidth: 190,
flexGrow: 3,
},
{
key: "value",
name: "Value",
onRender: (item: IItem) => {
return (
<div className={classNames.cellText} data-is-focusable={true}>
<TooltipHost
hostClassName={classNames.tooltipHost}
content={item.value}
overflowMode={TooltipOverflowMode.Parent}
>
{item.value}
</TooltipHost>
</div>
);
},
minWidth: 80,
flexGrow: 1,
},
];
return columns;
}
|
azure-quantum-python/visualization/react-lib/src/components/table/Column.tsx/0
|
{
"file_path": "azure-quantum-python/visualization/react-lib/src/components/table/Column.tsx",
"repo_id": "azure-quantum-python",
"token_count": 943
}
| 431 |
Alignment
=========
.. js:autoclass:: Alignment
:members:
|
bistring/docs/JavaScript/Alignment.rst/0
|
{
"file_path": "bistring/docs/JavaScript/Alignment.rst",
"repo_id": "bistring",
"token_count": 24
}
| 432 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.