text
stringlengths 5
22M
| id
stringlengths 12
177
| metadata
dict | __index_level_0__
int64 0
1.37k
|
---|---|---|---|
__include__: 'darts.yaml' # just use darts defaults
common:
experiment_name: 'MyResnet'
nas:
eval:
loader:
train_batch: 128
test_batch: 4096
cutout: 0
val_ratio: 0.1
# dataset:
# max_batches: 2
trainer:
epochs: 200
logger_freq: 10
drop_path_prob: 0.0
grad_clip: 0.0
aux_weight: 0.0
|
archai/confs/algos/resnet.yaml/0
|
{
"file_path": "archai/confs/algos/resnet.yaml",
"repo_id": "archai",
"token_count": 186
}
| 334 |
!!python/object:archai.nas.model_desc.ModelDesc
_cell_descs:
- !!python/object:archai.nas.model_desc.CellDesc
_nodes:
- !!python/object:archai.nas.model_desc.NodeDesc
edges:
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 1
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: sep_conv_3x3
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 16
ch_out: 16
stride: 1
trainables: null
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 0
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: sep_conv_3x3
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 16
ch_out: 16
stride: 1
trainables: null
- !!python/object:archai.nas.model_desc.NodeDesc
edges:
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 0
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: skip_connect
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 16
ch_out: 16
stride: 1
trainables: null
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 1
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: sep_conv_3x3
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 16
ch_out: 16
stride: 1
trainables: null
- !!python/object:archai.nas.model_desc.NodeDesc
edges:
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 0
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: skip_connect
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 16
ch_out: 16
stride: 1
trainables: null
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 1
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: sep_conv_3x3
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 16
ch_out: 16
stride: 1
trainables: null
- !!python/object:archai.nas.model_desc.NodeDesc
edges:
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 0
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: sep_conv_3x3
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 16
ch_out: 16
stride: 1
trainables: null
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 2
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: skip_connect
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 16
ch_out: 16
stride: 1
trainables: null
alphas_from: 0
cell_ch_out: 64
cell_type: &id001 !!python/object/apply:archai.nas.model_desc.CellType
- regular
conv_params: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 16
ch_out: 16
id: 0
max_final_edges: 2
node_ch_out: 16
post_op: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: concate_channels
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 64
ch_out: 64
out_states: 4
trainables: null
s0_op: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: prepr_normal
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 48
ch_out: 16
trainables: null
s1_op: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: prepr_normal
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 48
ch_out: 16
trainables: null
- !!python/object:archai.nas.model_desc.CellDesc
_nodes:
- !!python/object:archai.nas.model_desc.NodeDesc
edges:
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 0
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: sep_conv_5x5
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 16
ch_out: 16
stride: 1
trainables: null
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 1
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: sep_conv_3x3
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 16
ch_out: 16
stride: 1
trainables: null
- !!python/object:archai.nas.model_desc.NodeDesc
edges:
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 1
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: sep_conv_3x3
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 16
ch_out: 16
stride: 1
trainables: null
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 0
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: sep_conv_3x3
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 16
ch_out: 16
stride: 1
trainables: null
- !!python/object:archai.nas.model_desc.NodeDesc
edges:
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 2
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: sep_conv_5x5
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 16
ch_out: 16
stride: 1
trainables: null
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 0
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: max_pool_3x3
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 16
ch_out: 16
stride: 1
trainables: null
- !!python/object:archai.nas.model_desc.NodeDesc
edges:
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 0
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: max_pool_3x3
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 16
ch_out: 16
stride: 1
trainables: null
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 1
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: sep_conv_3x3
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 16
ch_out: 16
stride: 1
trainables: null
alphas_from: 0
cell_ch_out: 64
cell_type: *id001
conv_params: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 16
ch_out: 16
id: 1
max_final_edges: 2
node_ch_out: 16
post_op: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: concate_channels
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 64
ch_out: 64
out_states: 4
trainables: null
s0_op: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: prepr_normal
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 48
ch_out: 16
trainables: null
s1_op: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: prepr_normal
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 64
ch_out: 16
trainables: null
- !!python/object:archai.nas.model_desc.CellDesc
_nodes:
- !!python/object:archai.nas.model_desc.NodeDesc
edges:
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 0
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: max_pool_3x3
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 32
ch_out: 32
stride: 2
trainables: null
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 1
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: max_pool_3x3
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 32
ch_out: 32
stride: 2
trainables: null
- !!python/object:archai.nas.model_desc.NodeDesc
edges:
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 2
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: skip_connect
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 32
ch_out: 32
stride: 1
trainables: null
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 0
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: max_pool_3x3
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 32
ch_out: 32
stride: 2
trainables: null
- !!python/object:archai.nas.model_desc.NodeDesc
edges:
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 0
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: max_pool_3x3
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 32
ch_out: 32
stride: 2
trainables: null
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 2
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: skip_connect
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 32
ch_out: 32
stride: 1
trainables: null
- !!python/object:archai.nas.model_desc.NodeDesc
edges:
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 2
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: skip_connect
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 32
ch_out: 32
stride: 1
trainables: null
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 0
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: avg_pool_3x3
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 32
ch_out: 32
stride: 2
trainables: null
alphas_from: 2
cell_ch_out: 128
cell_type: &id002 !!python/object/apply:archai.nas.model_desc.CellType
- reduction
conv_params: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 32
ch_out: 32
id: 2
max_final_edges: 2
node_ch_out: 32
post_op: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: concate_channels
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 128
ch_out: 128
out_states: 4
trainables: null
s0_op: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: prepr_normal
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 64
ch_out: 32
trainables: null
s1_op: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: prepr_normal
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 64
ch_out: 32
trainables: null
- !!python/object:archai.nas.model_desc.CellDesc
_nodes:
- !!python/object:archai.nas.model_desc.NodeDesc
edges:
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 0
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: sep_conv_5x5
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 32
ch_out: 32
stride: 1
trainables: null
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 1
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: sep_conv_3x3
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 32
ch_out: 32
stride: 1
trainables: null
- !!python/object:archai.nas.model_desc.NodeDesc
edges:
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 1
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: sep_conv_3x3
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 32
ch_out: 32
stride: 1
trainables: null
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 0
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: sep_conv_3x3
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 32
ch_out: 32
stride: 1
trainables: null
- !!python/object:archai.nas.model_desc.NodeDesc
edges:
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 2
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: sep_conv_5x5
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 32
ch_out: 32
stride: 1
trainables: null
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 0
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: max_pool_3x3
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 32
ch_out: 32
stride: 1
trainables: null
- !!python/object:archai.nas.model_desc.NodeDesc
edges:
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 0
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: max_pool_3x3
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 32
ch_out: 32
stride: 1
trainables: null
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 1
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: sep_conv_3x3
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 32
ch_out: 32
stride: 1
trainables: null
alphas_from: 0
cell_ch_out: 128
cell_type: *id001
conv_params: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 32
ch_out: 32
id: 3
max_final_edges: 2
node_ch_out: 32
post_op: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: concate_channels
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 128
ch_out: 128
out_states: 4
trainables: null
s0_op: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: prepr_reduce
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 64
ch_out: 32
trainables: null
s1_op: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: prepr_normal
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 128
ch_out: 32
trainables: null
- !!python/object:archai.nas.model_desc.CellDesc
_nodes:
- !!python/object:archai.nas.model_desc.NodeDesc
edges:
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 0
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: sep_conv_5x5
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 32
ch_out: 32
stride: 1
trainables: null
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 1
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: sep_conv_3x3
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 32
ch_out: 32
stride: 1
trainables: null
- !!python/object:archai.nas.model_desc.NodeDesc
edges:
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 1
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: sep_conv_3x3
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 32
ch_out: 32
stride: 1
trainables: null
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 0
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: sep_conv_3x3
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 32
ch_out: 32
stride: 1
trainables: null
- !!python/object:archai.nas.model_desc.NodeDesc
edges:
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 2
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: sep_conv_5x5
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 32
ch_out: 32
stride: 1
trainables: null
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 0
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: max_pool_3x3
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 32
ch_out: 32
stride: 1
trainables: null
- !!python/object:archai.nas.model_desc.NodeDesc
edges:
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 0
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: max_pool_3x3
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 32
ch_out: 32
stride: 1
trainables: null
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 1
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: sep_conv_3x3
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 32
ch_out: 32
stride: 1
trainables: null
alphas_from: 0
cell_ch_out: 128
cell_type: *id001
conv_params: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 32
ch_out: 32
id: 4
max_final_edges: 2
node_ch_out: 32
post_op: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: concate_channels
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 128
ch_out: 128
out_states: 4
trainables: null
s0_op: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: prepr_normal
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 128
ch_out: 32
trainables: null
s1_op: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: prepr_normal
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 128
ch_out: 32
trainables: null
- !!python/object:archai.nas.model_desc.CellDesc
_nodes:
- !!python/object:archai.nas.model_desc.NodeDesc
edges:
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 0
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: max_pool_3x3
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 64
ch_out: 64
stride: 2
trainables: null
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 1
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: sep_conv_3x3
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 64
ch_out: 64
stride: 2
trainables: null
- !!python/object:archai.nas.model_desc.NodeDesc
edges:
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 2
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: dil_conv_5x5
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 64
ch_out: 64
stride: 1
trainables: null
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 0
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: max_pool_3x3
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 64
ch_out: 64
stride: 2
trainables: null
- !!python/object:archai.nas.model_desc.NodeDesc
edges:
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 3
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: sep_conv_5x5
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 64
ch_out: 64
stride: 1
trainables: null
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 2
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: skip_connect
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 64
ch_out: 64
stride: 1
trainables: null
- !!python/object:archai.nas.model_desc.NodeDesc
edges:
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 3
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: dil_conv_5x5
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 64
ch_out: 64
stride: 1
trainables: null
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 4
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: sep_conv_5x5
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 64
ch_out: 64
stride: 1
trainables: null
alphas_from: 2
cell_ch_out: 256
cell_type: *id002
conv_params: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 64
ch_out: 64
id: 5
max_final_edges: 2
node_ch_out: 64
post_op: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: concate_channels
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 256
ch_out: 256
out_states: 4
trainables: null
s0_op: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: prepr_normal
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 128
ch_out: 64
trainables: null
s1_op: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: prepr_normal
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 128
ch_out: 64
trainables: null
- !!python/object:archai.nas.model_desc.CellDesc
_nodes:
- !!python/object:archai.nas.model_desc.NodeDesc
edges:
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 0
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: sep_conv_5x5
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 64
ch_out: 64
stride: 1
trainables: null
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 1
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: sep_conv_3x3
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 64
ch_out: 64
stride: 1
trainables: null
- !!python/object:archai.nas.model_desc.NodeDesc
edges:
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 1
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: sep_conv_3x3
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 64
ch_out: 64
stride: 1
trainables: null
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 0
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: sep_conv_3x3
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 64
ch_out: 64
stride: 1
trainables: null
- !!python/object:archai.nas.model_desc.NodeDesc
edges:
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 2
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: sep_conv_5x5
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 64
ch_out: 64
stride: 1
trainables: null
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 0
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: max_pool_3x3
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 64
ch_out: 64
stride: 1
trainables: null
- !!python/object:archai.nas.model_desc.NodeDesc
edges:
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 0
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: max_pool_3x3
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 64
ch_out: 64
stride: 1
trainables: null
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 1
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: sep_conv_3x3
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 64
ch_out: 64
stride: 1
trainables: null
alphas_from: 0
cell_ch_out: 256
cell_type: *id001
conv_params: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 64
ch_out: 64
id: 6
max_final_edges: 2
node_ch_out: 64
post_op: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: concate_channels
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 256
ch_out: 256
out_states: 4
trainables: null
s0_op: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: prepr_reduce
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 128
ch_out: 64
trainables: null
s1_op: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: prepr_normal
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 256
ch_out: 64
trainables: null
- !!python/object:archai.nas.model_desc.CellDesc
_nodes:
- !!python/object:archai.nas.model_desc.NodeDesc
edges:
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 0
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: sep_conv_5x5
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 64
ch_out: 64
stride: 1
trainables: null
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 1
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: sep_conv_3x3
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 64
ch_out: 64
stride: 1
trainables: null
- !!python/object:archai.nas.model_desc.NodeDesc
edges:
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 1
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: sep_conv_3x3
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 64
ch_out: 64
stride: 1
trainables: null
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 0
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: sep_conv_3x3
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 64
ch_out: 64
stride: 1
trainables: null
- !!python/object:archai.nas.model_desc.NodeDesc
edges:
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 2
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: sep_conv_5x5
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 64
ch_out: 64
stride: 1
trainables: null
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 0
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: max_pool_3x3
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 64
ch_out: 64
stride: 1
trainables: null
- !!python/object:archai.nas.model_desc.NodeDesc
edges:
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 0
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: max_pool_3x3
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 64
ch_out: 64
stride: 1
trainables: null
- !!python/object:archai.nas.model_desc.EdgeDesc
input_ids:
- 1
op_desc: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: sep_conv_3x3
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 64
ch_out: 64
stride: 1
trainables: null
alphas_from: 0
cell_ch_out: 256
cell_type: *id001
conv_params: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 64
ch_out: 64
id: 7
max_final_edges: 2
node_ch_out: 64
post_op: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: concate_channels
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 256
ch_out: 256
out_states: 4
trainables: null
s0_op: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: prepr_normal
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 256
ch_out: 64
trainables: null
s1_op: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: prepr_normal
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 256
ch_out: 64
trainables: null
aux_tower_descs:
- null
- null
- null
- null
- null
- null
- null
- null
ds_ch: 3
logits_op: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: linear
params:
n_ch: 256
n_classes: 10
trainables: null
n_classes: 10
params:
gs_num_sample: 4
pool_op: !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: pool_adaptive_avg2d
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 256
ch_out: 256
trainables: null
model_stems:
- !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: stem_conv3x3
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 3
ch_out: 48
trainables: null
- !!python/object:archai.nas.model_desc.OpDesc
children: null
children_ins: null
in_len: 1
name: stem_conv3x3
params:
conv: !!python/object:archai.nas.model_desc.ConvMacroParams
ch_in: 3
ch_out: 48
trainables: null
|
archai/confs/darts_models/darts_genotype.yaml/0
|
{
"file_path": "archai/confs/darts_models/darts_genotype.yaml",
"repo_id": "archai",
"token_count": 19474
}
| 335 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import os
import json
from typing import List, Optional, Union
from overrides import overrides
from archai.api.dataset_provider import DatasetProvider
from archai.discrete_search.api.archai_model import ArchaiModel
from archai.discrete_search.api.model_evaluator import AsyncModelEvaluator
from azure.ai.ml import MLClient, command, Input, Output, dsl
from archai.common.store import ArchaiStore
from shutil import copyfile
from archai.common.monitor import JobCompletionMonitor
from training_pipeline import start_training_pipeline
from utils import copy_code_folder
class AmlTrainingValAccuracy(AsyncModelEvaluator):
def __init__(self,
config,
compute_cluster_name,
environment_name,
datastore_path,
models_path,
local_output,
experiment_name,
ml_client: MLClient,
save_models: bool = True,
partial_training: bool = True,
training_epochs: float = 1.0,
timeout_seconds=3600):
self.training_epochs = training_epochs
self.partial_training = partial_training
self.compute_cluster_name = compute_cluster_name
self.environment_name = environment_name
self.datastore_path = datastore_path
self.models_path = models_path
self.local_output = local_output
self.config = config
self.experiment_name = experiment_name
self.models = []
self.save_models = save_models
self.ml_client = ml_client
self.timeout = timeout_seconds
self.store = None
storage_account_key = config['storage_account_key']
storage_account_name = config['storage_account_name']
self.store = ArchaiStore(storage_account_name, storage_account_key)
@overrides
def send(self, arch: ArchaiModel, budget: Optional[float] = None) -> None:
self.models += [arch.arch.get_archid()]
@overrides
def fetch_all(self) -> List[Union[float, None]]:
snapshot = self.models
self.models = [] # reset for next run.
training_type = 'partial' if self.partial_training else 'full'
print(f"AmlTrainingValAccuracy: Starting {training_type} training on {len(snapshot)} models")
# train all the models listed in the snapshot on a GPU cluster so we get much training
# happening in parallel which greatly reduces the overall Archai Search process.
description = "AmlTrainingValAccuracy partial training"
pipeline_job, model_names = start_training_pipeline(
description, self.ml_client, self.store, snapshot,
self.compute_cluster_name, self.datastore_path, self.models_path, self.local_output,
self.experiment_name, self.environment_name, self.training_epochs, save_models=False)
job_id = pipeline_job.name
print(f'AmlTrainingValAccuracy: Started training pipeline: {job_id}')
# wait for all the parallel training jobs to finish
metric_key = 'vac_acc'
keys = [metric_key]
monitor = JobCompletionMonitor(self.store, self.ml_client, keys, job_id, self.timeout)
results = monitor.wait(model_names)
# save the results to the output folder (which is mapped by the AML pipeline to our
# blob store under the container 'models' in the folder named the same as the
# experiment_name)
results_path = f'{self.local_output}/models.json'
with open(results_path, 'w') as f:
f.write(json.dumps(results, indent=2))
# save the archai log also which can be handy for debugging later.
log = 'archai.log'
if os.path.isfile(log):
copyfile(log, f'{self.local_output}/{log}')
# extract the array of accuracies for our return value this is the metric that the
# Archai search needs to figure out which models to continue to evolve and which are
# not so good.
accuracies = []
for i, m in enumerate(results['models']):
val_acc = m[metric_key]
accuracies += [val_acc]
print(f'AmlTrainingValAccuracy: fetch_all returning : {accuracies}')
return accuracies
|
archai/docs/advanced_guide/cloud/azure/notebooks/multi_node_search/scripts/aml_training_evaluator.py/0
|
{
"file_path": "archai/docs/advanced_guide/cloud/azure/notebooks/multi_node_search/scripts/aml_training_evaluator.py",
"repo_id": "archai",
"token_count": 1735
}
| 336 |
import argparse
import json
from transformers import (
AutoTokenizer,
DataCollatorForLanguageModeling,
GPT2Config,
GPT2LMHeadModel,
TrainingArguments,
)
from archai.common.file_utils import check_available_checkpoint
from archai.datasets.nlp.hf_dataset_provider import HfHubDatasetProvider
from archai.datasets.nlp.hf_dataset_provider_utils import tokenize_contiguous_dataset
from archai.trainers.nlp.hf_trainer import HfTrainer
def parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser(description="Trains a GPT2 architecture.")
parser.add_argument("arch_config_path", type=str, help="Path to the architecture configuration file.")
parser.add_argument(
"-o",
"--output_dir",
type=str,
default="",
help="Defines an output folder for the saved outputs.",
)
args = parser.parse_args()
return args
if __name__ == "__main__":
args = parse_args()
tokenizer = AutoTokenizer.from_pretrained("gpt2", model_max_length=1024)
tokenizer.pad_token = tokenizer.eos_token
collator = DataCollatorForLanguageModeling(tokenizer=tokenizer, mlm=False)
dataset_provider = HfHubDatasetProvider(dataset_name="wikitext", dataset_config_name="wikitext-2-raw-v1")
train_dataset = dataset_provider.get_train_dataset()
encoded_train_dataset = train_dataset.map(
tokenize_contiguous_dataset,
batched=True,
fn_kwargs={"tokenizer": tokenizer, "model_max_length": 1024},
remove_columns=train_dataset.column_names,
)
pareto_config = {}
with open(args.arch_config_path, "r") as f:
pareto_config = json.load(f)
config = GPT2Config(n_positions=1024, bos_token_id=0, eos_token_id=0, **pareto_config)
model = GPT2LMHeadModel(config=config)
print(f"Total parameters: {sum(p.numel() for p in model.parameters())}")
training_args = TrainingArguments(
args.output_dir,
optim="adamw_torch",
evaluation_strategy="no",
logging_steps=10,
per_device_train_batch_size=4,
gradient_accumulation_steps=8,
learning_rate=6e-4,
weight_decay=0.1,
adam_beta1=0.9,
adam_beta2=0.95,
lr_scheduler_type="cosine",
warmup_steps=5,
max_steps=1000,
)
trainer = HfTrainer(
model=model,
args=training_args,
data_collator=collator,
train_dataset=encoded_train_dataset,
)
resume_from_checkpoint = check_available_checkpoint(args.output_dir)
trainer_output = trainer.train(resume_from_checkpoint=resume_from_checkpoint)
trainer.save_metrics("train", trainer_output.metrics)
|
archai/docs/advanced_guide/cloud/azure/notebooks/text_generation/src/train.py/0
|
{
"file_path": "archai/docs/advanced_guide/cloud/azure/notebooks/text_generation/src/train.py",
"repo_id": "archai",
"token_count": 1126
}
| 337 |
Unit Tests
==========
The Archai project welcomes contributions through the implementation of unit tests using Pytest. If you are interested in contributing to the project in this way, please follow these steps:
#. Ensure that Pytest is installed. You can install it using ``pip install archai[tests]``.
#. Check out the Archai codebase and create a new branch for your changes. This will allow for easy submission of your code as a pull request upon completion.
#. Create a ``.py`` test file in the :github:`tests` directory. For example, if writing a unit test for the :github:`archai/trainers/nlp/hf_training_args.py` file, the corresponding path would be :github:`tests/trainers/nlp/test_hf_training_args.py`.
#. Write test functions inside the created file. These functions should be named using the pattern: ``test_<name of function being tested>``.
#. Test functions should utilize the assert statement to verify that the output of the tested function is correct. For example:
.. code-block:: python
from archai.trainers.nlp.hf_training_args import DistillerTrainingArguments
def test_distiller_training_arguments():
args = DistillerTrainingArguments("tmp")
assert args.alpha == 0.5
assert args.temperature == 1.0
args = DistillerTrainingArguments("tmp", alpha=0.75, temperature=1.25)
assert args.alpha == 0.75
assert args.temperature == 1.25
#. Run your tests using the ``pytest`` command. This will automatically discover and execute all test functions in the tests directory.
#. To run a specific test file, use the ``pytest`` command followed by the path to the desired file. For example: ``pytest tests/trainers/nlp/test_hf_training_args.py``.
After writing and running your tests, you may submit your code as a pull request to the main project repository. Please include a description of the changes made and any relevant issue references.
|
archai/docs/contributing/unit_tests.rst/0
|
{
"file_path": "archai/docs/contributing/unit_tests.rst",
"repo_id": "archai",
"token_count": 564
}
| 338 |
<jupyter_start><jupyter_text>Evaluators<jupyter_code>from overrides import overrides
from typing import List, Optional
from archai.discrete_search.api import ArchaiModel<jupyter_output><empty_output><jupyter_text>We will use SegmentationDag search space for this example<jupyter_code>from archai.discrete_search.search_spaces.cv import SegmentationDagSearchSpace
ss = SegmentationDagSearchSpace(nb_classes=1, img_size=(64, 64), max_layers=5, seed=11)
# NBVAL_SKIP
m = ss.random_sample()
m.arch.view()<jupyter_output><empty_output><jupyter_text>`SegmentationDagSearchSpace` is a subclass of `EvolutionarySearchSpace`, so `mutate` and `crossover` methods are already implemented<jupyter_code># NBVAL_SKIP
ss.mutate(m).arch.view()<jupyter_output><empty_output><jupyter_text>Evaluating models `Evaluators` are the main tool used to evaluate architectures in given criteria (task performance, speed, size, etc.). Archai supports two types of `Evaluators`:* ModelEvaluator ([archai.discrete_search.api.evaluator.ModelEvaluator](https://microsoft.github.io/archai/reference/api/archai.discrete_search.api.htmlmodule-archai.discrete_search.api.evaluator.ModelEvaluator)) * Evaluates a model: `ModelEvaluator.evaluate(model, budget)`* AsyncModelEvaluator ([archai.discrete_search.api.evaluator.AsyncModelEvaluator](https://microsoft.github.io/archai/reference/api/archai.discrete_search.api.htmlmodule-archai.discrete_search.api.evaluator.AsyncModelEvaluator)): * Sends an evaluation job: `AsyncModelEvaluator.send(model, budget)` * Fetches all evaluation jobs from the queue: `AsyncObjective.fetch_all()` A synchronous evaluator (`ModelEvaluator`) is computed by the search algorithm in a sequential fashion, while an asynchronous evaluator (`AsyncModelEvaluator`) sends evaluation jobs to a queue and fetches the results later, and thus can be used to evaluate models remotely or in a distributed fashion.The `ArchaiModel` object passed to the evaluator objects can be used to access the architecture, if necessary. Some objectives will actively use the a dataset (e.g task accuracy), while others (e.g FLOPs, latency, memory) may not.The `budget` argument, if provided, is a multiplier value used by search algorithms like `SuccessiveHalving` to specify how much compute should be spent on the evaluation.Read more about Evaluators [here](https://microsoft.github.io/archai/reference/api/archai.discrete_search.api.htmlmodule-archai.discrete_search.api.evaluator). You can find a list of built-in evaluators in `archai.discrete_search.evaluators`. Example: Using a built-in evaluator (`AvgOnnxLatency`) Let's use a built-in evaluator to measure ONNX latency of PyTorch models<jupyter_code>from archai.discrete_search.evaluators import AvgOnnxLatency
onnx_latency_obj = AvgOnnxLatency(input_shape=(1, 3, 64, 64))
onnx_latency_obj.evaluate(model=ss.random_sample(), budget=None)<jupyter_output>/home/gderosa/miniconda3/envs/archai38/lib/python3.8/site-packages/torch/onnx/_internal/jit_utils.py:258: UserWarning: The shape inference of prim::Constant type is missing, so it may result in wrong shape inference for the exported graph. Please consider adding it in symbolic function. (Triggered internally at ../torch/csrc/jit/passes/onnx/shape_type_inference.cpp:1884.)
_C._jit_pass_onnx_node_shape_type_inference(node, params_dict, opset_version)
/home/gderosa/miniconda3/envs/archai38/lib/python3.8/site-packages/torch/onnx/utils.py:687: UserWarning: The shape inference of prim::Constant type is missing, so it may result in wrong shape inference for the exported graph. Please consider adding it in symbolic function. (Triggered internally at ../torch/csrc/jit/passes/onnx/shape_type_inference.cpp:1884.)
_C._jit_pass_onnx_graph_shape_type_inference(
/home/gderosa/miniconda3/envs/archai38/lib/python3.8/site-packages/torch/onnx/utils.py:1178: UserWarning: The shape inference of prim::C[...]<jupyter_text>Custom Evaluator Example Let's create a simple custom `ModelEvaluator` that counts the number of modules in a model<jupyter_code>from archai.api.dataset_provider import DatasetProvider
from archai.discrete_search.api import ModelEvaluator
class NumberOfModules(ModelEvaluator):
''' Class that measures the size of a model by the number of torch modules '''
@overrides
def evaluate(self, model: ArchaiModel,
budget: Optional[float] = None):
return len(list(model.arch.modules()))
m = ss.random_sample()
my_objective = NumberOfModules()
my_objective.evaluate(m)<jupyter_output><empty_output><jupyter_text>Useful Evaluators * [RayParallelEvaluator](https://microsoft.github.io/archai/reference/api/archai.discrete_search.objectives.htmlmodule-archai.discrete_search.evaluators.ray) - Wraps an existing `ModelEvaluator` into a new `AsyncModelEvaluator` that runs evaluation jobs using multiple Ray workers.* [EvaluationFunction](https://microsoft.github.io/archai/reference/api/archai.discrete_search.objectives.htmlmodule-archai.discrete_search.evaluators.functional) - Wraps a function that takes (model, budget) arguments and creates a `ModelEvaluator` Example: Parallelizing NumberOfModules Let's use `RayParallelEvaluator` to make our custom evaluator `NumberOfModules` run more efficiently.<jupyter_code>from archai.discrete_search.evaluators import RayParallelEvaluator
my_objective_parallel = RayParallelEvaluator(
NumberOfModules(),
timeout=10, # Timeout in seconds
num_cpus=1.0 # Each evaluation job will use a CPU core
)<jupyter_output><empty_output><jupyter_text>`my_objective_parallel` is now an `AsyncModelEvaluator` object. We can send evaluation jobs calling `AsyncModelEvaluator.send(model, budget)`:<jupyter_code>model_list = [ss.random_sample() for _ in range(10)]
for model in model_list:
print(f'Dispatching job for {model.archid}')
my_objective_parallel.send(model, budget=None)<jupyter_output>Dispatching job for 4aba6fbdb292e44d634daefa425ab1406684daed_64_64<jupyter_text>We can fetch and clear all jobs from the job queue by calling `AsyncModelEvaluator.fetch_all()`<jupyter_code>my_objective_parallel.fetch_all()<jupyter_output><empty_output><jupyter_text>After that, job queue should be empty<jupyter_code>assert my_objective_parallel.fetch_all() == []<jupyter_output><empty_output><jupyter_text>Example: Wrapping custom training code into an Evaluator Let's consider the problem of measuring the task performance on a specific dataset with custom training code.<jupyter_code>from archai.datasets.cv.mnist_dataset_provider import MnistDatasetProvider
from archai.discrete_search.evaluators import EvaluationFunction<jupyter_output><empty_output><jupyter_text>Datasets in Archai are defined using dataset providers. We will use the built-in `MnistProvider` dataset provider for the MNIST dataset.<jupyter_code>dataset_provider = MnistDatasetProvider()<jupyter_output><empty_output>
|
archai/docs/getting_started/notebooks/discrete_search/evaluators.ipynb/0
|
{
"file_path": "archai/docs/getting_started/notebooks/discrete_search/evaluators.ipynb",
"repo_id": "archai",
"token_count": 2252
}
| 339 |
Quick Start
===========
In this quickstart example, we will apply Archai in Natural Language Processing to find the optimal Pareto-frontier Transformers' configurations according to a set of objectives.
Creating the Search Space
-------------------------
We start by importing the `TransformerFlexSearchSpace` class which represents the search space for the Transformer architecture:
.. code-block:: python
from archai.discrete_search.search_spaces.nlp.transformer_flex.search_space import TransformerFlexSearchSpace
space = TransformerFlexSearchSpace("gpt2")
Defining Search Objectives
--------------------------
Next, we define the objectives we want to optimize. In this example, we use `NonEmbeddingParamsProxy`, `TransformerFlexOnnxLatency`, and `TransformerFlexOnnxMemory` to define the objectives:
.. code-block:: python
from archai.discrete_search.api.search_objectives import SearchObjectives
from archai.discrete_search.evaluators.nlp.parameters import NonEmbeddingParamsProxy
from archai.discrete_search.evaluators.nlp.transformer_flex_latency import TransformerFlexOnnxLatency
from archai.discrete_search.evaluators.nlp.transformer_flex_memory import TransformerFlexOnnxMemory
search_objectives = SearchObjectives()
search_objectives.add_objective(
"non_embedding_params",
NonEmbeddingParamsProxy(),
higher_is_better=True,
compute_intensive=False,
constraint=(1e6, 1e9),
)
search_objectives.add_objective(
"onnx_latency",
TransformerFlexOnnxLatency(space),
higher_is_better=False,
compute_intensive=False,
)
search_objectives.add_objective(
"onnx_memory",
TransformerFlexOnnxMemory(space),
higher_is_better=False,
compute_intensive=False,
)
Initializing the Algorithm
--------------------------
We use the `EvolutionParetoSearch` algorithm to conduct the search:
.. code-block:: python
from archai.discrete_search.algos.evolution_pareto import EvolutionParetoSearch
algo = EvolutionParetoSearch(
space,
search_objectives,
None,
"tmp",
num_iters=5,
init_num_models=10,
seed=1234,
)
Performing the Search
---------------------
Finally, we call the `search()` method to start the NAS process:
.. code-block:: python
algo.search()
The algorithm will iterate through different network architectures, evaluate their performance based on the defined objectives, and ultimately produce a frontier of Pareto-optimal results.
|
archai/docs/getting_started/quick_start.rst/0
|
{
"file_path": "archai/docs/getting_started/quick_start.rst",
"repo_id": "archai",
"token_count": 868
}
| 340 |
Evaluators
==========
.. toctree::
:maxdepth: 2
archai.discrete_search.evaluators.benchmark
archai.discrete_search.evaluators.nlp
archai.discrete_search.evaluators.pt_profiler_utils
Functional
----------
.. automodule:: archai.discrete_search.evaluators.functional
:members:
:undoc-members:
ONNX Model
----------
.. automodule:: archai.discrete_search.evaluators.onnx_model
:members:
:undoc-members:
Progressive Training
--------------------
.. automodule:: archai.discrete_search.evaluators.progressive_training
:members:
:undoc-members:
PyTorch Profiler
----------------
.. automodule:: archai.discrete_search.evaluators.pt_profiler
:members:
:undoc-members:
Ray
---
.. automodule:: archai.discrete_search.evaluators.ray
:members:
:undoc-members:
Remote Azure Benchmark
----------------------
.. automodule:: archai.discrete_search.evaluators.remote_azure_benchmark
:members:
:undoc-members:
|
archai/docs/reference/api/archai.discrete_search.evaluators.rst/0
|
{
"file_path": "archai/docs/reference/api/archai.discrete_search.evaluators.rst",
"repo_id": "archai",
"token_count": 348
}
| 341 |
Quantization
============
.. toctree::
:maxdepth: 2
archai.quantization.nlp
Modules
-------
.. automodule:: archai.quantization.modules
:members:
:undoc-members:
Observers
---------
.. automodule:: archai.quantization.observers
:members:
:undoc-members:
Quantizers
----------
.. automodule:: archai.quantization.quantizers
:members:
:undoc-members:
Quantization (Utilities)
------------------------
.. automodule:: archai.quantization.quantization_utils
:members:
:undoc-members:
Post-Training Quantization (PTQ)
--------------------------------
.. automodule:: archai.quantization.ptq
:members:
:undoc-members:
Quantization-Aware Training (QAT)
---------------------------------
.. automodule:: archai.quantization.qat
:members:
:undoc-members:
Mixed-QAT
---------
.. automodule:: archai.quantization.mixed_qat
:members:
:undoc-members:
|
archai/docs/reference/api/archai.quantization.rst/0
|
{
"file_path": "archai/docs/reference/api/archai.quantization.rst",
"repo_id": "archai",
"token_count": 312
}
| 342 |
Supergraph
==========
.. toctree::
:maxdepth: 2
archai.supergraph.algos
archai.supergraph.datasets
archai.supergraph.models
archai.supergraph.nas
archai.supergraph.utils
|
archai/docs/reference/api/archai.supergraph.rst/0
|
{
"file_path": "archai/docs/reference/api/archai.supergraph.rst",
"repo_id": "archai",
"token_count": 74
}
| 343 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import random
from hashlib import sha1
from typing import List, Optional
import numpy as np
from lm_eval.base import CachingLM
from lm_eval.evaluator import evaluate
from lm_eval.tasks import get_task_dict
from lm_eval.utils import run_task_tests
from lm_eval_harness.lm_eval_hf_model import HFEvalModel
def evaluate_wrapper(
hf_model: HFEvalModel,
tasks: List[str],
num_fewshot: Optional[int] = 0,
no_cache: Optional[bool] = False,
limit: Optional[int] = None,
bootstrap_iters: Optional[int] = 100000,
description_dict: Optional[str] = None,
check_integrity: Optional[bool] = False,
decontamination_ngrams_path: Optional[str] = None,
):
random.seed(1234)
np.random.seed(1234)
if not no_cache:
hf_model_id = sha1(repr(hf_model.model).encode("ascii")).hexdigest()
hf_model = CachingLM(hf_model, f"cache/{hf_model_id}.db")
if check_integrity:
run_task_tests(task_list=tasks)
task_dict = get_task_dict(tasks)
results = evaluate(
lm=hf_model,
task_dict=task_dict,
num_fewshot=num_fewshot,
limit=limit,
bootstrap_iters=bootstrap_iters,
description_dict=description_dict,
decontamination_ngrams_path=decontamination_ngrams_path,
)
results["config"] = {
"num_fewshot": num_fewshot,
"no_cache": no_cache,
"limit": limit,
"bootstrap_iters": bootstrap_iters,
"description_dict": description_dict,
}
return results
|
archai/research/lm_eval_harness/lm_eval_harness/lm_eval_evaluator.py/0
|
{
"file_path": "archai/research/lm_eval_harness/lm_eval_harness/lm_eval_evaluator.py",
"repo_id": "archai",
"token_count": 670
}
| 344 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import argparse
import json
import os
from transformers import GPT2LMHeadModel
from archai.common.file_utils import calculate_onnx_model_size
from archai.onnx.export import export_to_onnx
from archai.onnx.optimization import optimize_onnx
from archai.quantization.ptq import dynamic_quantization_onnx
def parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser(description="Exports a GPT-2 model to ONNX.")
parser.add_argument("output_model_path", type=str, help="Path to the ONNX output model.")
parser.add_argument("-op", "--opset", type=int, default=11, help="ONNX opset version.")
parser.add_argument(
"-a",
"--atol",
type=float,
default=1e-4,
help="Absolute difference to be tolerated between input and output models.",
)
parser.add_argument("-ol", "--opt_level", type=int, default=1, help="Level of the ORT optimization.")
parser.add_argument("-opt", "--optimization", action="store_true", help="Optimizes the exported model.")
parser.add_argument(
"-qnt",
"--quantization",
action="store_true",
help="Dynamically quantizes the exported model.",
)
args = parser.parse_args()
return args
if __name__ == "__main__":
args = parse_args()
model = GPT2LMHeadModel.from_pretrained("gpt2")
onnx_config = export_to_onnx(
model,
args.output_model_path,
task="causal-lm",
use_past=True,
share_weights=True,
opset=args.opset,
atol=args.atol,
)
print(f"Model: {calculate_onnx_model_size(args.output_model_path)}MB")
if args.optimization:
ort_model_path = optimize_onnx(args.output_model_path, onnx_config, opt_level=args.opt_level)
args.output_model_path = ort_model_path
print(f"Model-OPT: {calculate_onnx_model_size(args.output_model_path)}MB")
if args.quantization:
qnt_model_path = dynamic_quantization_onnx(args.output_model_path)
print(f"Model-QNT: {calculate_onnx_model_size(qnt_model_path)}MB")
# Exports model's configuration to JSON
model_config_path = os.path.join(os.path.dirname(args.output_model_path), "config.json")
with open(model_config_path, "w") as f:
json.dump(onnx_config.config.to_dict(), f)
|
archai/scripts/onnx/export_gpt2.py/0
|
{
"file_path": "archai/scripts/onnx/export_gpt2.py",
"repo_id": "archai",
"token_count": 964
}
| 345 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import dataset_utils
import torchvision
from torchvision import transforms
if __name__ == "__main__":
dataroot = dataset_utils.get_dataroot()
torchvision.datasets.STL10(
root=dataroot,
split="train",
# train=True,
download=True,
transform=transforms.Compose([]),
)
torchvision.datasets.STL10(
root=dataroot,
split="test",
# train=False,
download=True,
transform=transforms.Compose([]),
)
print("done")
|
archai/scripts/supergraph/download_datasets/torchvision_ds.py/0
|
{
"file_path": "archai/scripts/supergraph/download_datasets/torchvision_ds.py",
"repo_id": "archai",
"token_count": 249
}
| 346 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import torch
from torch_testbed import cifar10_models
from torch_testbed.dataloader_dali import cifar10_dataloaders
from torch_testbed.timing import MeasureTime, clear_timings, print_all_timings
from archai.common import utils
utils.setup_cuda(42, local_rank=0)
batch_size = 512
half = True
datadir = utils.full_path("~/dataroot")
train_dl, test_dl = cifar10_dataloaders(datadir, train_batch_size=batch_size, test_batch_size=1024, cutout=0)
model = cifar10_models.resnet18().cuda()
lr, momentum, weight_decay = 0.025, 0.9, 3.0e-4
optim = torch.optim.SGD(model.parameters(), lr, momentum=momentum, weight_decay=weight_decay)
crit = torch.nn.CrossEntropyLoss().cuda()
if half:
model = model.half()
crit = crit.half()
@MeasureTime
def iter_dl(dl):
i, d = 0, 0
for x, l in dl:
x, l = x.cuda().half() if half else x.cuda(), l.cuda()
y = model(x)
loss = crit(y, l)
optim.zero_grad()
loss.backward()
optim.step()
i += 1
d += len(x)
return i, d
def warm_up(epochs):
for _ in range(epochs):
train_dl = [
(
torch.rand(batch_size, 3, 12, 12).cuda()
if not half
else torch.rand(batch_size, 3, 12, 12).cuda().half(),
torch.LongTensor(batch_size).random_(0, 10).cuda(),
)
for _ in range(round(50000 / batch_size))
]
i, d = iter_dl(train_dl)
# warm_up(5)
# cudnn.benchmark = False
print_all_timings()
clear_timings()
for _ in range(5):
i, d = iter_dl(train_dl)
print_all_timings()
print(i, d)
exit(0)
|
archai/scripts/supergraph/performance/model_dl_test.py/0
|
{
"file_path": "archai/scripts/supergraph/performance/model_dl_test.py",
"repo_id": "archai",
"token_count": 793
}
| 347 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import argparse
from transformers import GPT2Config, GPT2LMHeadModel
from archai.trainers.nlp.nvidia_trainer import NvidiaTrainer
from archai.trainers.nlp.nvidia_training_args import NvidiaTrainingArguments
def parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser(description="Trains a GPT-2 using the NVIDIA trainer.")
parser.add_argument("-s", "--seed", type=int, default=42, help="Random seed.")
parser.add_argument("-nc", "--no_cuda", action="store_true", help="Whether CUDA should not be used.")
parser.add_argument("-ls", "--logging_steps", type=int, default=10, help="Number of steps between logs.")
parser.add_argument("-es", "--eval_steps", type=int, default=100, help="Number of steps between evaluations.")
parser.add_argument("-dn", "--dataset_name", type=str, default="wt103", help="Name of the dataset.")
parser.add_argument("-vt", "--vocab_type", type=str, default="gpt2", help="Name of the vocabulary/tokenizer.")
parser.add_argument("-vs", "--vocab_size", type=int, default=10000, help="Size of the vocabulary.")
parser.add_argument("-bsz", "--global_batch_size", type=int, default=256, help="Global batch size.")
parser.add_argument("-seq", "--seq_len", type=int, default=192, help="Sequence length.")
parser.add_argument("-st", "--strategy", type=str, default="ddp", help="Distributed training strategy.")
parser.add_argument("-n", "--max_steps", type=int, default=250, help="Maximum number of training steps.")
parser.add_argument(
"-ng", "--gradient_accumulation_steps", type=int, default=1, help="Number of gradient accumulation steps."
)
parser.add_argument("-o", "--optim", type=str, default="jitlamb", help="Name of the optimizer.")
args = parser.parse_args()
return args
if __name__ == "__main__":
args = parse_args()
training_args = NvidiaTrainingArguments(
"nvidia-gpt2",
seed=args.seed,
no_cuda=args.no_cuda,
logging_steps=args.logging_steps,
eval_steps=args.eval_steps,
dataset_name=args.dataset_name,
vocab_type=args.vocab_type,
vocab_size=args.vocab_size,
global_batch_size=args.global_batch_size,
seq_len=args.seq_len,
strategy=args.strategy,
max_steps=args.max_steps,
gradient_accumulation_steps=args.gradient_accumulation_steps,
optim=args.optim,
)
config = GPT2Config(
vocab_size=args.vocab_size,
n_positions=args.seq_len,
n_embd=512,
n_layer=16,
n_head=8,
embd_pdrop=0.0,
attn_pdrop=0.0,
use_cache=False,
)
model = GPT2LMHeadModel(config=config)
trainer = NvidiaTrainer(model=model, args=training_args)
trainer.train()
|
archai/scripts/trainers/nvidia/train_gpt2.py/0
|
{
"file_path": "archai/scripts/trainers/nvidia/train_gpt2.py",
"repo_id": "archai",
"token_count": 1108
}
| 348 |
#!/bin/bash
# If the python app runs out of memory due to various leaks in python libraries
# the process terminates with 'killed', this loop will restart the runner.
script_dir="$(dirname ${BASH_SOURCE})"
source ~/anaconda3/etc/profile.d/conda.sh
conda activate snap37
while true
do
python ${script_dir}/runner.py $@
if [ $? != 0 ]; then
exit 0
fi
echo "sleeping for 30 seconds..."
sleep 30
done
|
archai/tasks/face_segmentation/aml/azure/loop.sh/0
|
{
"file_path": "archai/tasks/face_segmentation/aml/azure/loop.sh",
"repo_id": "archai",
"token_count": 148
}
| 349 |
# Setup
This script contains the session setup used on the machine that is connected
to the Qualcomm boards for running N screen sessions that run the loop.sh
script for each Qualcomm board.
If you want to also cleanup stale kubernetes pods, you can add `--cleanup_stale_pods`
once you have configured `az login` and `az aks get-credentials --resource-group $resource_group --name $aks_cluster `
so that the runner script can call `cleanup_stale_pods.py`.
|
archai/tasks/face_segmentation/aml/setup/readme.md/0
|
{
"file_path": "archai/tasks/face_segmentation/aml/setup/readme.md",
"repo_id": "archai",
"token_count": 124
}
| 350 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import os
import sys
from glob import glob
from shutil import copyfile
from archai.common.config import Config
from archai.common.store import ArchaiStore
from azure.ai.ml.entities._credentials import AccountKeyConfiguration
from azure.ai.ml.entities import AzureBlobDatastore
import archai.common.azureml_helper as aml_helper
from archai.discrete_search.api.archai_model import ArchaiModel
def configure_store(aml_config: Config, blob_container_name: str = None) -> ArchaiStore:
con_str = aml_config.get('connection_str')
if not con_str:
print("Please set environment variable 'MODEL_STORAGE_CONNECTION_STRING' containing the Azure storage account connection " +
"string for the Azure storage account you want to use to control this experiment.")
sys.exit(1)
if blob_container_name is None:
blob_container_name = aml_config.get('blob_container_name', 'models')
experiment_name = aml_config.get('experiment_name', 'facesynthetics')
partition_key = aml_config.get('partition_key', 'main')
storage_account_name, storage_account_key = ArchaiStore.parse_connection_string(con_str)
return ArchaiStore(storage_account_name, storage_account_key, blob_container_name, experiment_name, partition_key)
def register_datastore(ml_client, data_store_name, blob_container_name, storage_account_name, storage_account_key, experiment_name):
try:
credentials = AccountKeyConfiguration(account_key=storage_account_key)
model_store = ml_client.datastores.get(data_store_name)
if model_store.container_name != blob_container_name:
raise Exception(f'The container name does not match. Only the credentials on {data_store_name} can be updated')
if model_store.account_name != storage_account_name:
raise Exception(f'The storage account name does not match. Only the credentials on {data_store_name} can be updated')
model_store.credentials = credentials
except:
model_store = AzureBlobDatastore(
name=data_store_name,
description="Datastore pointing to a blob container.",
account_name=storage_account_name,
container_name=blob_container_name,
credentials=credentials,
)
ml_client.create_or_update(model_store)
return f'azureml://datastores/{data_store_name}/paths/{experiment_name}'
def create_cluster(ml_client, config, key):
section = config[key]
compute_name = section['name']
size = section['size']
location = section['location']
max_instances = section.get('max_instances', 1)
aml_helper.create_compute_cluster(ml_client, compute_name, size=size, location=location, max_instances=max_instances)
return compute_name
def copy_code_folder(src_dir, target_dir):
""" Copies the code folder into a separate folder. This is needed otherwise the pipeline will fail with
UserError: The code snapshot was modified in blob storage, which could indicate tampering.
If this was unintended, you can create a new snapshot for the run. To do so, edit any
content in the local source directory and resubmit the run.
"""
os.makedirs(target_dir, exist_ok=True)
for path in glob(os.path.join(src_dir, '*.py')):
file = os.path.basename(path)
print(f"copying source file : {file} to {target_dir}")
copyfile(path, os.path.join(target_dir, file))
for name in os.listdir(src_dir):
path = os.path.join(src_dir, name)
if os.path.isdir(path):
copy_code_folder(path, os.path.join(target_dir, name))
def get_valid_arch_id(arch: ArchaiModel):
# bug in azure ml sdk requires blob store folder names not begin with digits, so we prefix with 'id_'
return f'id_{arch.archid}'
|
archai/tasks/face_segmentation/aml/util/setup.py/0
|
{
"file_path": "archai/tasks/face_segmentation/aml/util/setup.py",
"repo_id": "archai",
"token_count": 1366
}
| 351 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from functools import partial
from typing import Tuple, Optional, List
import torch
from torch import nn
from archai.discrete_search.search_spaces.config import (
ArchConfig, ArchParamTree, DiscreteChoice, ConfigSearchSpace, repeat_config
)
from .ops import ReluConv2d, Conv2dSamePadding, OPS
def hgnet_param_tree_factory(stem_strides: Tuple[int, ...] = (2, 4),
max_num_hourglass: int = 2,
share_hourglass_arch: bool = False,
base_channels: Tuple[int, ...] = (16, 24, 32, 48),
op_subset: Tuple[str, ...] = ('conv3x3', 'conv5x5', 'conv7x7'),
num_blocks: int = 4,
downsample_block_max_ops: int = 5,
post_upsample_max_ops: int = 3,
skip_block_max_ops: int = 3,
upsample_block_max_ops: int = 4):
assert num_blocks > 1, 'num_blocks must be greater than 1'
return ArchParamTree({
'stem_stride': DiscreteChoice(stem_strides),
'base_ch': DiscreteChoice(base_channels),
'hourglasses': repeat_config({
'downsample_blocks': repeat_config({
'layers': repeat_config({
'op': DiscreteChoice(op_subset)
}, repeat_times=range(1, downsample_block_max_ops + 1), share_arch=False),
'ch_expansion_factor': DiscreteChoice([1.0, 1.2, 1.5, 1.6, 2.0, 2.2]),
}, repeat_times=num_blocks),
'skip_blocks': repeat_config({
'layers': repeat_config({
'op': DiscreteChoice(op_subset)
}, repeat_times=range(0, skip_block_max_ops+1), share_arch=False),
}, repeat_times=num_blocks-1),
'upsample_blocks': repeat_config({
'layers': repeat_config({
'op': DiscreteChoice(op_subset)
}, repeat_times=range(1, upsample_block_max_ops+1), share_arch=False),
}, repeat_times=num_blocks-1),
}, repeat_times=range(1, max_num_hourglass+1), share_arch=share_hourglass_arch),
'post_upsample_layers': repeat_config({
'op': DiscreteChoice(op_subset)
}, repeat_times=range(0, post_upsample_max_ops+1), share_arch=False)
})
class Hourglass(nn.Module):
def __init__(self, arch_config: ArchConfig, base_channels: int):
super().__init__()
self.base_channels = base_channels
self.upsample = nn.UpsamplingBilinear2d(scale_factor=2)
self.chs = [self.base_channels]
# Calculates channels on each branch
for block_cfg in arch_config.pick('downsample_blocks'):
self.chs.append(
int(self.chs[-1] * block_cfg.pick('ch_expansion_factor'))
)
self.nb_blocks = len(self.chs) - 1
# Downsample blocks
self.down_blocks = nn.ModuleList()
for block_idx, block_cfg in enumerate(arch_config.pick('downsample_blocks')):
in_ch, out_ch = self.chs[block_idx], self.chs[block_idx + 1]
down_block = [
OPS[layer_cfg.pick('op')](
(in_ch if layer_idx == 0 else out_ch),
out_ch,
stride=(2 if (layer_idx == 0 and block_idx > 0) else 1)
)
for layer_idx, layer_cfg in enumerate(block_cfg.pick('layers'))
]
self.down_blocks.append(nn.Sequential(*down_block))
# Skip blocks
self.skip_blocks = nn.ModuleList()
for block_idx, block_cfg in enumerate(arch_config.pick('skip_blocks')):
out_ch = self.chs[block_idx + 1]
skip_block = [
OPS.get(layer_cfg.pick('op'))(out_ch, out_ch)
for layer_idx, layer_cfg in enumerate(block_cfg.pick('layers'))
]
self.skip_blocks.append(nn.Sequential(*skip_block))
# Upsample blocks
self.up_blocks = nn.ModuleList()
for block_idx, block_cfg in enumerate(arch_config.pick('upsample_blocks')):
in_ch, out_ch = self.chs[block_idx + 1], self.chs[block_idx + 2]
up_block = [
OPS.get(layer_cfg.pick('op'))(
(out_ch if layer_idx == 0 else in_ch), in_ch
)
for layer_idx, layer_cfg in enumerate(block_cfg.pick('layers'))
]
self.up_blocks.append(nn.Sequential(*up_block))
# Converts output to `base_channels`
self.final_conv = nn.Conv2d(self.chs[1], base_channels, kernel_size=1)
def forward(self, x: torch.FloatTensor) -> torch.FloatTensor:
skip_connections = [0 for _ in range(self.nb_blocks - 1)]
inp = x
for i in range(self.nb_blocks - 1):
out = self.down_blocks[i](inp)
skip_connections[i] = self.skip_blocks[i](out)
inp = out
# Last downsample branch
out = self.down_blocks[-1](inp)
for i in range(self.nb_blocks - 1)[::-1]:
out = skip_connections[i] + self.up_blocks[i](self.upsample(out))
return self.final_conv(out)
class StackedHourglass(nn.Module):
def __init__(self, arch_config: ArchConfig, num_classes: int, in_channels: int = 3):
super().__init__()
self.num_classes = num_classes
self.in_channels = in_channels
self.arch_config = arch_config
self.base_channels = arch_config.pick('base_ch')
# Classifier
self.classifier = nn.Conv2d(self.base_channels, num_classes, kernel_size=1)
# Stem convolution
self.stem_stride = arch_config.pick('stem_stride')
self.stem_conv = ReluConv2d(
in_channels=in_channels, out_channels=self.base_channels,
stride=self.stem_stride
)
self.final_upsample = nn.UpsamplingBilinear2d(scale_factor=self.stem_stride)
self.hgs = nn.Sequential(*[
Hourglass(hg_conf, self.base_channels)
for hg_conf in arch_config.pick('hourglasses')
])
self.post_upsample = nn.Sequential(*[
OPS[layer_cfg.pick('op')](self.base_channels, self.base_channels)
for layer_cfg in arch_config.pick('post_upsample_layers')
])
self.classifier = Conv2dSamePadding(self.base_channels, num_classes, kernel_size=1)
def forward(self, x: torch.Tensor) -> torch.Tensor:
out = self.stem_conv(x)
out = self.hgs(out)
out = self.post_upsample(self.final_upsample(out))
return self.classifier(out)
class HgnetSegmentationSearchSpace(ConfigSearchSpace):
def __init__(self,
num_classes: int,
img_size: Tuple[int, int],
in_channels: int = 3,
op_subset: Tuple[str, ...] = ('conv3x3', 'conv5x5', 'conv7x7'),
stem_strides: Tuple[int, ...] = (1, 2, 4),
num_blocks: int = 4,
downsample_block_max_ops: int = 4,
skip_block_max_ops: int = 2,
upsample_block_max_ops: int = 4,
post_upsample_max_ops: int = 3,
**ss_kwargs):
possible_downsample_factors = [
2**num_blocks * stem_stride for stem_stride in stem_strides
]
w, h = img_size
assert all(w % d_factor == 0 for d_factor in possible_downsample_factors), \
f'Image width must be divisible by all possible downsample factors ({2**num_blocks} * stem_stride)'
assert all(h % d_factor == 0 for d_factor in possible_downsample_factors), \
f'Image height must be divisible by all possible downsample factors ({2**num_blocks} * stem_stride)'
ss_kwargs['builder_kwargs'] = {
'op_subset': op_subset,
'stem_strides': stem_strides,
'num_blocks': num_blocks,
'downsample_block_max_ops': downsample_block_max_ops,
'skip_block_max_ops': skip_block_max_ops,
'upsample_block_max_ops': upsample_block_max_ops,
'post_upsample_max_ops': post_upsample_max_ops
}
ss_kwargs['model_kwargs'] = {
'num_classes': num_classes,
'in_channels': in_channels,
}
self.img_size = img_size
self.in_channels = in_channels
super().__init__(StackedHourglass, hgnet_param_tree_factory, **ss_kwargs)
|
archai/tasks/face_segmentation/search_space/hgnet.py/0
|
{
"file_path": "archai/tasks/face_segmentation/search_space/hgnet.py",
"repo_id": "archai",
"token_count": 4282
}
| 352 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from typing import Any, Optional
from torch import nn, Tensor, flatten
import torch
from torch.ao.quantization import DeQuantStub, QuantStub
from model import CustomInvertedResidual, CustomMobileNetV2
from torchvision.ops import Conv2dNormActivation
from torchvision.models.quantization.utils import _fuse_modules
# Adapted from https://github.com/pytorch/vision/blob/main/torchvision/models/quantization/mobilenetv2.py
class CustomQuantizableInvertedResidual(CustomInvertedResidual):
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
self.skip_add = nn.quantized.FloatFunctional()
def forward(self, x: Tensor) -> Tensor:
if self.use_res_connect:
return self.skip_add.add(x, self.conv(x))
else:
return self.conv(x)
def fuse_model(self, is_qat: Optional[bool] = None) -> None:
for idx in range(len(self.conv)):
if type(self.conv[idx]) is nn.Conv2d:
_fuse_modules(self.conv, [str(idx), str(idx + 1)], is_qat, inplace=True)
class CustomQuantizableMobileNetV2(CustomMobileNetV2):
def __init__(self, num_skip_qat_layers: int = 0, *args: Any, **kwargs: Any) -> None:
"""
MobileNet V2 main class
Args:
Inherits args from floating point MobileNetV2
"""
super().__init__(*args, **kwargs)
self.quant = QuantStub()
self.dequant = DeQuantStub()
assert len(self.features) >= num_skip_qat_layers
self.num_skip_qat_layers = num_skip_qat_layers
def forward(self, x: Tensor) -> Tensor:
x = self.quant(x)
# Split features for qat and non-qat
qat_features = self.features if self.num_skip_qat_layers == 0 else self.features[:-1 * self.num_skip_qat_layers]
non_qat_features = self.features[len(self.features) - self.num_skip_qat_layers:]
for f in qat_features:
x = f(x)
if (self.num_skip_qat_layers > 0):
x = self.dequant(x)
# Pass data through features not to be quantized
for f in non_qat_features:
x = f(x)
x = nn.functional.adaptive_avg_pool2d(x, (1, 1))
x = flatten(x, 1)
x = self.classifier(x)
if (self.num_skip_qat_layers == 0):
x = self.dequant(x)
return x
def setup_qconfig(self, engine: str) -> None:
# Disable quantization config for layers that are NOT to be quantized
torch.backends.quantized.engine = engine
self.qconfig = torch.quantization.get_default_qat_qconfig(engine)
for f in self.features[len(self.features) - self.num_skip_qat_layers:]:
f.qconfig = None
def fuse_model(self) -> None:
for m in self.modules():
if type(m) is Conv2dNormActivation:
_fuse_modules(m, ["0", "1", "2"], is_qat=True, inplace=True)
if type(m) is CustomQuantizableInvertedResidual:
m.fuse_model(is_qat=True)
|
archai/tasks/facial_landmark_detection/quantizable_model.py/0
|
{
"file_path": "archai/tasks/facial_landmark_detection/quantizable_model.py",
"repo_id": "archai",
"token_count": 1376
}
| 353 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import torch
from archai.datasets.cv.transforms.custom_cutout import CustomCutout
def test_custom_cutout():
# Assert that it works with length argument
c = CustomCutout(length=10)
img = torch.ones((3, 20, 20))
result = c(img)
non_zero_elements = (result == 0).sum().item()
assert non_zero_elements > 0
# Assert that it produces the different result (due to randomness) for the same image
c1 = CustomCutout(length=10)
c2 = CustomCutout(length=10)
img1 = torch.ones((3, 20, 20))
img2 = torch.ones((3, 20, 20))
result1 = c1(img1)
result2 = c2(img2)
assert not torch.equal(result1, result2)
|
archai/tests/datasets/cv/transforms/test_custom_cutout.py/0
|
{
"file_path": "archai/tests/datasets/cv/transforms/test_custom_cutout.py",
"repo_id": "archai",
"token_count": 269
}
| 354 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from archai.discrete_search.search_spaces.nlp.transformer_flex.models.configuration_gpt2_flex import (
GPT2FlexConfig,
)
def test_gpt2_flex_config():
# Assert that the config has the correct values
config = GPT2FlexConfig(n_layer=3, primer_square=True)
assert config.model_type == "gpt2-flex"
assert config.primer_square is True
assert config.activation_function == "relu"
assert config.n_inner is not None
assert config.n_inner == [4 * config.n_embd for _ in range(config.n_layer)]
assert config.n_head is not None
assert config.n_head == [12 for _ in range(config.n_layer)]
|
archai/tests/discrete_search/search_spaces/nlp/transformer_flex/models/test_configuration_gpt2_flex.py/0
|
{
"file_path": "archai/tests/discrete_search/search_spaces/nlp/transformer_flex/models/test_configuration_gpt2_flex.py",
"repo_id": "archai",
"token_count": 245
}
| 355 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import pytest
import torch
import transformers
from archai.quantization.nlp.modules import FakeDynamicQuant, FakeDynamicQuantHFConv1D
@pytest.fixture
def fake_dynamic_quant_hf_conv1d():
return FakeDynamicQuantHFConv1D(nf=3, nx=2)
def test_fake_dynamic_quant_hf_conv1d_init(fake_dynamic_quant_hf_conv1d):
# Assert that the `fake_dynamic_quant_hf_conv1d` is initialized correctly
assert fake_dynamic_quant_hf_conv1d.nf == 3
assert isinstance(fake_dynamic_quant_hf_conv1d.weight_fake_quant, FakeDynamicQuant)
assert isinstance(fake_dynamic_quant_hf_conv1d.input_pre_process, FakeDynamicQuant)
def test_fake_dynamic_quant_hf_conv1d_fake_quant_weight(fake_dynamic_quant_hf_conv1d):
# Assert that the `fake_quant_weight` has correct shape and type
fake_quant_weight = fake_dynamic_quant_hf_conv1d.fake_quant_weight
assert fake_quant_weight.shape == (2, 3)
assert isinstance(fake_quant_weight, torch.Tensor)
def test_fake_dynamic_quant_hf_conv1d_forward(fake_dynamic_quant_hf_conv1d):
x = torch.randn(3, 2)
# Assert that the `output` has correct shape and type
output = fake_dynamic_quant_hf_conv1d(x)
assert output.shape == (3, 3)
assert isinstance(output, torch.Tensor)
def test_fake_dynamic_quant_hf_conv1d_from_float():
mod = transformers.modeling_utils.Conv1D(nf=3, nx=2)
qconfig = torch.quantization.get_default_qat_qconfig("qnnpack")
# Assert that the `quantized_mod` has correct attributes, values and types
quantized_mod = FakeDynamicQuantHFConv1D.from_float(mod, qconfig)
assert quantized_mod.nf == mod.nf
assert torch.equal(quantized_mod.weight, mod.weight)
assert torch.equal(quantized_mod.bias, mod.bias)
assert isinstance(quantized_mod.weight_fake_quant, FakeDynamicQuant)
assert isinstance(quantized_mod.input_pre_process, FakeDynamicQuant)
def test_fake_dynamic_quant_hf_conv1d_to_float(fake_dynamic_quant_hf_conv1d):
# Assert that the `float_mod` has correct attributes, values and types
float_mod = fake_dynamic_quant_hf_conv1d.to_float()
assert float_mod.nf == fake_dynamic_quant_hf_conv1d.nf
assert torch.equal(
float_mod.weight, fake_dynamic_quant_hf_conv1d.weight_fake_quant(fake_dynamic_quant_hf_conv1d.weight)
)
assert torch.equal(float_mod.bias, fake_dynamic_quant_hf_conv1d.bias)
|
archai/tests/quantization/nlp/test_nlp_modules.py/0
|
{
"file_path": "archai/tests/quantization/nlp/test_nlp_modules.py",
"repo_id": "archai",
"token_count": 937
}
| 356 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import os
import tempfile
import torch
from transformers import TrainerState, TrainingArguments
from archai.trainers.nlp.hf_trainer import HfTrainer
def test_hf_trainer_rotate_checkpoints():
model = torch.nn.Linear(10, 5)
args = TrainingArguments("tmp", save_total_limit=2, load_best_model_at_end=False)
trainer = HfTrainer(model, args=args)
state = TrainerState(best_model_checkpoint=None)
trainer.state = state
with tempfile.TemporaryDirectory() as temp_dir:
checkpoint_1 = os.path.join(temp_dir, "checkpoint-1")
os.mkdir(checkpoint_1)
checkpoint_2 = os.path.join(temp_dir, "checkpoint-2")
os.mkdir(checkpoint_2)
checkpoint_3 = os.path.join(temp_dir, "checkpoint-3")
os.mkdir(checkpoint_3)
# Assert that nothing happens when `save_total_limit` is None or 0
trainer.args.save_total_limit = None
trainer._rotate_checkpoints(output_dir=temp_dir)
assert os.path.exists(checkpoint_1)
assert os.path.exists(checkpoint_2)
assert os.path.exists(checkpoint_3)
trainer.args.save_total_limit = 0
trainer._rotate_checkpoints(output_dir=temp_dir)
assert os.path.exists(checkpoint_1)
assert os.path.exists(checkpoint_2)
assert os.path.exists(checkpoint_3)
# Assert that only the oldest checkpoint is deleted
trainer.args.save_total_limit = 2
trainer._rotate_checkpoints(output_dir=temp_dir)
assert not os.path.exists(checkpoint_1)
assert os.path.exists(checkpoint_2)
assert os.path.exists(checkpoint_3)
# Assert that the last checkpoint is not deleted when `load_best_model_at_end` is True
trainer.args.load_best_model_at_end = True
trainer.state.best_model_checkpoint = checkpoint_3
trainer._rotate_checkpoints(output_dir=temp_dir)
assert not os.path.exists(checkpoint_1)
assert os.path.exists(checkpoint_2)
assert os.path.exists(checkpoint_3)
|
archai/tests/trainers/nlp/test_hf_trainer.py/0
|
{
"file_path": "archai/tests/trainers/nlp/test_hf_trainer.py",
"repo_id": "archai",
"token_count": 855
}
| 357 |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
import logging
from msrest.service_client import ServiceClient
from ._file_cache import RESOURCE_CACHE as RESOURCE_FILE_CACHE
from .client_configuration import ClientConfiguration
from .exceptions import AzureDevOpsClientRequestError
from .released.client_factory import ClientFactory
from .v7_1.location.location_client import LocationClient
from .v7_1.client_factory import ClientFactoryV7_1
from .v7_0.client_factory import ClientFactoryV7_0
logger = logging.getLogger(__name__)
class Connection(object):
"""Connection.
"""
def __init__(self, base_url=None, creds=None, user_agent=None):
self._config = ClientConfiguration(base_url)
self._config.credentials = creds
self._addition_user_agent = user_agent
if user_agent is not None:
self._config.add_user_agent(user_agent)
self._client = ServiceClient(creds, self._config)
self._client_cache = {}
self.base_url = base_url
self._creds = creds
self._resource_areas = None
self.clients = ClientFactory(self)
self.clients_v7_1 = ClientFactoryV7_1(self)
self.clients_v7_0 = ClientFactoryV7_0(self)
self.use_fiddler = False
def get_client(self, client_type):
"""get_client.
"""
if client_type not in self._client_cache:
client_class = self._get_class(client_type)
self._client_cache[client_type] = self._get_client_instance(client_class)
return self._client_cache[client_type]
@staticmethod
def _get_class(full_class_name):
parts = full_class_name.split('.')
module_name = ".".join(parts[:-1])
imported = __import__(module_name)
for comp in parts[1:]:
imported = getattr(imported, comp)
return imported
def _get_client_instance(self, client_class):
url = self._get_url_for_client_instance(client_class)
client = client_class(url, self._creds)
client.add_user_agent(self._addition_user_agent)
if self.use_fiddler:
self._configure_client_for_fiddler(client)
return client
def _get_url_for_client_instance(self, client_class):
resource_id = client_class.resource_area_identifier
if resource_id is None:
return self.base_url
else:
resource_areas = self._get_resource_areas()
if resource_areas is None:
raise AzureDevOpsClientRequestError(('Failed to retrieve resource areas '
+ 'from server: {url}').format(url=self.base_url))
if not resource_areas:
# For OnPrem environments we get an empty list.
return self.base_url
for resource_area in resource_areas:
if resource_area.id.lower() == resource_id.lower():
return resource_area.location_url
# Check SPS deployment level for the resource area
resource_area = self._get_deployment_resource_area_from_sps(resource_id)
if resource_area is not None:
return resource_area.location_url
raise AzureDevOpsClientRequestError(('Could not find information for resource area {id} '
+ 'from server: {url}').format(id=resource_id,
url=self.base_url))
def _get_deployment_resource_area_from_sps(self, resource_id):
resource_id = resource_id.lower()
if resource_id in _deployment_level_resource_areas:
return _deployment_level_resource_areas[resource_id]
location_client = LocationClient(sps_url, self._creds)
if self.use_fiddler:
self._configure_client_for_fiddler(location_client)
resource_area = location_client.get_resource_area(area_id=resource_id)
_deployment_level_resource_areas[resource_id] = resource_area
return resource_area
def authenticate(self):
self._get_resource_areas(force=True)
def _get_resource_areas(self, force=False):
if self._resource_areas is None or force:
location_client = LocationClient(self.base_url, self._creds)
if self.use_fiddler:
self._configure_client_for_fiddler(location_client)
if not force and RESOURCE_FILE_CACHE[location_client.normalized_url]:
try:
logger.debug('File cache hit for resources on: %s', location_client.normalized_url)
self._resource_areas = location_client._base_deserialize.deserialize_data(
RESOURCE_FILE_CACHE[location_client.normalized_url],
'[ResourceAreaInfo]')
return self._resource_areas
except Exception as ex:
logger.debug(ex, exc_info=True)
elif not force:
logger.debug('File cache miss for resources on: %s', location_client.normalized_url)
self._resource_areas = location_client.get_resource_areas()
if self._resource_areas is None:
# For OnPrem environments we get an empty collection wrapper.
self._resource_areas = []
try:
serialized = location_client._base_serialize.serialize_data(self._resource_areas,
'[ResourceAreaInfo]')
RESOURCE_FILE_CACHE[location_client.normalized_url] = serialized
except Exception as ex:
logger.debug(ex, exc_info=True)
return self._resource_areas
@staticmethod
def _combine_url(part1, part2):
return part1.rstrip('/') + '/' + part2.strip('/')
@staticmethod
def _configure_client_for_fiddler(client):
client.config.connection.verify = False
client.config.proxies.add(protocol='https', proxy_url='https://127.0.0.1:8888')
_deployment_level_resource_areas = {}
sps_url = 'https://app.vssps.visualstudio.com'
|
azure-devops-python-api/azure-devops/azure/devops/connection.py/0
|
{
"file_path": "azure-devops-python-api/azure-devops/azure/devops/connection.py",
"repo_id": "azure-devops-python-api",
"token_count": 2838
}
| 358 |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest.serialization import Model
class Account(Model):
"""
:param account_id: Identifier for an Account
:type account_id: str
:param account_name: Name for an account
:type account_name: str
:param account_owner: Owner of account
:type account_owner: str
:param account_status: Current account status
:type account_status: object
:param account_type: Type of account: Personal, Organization
:type account_type: object
:param account_uri: Uri for an account
:type account_uri: str
:param created_by: Who created the account
:type created_by: str
:param created_date: Date account was created
:type created_date: datetime
:param has_moved:
:type has_moved: bool
:param last_updated_by: Identity of last person to update the account
:type last_updated_by: str
:param last_updated_date: Date account was last updated
:type last_updated_date: datetime
:param namespace_id: Namespace for an account
:type namespace_id: str
:param new_collection_id:
:type new_collection_id: str
:param organization_name: Organization that created the account
:type organization_name: str
:param properties: Extended properties
:type properties: :class:`object <azure.devops.v7_1.accounts.models.object>`
:param status_reason: Reason for current status
:type status_reason: str
"""
_attribute_map = {
'account_id': {'key': 'accountId', 'type': 'str'},
'account_name': {'key': 'accountName', 'type': 'str'},
'account_owner': {'key': 'accountOwner', 'type': 'str'},
'account_status': {'key': 'accountStatus', 'type': 'object'},
'account_type': {'key': 'accountType', 'type': 'object'},
'account_uri': {'key': 'accountUri', 'type': 'str'},
'created_by': {'key': 'createdBy', 'type': 'str'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'has_moved': {'key': 'hasMoved', 'type': 'bool'},
'last_updated_by': {'key': 'lastUpdatedBy', 'type': 'str'},
'last_updated_date': {'key': 'lastUpdatedDate', 'type': 'iso-8601'},
'namespace_id': {'key': 'namespaceId', 'type': 'str'},
'new_collection_id': {'key': 'newCollectionId', 'type': 'str'},
'organization_name': {'key': 'organizationName', 'type': 'str'},
'properties': {'key': 'properties', 'type': 'object'},
'status_reason': {'key': 'statusReason', 'type': 'str'}
}
def __init__(self, account_id=None, account_name=None, account_owner=None, account_status=None, account_type=None, account_uri=None, created_by=None, created_date=None, has_moved=None, last_updated_by=None, last_updated_date=None, namespace_id=None, new_collection_id=None, organization_name=None, properties=None, status_reason=None):
super(Account, self).__init__()
self.account_id = account_id
self.account_name = account_name
self.account_owner = account_owner
self.account_status = account_status
self.account_type = account_type
self.account_uri = account_uri
self.created_by = created_by
self.created_date = created_date
self.has_moved = has_moved
self.last_updated_by = last_updated_by
self.last_updated_date = last_updated_date
self.namespace_id = namespace_id
self.new_collection_id = new_collection_id
self.organization_name = organization_name
self.properties = properties
self.status_reason = status_reason
class AccountCreateInfoInternal(Model):
"""
:param account_name:
:type account_name: str
:param creator:
:type creator: str
:param organization:
:type organization: str
:param preferences:
:type preferences: :class:`AccountPreferencesInternal <azure.devops.v7_1.accounts.models.AccountPreferencesInternal>`
:param properties:
:type properties: :class:`object <azure.devops.v7_1.accounts.models.object>`
:param service_definitions:
:type service_definitions: list of { key: str; value: str }
"""
_attribute_map = {
'account_name': {'key': 'accountName', 'type': 'str'},
'creator': {'key': 'creator', 'type': 'str'},
'organization': {'key': 'organization', 'type': 'str'},
'preferences': {'key': 'preferences', 'type': 'AccountPreferencesInternal'},
'properties': {'key': 'properties', 'type': 'object'},
'service_definitions': {'key': 'serviceDefinitions', 'type': '[{ key: str; value: str }]'}
}
def __init__(self, account_name=None, creator=None, organization=None, preferences=None, properties=None, service_definitions=None):
super(AccountCreateInfoInternal, self).__init__()
self.account_name = account_name
self.creator = creator
self.organization = organization
self.preferences = preferences
self.properties = properties
self.service_definitions = service_definitions
class AccountPreferencesInternal(Model):
"""
:param culture:
:type culture: object
:param language:
:type language: object
:param time_zone:
:type time_zone: object
"""
_attribute_map = {
'culture': {'key': 'culture', 'type': 'object'},
'language': {'key': 'language', 'type': 'object'},
'time_zone': {'key': 'timeZone', 'type': 'object'}
}
def __init__(self, culture=None, language=None, time_zone=None):
super(AccountPreferencesInternal, self).__init__()
self.culture = culture
self.language = language
self.time_zone = time_zone
__all__ = [
'Account',
'AccountCreateInfoInternal',
'AccountPreferencesInternal',
]
|
azure-devops-python-api/azure-devops/azure/devops/v7_1/accounts/models.py/0
|
{
"file_path": "azure-devops-python-api/azure-devops/azure/devops/v7_1/accounts/models.py",
"repo_id": "azure-devops-python-api",
"token_count": 2267
}
| 359 |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest.serialization import Model
class ClientContribution(Model):
"""
Representation of a ContributionNode that can be used for serialized to clients.
:param description: Description of the contribution/type
:type description: str
:param id: Fully qualified identifier of the contribution/type
:type id: str
:param includes: Includes is a set of contributions that should have this contribution included in their targets list.
:type includes: list of str
:param properties: Properties/attributes of this contribution
:type properties: :class:`object <azure.devops.v7_1.contributions.models.object>`
:param targets: The ids of the contribution(s) that this contribution targets. (parent contributions)
:type targets: list of str
:param type: Id of the Contribution Type
:type type: str
"""
_attribute_map = {
'description': {'key': 'description', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'includes': {'key': 'includes', 'type': '[str]'},
'properties': {'key': 'properties', 'type': 'object'},
'targets': {'key': 'targets', 'type': '[str]'},
'type': {'key': 'type', 'type': 'str'}
}
def __init__(self, description=None, id=None, includes=None, properties=None, targets=None, type=None):
super(ClientContribution, self).__init__()
self.description = description
self.id = id
self.includes = includes
self.properties = properties
self.targets = targets
self.type = type
class ClientContributionNode(Model):
"""
Representation of a ContributionNode that can be used for serialized to clients.
:param contribution: Contribution associated with this node.
:type contribution: :class:`ClientContribution <azure.devops.v7_1.contributions.models.ClientContribution>`
:param children: List of ids for contributions which are children to the current contribution.
:type children: list of str
:param parents: List of ids for contributions which are parents to the current contribution.
:type parents: list of str
"""
_attribute_map = {
'contribution': {'key': 'contribution', 'type': 'ClientContribution'},
'children': {'key': 'children', 'type': '[str]'},
'parents': {'key': 'parents', 'type': '[str]'}
}
def __init__(self, contribution=None, children=None, parents=None):
super(ClientContributionNode, self).__init__()
self.contribution = contribution
self.children = children
self.parents = parents
class ClientContributionProviderDetails(Model):
"""
:param display_name: Friendly name for the provider.
:type display_name: str
:param name: Unique identifier for this provider. The provider name can be used to cache the contribution data and refer back to it when looking for changes
:type name: str
:param properties: Properties associated with the provider
:type properties: dict
:param version: Version of contributions associated with this contribution provider.
:type version: str
"""
_attribute_map = {
'display_name': {'key': 'displayName', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{str}'},
'version': {'key': 'version', 'type': 'str'}
}
def __init__(self, display_name=None, name=None, properties=None, version=None):
super(ClientContributionProviderDetails, self).__init__()
self.display_name = display_name
self.name = name
self.properties = properties
self.version = version
class ContributionBase(Model):
"""
Base class shared by contributions and contribution types
:param description: Description of the contribution/type
:type description: str
:param id: Fully qualified identifier of the contribution/type
:type id: str
:param visible_to: VisibleTo can be used to restrict whom can reference a given contribution/type. This value should be a list of publishers or extensions access is restricted too. Examples: "ms" - Means only the "ms" publisher can reference this. "ms.vss-web" - Means only the "vss-web" extension from the "ms" publisher can reference this.
:type visible_to: list of str
"""
_attribute_map = {
'description': {'key': 'description', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'visible_to': {'key': 'visibleTo', 'type': '[str]'}
}
def __init__(self, description=None, id=None, visible_to=None):
super(ContributionBase, self).__init__()
self.description = description
self.id = id
self.visible_to = visible_to
class ContributionConstraint(Model):
"""
Specifies a constraint that can be used to dynamically include/exclude a given contribution
:param group: An optional property that can be specified to group constraints together. All constraints within a group are AND'd together (all must be evaluate to True in order for the contribution to be included). Different groups of constraints are OR'd (only one group needs to evaluate to True for the contribution to be included).
:type group: int
:param id: Fully qualified identifier of a shared constraint
:type id: str
:param inverse: If true, negate the result of the filter (include the contribution if the applied filter returns false instead of true)
:type inverse: bool
:param name: Name of the IContributionFilter plugin
:type name: str
:param properties: Properties that are fed to the contribution filter class
:type properties: :class:`object <azure.devops.v7_1.contributions.models.object>`
:param relationships: Constraints can be optionally be applied to one or more of the relationships defined in the contribution. If no relationships are defined then all relationships are associated with the constraint. This means the default behaviour will eliminate the contribution from the tree completely if the constraint is applied.
:type relationships: list of str
"""
_attribute_map = {
'group': {'key': 'group', 'type': 'int'},
'id': {'key': 'id', 'type': 'str'},
'inverse': {'key': 'inverse', 'type': 'bool'},
'name': {'key': 'name', 'type': 'str'},
'properties': {'key': 'properties', 'type': 'object'},
'relationships': {'key': 'relationships', 'type': '[str]'}
}
def __init__(self, group=None, id=None, inverse=None, name=None, properties=None, relationships=None):
super(ContributionConstraint, self).__init__()
self.group = group
self.id = id
self.inverse = inverse
self.name = name
self.properties = properties
self.relationships = relationships
class ContributionNodeQuery(Model):
"""
A query that can be issued for contribution nodes
:param contribution_ids: The contribution ids of the nodes to find.
:type contribution_ids: list of str
:param data_provider_context: Contextual information that can be leveraged by contribution constraints
:type data_provider_context: :class:`DataProviderContext <azure.devops.v7_1.contributions.models.DataProviderContext>`
:param include_provider_details: Indicator if contribution provider details should be included in the result.
:type include_provider_details: bool
:param query_options: Query options tpo be used when fetching ContributionNodes
:type query_options: object
"""
_attribute_map = {
'contribution_ids': {'key': 'contributionIds', 'type': '[str]'},
'data_provider_context': {'key': 'dataProviderContext', 'type': 'DataProviderContext'},
'include_provider_details': {'key': 'includeProviderDetails', 'type': 'bool'},
'query_options': {'key': 'queryOptions', 'type': 'object'}
}
def __init__(self, contribution_ids=None, data_provider_context=None, include_provider_details=None, query_options=None):
super(ContributionNodeQuery, self).__init__()
self.contribution_ids = contribution_ids
self.data_provider_context = data_provider_context
self.include_provider_details = include_provider_details
self.query_options = query_options
class ContributionNodeQueryResult(Model):
"""
Result of a contribution node query. Wraps the resulting contribution nodes and provider details.
:param nodes: Map of contribution ids to corresponding node.
:type nodes: dict
:param provider_details: Map of provider ids to the corresponding provider details object.
:type provider_details: dict
"""
_attribute_map = {
'nodes': {'key': 'nodes', 'type': '{ClientContributionNode}'},
'provider_details': {'key': 'providerDetails', 'type': '{ClientContributionProviderDetails}'}
}
def __init__(self, nodes=None, provider_details=None):
super(ContributionNodeQueryResult, self).__init__()
self.nodes = nodes
self.provider_details = provider_details
class ContributionPropertyDescription(Model):
"""
Description about a property of a contribution type
:param description: Description of the property
:type description: str
:param name: Name of the property
:type name: str
:param required: True if this property is required
:type required: bool
:param type: The type of value used for this property
:type type: object
"""
_attribute_map = {
'description': {'key': 'description', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'required': {'key': 'required', 'type': 'bool'},
'type': {'key': 'type', 'type': 'object'}
}
def __init__(self, description=None, name=None, required=None, type=None):
super(ContributionPropertyDescription, self).__init__()
self.description = description
self.name = name
self.required = required
self.type = type
class ContributionType(ContributionBase):
"""
A contribution type, given by a json schema
:param description: Description of the contribution/type
:type description: str
:param id: Fully qualified identifier of the contribution/type
:type id: str
:param visible_to: VisibleTo can be used to restrict whom can reference a given contribution/type. This value should be a list of publishers or extensions access is restricted too. Examples: "ms" - Means only the "ms" publisher can reference this. "ms.vss-web" - Means only the "vss-web" extension from the "ms" publisher can reference this.
:type visible_to: list of str
:param indexed: Controls whether or not contributions of this type have the type indexed for queries. This allows clients to find all extensions that have a contribution of this type. NOTE: Only TrustedPartners are allowed to specify indexed contribution types.
:type indexed: bool
:param name: Friendly name of the contribution/type
:type name: str
:param properties: Describes the allowed properties for this contribution type
:type properties: dict
"""
_attribute_map = {
'description': {'key': 'description', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'visible_to': {'key': 'visibleTo', 'type': '[str]'},
'indexed': {'key': 'indexed', 'type': 'bool'},
'name': {'key': 'name', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{ContributionPropertyDescription}'}
}
def __init__(self, description=None, id=None, visible_to=None, indexed=None, name=None, properties=None):
super(ContributionType, self).__init__(description=description, id=id, visible_to=visible_to)
self.indexed = indexed
self.name = name
self.properties = properties
class DataProviderContext(Model):
"""
Contextual information that data providers can examine when populating their data
:param properties: Generic property bag that contains context-specific properties that data providers can use when populating their data dictionary
:type properties: dict
"""
_attribute_map = {
'properties': {'key': 'properties', 'type': '{object}'}
}
def __init__(self, properties=None):
super(DataProviderContext, self).__init__()
self.properties = properties
class DataProviderExceptionDetails(Model):
"""
:param exception_type: The type of the exception that was thrown.
:type exception_type: str
:param message: Message that is associated with the exception.
:type message: str
:param stack_trace: The StackTrace from the exception turned into a string.
:type stack_trace: str
"""
_attribute_map = {
'exception_type': {'key': 'exceptionType', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'stack_trace': {'key': 'stackTrace', 'type': 'str'}
}
def __init__(self, exception_type=None, message=None, stack_trace=None):
super(DataProviderExceptionDetails, self).__init__()
self.exception_type = exception_type
self.message = message
self.stack_trace = stack_trace
class DataProviderQuery(Model):
"""
A query that can be issued for data provider data
:param context: Contextual information to pass to the data providers
:type context: :class:`DataProviderContext <azure.devops.v7_1.contributions.models.DataProviderContext>`
:param contribution_ids: The contribution ids of the data providers to resolve
:type contribution_ids: list of str
"""
_attribute_map = {
'context': {'key': 'context', 'type': 'DataProviderContext'},
'contribution_ids': {'key': 'contributionIds', 'type': '[str]'}
}
def __init__(self, context=None, contribution_ids=None):
super(DataProviderQuery, self).__init__()
self.context = context
self.contribution_ids = contribution_ids
class DataProviderResult(Model):
"""
Result structure from calls to GetDataProviderData
:param client_providers: This is the set of data providers that were requested, but either they were defined as client providers, or as remote providers that failed and may be retried by the client.
:type client_providers: dict
:param data: Property bag of data keyed off of the data provider contribution id
:type data: dict
:param exceptions: Set of exceptions that occurred resolving the data providers.
:type exceptions: dict
:param resolved_providers: List of data providers resolved in the data-provider query
:type resolved_providers: list of :class:`ResolvedDataProvider <azure.devops.v7_1.contributions.models.ResolvedDataProvider>`
:param scope_name: Scope name applied to this data provider result.
:type scope_name: str
:param scope_value: Scope value applied to this data provider result.
:type scope_value: str
:param shared_data: Property bag of shared data that was contributed to by any of the individual data providers
:type shared_data: dict
"""
_attribute_map = {
'client_providers': {'key': 'clientProviders', 'type': '{ClientDataProviderQuery}'},
'data': {'key': 'data', 'type': '{object}'},
'exceptions': {'key': 'exceptions', 'type': '{DataProviderExceptionDetails}'},
'resolved_providers': {'key': 'resolvedProviders', 'type': '[ResolvedDataProvider]'},
'scope_name': {'key': 'scopeName', 'type': 'str'},
'scope_value': {'key': 'scopeValue', 'type': 'str'},
'shared_data': {'key': 'sharedData', 'type': '{object}'}
}
def __init__(self, client_providers=None, data=None, exceptions=None, resolved_providers=None, scope_name=None, scope_value=None, shared_data=None):
super(DataProviderResult, self).__init__()
self.client_providers = client_providers
self.data = data
self.exceptions = exceptions
self.resolved_providers = resolved_providers
self.scope_name = scope_name
self.scope_value = scope_value
self.shared_data = shared_data
class ExtensionEventCallback(Model):
"""
Base class for an event callback for an extension
:param uri: The uri of the endpoint that is hit when an event occurs
:type uri: str
"""
_attribute_map = {
'uri': {'key': 'uri', 'type': 'str'}
}
def __init__(self, uri=None):
super(ExtensionEventCallback, self).__init__()
self.uri = uri
class ExtensionEventCallbackCollection(Model):
"""
Collection of event callbacks - endpoints called when particular extension events occur.
:param post_disable: Optional. Defines an endpoint that gets called via a POST request to notify that an extension disable has occurred.
:type post_disable: :class:`ExtensionEventCallback <azure.devops.v7_1.contributions.models.ExtensionEventCallback>`
:param post_enable: Optional. Defines an endpoint that gets called via a POST request to notify that an extension enable has occurred.
:type post_enable: :class:`ExtensionEventCallback <azure.devops.v7_1.contributions.models.ExtensionEventCallback>`
:param post_install: Optional. Defines an endpoint that gets called via a POST request to notify that an extension install has completed.
:type post_install: :class:`ExtensionEventCallback <azure.devops.v7_1.contributions.models.ExtensionEventCallback>`
:param post_uninstall: Optional. Defines an endpoint that gets called via a POST request to notify that an extension uninstall has occurred.
:type post_uninstall: :class:`ExtensionEventCallback <azure.devops.v7_1.contributions.models.ExtensionEventCallback>`
:param post_update: Optional. Defines an endpoint that gets called via a POST request to notify that an extension update has occurred.
:type post_update: :class:`ExtensionEventCallback <azure.devops.v7_1.contributions.models.ExtensionEventCallback>`
:param pre_install: Optional. Defines an endpoint that gets called via a POST request to notify that an extension install is about to occur. Response indicates whether to proceed or abort.
:type pre_install: :class:`ExtensionEventCallback <azure.devops.v7_1.contributions.models.ExtensionEventCallback>`
:param version_check: For multi-version extensions, defines an endpoint that gets called via an OPTIONS request to determine the particular version of the extension to be used
:type version_check: :class:`ExtensionEventCallback <azure.devops.v7_1.contributions.models.ExtensionEventCallback>`
"""
_attribute_map = {
'post_disable': {'key': 'postDisable', 'type': 'ExtensionEventCallback'},
'post_enable': {'key': 'postEnable', 'type': 'ExtensionEventCallback'},
'post_install': {'key': 'postInstall', 'type': 'ExtensionEventCallback'},
'post_uninstall': {'key': 'postUninstall', 'type': 'ExtensionEventCallback'},
'post_update': {'key': 'postUpdate', 'type': 'ExtensionEventCallback'},
'pre_install': {'key': 'preInstall', 'type': 'ExtensionEventCallback'},
'version_check': {'key': 'versionCheck', 'type': 'ExtensionEventCallback'}
}
def __init__(self, post_disable=None, post_enable=None, post_install=None, post_uninstall=None, post_update=None, pre_install=None, version_check=None):
super(ExtensionEventCallbackCollection, self).__init__()
self.post_disable = post_disable
self.post_enable = post_enable
self.post_install = post_install
self.post_uninstall = post_uninstall
self.post_update = post_update
self.pre_install = pre_install
self.version_check = version_check
class ExtensionFile(Model):
"""
:param asset_type:
:type asset_type: str
:param language:
:type language: str
:param source:
:type source: str
"""
_attribute_map = {
'asset_type': {'key': 'assetType', 'type': 'str'},
'language': {'key': 'language', 'type': 'str'},
'source': {'key': 'source', 'type': 'str'}
}
def __init__(self, asset_type=None, language=None, source=None):
super(ExtensionFile, self).__init__()
self.asset_type = asset_type
self.language = language
self.source = source
class ExtensionLicensing(Model):
"""
How an extension should handle including contributions based on licensing
:param overrides: A list of contributions which deviate from the default licensing behavior
:type overrides: list of :class:`LicensingOverride <azure.devops.v7_1.contributions.models.LicensingOverride>`
"""
_attribute_map = {
'overrides': {'key': 'overrides', 'type': '[LicensingOverride]'}
}
def __init__(self, overrides=None):
super(ExtensionLicensing, self).__init__()
self.overrides = overrides
class ExtensionManifest(Model):
"""
Base class for extension properties which are shared by the extension manifest and the extension model
:param base_uri: Uri used as base for other relative uri's defined in extension
:type base_uri: str
:param constraints: List of shared constraints defined by this extension
:type constraints: list of :class:`ContributionConstraint <azure.devops.v7_1.contributions.models.ContributionConstraint>`
:param contributions: List of contributions made by this extension
:type contributions: list of :class:`Contribution <azure.devops.v7_1.contributions.models.Contribution>`
:param contribution_types: List of contribution types defined by this extension
:type contribution_types: list of :class:`ContributionType <azure.devops.v7_1.contributions.models.ContributionType>`
:param demands: List of explicit demands required by this extension
:type demands: list of str
:param event_callbacks: Collection of endpoints that get called when particular extension events occur
:type event_callbacks: :class:`ExtensionEventCallbackCollection <azure.devops.v7_1.contributions.models.ExtensionEventCallbackCollection>`
:param fallback_base_uri: Secondary location that can be used as base for other relative uri's defined in extension
:type fallback_base_uri: str
:param language: Language Culture Name set by the Gallery
:type language: str
:param licensing: How this extension behaves with respect to licensing
:type licensing: :class:`ExtensionLicensing <azure.devops.v7_1.contributions.models.ExtensionLicensing>`
:param manifest_version: Version of the extension manifest format/content
:type manifest_version: float
:param restricted_to: Default user claims applied to all contributions (except the ones which have been specified restrictedTo explicitly) to control the visibility of a contribution.
:type restricted_to: list of str
:param scopes: List of all oauth scopes required by this extension
:type scopes: list of str
:param service_instance_type: The ServiceInstanceType(Guid) of the VSTS service that must be available to an account in order for the extension to be installed
:type service_instance_type: str
"""
_attribute_map = {
'base_uri': {'key': 'baseUri', 'type': 'str'},
'constraints': {'key': 'constraints', 'type': '[ContributionConstraint]'},
'contributions': {'key': 'contributions', 'type': '[Contribution]'},
'contribution_types': {'key': 'contributionTypes', 'type': '[ContributionType]'},
'demands': {'key': 'demands', 'type': '[str]'},
'event_callbacks': {'key': 'eventCallbacks', 'type': 'ExtensionEventCallbackCollection'},
'fallback_base_uri': {'key': 'fallbackBaseUri', 'type': 'str'},
'language': {'key': 'language', 'type': 'str'},
'licensing': {'key': 'licensing', 'type': 'ExtensionLicensing'},
'manifest_version': {'key': 'manifestVersion', 'type': 'float'},
'restricted_to': {'key': 'restrictedTo', 'type': '[str]'},
'scopes': {'key': 'scopes', 'type': '[str]'},
'service_instance_type': {'key': 'serviceInstanceType', 'type': 'str'}
}
def __init__(self, base_uri=None, constraints=None, contributions=None, contribution_types=None, demands=None, event_callbacks=None, fallback_base_uri=None, language=None, licensing=None, manifest_version=None, restricted_to=None, scopes=None, service_instance_type=None):
super(ExtensionManifest, self).__init__()
self.base_uri = base_uri
self.constraints = constraints
self.contributions = contributions
self.contribution_types = contribution_types
self.demands = demands
self.event_callbacks = event_callbacks
self.fallback_base_uri = fallback_base_uri
self.language = language
self.licensing = licensing
self.manifest_version = manifest_version
self.restricted_to = restricted_to
self.scopes = scopes
self.service_instance_type = service_instance_type
class InstalledExtension(ExtensionManifest):
"""
Represents a VSTS extension along with its installation state
:param base_uri: Uri used as base for other relative uri's defined in extension
:type base_uri: str
:param constraints: List of shared constraints defined by this extension
:type constraints: list of :class:`ContributionConstraint <azure.devops.v7_1.contributions.models.ContributionConstraint>`
:param contributions: List of contributions made by this extension
:type contributions: list of :class:`Contribution <azure.devops.v7_1.contributions.models.Contribution>`
:param contribution_types: List of contribution types defined by this extension
:type contribution_types: list of :class:`ContributionType <azure.devops.v7_1.contributions.models.ContributionType>`
:param demands: List of explicit demands required by this extension
:type demands: list of str
:param event_callbacks: Collection of endpoints that get called when particular extension events occur
:type event_callbacks: :class:`ExtensionEventCallbackCollection <azure.devops.v7_1.contributions.models.ExtensionEventCallbackCollection>`
:param fallback_base_uri: Secondary location that can be used as base for other relative uri's defined in extension
:type fallback_base_uri: str
:param language: Language Culture Name set by the Gallery
:type language: str
:param licensing: How this extension behaves with respect to licensing
:type licensing: :class:`ExtensionLicensing <azure.devops.v7_1.contributions.models.ExtensionLicensing>`
:param manifest_version: Version of the extension manifest format/content
:type manifest_version: float
:param restricted_to: Default user claims applied to all contributions (except the ones which have been specified restrictedTo explicitly) to control the visibility of a contribution.
:type restricted_to: list of str
:param scopes: List of all oauth scopes required by this extension
:type scopes: list of str
:param service_instance_type: The ServiceInstanceType(Guid) of the VSTS service that must be available to an account in order for the extension to be installed
:type service_instance_type: str
:param extension_id: The friendly extension id for this extension - unique for a given publisher.
:type extension_id: str
:param extension_name: The display name of the extension.
:type extension_name: str
:param files: This is the set of files available from the extension.
:type files: list of :class:`ExtensionFile <azure.devops.v7_1.contributions.models.ExtensionFile>`
:param flags: Extension flags relevant to contribution consumers
:type flags: object
:param install_state: Information about this particular installation of the extension
:type install_state: :class:`InstalledExtensionState <azure.devops.v7_1.contributions.models.InstalledExtensionState>`
:param last_published: This represents the date/time the extensions was last updated in the gallery. This doesnt mean this version was updated the value represents changes to any and all versions of the extension.
:type last_published: datetime
:param publisher_id: Unique id of the publisher of this extension
:type publisher_id: str
:param publisher_name: The display name of the publisher
:type publisher_name: str
:param registration_id: Unique id for this extension (the same id is used for all versions of a single extension)
:type registration_id: str
:param version: Version of this extension
:type version: str
"""
_attribute_map = {
'base_uri': {'key': 'baseUri', 'type': 'str'},
'constraints': {'key': 'constraints', 'type': '[ContributionConstraint]'},
'contributions': {'key': 'contributions', 'type': '[Contribution]'},
'contribution_types': {'key': 'contributionTypes', 'type': '[ContributionType]'},
'demands': {'key': 'demands', 'type': '[str]'},
'event_callbacks': {'key': 'eventCallbacks', 'type': 'ExtensionEventCallbackCollection'},
'fallback_base_uri': {'key': 'fallbackBaseUri', 'type': 'str'},
'language': {'key': 'language', 'type': 'str'},
'licensing': {'key': 'licensing', 'type': 'ExtensionLicensing'},
'manifest_version': {'key': 'manifestVersion', 'type': 'float'},
'restricted_to': {'key': 'restrictedTo', 'type': '[str]'},
'scopes': {'key': 'scopes', 'type': '[str]'},
'service_instance_type': {'key': 'serviceInstanceType', 'type': 'str'},
'extension_id': {'key': 'extensionId', 'type': 'str'},
'extension_name': {'key': 'extensionName', 'type': 'str'},
'files': {'key': 'files', 'type': '[ExtensionFile]'},
'flags': {'key': 'flags', 'type': 'object'},
'install_state': {'key': 'installState', 'type': 'InstalledExtensionState'},
'last_published': {'key': 'lastPublished', 'type': 'iso-8601'},
'publisher_id': {'key': 'publisherId', 'type': 'str'},
'publisher_name': {'key': 'publisherName', 'type': 'str'},
'registration_id': {'key': 'registrationId', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'}
}
def __init__(self, base_uri=None, constraints=None, contributions=None, contribution_types=None, demands=None, event_callbacks=None, fallback_base_uri=None, language=None, licensing=None, manifest_version=None, restricted_to=None, scopes=None, service_instance_type=None, extension_id=None, extension_name=None, files=None, flags=None, install_state=None, last_published=None, publisher_id=None, publisher_name=None, registration_id=None, version=None):
super(InstalledExtension, self).__init__(base_uri=base_uri, constraints=constraints, contributions=contributions, contribution_types=contribution_types, demands=demands, event_callbacks=event_callbacks, fallback_base_uri=fallback_base_uri, language=language, licensing=licensing, manifest_version=manifest_version, restricted_to=restricted_to, scopes=scopes, service_instance_type=service_instance_type)
self.extension_id = extension_id
self.extension_name = extension_name
self.files = files
self.flags = flags
self.install_state = install_state
self.last_published = last_published
self.publisher_id = publisher_id
self.publisher_name = publisher_name
self.registration_id = registration_id
self.version = version
class InstalledExtensionState(Model):
"""
The state of an installed extension
:param flags: States of an installed extension
:type flags: object
:param installation_issues: List of installation issues
:type installation_issues: list of :class:`InstalledExtensionStateIssue <azure.devops.v7_1.contributions.models.InstalledExtensionStateIssue>`
:param last_updated: The time at which this installation was last updated
:type last_updated: datetime
"""
_attribute_map = {
'flags': {'key': 'flags', 'type': 'object'},
'installation_issues': {'key': 'installationIssues', 'type': '[InstalledExtensionStateIssue]'},
'last_updated': {'key': 'lastUpdated', 'type': 'iso-8601'}
}
def __init__(self, flags=None, installation_issues=None, last_updated=None):
super(InstalledExtensionState, self).__init__()
self.flags = flags
self.installation_issues = installation_issues
self.last_updated = last_updated
class InstalledExtensionStateIssue(Model):
"""
Represents an installation issue
:param message: The error message
:type message: str
:param source: Source of the installation issue, for example "Demands"
:type source: str
:param type: Installation issue type (Warning, Error)
:type type: object
"""
_attribute_map = {
'message': {'key': 'message', 'type': 'str'},
'source': {'key': 'source', 'type': 'str'},
'type': {'key': 'type', 'type': 'object'}
}
def __init__(self, message=None, source=None, type=None):
super(InstalledExtensionStateIssue, self).__init__()
self.message = message
self.source = source
self.type = type
class LicensingOverride(Model):
"""
Maps a contribution to a licensing behavior
:param behavior: How the inclusion of this contribution should change based on licensing
:type behavior: object
:param id: Fully qualified contribution id which we want to define licensing behavior for
:type id: str
"""
_attribute_map = {
'behavior': {'key': 'behavior', 'type': 'object'},
'id': {'key': 'id', 'type': 'str'}
}
def __init__(self, behavior=None, id=None):
super(LicensingOverride, self).__init__()
self.behavior = behavior
self.id = id
class ResolvedDataProvider(Model):
"""
Entry for a specific data provider's resulting data
:param duration: The total time the data provider took to resolve its data (in milliseconds)
:type duration: float
:param error:
:type error: str
:param id:
:type id: str
"""
_attribute_map = {
'duration': {'key': 'duration', 'type': 'float'},
'error': {'key': 'error', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'}
}
def __init__(self, duration=None, error=None, id=None):
super(ResolvedDataProvider, self).__init__()
self.duration = duration
self.error = error
self.id = id
class ClientDataProviderQuery(DataProviderQuery):
"""
A client data provider are the details needed to make the data provider request from the client.
:param context: Contextual information to pass to the data providers
:type context: :class:`DataProviderContext <azure.devops.v7_1.contributions.models.DataProviderContext>`
:param contribution_ids: The contribution ids of the data providers to resolve
:type contribution_ids: list of str
:param query_service_instance_type: The Id of the service instance type that should be communicated with in order to resolve the data providers from the client given the query values.
:type query_service_instance_type: str
"""
_attribute_map = {
'context': {'key': 'context', 'type': 'DataProviderContext'},
'contribution_ids': {'key': 'contributionIds', 'type': '[str]'},
'query_service_instance_type': {'key': 'queryServiceInstanceType', 'type': 'str'}
}
def __init__(self, context=None, contribution_ids=None, query_service_instance_type=None):
super(ClientDataProviderQuery, self).__init__(context=context, contribution_ids=contribution_ids)
self.query_service_instance_type = query_service_instance_type
class Contribution(ContributionBase):
"""
An individual contribution made by an extension
:param description: Description of the contribution/type
:type description: str
:param id: Fully qualified identifier of the contribution/type
:type id: str
:param visible_to: VisibleTo can be used to restrict whom can reference a given contribution/type. This value should be a list of publishers or extensions access is restricted too. Examples: "ms" - Means only the "ms" publisher can reference this. "ms.vss-web" - Means only the "vss-web" extension from the "ms" publisher can reference this.
:type visible_to: list of str
:param constraints: List of constraints (filters) that should be applied to the availability of this contribution
:type constraints: list of :class:`ContributionConstraint <azure.devops.v7_1.contributions.models.ContributionConstraint>`
:param includes: Includes is a set of contributions that should have this contribution included in their targets list.
:type includes: list of str
:param properties: Properties/attributes of this contribution
:type properties: :class:`object <azure.devops.v7_1.contributions.models.object>`
:param restricted_to: List of demanded claims in order for the user to see this contribution (like anonymous, public, member...).
:type restricted_to: list of str
:param targets: The ids of the contribution(s) that this contribution targets. (parent contributions)
:type targets: list of str
:param type: Id of the Contribution Type
:type type: str
"""
_attribute_map = {
'description': {'key': 'description', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'visible_to': {'key': 'visibleTo', 'type': '[str]'},
'constraints': {'key': 'constraints', 'type': '[ContributionConstraint]'},
'includes': {'key': 'includes', 'type': '[str]'},
'properties': {'key': 'properties', 'type': 'object'},
'restricted_to': {'key': 'restrictedTo', 'type': '[str]'},
'targets': {'key': 'targets', 'type': '[str]'},
'type': {'key': 'type', 'type': 'str'}
}
def __init__(self, description=None, id=None, visible_to=None, constraints=None, includes=None, properties=None, restricted_to=None, targets=None, type=None):
super(Contribution, self).__init__(description=description, id=id, visible_to=visible_to)
self.constraints = constraints
self.includes = includes
self.properties = properties
self.restricted_to = restricted_to
self.targets = targets
self.type = type
__all__ = [
'ClientContribution',
'ClientContributionNode',
'ClientContributionProviderDetails',
'ContributionBase',
'ContributionConstraint',
'ContributionNodeQuery',
'ContributionNodeQueryResult',
'ContributionPropertyDescription',
'ContributionType',
'DataProviderContext',
'DataProviderExceptionDetails',
'DataProviderQuery',
'DataProviderResult',
'ExtensionEventCallback',
'ExtensionEventCallbackCollection',
'ExtensionFile',
'ExtensionLicensing',
'ExtensionManifest',
'InstalledExtension',
'InstalledExtensionState',
'InstalledExtensionStateIssue',
'LicensingOverride',
'ResolvedDataProvider',
'ClientDataProviderQuery',
'Contribution',
]
|
azure-devops-python-api/azure-devops/azure/devops/v7_1/contributions/models.py/0
|
{
"file_path": "azure-devops-python-api/azure-devops/azure/devops/v7_1/contributions/models.py",
"repo_id": "azure-devops-python-api",
"token_count": 13201
}
| 360 |
# coding=utf-8
from msrest.universal_http import ClientRequest
from .git_client_base import GitClientBase
class GitClient(GitClientBase):
"""Git
:param str base_url: Service URL
:param Authentication creds: Authenticated credentials.
"""
def __init__(self, base_url=None, creds=None):
super(GitClient, self).__init__(base_url, creds)
def get_vsts_info(self, relative_remote_url):
url = self._client.format_url(relative_remote_url.rstrip('/') + '/vsts/info')
request = ClientRequest(method='GET', url=url)
headers = {'Accept': 'application/json'}
if self._suppress_fedauth_redirect:
headers['X-TFS-FedAuthRedirect'] = 'Suppress'
if self._force_msa_pass_through:
headers['X-VSS-ForceMsaPassThrough'] = 'true'
response = self._send_request(request, headers)
return self._deserialize('VstsInfo', response)
@staticmethod
def get_vsts_info_by_remote_url(remote_url, credentials,
suppress_fedauth_redirect=True,
force_msa_pass_through=True):
request = ClientRequest(method='GET', url=remote_url.rstrip('/') + '/vsts/info')
headers = {'Accept': 'application/json'}
if suppress_fedauth_redirect:
headers['X-TFS-FedAuthRedirect'] = 'Suppress'
if force_msa_pass_through:
headers['X-VSS-ForceMsaPassThrough'] = 'true'
git_client = GitClient(base_url=remote_url, creds=credentials)
response = git_client._send_request(request, headers)
return git_client._deserialize('VstsInfo', response)
|
azure-devops-python-api/azure-devops/azure/devops/v7_1/git/git_client.py/0
|
{
"file_path": "azure-devops-python-api/azure-devops/azure/devops/v7_1/git/git_client.py",
"repo_id": "azure-devops-python-api",
"token_count": 715
}
| 361 |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest import Serializer, Deserializer
from ...client import Client
from . import models
class MemberEntitlementManagementClient(Client):
"""MemberEntitlementManagement
:param str base_url: Service URL
:param Authentication creds: Authenticated credentials.
"""
def __init__(self, base_url=None, creds=None):
super(MemberEntitlementManagementClient, self).__init__(base_url, creds)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
resource_area_identifier = '68ddce18-2501-45f1-a17b-7931a9922690'
def add_group_entitlement(self, group_entitlement, rule_option=None):
"""AddGroupEntitlement.
[Preview API] Create a group entitlement with license rule, extension rule.
:param :class:`<GroupEntitlement> <azure.devops.v7_1.member_entitlement_management.models.GroupEntitlement>` group_entitlement: GroupEntitlement object specifying License Rule, Extensions Rule for the group. Based on the rules the members of the group will be given licenses and extensions. The Group Entitlement can be used to add the group to another project level groups
:param str rule_option: RuleOption [ApplyGroupRule/TestApplyGroupRule] - specifies if the rules defined in group entitlement should be created and applied to it’s members (default option) or just be tested
:rtype: :class:`<GroupEntitlementOperationReference> <azure.devops.v7_1.member_entitlement_management.models.GroupEntitlementOperationReference>`
"""
query_parameters = {}
if rule_option is not None:
query_parameters['ruleOption'] = self._serialize.query('rule_option', rule_option, 'str')
content = self._serialize.body(group_entitlement, 'GroupEntitlement')
response = self._send(http_method='POST',
location_id='2280bffa-58a2-49da-822e-0764a1bb44f7',
version='7.1-preview.1',
query_parameters=query_parameters,
content=content)
return self._deserialize('GroupEntitlementOperationReference', response)
def delete_group_entitlement(self, group_id, rule_option=None, remove_group_membership=None):
"""DeleteGroupEntitlement.
[Preview API] Delete a group entitlement.
:param str group_id: ID of the group to delete.
:param str rule_option: RuleOption [ApplyGroupRule/TestApplyGroupRule] - specifies if the rules defined in group entitlement should be deleted and the changes are applied to it’s members (default option) or just be tested
:param bool remove_group_membership: Optional parameter that specifies whether the group with the given ID should be removed from all other groups
:rtype: :class:`<GroupEntitlementOperationReference> <azure.devops.v7_1.member_entitlement_management.models.GroupEntitlementOperationReference>`
"""
route_values = {}
if group_id is not None:
route_values['groupId'] = self._serialize.url('group_id', group_id, 'str')
query_parameters = {}
if rule_option is not None:
query_parameters['ruleOption'] = self._serialize.query('rule_option', rule_option, 'str')
if remove_group_membership is not None:
query_parameters['removeGroupMembership'] = self._serialize.query('remove_group_membership', remove_group_membership, 'bool')
response = self._send(http_method='DELETE',
location_id='2280bffa-58a2-49da-822e-0764a1bb44f7',
version='7.1-preview.1',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('GroupEntitlementOperationReference', response)
def get_group_entitlement(self, group_id):
"""GetGroupEntitlement.
[Preview API] Get a group entitlement.
:param str group_id: ID of the group.
:rtype: :class:`<GroupEntitlement> <azure.devops.v7_1.member_entitlement_management.models.GroupEntitlement>`
"""
route_values = {}
if group_id is not None:
route_values['groupId'] = self._serialize.url('group_id', group_id, 'str')
response = self._send(http_method='GET',
location_id='2280bffa-58a2-49da-822e-0764a1bb44f7',
version='7.1-preview.1',
route_values=route_values)
return self._deserialize('GroupEntitlement', response)
def update_group_entitlement(self, document, group_id, rule_option=None):
"""UpdateGroupEntitlement.
[Preview API] Update entitlements (License Rule, Extensions Rule, Project memberships etc.) for a group.
:param :class:`<[JsonPatchOperation]> <azure.devops.v7_1.member_entitlement_management.models.[JsonPatchOperation]>` document: JsonPatchDocument containing the operations to perform on the group.
:param str group_id: ID of the group.
:param str rule_option: RuleOption [ApplyGroupRule/TestApplyGroupRule] - specifies if the rules defined in group entitlement should be updated and the changes are applied to it’s members (default option) or just be tested
:rtype: :class:`<GroupEntitlementOperationReference> <azure.devops.v7_1.member_entitlement_management.models.GroupEntitlementOperationReference>`
"""
route_values = {}
if group_id is not None:
route_values['groupId'] = self._serialize.url('group_id', group_id, 'str')
query_parameters = {}
if rule_option is not None:
query_parameters['ruleOption'] = self._serialize.query('rule_option', rule_option, 'str')
content = self._serialize.body(document, '[JsonPatchOperation]')
response = self._send(http_method='PATCH',
location_id='2280bffa-58a2-49da-822e-0764a1bb44f7',
version='7.1-preview.1',
route_values=route_values,
query_parameters=query_parameters,
content=content,
media_type='application/json-patch+json')
return self._deserialize('GroupEntitlementOperationReference', response)
def get_group_entitlements(self):
"""GetGroupEntitlements.
[Preview API] Get the group entitlements for an account.
:rtype: [GroupEntitlement]
"""
response = self._send(http_method='GET',
location_id='9bce1f43-2629-419f-8f6c-7503be58a4f3',
version='7.1-preview.1')
return self._deserialize('[GroupEntitlement]', self._unwrap_collection(response))
def search_member_entitlements(self, continuation_token=None, select=None, filter=None, order_by=None):
"""SearchMemberEntitlements.
[Preview API]
:param str continuation_token:
:param str select:
:param str filter:
:param str order_by:
:rtype: :class:`<PagedList> <azure.devops.v7_1.member_entitlement_management.models.PagedList>`
"""
query_parameters = {}
if continuation_token is not None:
query_parameters['continuationToken'] = self._serialize.query('continuation_token', continuation_token, 'str')
if select is not None:
query_parameters['select'] = self._serialize.query('select', select, 'str')
if filter is not None:
query_parameters['$filter'] = self._serialize.query('filter', filter, 'str')
if order_by is not None:
query_parameters['$orderBy'] = self._serialize.query('order_by', order_by, 'str')
response = self._send(http_method='GET',
location_id='1e8cabfb-1fda-461e-860f-eeeae54d06bb',
version='7.1-preview.2',
query_parameters=query_parameters)
return self._deserialize('PagedList', response)
def add_member_to_group(self, group_id, member_id):
"""AddMemberToGroup.
[Preview API] Add a member to a Group.
:param str group_id: Id of the Group.
:param str member_id: Id of the member to add.
"""
route_values = {}
if group_id is not None:
route_values['groupId'] = self._serialize.url('group_id', group_id, 'str')
if member_id is not None:
route_values['memberId'] = self._serialize.url('member_id', member_id, 'str')
self._send(http_method='PUT',
location_id='45a36e53-5286-4518-aa72-2d29f7acc5d8',
version='7.1-preview.1',
route_values=route_values)
def get_group_members(self, group_id, max_results=None, paging_token=None):
"""GetGroupMembers.
[Preview API] Get direct members of a Group.
:param str group_id: Id of the Group.
:param int max_results: Maximum number of results to retrieve.
:param str paging_token: Paging Token from the previous page fetched. If the 'pagingToken' is null, the results would be fetched from the beginning of the Members List.
:rtype: :class:`<PagedGraphMemberList> <azure.devops.v7_1.member_entitlement_management.models.PagedGraphMemberList>`
"""
route_values = {}
if group_id is not None:
route_values['groupId'] = self._serialize.url('group_id', group_id, 'str')
query_parameters = {}
if max_results is not None:
query_parameters['maxResults'] = self._serialize.query('max_results', max_results, 'int')
if paging_token is not None:
query_parameters['pagingToken'] = self._serialize.query('paging_token', paging_token, 'str')
response = self._send(http_method='GET',
location_id='45a36e53-5286-4518-aa72-2d29f7acc5d8',
version='7.1-preview.1',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('PagedGraphMemberList', response)
def remove_member_from_group(self, group_id, member_id):
"""RemoveMemberFromGroup.
[Preview API] Remove a member from a Group.
:param str group_id: Id of the group.
:param str member_id: Id of the member to remove.
"""
route_values = {}
if group_id is not None:
route_values['groupId'] = self._serialize.url('group_id', group_id, 'str')
if member_id is not None:
route_values['memberId'] = self._serialize.url('member_id', member_id, 'str')
self._send(http_method='DELETE',
location_id='45a36e53-5286-4518-aa72-2d29f7acc5d8',
version='7.1-preview.1',
route_values=route_values)
def add_service_principal_entitlement(self, service_principal_entitlement):
"""AddServicePrincipalEntitlement.
[Preview API] Add a service principal, assign license and extensions and make them a member of a project group in an account.
:param :class:`<ServicePrincipalEntitlement> <azure.devops.v7_1.member_entitlement_management.models.ServicePrincipalEntitlement>` service_principal_entitlement: ServicePrincipalEntitlement object specifying License, Extensions and Project/Team groups the service principal should be added to.
:rtype: :class:`<ServicePrincipalEntitlementsPostResponse> <azure.devops.v7_1.member_entitlement_management.models.ServicePrincipalEntitlementsPostResponse>`
"""
content = self._serialize.body(service_principal_entitlement, 'ServicePrincipalEntitlement')
response = self._send(http_method='POST',
location_id='f03dbf50-80f8-41b7-8ca2-65b6a178caba',
version='7.1-preview.1',
content=content)
return self._deserialize('ServicePrincipalEntitlementsPostResponse', response)
def update_service_principal_entitlements(self, document):
"""UpdateServicePrincipalEntitlements.
[Preview API] Edit the entitlements (License, Extensions, Projects, Teams etc) for one or more service principals.
:param :class:`<[JsonPatchOperation]> <azure.devops.v7_1.member_entitlement_management.models.[JsonPatchOperation]>` document: JsonPatchDocument containing the operations to perform.
:rtype: :class:`<ServicePrincipalEntitlementOperationReference> <azure.devops.v7_1.member_entitlement_management.models.ServicePrincipalEntitlementOperationReference>`
"""
content = self._serialize.body(document, '[JsonPatchOperation]')
response = self._send(http_method='PATCH',
location_id='f03dbf50-80f8-41b7-8ca2-65b6a178caba',
version='7.1-preview.1',
content=content,
media_type='application/json-patch+json')
return self._deserialize('ServicePrincipalEntitlementOperationReference', response)
def delete_service_principal_entitlement(self, service_principal_id):
"""DeleteServicePrincipalEntitlement.
[Preview API] Delete a service principal from the account.
:param str service_principal_id: ID of the service principal.
"""
route_values = {}
if service_principal_id is not None:
route_values['servicePrincipalId'] = self._serialize.url('service_principal_id', service_principal_id, 'str')
self._send(http_method='DELETE',
location_id='1d491a66-190b-43ae-86b8-9c2688c55186',
version='7.1-preview.1',
route_values=route_values)
def get_service_principal_entitlement(self, service_principal_id):
"""GetServicePrincipalEntitlement.
[Preview API] Get Service principal Entitlement for a service principal.
:param str service_principal_id: ID of the service principal.
:rtype: :class:`<ServicePrincipalEntitlement> <azure.devops.v7_1.member_entitlement_management.models.ServicePrincipalEntitlement>`
"""
route_values = {}
if service_principal_id is not None:
route_values['servicePrincipalId'] = self._serialize.url('service_principal_id', service_principal_id, 'str')
response = self._send(http_method='GET',
location_id='1d491a66-190b-43ae-86b8-9c2688c55186',
version='7.1-preview.1',
route_values=route_values)
return self._deserialize('ServicePrincipalEntitlement', response)
def update_service_principal_entitlement(self, document, service_principal_id):
"""UpdateServicePrincipalEntitlement.
[Preview API] Edit the entitlements (License, Extensions, Projects, Teams etc) for a service principal.
:param :class:`<[JsonPatchOperation]> <azure.devops.v7_1.member_entitlement_management.models.[JsonPatchOperation]>` document: JsonPatchDocument containing the operations to perform on the service principal.
:param str service_principal_id: ID of the service principal.
:rtype: :class:`<ServicePrincipalEntitlementsPatchResponse> <azure.devops.v7_1.member_entitlement_management.models.ServicePrincipalEntitlementsPatchResponse>`
"""
route_values = {}
if service_principal_id is not None:
route_values['servicePrincipalId'] = self._serialize.url('service_principal_id', service_principal_id, 'str')
content = self._serialize.body(document, '[JsonPatchOperation]')
response = self._send(http_method='PATCH',
location_id='1d491a66-190b-43ae-86b8-9c2688c55186',
version='7.1-preview.1',
route_values=route_values,
content=content,
media_type='application/json-patch+json')
return self._deserialize('ServicePrincipalEntitlementsPatchResponse', response)
def add_user_entitlement(self, user_entitlement):
"""AddUserEntitlement.
[Preview API] Add a user, assign license and extensions and make them a member of a project group in an account.
:param :class:`<UserEntitlement> <azure.devops.v7_1.member_entitlement_management.models.UserEntitlement>` user_entitlement: UserEntitlement object specifying License, Extensions and Project/Team groups the user should be added to.
:rtype: :class:`<UserEntitlementsPostResponse> <azure.devops.v7_1.member_entitlement_management.models.UserEntitlementsPostResponse>`
"""
content = self._serialize.body(user_entitlement, 'UserEntitlement')
response = self._send(http_method='POST',
location_id='387f832c-dbf2-4643-88e9-c1aa94dbb737',
version='7.1-preview.3',
content=content)
return self._deserialize('UserEntitlementsPostResponse', response)
def search_user_entitlements(self, continuation_token=None, select=None, filter=None, order_by=None):
"""SearchUserEntitlements.
[Preview API] Get a paged set of user entitlements matching the filter and sort criteria built with properties that match the select input.
:param str continuation_token: Continuation token for getting the next page of data set. If null is passed, gets the first page.
:param str select: Comma (",") separated list of properties to select in the result entitlements. names of the properties are - 'Projects, 'Extensions' and 'Grouprules'.
:param str filter: Equality operators relating to searching user entitlements seperated by and clauses. Valid filters include: licenseId, licenseStatus, userType, and name. licenseId: filters based on license assignment using license names. i.e. licenseId eq 'Account-Stakeholder' or licenseId eq 'Account-Express'. licenseStatus: filters based on license status. currently only supports disabled. i.e. licenseStatus eq 'Disabled'. To get disabled basic licenses, you would pass (licenseId eq 'Account-Express' and licenseStatus eq 'Disabled') userType: filters off identity type. Suppored types are member or guest i.e. userType eq 'member'. name: filters on if the user's display name or email contians given input. i.e. get all users with "test" in email or displayname is "name eq 'test'". A valid query could be: (licenseId eq 'Account-Stakeholder' or (licenseId eq 'Account-Express' and licenseStatus eq 'Disabled')) and name eq 'test' and userType eq 'guest'.
:param str order_by: PropertyName and Order (separated by a space ( )) to sort on (e.g. lastAccessed desc). Order defaults to ascending. valid properties to order by are dateCreated, lastAccessed, and name
:rtype: :class:`<PagedGraphMemberList> <azure.devops.v7_1.member_entitlement_management.models.PagedGraphMemberList>`
"""
query_parameters = {}
if continuation_token is not None:
query_parameters['continuationToken'] = self._serialize.query('continuation_token', continuation_token, 'str')
if select is not None:
query_parameters['select'] = self._serialize.query('select', select, 'str')
if filter is not None:
query_parameters['$filter'] = self._serialize.query('filter', filter, 'str')
if order_by is not None:
query_parameters['$orderBy'] = self._serialize.query('order_by', order_by, 'str')
response = self._send(http_method='GET',
location_id='387f832c-dbf2-4643-88e9-c1aa94dbb737',
version='7.1-preview.3',
query_parameters=query_parameters)
return self._deserialize('PagedGraphMemberList', response)
def update_user_entitlements(self, document, do_not_send_invite_for_new_users=None):
"""UpdateUserEntitlements.
[Preview API] Edit the entitlements (License, Extensions, Projects, Teams etc) for one or more users.
:param :class:`<[JsonPatchOperation]> <azure.devops.v7_1.member_entitlement_management.models.[JsonPatchOperation]>` document: JsonPatchDocument containing the operations to perform.
:param bool do_not_send_invite_for_new_users: Whether to send email invites to new users or not
:rtype: :class:`<UserEntitlementOperationReference> <azure.devops.v7_1.member_entitlement_management.models.UserEntitlementOperationReference>`
"""
query_parameters = {}
if do_not_send_invite_for_new_users is not None:
query_parameters['doNotSendInviteForNewUsers'] = self._serialize.query('do_not_send_invite_for_new_users', do_not_send_invite_for_new_users, 'bool')
content = self._serialize.body(document, '[JsonPatchOperation]')
response = self._send(http_method='PATCH',
location_id='387f832c-dbf2-4643-88e9-c1aa94dbb737',
version='7.1-preview.3',
query_parameters=query_parameters,
content=content,
media_type='application/json-patch+json')
return self._deserialize('UserEntitlementOperationReference', response)
def delete_user_entitlement(self, user_id):
"""DeleteUserEntitlement.
[Preview API] Delete a user from the account.
:param str user_id: ID of the user.
"""
route_values = {}
if user_id is not None:
route_values['userId'] = self._serialize.url('user_id', user_id, 'str')
self._send(http_method='DELETE',
location_id='8480c6eb-ce60-47e9-88df-eca3c801638b',
version='7.1-preview.3',
route_values=route_values)
def get_user_entitlement(self, user_id):
"""GetUserEntitlement.
[Preview API] Get User Entitlement for a user.
:param str user_id: ID of the user.
:rtype: :class:`<UserEntitlement> <azure.devops.v7_1.member_entitlement_management.models.UserEntitlement>`
"""
route_values = {}
if user_id is not None:
route_values['userId'] = self._serialize.url('user_id', user_id, 'str')
response = self._send(http_method='GET',
location_id='8480c6eb-ce60-47e9-88df-eca3c801638b',
version='7.1-preview.3',
route_values=route_values)
return self._deserialize('UserEntitlement', response)
def update_user_entitlement(self, document, user_id):
"""UpdateUserEntitlement.
[Preview API] Edit the entitlements (License, Extensions, Projects, Teams etc) for a user.
:param :class:`<[JsonPatchOperation]> <azure.devops.v7_1.member_entitlement_management.models.[JsonPatchOperation]>` document: JsonPatchDocument containing the operations to perform on the user.
:param str user_id: ID of the user.
:rtype: :class:`<UserEntitlementsPatchResponse> <azure.devops.v7_1.member_entitlement_management.models.UserEntitlementsPatchResponse>`
"""
route_values = {}
if user_id is not None:
route_values['userId'] = self._serialize.url('user_id', user_id, 'str')
content = self._serialize.body(document, '[JsonPatchOperation]')
response = self._send(http_method='PATCH',
location_id='8480c6eb-ce60-47e9-88df-eca3c801638b',
version='7.1-preview.3',
route_values=route_values,
content=content,
media_type='application/json-patch+json')
return self._deserialize('UserEntitlementsPatchResponse', response)
def get_users_summary(self, select=None):
"""GetUsersSummary.
[Preview API] Get summary of Licenses, Extension, Projects, Groups and their assignments in the collection.
:param str select: Comma (",") separated list of properties to select. Supported property names are {AccessLevels, Licenses, Projects, Groups}.
:rtype: :class:`<UsersSummary> <azure.devops.v7_1.member_entitlement_management.models.UsersSummary>`
"""
query_parameters = {}
if select is not None:
query_parameters['select'] = self._serialize.query('select', select, 'str')
response = self._send(http_method='GET',
location_id='5ae55b13-c9dd-49d1-957e-6e76c152e3d9',
version='7.1-preview.1',
query_parameters=query_parameters)
return self._deserialize('UsersSummary', response)
|
azure-devops-python-api/azure-devops/azure/devops/v7_1/member_entitlement_management/member_entitlement_management_client.py/0
|
{
"file_path": "azure-devops-python-api/azure-devops/azure/devops/v7_1/member_entitlement_management/member_entitlement_management_client.py",
"repo_id": "azure-devops-python-api",
"token_count": 10707
}
| 362 |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest import Serializer, Deserializer
from ...client import Client
from . import models
class PipelinePermissionsClient(Client):
"""PipelinePermissions
:param str base_url: Service URL
:param Authentication creds: Authenticated credentials.
"""
def __init__(self, base_url=None, creds=None):
super(PipelinePermissionsClient, self).__init__(base_url, creds)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
resource_area_identifier = 'a81a0441-de52-4000-aa15-ff0e07bfbbaa'
def get_pipeline_permissions_for_resource(self, project, resource_type, resource_id):
"""GetPipelinePermissionsForResource.
[Preview API] Given a ResourceType and ResourceId, returns authorized definitions for that resource.
:param str project: Project ID or project name
:param str resource_type:
:param str resource_id:
:rtype: :class:`<ResourcePipelinePermissions> <azure.devops.v7_1.pipeline_permissions.models.ResourcePipelinePermissions>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if resource_type is not None:
route_values['resourceType'] = self._serialize.url('resource_type', resource_type, 'str')
if resource_id is not None:
route_values['resourceId'] = self._serialize.url('resource_id', resource_id, 'str')
response = self._send(http_method='GET',
location_id='b5b9a4a4-e6cd-4096-853c-ab7d8b0c4eb2',
version='7.1-preview.1',
route_values=route_values)
return self._deserialize('ResourcePipelinePermissions', response)
def update_pipeline_permisions_for_resource(self, resource_authorization, project, resource_type, resource_id):
"""UpdatePipelinePermisionsForResource.
[Preview API] Authorizes/Unauthorizes a list of definitions for a given resource.
:param :class:`<ResourcePipelinePermissions> <azure.devops.v7_1.pipeline_permissions.models.ResourcePipelinePermissions>` resource_authorization:
:param str project: Project ID or project name
:param str resource_type:
:param str resource_id:
:rtype: :class:`<ResourcePipelinePermissions> <azure.devops.v7_1.pipeline_permissions.models.ResourcePipelinePermissions>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if resource_type is not None:
route_values['resourceType'] = self._serialize.url('resource_type', resource_type, 'str')
if resource_id is not None:
route_values['resourceId'] = self._serialize.url('resource_id', resource_id, 'str')
content = self._serialize.body(resource_authorization, 'ResourcePipelinePermissions')
response = self._send(http_method='PATCH',
location_id='b5b9a4a4-e6cd-4096-853c-ab7d8b0c4eb2',
version='7.1-preview.1',
route_values=route_values,
content=content)
return self._deserialize('ResourcePipelinePermissions', response)
def update_pipeline_permisions_for_resources(self, resource_authorizations, project):
"""UpdatePipelinePermisionsForResources.
[Preview API] Batch API to authorize/unauthorize a list of definitions for a multiple resources.
:param [ResourcePipelinePermissions] resource_authorizations:
:param str project: Project ID or project name
:rtype: [ResourcePipelinePermissions]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
content = self._serialize.body(resource_authorizations, '[ResourcePipelinePermissions]')
response = self._send(http_method='PATCH',
location_id='b5b9a4a4-e6cd-4096-853c-ab7d8b0c4eb2',
version='7.1-preview.1',
route_values=route_values,
content=content)
return self._deserialize('[ResourcePipelinePermissions]', self._unwrap_collection(response))
|
azure-devops-python-api/azure-devops/azure/devops/v7_1/pipeline_permissions/pipeline_permissions_client.py/0
|
{
"file_path": "azure-devops-python-api/azure-devops/azure/devops/v7_1/pipeline_permissions/pipeline_permissions_client.py",
"repo_id": "azure-devops-python-api",
"token_count": 1998
}
| 363 |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest.serialization import Model
class BoardResult(Model):
"""
Defines the Board result that matched a Board search request.
:param boardtype: Board Type of the board document.
:type boardtype: str
:param collection: Collection details of the baord document.
:type collection: :class:`Collection <azure.devops.v7_1.search.models.Collection>`
:param project: Project details of the board document.
:type project: :class:`Project <azure.devops.v7_1.search.models.Project>`
:param team: Team details of the board document.
:type team: :class:`Team <azure.devops.v7_1.search.models.Team>`
"""
_attribute_map = {
'boardtype': {'key': 'boardtype', 'type': 'str'},
'collection': {'key': 'collection', 'type': 'Collection'},
'project': {'key': 'project', 'type': 'Project'},
'team': {'key': 'team', 'type': 'Team'}
}
def __init__(self, boardtype=None, collection=None, project=None, team=None):
super(BoardResult, self).__init__()
self.boardtype = boardtype
self.collection = collection
self.project = project
self.team = team
class BranchInfo(Model):
"""
Information about the configured branch.
:param last_indexed_change_id: The commit Id of the last Git commit indexed in this branch
:type last_indexed_change_id: str
:param last_processed_time: The last time this branch was processed by the Search service
:type last_processed_time: datetime
:param name: Name of the indexed branch
:type name: str
"""
_attribute_map = {
'last_indexed_change_id': {'key': 'lastIndexedChangeId', 'type': 'str'},
'last_processed_time': {'key': 'lastProcessedTime', 'type': 'iso-8601'},
'name': {'key': 'name', 'type': 'str'}
}
def __init__(self, last_indexed_change_id=None, last_processed_time=None, name=None):
super(BranchInfo, self).__init__()
self.last_indexed_change_id = last_indexed_change_id
self.last_processed_time = last_processed_time
self.name = name
class CodeResult(Model):
"""
Defines the code result containing information of the searched files and its metadata.
:param collection: Collection of the result file.
:type collection: :class:`Collection <azure.devops.v7_1.search.models.Collection>`
:param content_id: ContentId of the result file.
:type content_id: str
:param file_name: Name of the result file.
:type file_name: str
:param matches: Dictionary of field to hit offsets in the result file. Key identifies the area in which hits were found, for ex: file content/file name etc.
:type matches: dict
:param path: Path at which result file is present.
:type path: str
:param project: Project of the result file.
:type project: :class:`Project <azure.devops.v7_1.search.models.Project>`
:param repository: Repository of the result file.
:type repository: :class:`Repository <azure.devops.v7_1.search.models.Repository>`
:param versions: Versions of the result file.
:type versions: list of :class:`Version <azure.devops.v7_1.search.models.Version>`
"""
_attribute_map = {
'collection': {'key': 'collection', 'type': 'Collection'},
'content_id': {'key': 'contentId', 'type': 'str'},
'file_name': {'key': 'fileName', 'type': 'str'},
'matches': {'key': 'matches', 'type': '{[Hit]}'},
'path': {'key': 'path', 'type': 'str'},
'project': {'key': 'project', 'type': 'Project'},
'repository': {'key': 'repository', 'type': 'Repository'},
'versions': {'key': 'versions', 'type': '[Version]'}
}
def __init__(self, collection=None, content_id=None, file_name=None, matches=None, path=None, project=None, repository=None, versions=None):
super(CodeResult, self).__init__()
self.collection = collection
self.content_id = content_id
self.file_name = file_name
self.matches = matches
self.path = path
self.project = project
self.repository = repository
self.versions = versions
class Collection(Model):
"""
Defines the details of the collection.
:param name: Name of the collection.
:type name: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'}
}
def __init__(self, name=None):
super(Collection, self).__init__()
self.name = name
class CustomRepositoryBranchStatusResponse(Model):
"""
:param last_indexed_change_id:
:type last_indexed_change_id: long
:param last_indexed_change_id_change_time:
:type last_indexed_change_id_change_time: datetime
:param latest_change_id:
:type latest_change_id: long
:param latest_change_id_change_time:
:type latest_change_id_change_time: datetime
"""
_attribute_map = {
'last_indexed_change_id': {'key': 'lastIndexedChangeId', 'type': 'long'},
'last_indexed_change_id_change_time': {'key': 'lastIndexedChangeIdChangeTime', 'type': 'iso-8601'},
'latest_change_id': {'key': 'latestChangeId', 'type': 'long'},
'latest_change_id_change_time': {'key': 'latestChangeIdChangeTime', 'type': 'iso-8601'}
}
def __init__(self, last_indexed_change_id=None, last_indexed_change_id_change_time=None, latest_change_id=None, latest_change_id_change_time=None):
super(CustomRepositoryBranchStatusResponse, self).__init__()
self.last_indexed_change_id = last_indexed_change_id
self.last_indexed_change_id_change_time = last_indexed_change_id_change_time
self.latest_change_id = latest_change_id
self.latest_change_id_change_time = latest_change_id_change_time
class CustomRepositoryStatusResponse(Model):
"""
Defines the custom repository status.
:param id: Repository Id.
:type id: str
:param indexed_top_level_folders: List of indexed top level folders info.
:type indexed_top_level_folders: list of :class:`DepotInfo <azure.devops.v7_1.search.models.DepotInfo>`
:param name: Repository Name.
:type name: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'indexed_top_level_folders': {'key': 'indexedTopLevelFolders', 'type': '[DepotInfo]'},
'name': {'key': 'name', 'type': 'str'}
}
def __init__(self, id=None, indexed_top_level_folders=None, name=None):
super(CustomRepositoryStatusResponse, self).__init__()
self.id = id
self.indexed_top_level_folders = indexed_top_level_folders
self.name = name
class DepotInfo(Model):
"""
Information about the custom repository indexing freshness for configured branches and depots.
:param indexed_branches: List of Indexed branches info.
:type indexed_branches: list of :class:`BranchInfo <azure.devops.v7_1.search.models.BranchInfo>`
:param name: Name of the indexed top level folder (depot).
:type name: str
"""
_attribute_map = {
'indexed_branches': {'key': 'indexedBranches', 'type': '[BranchInfo]'},
'name': {'key': 'name', 'type': 'str'}
}
def __init__(self, indexed_branches=None, name=None):
super(DepotInfo, self).__init__()
self.indexed_branches = indexed_branches
self.name = name
class EntitySearchRequestBase(Model):
"""
Base class for search request types.
:param filters: Filters to be applied. Set it to null if there are no filters to be applied.
:type filters: dict
:param search_text: The search text.
:type search_text: str
"""
_attribute_map = {
'filters': {'key': 'filters', 'type': '{[str]}'},
'search_text': {'key': 'searchText', 'type': 'str'}
}
def __init__(self, filters=None, search_text=None):
super(EntitySearchRequestBase, self).__init__()
self.filters = filters
self.search_text = search_text
class EntitySearchResponse(Model):
"""
Defines the base contract for search response.
:param facets: A dictionary storing an array of <code>Filter</code> object against each facet.
:type facets: dict
:param info_code: Numeric code indicating any additional information: 0 - Ok, 1 - Account is being reindexed, 2 - Account indexing has not started, 3 - Invalid Request, 4 - Prefix wildcard query not supported, 5 - MultiWords with code facet not supported, 6 - Account is being onboarded, 7 - Account is being onboarded or reindexed, 8 - Top value trimmed to maxresult allowed 9 - Branches are being indexed, 10 - Faceting not enabled, 11 - Work items not accessible, 19 - Phrase queries with code type filters not supported, 20 - Wildcard queries with code type filters not supported. Any other info code is used for internal purpose.
:type info_code: int
"""
_attribute_map = {
'facets': {'key': 'facets', 'type': '{[Filter]}'},
'info_code': {'key': 'infoCode', 'type': 'int'}
}
def __init__(self, facets=None, info_code=None):
super(EntitySearchResponse, self).__init__()
self.facets = facets
self.info_code = info_code
class FeedInfo(Model):
"""
Defines the details of a feed.
:param collection_id: Id of the collection.
:type collection_id: str
:param collection_name: Name of the collection.
:type collection_name: str
:param feed_id: Id of the feed.
:type feed_id: str
:param feed_name: Name of the feed.
:type feed_name: str
:param latest_matched_version: Latest matched version of package in this Feed.
:type latest_matched_version: str
:param latest_version: Latest version of package in this Feed.
:type latest_version: str
:param package_url: Url of package in this Feed.
:type package_url: str
:param views: List of views which contain the matched package.
:type views: list of str
"""
_attribute_map = {
'collection_id': {'key': 'collectionId', 'type': 'str'},
'collection_name': {'key': 'collectionName', 'type': 'str'},
'feed_id': {'key': 'feedId', 'type': 'str'},
'feed_name': {'key': 'feedName', 'type': 'str'},
'latest_matched_version': {'key': 'latestMatchedVersion', 'type': 'str'},
'latest_version': {'key': 'latestVersion', 'type': 'str'},
'package_url': {'key': 'packageUrl', 'type': 'str'},
'views': {'key': 'views', 'type': '[str]'}
}
def __init__(self, collection_id=None, collection_name=None, feed_id=None, feed_name=None, latest_matched_version=None, latest_version=None, package_url=None, views=None):
super(FeedInfo, self).__init__()
self.collection_id = collection_id
self.collection_name = collection_name
self.feed_id = feed_id
self.feed_name = feed_name
self.latest_matched_version = latest_matched_version
self.latest_version = latest_version
self.package_url = package_url
self.views = views
class Filter(Model):
"""
Describes a filter bucket item representing the total matches of search result, name and id.
:param id: Id of the filter bucket.
:type id: str
:param name: Name of the filter bucket.
:type name: str
:param result_count: Count of matches in the filter bucket.
:type result_count: int
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'result_count': {'key': 'resultCount', 'type': 'int'}
}
def __init__(self, id=None, name=None, result_count=None):
super(Filter, self).__init__()
self.id = id
self.name = name
self.result_count = result_count
class Hit(Model):
"""
Describes the position of a piece of text in a document.
:param code_snippet: Gets or sets an extract of code where the match appears. Usually it is the line where there is the match.
:type code_snippet: str
:param column: Gets or sets the column number where the match appears in the line.
:type column: int
:param char_offset: Gets or sets the start character offset of a piece of text.
:type char_offset: int
:param length: Gets or sets the length of a piece of text.
:type length: int
:param line: Gets or sets the line number where the match appears in the file.
:type line: int
:param type: Gets or sets the name of type of a piece of text.
:type type: str
"""
_attribute_map = {
'code_snippet': {'key': 'codeSnippet', 'type': 'str'},
'column': {'key': 'column', 'type': 'int'},
'char_offset': {'key': 'charOffset', 'type': 'int'},
'length': {'key': 'length', 'type': 'int'},
'line': {'key': 'line', 'type': 'int'},
'type': {'key': 'type', 'type': 'str'}
}
def __init__(self, code_snippet=None, column=None, char_offset=None, length=None, line=None, type=None):
super(Hit, self).__init__()
self.code_snippet = code_snippet
self.column = column
self.char_offset = char_offset
self.length = length
self.line = line
self.type = type
class PackageHit(Model):
"""
Defines the matched terms in the field of the package result.
:param field_reference_name: Reference name of the highlighted field.
:type field_reference_name: str
:param highlights: Matched/highlighted snippets of the field.
:type highlights: list of str
"""
_attribute_map = {
'field_reference_name': {'key': 'fieldReferenceName', 'type': 'str'},
'highlights': {'key': 'highlights', 'type': '[str]'}
}
def __init__(self, field_reference_name=None, highlights=None):
super(PackageHit, self).__init__()
self.field_reference_name = field_reference_name
self.highlights = highlights
class PackageResult(Model):
"""
Defines the package result that matched a package search request.
:param description: Description of the package.
:type description: str
:param feeds: List of feeds which contain the matching package.
:type feeds: list of :class:`FeedInfo <azure.devops.v7_1.microsoft._visual_studio._services._search._shared._web_api.models.FeedInfo>`
:param hits: List of highlighted fields for the match.
:type hits: list of :class:`PackageHit <azure.devops.v7_1.microsoft._visual_studio._services._search._shared._web_api.models.PackageHit>`
:param id: Id of the package.
:type id: str
:param name: Name of the package.
:type name: str
:param protocol_type: Type of the package.
:type protocol_type: str
"""
_attribute_map = {
'description': {'key': 'description', 'type': 'str'},
'feeds': {'key': 'feeds', 'type': '[FeedInfo]'},
'hits': {'key': 'hits', 'type': '[PackageHit]'},
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'protocol_type': {'key': 'protocolType', 'type': 'str'}
}
def __init__(self, description=None, feeds=None, hits=None, id=None, name=None, protocol_type=None):
super(PackageResult, self).__init__()
self.description = description
self.feeds = feeds
self.hits = hits
self.id = id
self.name = name
self.protocol_type = protocol_type
class PackageSearchResponse(Model):
"""
:param activity_id:
:type activity_id: list of str
:param content:
:type content: :class:`PackageSearchResponseContent <azure.devops.v7_1.microsoft._visual_studio._services._search._shared._web_api.models.PackageSearchResponseContent>`
"""
_attribute_map = {
'activity_id': {'key': 'activityId', 'type': '[str]'},
'content': {'key': 'content', 'type': 'PackageSearchResponseContent'}
}
def __init__(self, activity_id=None, content=None):
super(PackageSearchResponse, self).__init__()
self.activity_id = activity_id
self.content = content
class PackageSearchResponseContent(EntitySearchResponse):
"""
Defines a response item that is returned for a package search request.
:param facets: A dictionary storing an array of <code>Filter</code> object against each facet.
:type facets: dict
:param info_code: Numeric code indicating any additional information: 0 - Ok, 1 - Account is being reindexed, 2 - Account indexing has not started, 3 - Invalid Request, 4 - Prefix wildcard query not supported, 5 - MultiWords with code facet not supported, 6 - Account is being onboarded, 7 - Account is being onboarded or reindexed, 8 - Top value trimmed to maxresult allowed 9 - Branches are being indexed, 10 - Faceting not enabled, 11 - Work items not accessible, 19 - Phrase queries with code type filters not supported, 20 - Wildcard queries with code type filters not supported. Any other info code is used for internal purpose.
:type info_code: int
:param count: Total number of matched packages.
:type count: int
:param results: List of matched packages.
:type results: list of :class:`PackageResult <azure.devops.v7_1.microsoft._visual_studio._services._search._shared._web_api.models.PackageResult>`
"""
_attribute_map = {
'facets': {'key': 'facets', 'type': '{[Filter]}'},
'info_code': {'key': 'infoCode', 'type': 'int'},
'count': {'key': 'count', 'type': 'int'},
'results': {'key': 'results', 'type': '[PackageResult]'}
}
def __init__(self, facets=None, info_code=None, count=None, results=None):
super(PackageSearchResponseContent, self).__init__(facets=facets, info_code=info_code)
self.count = count
self.results = results
class Project(Model):
"""
Defines the details of the project.
:param id: Id of the project.
:type id: str
:param name: Name of the project.
:type name: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'}
}
def __init__(self, id=None, name=None):
super(Project, self).__init__()
self.id = id
self.name = name
class ProjectReference(Model):
"""
Defines the details of the project.
:param id: ID of the project.
:type id: str
:param name: Name of the project.
:type name: str
:param visibility: Visibility of the project.
:type visibility: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'visibility': {'key': 'visibility', 'type': 'str'}
}
def __init__(self, id=None, name=None, visibility=None):
super(ProjectReference, self).__init__()
self.id = id
self.name = name
self.visibility = visibility
class Repository(Model):
"""
Defines the details of the repository.
:param id: Id of the repository.
:type id: str
:param name: Name of the repository.
:type name: str
:param type: Version control type of the result file.
:type type: object
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'object'}
}
def __init__(self, id=None, name=None, type=None):
super(Repository, self).__init__()
self.id = id
self.name = name
self.type = type
class RepositoryStatusResponse(Model):
"""
Defines the repository status.
:param id: Repository Id.
:type id: str
:param indexed_branches: List of Indexed branches info.
:type indexed_branches: list of :class:`BranchInfo <azure.devops.v7_1.search.models.BranchInfo>`
:param name: Repository Name.
:type name: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'indexed_branches': {'key': 'indexedBranches', 'type': '[BranchInfo]'},
'name': {'key': 'name', 'type': 'str'}
}
def __init__(self, id=None, indexed_branches=None, name=None):
super(RepositoryStatusResponse, self).__init__()
self.id = id
self.indexed_branches = indexed_branches
self.name = name
class ScrollSearchRequest(EntitySearchRequestBase):
"""
Defines a scroll code search request.
:param filters: Filters to be applied. Set it to null if there are no filters to be applied.
:type filters: dict
:param search_text: The search text.
:type search_text: str
:param scroll_id: Scroll Id for scroll search query.
:type scroll_id: str
:param scroll_size: Size of data to return for scroll search query. Min value is 201.
:type scroll_size: int
"""
_attribute_map = {
'filters': {'key': 'filters', 'type': '{[str]}'},
'search_text': {'key': 'searchText', 'type': 'str'},
'scroll_id': {'key': '$scrollId', 'type': 'str'},
'scroll_size': {'key': '$scrollSize', 'type': 'int'}
}
def __init__(self, filters=None, search_text=None, scroll_id=None, scroll_size=None):
super(ScrollSearchRequest, self).__init__(filters=filters, search_text=search_text)
self.scroll_id = scroll_id
self.scroll_size = scroll_size
class SettingResult(Model):
"""
Defines the setting result that matched a setting search request
:param description: Description of the settings page
:type description: str
:param icon: Icon name of the settings page
:type icon: str
:param route_id: Contribution url route id of the corresponding settings page
:type route_id: str
:param route_parameter_mapping: Contribution url route parameter of the corresponding settings page
:type route_parameter_mapping: dict
:param scope: Scope of the settings page, either organization, project or user
:type scope: object
:param title: Title of the settings page
:type title: str
"""
_attribute_map = {
'description': {'key': 'description', 'type': 'str'},
'icon': {'key': 'icon', 'type': 'str'},
'route_id': {'key': 'routeId', 'type': 'str'},
'route_parameter_mapping': {'key': 'routeParameterMapping', 'type': '{str}'},
'scope': {'key': 'scope', 'type': 'object'},
'title': {'key': 'title', 'type': 'str'}
}
def __init__(self, description=None, icon=None, route_id=None, route_parameter_mapping=None, scope=None, title=None):
super(SettingResult, self).__init__()
self.description = description
self.icon = icon
self.route_id = route_id
self.route_parameter_mapping = route_parameter_mapping
self.scope = scope
self.title = title
class SettingSearchResponse(EntitySearchResponse):
"""
Defines a setting search response item
:param facets: A dictionary storing an array of <code>Filter</code> object against each facet.
:type facets: dict
:param info_code: Numeric code indicating any additional information: 0 - Ok, 1 - Account is being reindexed, 2 - Account indexing has not started, 3 - Invalid Request, 4 - Prefix wildcard query not supported, 5 - MultiWords with code facet not supported, 6 - Account is being onboarded, 7 - Account is being onboarded or reindexed, 8 - Top value trimmed to maxresult allowed 9 - Branches are being indexed, 10 - Faceting not enabled, 11 - Work items not accessible, 19 - Phrase queries with code type filters not supported, 20 - Wildcard queries with code type filters not supported. Any other info code is used for internal purpose.
:type info_code: int
:param count: Total number of matched setting documents.
:type count: int
:param results: List of top matched setting documents.
:type results: list of :class:`SettingResult <azure.devops.v7_1.microsoft._visual_studio._services._search._shared._web_api.models.SettingResult>`
"""
_attribute_map = {
'facets': {'key': 'facets', 'type': '{[Filter]}'},
'info_code': {'key': 'infoCode', 'type': 'int'},
'count': {'key': 'count', 'type': 'int'},
'results': {'key': 'results', 'type': '[SettingResult]'}
}
def __init__(self, facets=None, info_code=None, count=None, results=None):
super(SettingSearchResponse, self).__init__(facets=facets, info_code=info_code)
self.count = count
self.results = results
class SortOption(Model):
"""
Defines how to sort the result.
:param field: Field name on which sorting should be done.
:type field: str
:param sort_order: Order (ASC/DESC) in which the results should be sorted.
:type sort_order: str
"""
_attribute_map = {
'field': {'key': 'field', 'type': 'str'},
'sort_order': {'key': 'sortOrder', 'type': 'str'}
}
def __init__(self, field=None, sort_order=None):
super(SortOption, self).__init__()
self.field = field
self.sort_order = sort_order
class Team(Model):
"""
Defines the details of the team.
:param id: Id of the team.
:type id: str
:param name: Name of the Team.
:type name: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'}
}
def __init__(self, id=None, name=None):
super(Team, self).__init__()
self.id = id
self.name = name
class TfvcRepositoryStatusResponse(Model):
"""
Defines the TFVC repository status.
:param id: Repository Id.
:type id: str
:param indexing_information: List of Indexing Information for TFVC repository
:type indexing_information: list of :class:`BranchInfo <azure.devops.v7_1.search.models.BranchInfo>`
:param name: Repository Name.
:type name: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'indexing_information': {'key': 'indexingInformation', 'type': '[BranchInfo]'},
'name': {'key': 'name', 'type': 'str'}
}
def __init__(self, id=None, indexing_information=None, name=None):
super(TfvcRepositoryStatusResponse, self).__init__()
self.id = id
self.indexing_information = indexing_information
self.name = name
class Version(Model):
"""
Describes the details pertaining to a version of the result file.
:param branch_name: Name of the branch.
:type branch_name: str
:param change_id: ChangeId in the given branch associated with this match.
:type change_id: str
"""
_attribute_map = {
'branch_name': {'key': 'branchName', 'type': 'str'},
'change_id': {'key': 'changeId', 'type': 'str'}
}
def __init__(self, branch_name=None, change_id=None):
super(Version, self).__init__()
self.branch_name = branch_name
self.change_id = change_id
class Wiki(Model):
"""
Defines the details of wiki.
:param id: Id of the wiki.
:type id: str
:param mapped_path: Mapped path for the wiki.
:type mapped_path: str
:param name: Name of the wiki.
:type name: str
:param version: Version for wiki.
:type version: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'mapped_path': {'key': 'mappedPath', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'}
}
def __init__(self, id=None, mapped_path=None, name=None, version=None):
super(Wiki, self).__init__()
self.id = id
self.mapped_path = mapped_path
self.name = name
self.version = version
class WikiHit(Model):
"""
Defines the matched terms in the field of the wiki result.
:param field_reference_name: Reference name of the highlighted field.
:type field_reference_name: str
:param highlights: Matched/highlighted snippets of the field.
:type highlights: list of str
"""
_attribute_map = {
'field_reference_name': {'key': 'fieldReferenceName', 'type': 'str'},
'highlights': {'key': 'highlights', 'type': '[str]'}
}
def __init__(self, field_reference_name=None, highlights=None):
super(WikiHit, self).__init__()
self.field_reference_name = field_reference_name
self.highlights = highlights
class WikiResult(Model):
"""
Defines the wiki result that matched a wiki search request.
:param collection: Collection of the result file.
:type collection: :class:`Collection <azure.devops.v7_1.microsoft._visual_studio._services._search._shared._web_api.models.Collection>`
:param content_id: ContentId of the result file.
:type content_id: str
:param file_name: Name of the result file.
:type file_name: str
:param hits: Highlighted snippets of fields that match the search request. The list is sorted by relevance of the snippets.
:type hits: list of :class:`WikiHit <azure.devops.v7_1.microsoft._visual_studio._services._search._shared._web_api.models.WikiHit>`
:param path: Path at which result file is present.
:type path: str
:param project: Project details of the wiki document.
:type project: :class:`ProjectReference <azure.devops.v7_1.microsoft._visual_studio._services._search._shared._web_api.models.ProjectReference>`
:param wiki: Wiki information for the result.
:type wiki: :class:`Wiki <azure.devops.v7_1.microsoft._visual_studio._services._search._shared._web_api.models.Wiki>`
"""
_attribute_map = {
'collection': {'key': 'collection', 'type': 'Collection'},
'content_id': {'key': 'contentId', 'type': 'str'},
'file_name': {'key': 'fileName', 'type': 'str'},
'hits': {'key': 'hits', 'type': '[WikiHit]'},
'path': {'key': 'path', 'type': 'str'},
'project': {'key': 'project', 'type': 'ProjectReference'},
'wiki': {'key': 'wiki', 'type': 'Wiki'}
}
def __init__(self, collection=None, content_id=None, file_name=None, hits=None, path=None, project=None, wiki=None):
super(WikiResult, self).__init__()
self.collection = collection
self.content_id = content_id
self.file_name = file_name
self.hits = hits
self.path = path
self.project = project
self.wiki = wiki
class WikiSearchResponse(EntitySearchResponse):
"""
Defines a wiki search response item.
:param facets: A dictionary storing an array of <code>Filter</code> object against each facet.
:type facets: dict
:param info_code: Numeric code indicating any additional information: 0 - Ok, 1 - Account is being reindexed, 2 - Account indexing has not started, 3 - Invalid Request, 4 - Prefix wildcard query not supported, 5 - MultiWords with code facet not supported, 6 - Account is being onboarded, 7 - Account is being onboarded or reindexed, 8 - Top value trimmed to maxresult allowed 9 - Branches are being indexed, 10 - Faceting not enabled, 11 - Work items not accessible, 19 - Phrase queries with code type filters not supported, 20 - Wildcard queries with code type filters not supported. Any other info code is used for internal purpose.
:type info_code: int
:param count: Total number of matched wiki documents.
:type count: int
:param results: List of top matched wiki documents.
:type results: list of :class:`WikiResult <azure.devops.v7_1.microsoft._visual_studio._services._search._shared._web_api.models.WikiResult>`
"""
_attribute_map = {
'facets': {'key': 'facets', 'type': '{[Filter]}'},
'info_code': {'key': 'infoCode', 'type': 'int'},
'count': {'key': 'count', 'type': 'int'},
'results': {'key': 'results', 'type': '[WikiResult]'}
}
def __init__(self, facets=None, info_code=None, count=None, results=None):
super(WikiSearchResponse, self).__init__(facets=facets, info_code=info_code)
self.count = count
self.results = results
class WorkItemHit(Model):
"""
Defines the matched terms in the field of the work item result.
:param field_reference_name: Reference name of the highlighted field.
:type field_reference_name: str
:param highlights: Matched/highlighted snippets of the field.
:type highlights: list of str
"""
_attribute_map = {
'field_reference_name': {'key': 'fieldReferenceName', 'type': 'str'},
'highlights': {'key': 'highlights', 'type': '[str]'}
}
def __init__(self, field_reference_name=None, highlights=None):
super(WorkItemHit, self).__init__()
self.field_reference_name = field_reference_name
self.highlights = highlights
class WorkItemResult(Model):
"""
Defines the work item result that matched a work item search request.
:param fields: A standard set of work item fields and their values.
:type fields: dict
:param hits: Highlighted snippets of fields that match the search request. The list is sorted by relevance of the snippets.
:type hits: list of :class:`WorkItemHit <azure.devops.v7_1.search.models.WorkItemHit>`
:param project: Project details of the work item.
:type project: :class:`Project <azure.devops.v7_1.search.models.Project>`
:param url: Reference to the work item.
:type url: str
"""
_attribute_map = {
'fields': {'key': 'fields', 'type': '{str}'},
'hits': {'key': 'hits', 'type': '[WorkItemHit]'},
'project': {'key': 'project', 'type': 'Project'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, fields=None, hits=None, project=None, url=None):
super(WorkItemResult, self).__init__()
self.fields = fields
self.hits = hits
self.project = project
self.url = url
class WorkItemSearchResponse(EntitySearchResponse):
"""
Defines a response item that is returned for a work item search request.
:param facets: A dictionary storing an array of <code>Filter</code> object against each facet.
:type facets: dict
:param info_code: Numeric code indicating any additional information: 0 - Ok, 1 - Account is being reindexed, 2 - Account indexing has not started, 3 - Invalid Request, 4 - Prefix wildcard query not supported, 5 - MultiWords with code facet not supported, 6 - Account is being onboarded, 7 - Account is being onboarded or reindexed, 8 - Top value trimmed to maxresult allowed 9 - Branches are being indexed, 10 - Faceting not enabled, 11 - Work items not accessible, 19 - Phrase queries with code type filters not supported, 20 - Wildcard queries with code type filters not supported. Any other info code is used for internal purpose.
:type info_code: int
:param count: Total number of matched work items.
:type count: int
:param results: List of top matched work items.
:type results: list of :class:`WorkItemResult <azure.devops.v7_1.search.models.WorkItemResult>`
"""
_attribute_map = {
'facets': {'key': 'facets', 'type': '{[Filter]}'},
'info_code': {'key': 'infoCode', 'type': 'int'},
'count': {'key': 'count', 'type': 'int'},
'results': {'key': 'results', 'type': '[WorkItemResult]'}
}
def __init__(self, facets=None, info_code=None, count=None, results=None):
super(WorkItemSearchResponse, self).__init__(facets=facets, info_code=info_code)
self.count = count
self.results = results
class BoardSearchResponse(EntitySearchResponse):
"""
Defines a Board search response item.
:param facets: A dictionary storing an array of <code>Filter</code> object against each facet.
:type facets: dict
:param info_code: Numeric code indicating any additional information: 0 - Ok, 1 - Account is being reindexed, 2 - Account indexing has not started, 3 - Invalid Request, 4 - Prefix wildcard query not supported, 5 - MultiWords with code facet not supported, 6 - Account is being onboarded, 7 - Account is being onboarded or reindexed, 8 - Top value trimmed to maxresult allowed 9 - Branches are being indexed, 10 - Faceting not enabled, 11 - Work items not accessible, 19 - Phrase queries with code type filters not supported, 20 - Wildcard queries with code type filters not supported. Any other info code is used for internal purpose.
:type info_code: int
:param count: Total number of matched Board documents.
:type count: int
:param results: List of top matched Board documents.
:type results: list of :class:`BoardResult <azure.devops.v7_1.search.models.BoardResult>`
"""
_attribute_map = {
'facets': {'key': 'facets', 'type': '{[Filter]}'},
'info_code': {'key': 'infoCode', 'type': 'int'},
'count': {'key': 'count', 'type': 'int'},
'results': {'key': 'results', 'type': '[BoardResult]'}
}
def __init__(self, facets=None, info_code=None, count=None, results=None):
super(BoardSearchResponse, self).__init__(facets=facets, info_code=info_code)
self.count = count
self.results = results
class CodeSearchResponse(EntitySearchResponse):
"""
Defines a code search response item.
:param facets: A dictionary storing an array of <code>Filter</code> object against each facet.
:type facets: dict
:param info_code: Numeric code indicating any additional information: 0 - Ok, 1 - Account is being reindexed, 2 - Account indexing has not started, 3 - Invalid Request, 4 - Prefix wildcard query not supported, 5 - MultiWords with code facet not supported, 6 - Account is being onboarded, 7 - Account is being onboarded or reindexed, 8 - Top value trimmed to maxresult allowed 9 - Branches are being indexed, 10 - Faceting not enabled, 11 - Work items not accessible, 19 - Phrase queries with code type filters not supported, 20 - Wildcard queries with code type filters not supported. Any other info code is used for internal purpose.
:type info_code: int
:param count: Total number of matched files.
:type count: int
:param results: List of matched files.
:type results: list of :class:`CodeResult <azure.devops.v7_1.search.models.CodeResult>`
"""
_attribute_map = {
'facets': {'key': 'facets', 'type': '{[Filter]}'},
'info_code': {'key': 'infoCode', 'type': 'int'},
'count': {'key': 'count', 'type': 'int'},
'results': {'key': 'results', 'type': '[CodeResult]'}
}
def __init__(self, facets=None, info_code=None, count=None, results=None):
super(CodeSearchResponse, self).__init__(facets=facets, info_code=info_code)
self.count = count
self.results = results
class EntitySearchRequest(EntitySearchRequestBase):
"""
Base contract for search request types without scroll support.
:param filters: Filters to be applied. Set it to null if there are no filters to be applied.
:type filters: dict
:param search_text: The search text.
:type search_text: str
:param order_by: Options for sorting search results. If set to null, the results will be returned sorted by relevance. If more than one sort option is provided, the results are sorted in the order specified in the OrderBy.
:type order_by: list of :class:`SortOption <azure.devops.v7_1.microsoft._visual_studio._services._search._shared._web_api.models.SortOption>`
:param skip: Number of results to be skipped.
:type skip: int
:param top: Number of results to be returned.
:type top: int
:param include_facets: Flag to opt for faceting in the result. Default behavior is false.
:type include_facets: bool
"""
_attribute_map = {
'filters': {'key': 'filters', 'type': '{[str]}'},
'search_text': {'key': 'searchText', 'type': 'str'},
'order_by': {'key': '$orderBy', 'type': '[SortOption]'},
'skip': {'key': '$skip', 'type': 'int'},
'top': {'key': '$top', 'type': 'int'},
'include_facets': {'key': 'includeFacets', 'type': 'bool'}
}
def __init__(self, filters=None, search_text=None, order_by=None, skip=None, top=None, include_facets=None):
super(EntitySearchRequest, self).__init__(filters=filters, search_text=search_text)
self.order_by = order_by
self.skip = skip
self.top = top
self.include_facets = include_facets
class PackageSearchRequest(EntitySearchRequest):
"""
Defines a package search request.
:param filters: Filters to be applied. Set it to null if there are no filters to be applied.
:type filters: dict
:param search_text: The search text.
:type search_text: str
:param order_by: Options for sorting search results. If set to null, the results will be returned sorted by relevance. If more than one sort option is provided, the results are sorted in the order specified in the OrderBy.
:type order_by: list of :class:`SortOption <azure.devops.v7_1.microsoft._visual_studio._services._search._shared._web_api.models.SortOption>`
:param skip: Number of results to be skipped.
:type skip: int
:param top: Number of results to be returned.
:type top: int
:param include_facets: Flag to opt for faceting in the result. Default behavior is false.
:type include_facets: bool
"""
_attribute_map = {
'filters': {'key': 'filters', 'type': '{[str]}'},
'search_text': {'key': 'searchText', 'type': 'str'},
'order_by': {'key': '$orderBy', 'type': '[SortOption]'},
'skip': {'key': '$skip', 'type': 'int'},
'top': {'key': '$top', 'type': 'int'},
'include_facets': {'key': 'includeFacets', 'type': 'bool'},
}
def __init__(self, filters=None, search_text=None, order_by=None, skip=None, top=None, include_facets=None):
super(PackageSearchRequest, self).__init__(filters=filters, search_text=search_text, order_by=order_by, skip=skip, top=top, include_facets=include_facets)
class SettingSearchRequest(EntitySearchRequest):
"""
Defines a setting search request
:param filters: Filters to be applied. Set it to null if there are no filters to be applied.
:type filters: dict
:param search_text: The search text.
:type search_text: str
:param order_by: Options for sorting search results. If set to null, the results will be returned sorted by relevance. If more than one sort option is provided, the results are sorted in the order specified in the OrderBy.
:type order_by: list of :class:`SortOption <azure.devops.v7_1.microsoft._visual_studio._services._search._shared._web_api.models.SortOption>`
:param skip: Number of results to be skipped.
:type skip: int
:param top: Number of results to be returned.
:type top: int
:param include_facets: Flag to opt for faceting in the result. Default behavior is false.
:type include_facets: bool
"""
_attribute_map = {
'filters': {'key': 'filters', 'type': '{[str]}'},
'search_text': {'key': 'searchText', 'type': 'str'},
'order_by': {'key': '$orderBy', 'type': '[SortOption]'},
'skip': {'key': '$skip', 'type': 'int'},
'top': {'key': '$top', 'type': 'int'},
'include_facets': {'key': 'includeFacets', 'type': 'bool'},
}
def __init__(self, filters=None, search_text=None, order_by=None, skip=None, top=None, include_facets=None):
super(SettingSearchRequest, self).__init__(filters=filters, search_text=search_text, order_by=order_by, skip=skip, top=top, include_facets=include_facets)
class WikiSearchRequest(EntitySearchRequest):
"""
Defines a wiki search request.
:param filters: Filters to be applied. Set it to null if there are no filters to be applied.
:type filters: dict
:param search_text: The search text.
:type search_text: str
:param order_by: Options for sorting search results. If set to null, the results will be returned sorted by relevance. If more than one sort option is provided, the results are sorted in the order specified in the OrderBy.
:type order_by: list of :class:`SortOption <azure.devops.v7_1.microsoft._visual_studio._services._search._shared._web_api.models.SortOption>`
:param skip: Number of results to be skipped.
:type skip: int
:param top: Number of results to be returned.
:type top: int
:param include_facets: Flag to opt for faceting in the result. Default behavior is false.
:type include_facets: bool
"""
_attribute_map = {
'filters': {'key': 'filters', 'type': '{[str]}'},
'search_text': {'key': 'searchText', 'type': 'str'},
'order_by': {'key': '$orderBy', 'type': '[SortOption]'},
'skip': {'key': '$skip', 'type': 'int'},
'top': {'key': '$top', 'type': 'int'},
'include_facets': {'key': 'includeFacets', 'type': 'bool'},
}
def __init__(self, filters=None, search_text=None, order_by=None, skip=None, top=None, include_facets=None):
super(WikiSearchRequest, self).__init__(filters=filters, search_text=search_text, order_by=order_by, skip=skip, top=top, include_facets=include_facets)
class WorkItemSearchRequest(EntitySearchRequest):
"""
Defines a work item search request.
:param filters: Filters to be applied. Set it to null if there are no filters to be applied.
:type filters: dict
:param search_text: The search text.
:type search_text: str
:param order_by: Options for sorting search results. If set to null, the results will be returned sorted by relevance. If more than one sort option is provided, the results are sorted in the order specified in the OrderBy.
:type order_by: list of :class:`SortOption <azure.devops.v7_1.search.models.SortOption>`
:param skip: Number of results to be skipped.
:type skip: int
:param top: Number of results to be returned.
:type top: int
:param include_facets: Flag to opt for faceting in the result. Default behavior is false.
:type include_facets: bool
"""
_attribute_map = {
'filters': {'key': 'filters', 'type': '{[str]}'},
'search_text': {'key': 'searchText', 'type': 'str'},
'order_by': {'key': '$orderBy', 'type': '[SortOption]'},
'skip': {'key': '$skip', 'type': 'int'},
'top': {'key': '$top', 'type': 'int'},
'include_facets': {'key': 'includeFacets', 'type': 'bool'},
}
def __init__(self, filters=None, search_text=None, order_by=None, skip=None, top=None, include_facets=None):
super(WorkItemSearchRequest, self).__init__(filters=filters, search_text=search_text, order_by=order_by, skip=skip, top=top, include_facets=include_facets)
class BoardSearchRequest(EntitySearchRequest):
"""
Defines a Board search request.
:param filters: Filters to be applied. Set it to null if there are no filters to be applied.
:type filters: dict
:param search_text: The search text.
:type search_text: str
:param order_by: Options for sorting search results. If set to null, the results will be returned sorted by relevance. If more than one sort option is provided, the results are sorted in the order specified in the OrderBy.
:type order_by: list of :class:`SortOption <azure.devops.v7_1.search.models.SortOption>`
:param skip: Number of results to be skipped.
:type skip: int
:param top: Number of results to be returned.
:type top: int
:param include_facets: Flag to opt for faceting in the result. Default behavior is false.
:type include_facets: bool
"""
_attribute_map = {
'filters': {'key': 'filters', 'type': '{[str]}'},
'search_text': {'key': 'searchText', 'type': 'str'},
'order_by': {'key': '$orderBy', 'type': '[SortOption]'},
'skip': {'key': '$skip', 'type': 'int'},
'top': {'key': '$top', 'type': 'int'},
'include_facets': {'key': 'includeFacets', 'type': 'bool'},
}
def __init__(self, filters=None, search_text=None, order_by=None, skip=None, top=None, include_facets=None):
super(BoardSearchRequest, self).__init__(filters=filters, search_text=search_text, order_by=order_by, skip=skip, top=top, include_facets=include_facets)
class CodeSearchRequest(EntitySearchRequest):
"""
Defines a code search request.
:param filters: Filters to be applied. Set it to null if there are no filters to be applied.
:type filters: dict
:param search_text: The search text.
:type search_text: str
:param order_by: Options for sorting search results. If set to null, the results will be returned sorted by relevance. If more than one sort option is provided, the results are sorted in the order specified in the OrderBy.
:type order_by: list of :class:`SortOption <azure.devops.v7_1.search.models.SortOption>`
:param skip: Number of results to be skipped.
:type skip: int
:param top: Number of results to be returned.
:type top: int
:param include_facets: Flag to opt for faceting in the result. Default behavior is false.
:type include_facets: bool
:param include_snippet: Flag to opt for including matched code snippet in the result. Default behavior is false.
:type include_snippet: bool
"""
_attribute_map = {
'filters': {'key': 'filters', 'type': '{[str]}'},
'search_text': {'key': 'searchText', 'type': 'str'},
'order_by': {'key': '$orderBy', 'type': '[SortOption]'},
'skip': {'key': '$skip', 'type': 'int'},
'top': {'key': '$top', 'type': 'int'},
'include_facets': {'key': 'includeFacets', 'type': 'bool'},
'include_snippet': {'key': 'includeSnippet', 'type': 'bool'}
}
def __init__(self, filters=None, search_text=None, order_by=None, skip=None, top=None, include_facets=None, include_snippet=None):
super(CodeSearchRequest, self).__init__(filters=filters, search_text=search_text, order_by=order_by, skip=skip, top=top, include_facets=include_facets)
self.include_snippet = include_snippet
__all__ = [
'BoardResult',
'BranchInfo',
'CodeResult',
'Collection',
'CustomRepositoryBranchStatusResponse',
'CustomRepositoryStatusResponse',
'DepotInfo',
'EntitySearchRequestBase',
'EntitySearchResponse',
'FeedInfo',
'Filter',
'Hit',
'PackageHit',
'PackageResult',
'PackageSearchResponse',
'PackageSearchResponseContent',
'Project',
'ProjectReference',
'Repository',
'RepositoryStatusResponse',
'ScrollSearchRequest',
'SettingResult',
'SettingSearchResponse',
'SortOption',
'Team',
'TfvcRepositoryStatusResponse',
'Version',
'Wiki',
'WikiHit',
'WikiResult',
'WikiSearchResponse',
'WorkItemHit',
'WorkItemResult',
'WorkItemSearchResponse',
'BoardSearchResponse',
'CodeSearchResponse',
'EntitySearchRequest',
'PackageSearchRequest',
'SettingSearchRequest',
'WikiSearchRequest',
'WorkItemSearchRequest',
'BoardSearchRequest',
'CodeSearchRequest',
]
|
azure-devops-python-api/azure-devops/azure/devops/v7_1/search/models.py/0
|
{
"file_path": "azure-devops-python-api/azure-devops/azure/devops/v7_1/search/models.py",
"repo_id": "azure-devops-python-api",
"token_count": 18801
}
| 364 |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest.serialization import Model
class AssociatedWorkItem(Model):
"""
:param assigned_to:
:type assigned_to: str
:param id: Id of associated the work item.
:type id: int
:param state:
:type state: str
:param title:
:type title: str
:param url: REST Url of the work item.
:type url: str
:param web_url:
:type web_url: str
:param work_item_type:
:type work_item_type: str
"""
_attribute_map = {
'assigned_to': {'key': 'assignedTo', 'type': 'str'},
'id': {'key': 'id', 'type': 'int'},
'state': {'key': 'state', 'type': 'str'},
'title': {'key': 'title', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'web_url': {'key': 'webUrl', 'type': 'str'},
'work_item_type': {'key': 'workItemType', 'type': 'str'}
}
def __init__(self, assigned_to=None, id=None, state=None, title=None, url=None, web_url=None, work_item_type=None):
super(AssociatedWorkItem, self).__init__()
self.assigned_to = assigned_to
self.id = id
self.state = state
self.title = title
self.url = url
self.web_url = web_url
self.work_item_type = work_item_type
class FileContentMetadata(Model):
"""
:param content_type:
:type content_type: str
:param encoding:
:type encoding: int
:param extension:
:type extension: str
:param file_name:
:type file_name: str
:param is_binary:
:type is_binary: bool
:param is_image:
:type is_image: bool
:param vs_link:
:type vs_link: str
"""
_attribute_map = {
'content_type': {'key': 'contentType', 'type': 'str'},
'encoding': {'key': 'encoding', 'type': 'int'},
'extension': {'key': 'extension', 'type': 'str'},
'file_name': {'key': 'fileName', 'type': 'str'},
'is_binary': {'key': 'isBinary', 'type': 'bool'},
'is_image': {'key': 'isImage', 'type': 'bool'},
'vs_link': {'key': 'vsLink', 'type': 'str'}
}
def __init__(self, content_type=None, encoding=None, extension=None, file_name=None, is_binary=None, is_image=None, vs_link=None):
super(FileContentMetadata, self).__init__()
self.content_type = content_type
self.encoding = encoding
self.extension = extension
self.file_name = file_name
self.is_binary = is_binary
self.is_image = is_image
self.vs_link = vs_link
class GitRepository(Model):
"""
:param _links:
:type _links: :class:`ReferenceLinks <azure.devops.v7_1.tfvc.models.ReferenceLinks>`
:param default_branch:
:type default_branch: str
:param id:
:type id: str
:param is_disabled: True if the repository is disabled. False otherwise.
:type is_disabled: bool
:param is_fork: True if the repository was created as a fork.
:type is_fork: bool
:param is_in_maintenance: True if the repository is in maintenance. False otherwise.
:type is_in_maintenance: bool
:param name:
:type name: str
:param parent_repository:
:type parent_repository: :class:`GitRepositoryRef <azure.devops.v7_1.tfvc.models.GitRepositoryRef>`
:param project:
:type project: :class:`TeamProjectReference <azure.devops.v7_1.tfvc.models.TeamProjectReference>`
:param remote_url:
:type remote_url: str
:param size: Compressed size (bytes) of the repository.
:type size: long
:param ssh_url:
:type ssh_url: str
:param url:
:type url: str
:param valid_remote_urls:
:type valid_remote_urls: list of str
:param web_url:
:type web_url: str
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'default_branch': {'key': 'defaultBranch', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'is_disabled': {'key': 'isDisabled', 'type': 'bool'},
'is_fork': {'key': 'isFork', 'type': 'bool'},
'is_in_maintenance': {'key': 'isInMaintenance', 'type': 'bool'},
'name': {'key': 'name', 'type': 'str'},
'parent_repository': {'key': 'parentRepository', 'type': 'GitRepositoryRef'},
'project': {'key': 'project', 'type': 'TeamProjectReference'},
'remote_url': {'key': 'remoteUrl', 'type': 'str'},
'size': {'key': 'size', 'type': 'long'},
'ssh_url': {'key': 'sshUrl', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'valid_remote_urls': {'key': 'validRemoteUrls', 'type': '[str]'},
'web_url': {'key': 'webUrl', 'type': 'str'}
}
def __init__(self, _links=None, default_branch=None, id=None, is_disabled=None, is_fork=None, is_in_maintenance=None, name=None, parent_repository=None, project=None, remote_url=None, size=None, ssh_url=None, url=None, valid_remote_urls=None, web_url=None):
super(GitRepository, self).__init__()
self._links = _links
self.default_branch = default_branch
self.id = id
self.is_disabled = is_disabled
self.is_fork = is_fork
self.is_in_maintenance = is_in_maintenance
self.name = name
self.parent_repository = parent_repository
self.project = project
self.remote_url = remote_url
self.size = size
self.ssh_url = ssh_url
self.url = url
self.valid_remote_urls = valid_remote_urls
self.web_url = web_url
class GitRepositoryRef(Model):
"""
:param collection: Team Project Collection where this Fork resides
:type collection: :class:`TeamProjectCollectionReference <azure.devops.v7_1.tfvc.models.TeamProjectCollectionReference>`
:param id:
:type id: str
:param is_fork: True if the repository was created as a fork
:type is_fork: bool
:param name:
:type name: str
:param project:
:type project: :class:`TeamProjectReference <azure.devops.v7_1.tfvc.models.TeamProjectReference>`
:param remote_url:
:type remote_url: str
:param ssh_url:
:type ssh_url: str
:param url:
:type url: str
"""
_attribute_map = {
'collection': {'key': 'collection', 'type': 'TeamProjectCollectionReference'},
'id': {'key': 'id', 'type': 'str'},
'is_fork': {'key': 'isFork', 'type': 'bool'},
'name': {'key': 'name', 'type': 'str'},
'project': {'key': 'project', 'type': 'TeamProjectReference'},
'remote_url': {'key': 'remoteUrl', 'type': 'str'},
'ssh_url': {'key': 'sshUrl', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, collection=None, id=None, is_fork=None, name=None, project=None, remote_url=None, ssh_url=None, url=None):
super(GitRepositoryRef, self).__init__()
self.collection = collection
self.id = id
self.is_fork = is_fork
self.name = name
self.project = project
self.remote_url = remote_url
self.ssh_url = ssh_url
self.url = url
class GraphSubjectBase(Model):
"""
:param _links: This field contains zero or more interesting links about the graph subject. These links may be invoked to obtain additional relationships or more detailed information about this graph subject.
:type _links: :class:`ReferenceLinks <azure.devops.v7_1.microsoft._visual_studio._services._web_api.models.ReferenceLinks>`
:param descriptor: The descriptor is the primary way to reference the graph subject while the system is running. This field will uniquely identify the same graph subject across both Accounts and Organizations.
:type descriptor: str
:param display_name: This is the non-unique display name of the graph subject. To change this field, you must alter its value in the source provider.
:type display_name: str
:param url: This url is the full route to the source resource of this graph subject.
:type url: str
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'descriptor': {'key': 'descriptor', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, _links=None, descriptor=None, display_name=None, url=None):
super(GraphSubjectBase, self).__init__()
self._links = _links
self.descriptor = descriptor
self.display_name = display_name
self.url = url
class Change(Model):
"""
:param change_type: The type of change that was made to the item.
:type change_type: object
:param item: Current version.
:type item: object
:param new_content: Content of the item after the change.
:type new_content: :class:`ItemContent <azure.devops.v7_1.tfvc.models.ItemContent>`
:param source_server_item: Path of the item on the server.
:type source_server_item: str
:param url: URL to retrieve the item.
:type url: str
"""
_attribute_map = {
'change_type': {'key': 'changeType', 'type': 'object'},
'item': {'key': 'item', 'type': 'object'},
'new_content': {'key': 'newContent', 'type': 'ItemContent'},
'source_server_item': {'key': 'sourceServerItem', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, change_type=None, item=None, new_content=None, source_server_item=None, url=None):
super(Change, self).__init__()
self.change_type = change_type
self.item = item
self.new_content = new_content
self.source_server_item = source_server_item
self.url = url
class CheckinNote(Model):
"""
:param name:
:type name: str
:param value:
:type value: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'}
}
def __init__(self, name=None, value=None):
super(CheckinNote, self).__init__()
self.name = name
self.value = value
class IdentityRef(GraphSubjectBase):
"""
:param _links: This field contains zero or more interesting links about the graph subject. These links may be invoked to obtain additional relationships or more detailed information about this graph subject.
:type _links: :class:`ReferenceLinks <azure.devops.v7_1.microsoft._visual_studio._services._web_api.models.ReferenceLinks>`
:param descriptor: The descriptor is the primary way to reference the graph subject while the system is running. This field will uniquely identify the same graph subject across both Accounts and Organizations.
:type descriptor: str
:param display_name: This is the non-unique display name of the graph subject. To change this field, you must alter its value in the source provider.
:type display_name: str
:param url: This url is the full route to the source resource of this graph subject.
:type url: str
:param directory_alias: Deprecated - Can be retrieved by querying the Graph user referenced in the "self" entry of the IdentityRef "_links" dictionary
:type directory_alias: str
:param id:
:type id: str
:param image_url: Deprecated - Available in the "avatar" entry of the IdentityRef "_links" dictionary
:type image_url: str
:param inactive: Deprecated - Can be retrieved by querying the Graph membership state referenced in the "membershipState" entry of the GraphUser "_links" dictionary
:type inactive: bool
:param is_aad_identity: Deprecated - Can be inferred from the subject type of the descriptor (Descriptor.IsAadUserType/Descriptor.IsAadGroupType)
:type is_aad_identity: bool
:param is_container: Deprecated - Can be inferred from the subject type of the descriptor (Descriptor.IsGroupType)
:type is_container: bool
:param is_deleted_in_origin:
:type is_deleted_in_origin: bool
:param profile_url: Deprecated - not in use in most preexisting implementations of ToIdentityRef
:type profile_url: str
:param unique_name: Deprecated - use Domain+PrincipalName instead
:type unique_name: str
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'descriptor': {'key': 'descriptor', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'directory_alias': {'key': 'directoryAlias', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'image_url': {'key': 'imageUrl', 'type': 'str'},
'inactive': {'key': 'inactive', 'type': 'bool'},
'is_aad_identity': {'key': 'isAadIdentity', 'type': 'bool'},
'is_container': {'key': 'isContainer', 'type': 'bool'},
'is_deleted_in_origin': {'key': 'isDeletedInOrigin', 'type': 'bool'},
'profile_url': {'key': 'profileUrl', 'type': 'str'},
'unique_name': {'key': 'uniqueName', 'type': 'str'}
}
def __init__(self, _links=None, descriptor=None, display_name=None, url=None, directory_alias=None, id=None, image_url=None, inactive=None, is_aad_identity=None, is_container=None, is_deleted_in_origin=None, profile_url=None, unique_name=None):
super(IdentityRef, self).__init__(_links=_links, descriptor=descriptor, display_name=display_name, url=url)
self.directory_alias = directory_alias
self.id = id
self.image_url = image_url
self.inactive = inactive
self.is_aad_identity = is_aad_identity
self.is_container = is_container
self.is_deleted_in_origin = is_deleted_in_origin
self.profile_url = profile_url
self.unique_name = unique_name
class ItemContent(Model):
"""
:param content:
:type content: str
:param content_type:
:type content_type: object
"""
_attribute_map = {
'content': {'key': 'content', 'type': 'str'},
'content_type': {'key': 'contentType', 'type': 'object'}
}
def __init__(self, content=None, content_type=None):
super(ItemContent, self).__init__()
self.content = content
self.content_type = content_type
class ItemModel(Model):
"""
:param _links:
:type _links: :class:`ReferenceLinks <azure.devops.v7_1.tfvc.models.ReferenceLinks>`
:param content:
:type content: str
:param content_metadata:
:type content_metadata: :class:`FileContentMetadata <azure.devops.v7_1.tfvc.models.FileContentMetadata>`
:param is_folder:
:type is_folder: bool
:param is_sym_link:
:type is_sym_link: bool
:param path:
:type path: str
:param url:
:type url: str
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'content': {'key': 'content', 'type': 'str'},
'content_metadata': {'key': 'contentMetadata', 'type': 'FileContentMetadata'},
'is_folder': {'key': 'isFolder', 'type': 'bool'},
'is_sym_link': {'key': 'isSymLink', 'type': 'bool'},
'path': {'key': 'path', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, _links=None, content=None, content_metadata=None, is_folder=None, is_sym_link=None, path=None, url=None):
super(ItemModel, self).__init__()
self._links = _links
self.content = content
self.content_metadata = content_metadata
self.is_folder = is_folder
self.is_sym_link = is_sym_link
self.path = path
self.url = url
class ReferenceLinks(Model):
"""
The class to represent a collection of REST reference links.
:param links: The readonly view of the links. Because Reference links are readonly, we only want to expose them as read only.
:type links: dict
"""
_attribute_map = {
'links': {'key': 'links', 'type': '{object}'}
}
def __init__(self, links=None):
super(ReferenceLinks, self).__init__()
self.links = links
class TeamProjectCollectionReference(Model):
"""
Reference object for a TeamProjectCollection.
:param avatar_url: Collection avatar Url.
:type avatar_url: str
:param id: Collection Id.
:type id: str
:param name: Collection Name.
:type name: str
:param url: Collection REST Url.
:type url: str
"""
_attribute_map = {
'avatar_url': {'key': 'avatarUrl', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, avatar_url=None, id=None, name=None, url=None):
super(TeamProjectCollectionReference, self).__init__()
self.avatar_url = avatar_url
self.id = id
self.name = name
self.url = url
class TeamProjectReference(Model):
"""
Represents a shallow reference to a TeamProject.
:param abbreviation: Project abbreviation.
:type abbreviation: str
:param default_team_image_url: Url to default team identity image.
:type default_team_image_url: str
:param description: The project's description (if any).
:type description: str
:param id: Project identifier.
:type id: str
:param last_update_time: Project last update time.
:type last_update_time: datetime
:param name: Project name.
:type name: str
:param revision: Project revision.
:type revision: long
:param state: Project state.
:type state: object
:param url: Url to the full version of the object.
:type url: str
:param visibility: Project visibility.
:type visibility: object
"""
_attribute_map = {
'abbreviation': {'key': 'abbreviation', 'type': 'str'},
'default_team_image_url': {'key': 'defaultTeamImageUrl', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'last_update_time': {'key': 'lastUpdateTime', 'type': 'iso-8601'},
'name': {'key': 'name', 'type': 'str'},
'revision': {'key': 'revision', 'type': 'long'},
'state': {'key': 'state', 'type': 'object'},
'url': {'key': 'url', 'type': 'str'},
'visibility': {'key': 'visibility', 'type': 'object'}
}
def __init__(self, abbreviation=None, default_team_image_url=None, description=None, id=None, last_update_time=None, name=None, revision=None, state=None, url=None, visibility=None):
super(TeamProjectReference, self).__init__()
self.abbreviation = abbreviation
self.default_team_image_url = default_team_image_url
self.description = description
self.id = id
self.last_update_time = last_update_time
self.name = name
self.revision = revision
self.state = state
self.url = url
self.visibility = visibility
class TfvcBranchMapping(Model):
"""
A branch mapping.
:param depth: Depth of the branch.
:type depth: str
:param server_item: Server item for the branch.
:type server_item: str
:param type: Type of the branch.
:type type: str
"""
_attribute_map = {
'depth': {'key': 'depth', 'type': 'str'},
'server_item': {'key': 'serverItem', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'}
}
def __init__(self, depth=None, server_item=None, type=None):
super(TfvcBranchMapping, self).__init__()
self.depth = depth
self.server_item = server_item
self.type = type
class TfvcChange(Change):
"""
A change.
:param merge_sources: List of merge sources in case of rename or branch creation.
:type merge_sources: list of :class:`TfvcMergeSource <azure.devops.v7_1.tfvc.models.TfvcMergeSource>`
:param pending_version: Version at which a (shelved) change was pended against
:type pending_version: int
"""
_attribute_map = {
'merge_sources': {'key': 'mergeSources', 'type': '[TfvcMergeSource]'},
'pending_version': {'key': 'pendingVersion', 'type': 'int'}
}
def __init__(self, merge_sources=None, pending_version=None):
super(TfvcChange, self).__init__()
self.merge_sources = merge_sources
self.pending_version = pending_version
class TfvcChangesetRef(Model):
"""
Metadata for a changeset.
:param _links: A collection of REST reference links.
:type _links: :class:`ReferenceLinks <azure.devops.v7_1.tfvc.models.ReferenceLinks>`
:param author: Alias or display name of user.
:type author: :class:`IdentityRef <azure.devops.v7_1.tfvc.models.IdentityRef>`
:param comment: Comment for the changeset.
:type comment: str
:param comment_truncated: Was the Comment result truncated?
:type comment_truncated: bool
:param created_date: Creation date of the changeset.
:type created_date: datetime
:param changeset_id: Changeset Id.
:type changeset_id: int
:param checked_in_by: Alias or display name of user.
:type checked_in_by: :class:`IdentityRef <azure.devops.v7_1.tfvc.models.IdentityRef>`
:param url: URL to retrieve the item.
:type url: str
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'author': {'key': 'author', 'type': 'IdentityRef'},
'comment': {'key': 'comment', 'type': 'str'},
'comment_truncated': {'key': 'commentTruncated', 'type': 'bool'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'changeset_id': {'key': 'changesetId', 'type': 'int'},
'checked_in_by': {'key': 'checkedInBy', 'type': 'IdentityRef'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, _links=None, author=None, comment=None, comment_truncated=None, created_date=None, changeset_id=None, checked_in_by=None, url=None):
super(TfvcChangesetRef, self).__init__()
self._links = _links
self.author = author
self.comment = comment
self.comment_truncated = comment_truncated
self.created_date = created_date
self.changeset_id = changeset_id
self.checked_in_by = checked_in_by
self.url = url
class TfvcChangesetSearchCriteria(Model):
"""
Criteria used in a search for change lists.
:param author: Alias or display name of user who made the changes.
:type author: str
:param follow_renames: Whether or not to follow renames for the given item being queried.
:type follow_renames: bool
:param from_date: If provided, only include changesets created after this date (string).
:type from_date: str
:param from_id: If provided, only include changesets after this changesetID.
:type from_id: int
:param include_links: Whether to include the _links field on the shallow references.
:type include_links: bool
:param item_path: Path of item to search under.
:type item_path: str
:param mappings:
:type mappings: list of :class:`TfvcMappingFilter <azure.devops.v7_1.tfvc.models.TfvcMappingFilter>`
:param to_date: If provided, only include changesets created before this date (string).
:type to_date: str
:param to_id: If provided, a version descriptor for the latest change list to include.
:type to_id: int
"""
_attribute_map = {
'author': {'key': 'author', 'type': 'str'},
'follow_renames': {'key': 'followRenames', 'type': 'bool'},
'from_date': {'key': 'fromDate', 'type': 'str'},
'from_id': {'key': 'fromId', 'type': 'int'},
'include_links': {'key': 'includeLinks', 'type': 'bool'},
'item_path': {'key': 'itemPath', 'type': 'str'},
'mappings': {'key': 'mappings', 'type': '[TfvcMappingFilter]'},
'to_date': {'key': 'toDate', 'type': 'str'},
'to_id': {'key': 'toId', 'type': 'int'}
}
def __init__(self, author=None, follow_renames=None, from_date=None, from_id=None, include_links=None, item_path=None, mappings=None, to_date=None, to_id=None):
super(TfvcChangesetSearchCriteria, self).__init__()
self.author = author
self.follow_renames = follow_renames
self.from_date = from_date
self.from_id = from_id
self.include_links = include_links
self.item_path = item_path
self.mappings = mappings
self.to_date = to_date
self.to_id = to_id
class TfvcChangesetsRequestData(Model):
"""
Request body for Get batched changesets.
:param comment_length: Max length of the comment.
:type comment_length: int
:param changeset_ids: List of changeset Ids.
:type changeset_ids: list of int
:param include_links: Whether to include the _links field on the shallow references
:type include_links: bool
"""
_attribute_map = {
'comment_length': {'key': 'commentLength', 'type': 'int'},
'changeset_ids': {'key': 'changesetIds', 'type': '[int]'},
'include_links': {'key': 'includeLinks', 'type': 'bool'}
}
def __init__(self, comment_length=None, changeset_ids=None, include_links=None):
super(TfvcChangesetsRequestData, self).__init__()
self.comment_length = comment_length
self.changeset_ids = changeset_ids
self.include_links = include_links
class TfvcItem(ItemModel):
"""
Metadata for an item.
:param _links:
:type _links: :class:`ReferenceLinks <azure.devops.v7_1.tfvc.models.ReferenceLinks>`
:param content:
:type content: str
:param content_metadata:
:type content_metadata: :class:`FileContentMetadata <azure.devops.v7_1.tfvc.models.FileContentMetadata>`
:param is_folder:
:type is_folder: bool
:param is_sym_link:
:type is_sym_link: bool
:param path:
:type path: str
:param url:
:type url: str
:param deletion_id: Greater than 0 if item is deleted.
:type deletion_id: int
:param encoding: File encoding from database, -1 represents binary.
:type encoding: int
:param hash_value: MD5 hash as a base 64 string, applies to files only.
:type hash_value: str
:param change_date: Item changed datetime.
:type change_date: datetime
:param is_branch: True if item is a branch.
:type is_branch: bool
:param is_pending_change: True if there is a change pending.
:type is_pending_change: bool
:param size: The size of the file, if applicable.
:type size: long
:param version: Changeset version Id.
:type version: int
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'content': {'key': 'content', 'type': 'str'},
'content_metadata': {'key': 'contentMetadata', 'type': 'FileContentMetadata'},
'is_folder': {'key': 'isFolder', 'type': 'bool'},
'is_sym_link': {'key': 'isSymLink', 'type': 'bool'},
'path': {'key': 'path', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'deletion_id': {'key': 'deletionId', 'type': 'int'},
'encoding': {'key': 'encoding', 'type': 'int'},
'hash_value': {'key': 'hashValue', 'type': 'str'},
'change_date': {'key': 'changeDate', 'type': 'iso-8601'},
'is_branch': {'key': 'isBranch', 'type': 'bool'},
'is_pending_change': {'key': 'isPendingChange', 'type': 'bool'},
'size': {'key': 'size', 'type': 'long'},
'version': {'key': 'version', 'type': 'int'}
}
def __init__(self, _links=None, content=None, content_metadata=None, is_folder=None, is_sym_link=None, path=None, url=None, deletion_id=None, encoding=None, hash_value=None, change_date=None, is_branch=None, is_pending_change=None, size=None, version=None):
super(TfvcItem, self).__init__(_links=_links, content=content, content_metadata=content_metadata, is_folder=is_folder, is_sym_link=is_sym_link, path=path, url=url)
self.deletion_id = deletion_id
self.encoding = encoding
self.hash_value = hash_value
self.change_date = change_date
self.is_branch = is_branch
self.is_pending_change = is_pending_change
self.size = size
self.version = version
class TfvcItemDescriptor(Model):
"""
Item path and Version descriptor properties
:param path: Item path.
:type path: str
:param recursion_level: Defaults to OneLevel.
:type recursion_level: object
:param version: Specify the desired version, can be null or empty string only if VersionType is latest or tip.
:type version: str
:param version_option: Defaults to None.
:type version_option: object
:param version_type: Defaults to Latest.
:type version_type: object
"""
_attribute_map = {
'path': {'key': 'path', 'type': 'str'},
'recursion_level': {'key': 'recursionLevel', 'type': 'object'},
'version': {'key': 'version', 'type': 'str'},
'version_option': {'key': 'versionOption', 'type': 'object'},
'version_type': {'key': 'versionType', 'type': 'object'}
}
def __init__(self, path=None, recursion_level=None, version=None, version_option=None, version_type=None):
super(TfvcItemDescriptor, self).__init__()
self.path = path
self.recursion_level = recursion_level
self.version = version
self.version_option = version_option
self.version_type = version_type
class TfvcItemRequestData(Model):
"""
Request body used by Get Items Batch
:param include_content_metadata: If true, include metadata about the file type
:type include_content_metadata: bool
:param include_links: Whether to include the _links field on the shallow references
:type include_links: bool
:param item_descriptors:
:type item_descriptors: list of :class:`TfvcItemDescriptor <azure.devops.v7_1.tfvc.models.TfvcItemDescriptor>`
"""
_attribute_map = {
'include_content_metadata': {'key': 'includeContentMetadata', 'type': 'bool'},
'include_links': {'key': 'includeLinks', 'type': 'bool'},
'item_descriptors': {'key': 'itemDescriptors', 'type': '[TfvcItemDescriptor]'}
}
def __init__(self, include_content_metadata=None, include_links=None, item_descriptors=None):
super(TfvcItemRequestData, self).__init__()
self.include_content_metadata = include_content_metadata
self.include_links = include_links
self.item_descriptors = item_descriptors
class TfvcLabelRef(Model):
"""
Metadata for a Label.
:param _links: Collection of reference links.
:type _links: :class:`ReferenceLinks <azure.devops.v7_1.tfvc.models.ReferenceLinks>`
:param description: Label description.
:type description: str
:param id: Label Id.
:type id: int
:param label_scope: Label scope.
:type label_scope: str
:param modified_date: Last modified datetime for the label.
:type modified_date: datetime
:param name: Label name.
:type name: str
:param owner: Label owner.
:type owner: :class:`IdentityRef <azure.devops.v7_1.tfvc.models.IdentityRef>`
:param url: Label Url.
:type url: str
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'description': {'key': 'description', 'type': 'str'},
'id': {'key': 'id', 'type': 'int'},
'label_scope': {'key': 'labelScope', 'type': 'str'},
'modified_date': {'key': 'modifiedDate', 'type': 'iso-8601'},
'name': {'key': 'name', 'type': 'str'},
'owner': {'key': 'owner', 'type': 'IdentityRef'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, _links=None, description=None, id=None, label_scope=None, modified_date=None, name=None, owner=None, url=None):
super(TfvcLabelRef, self).__init__()
self._links = _links
self.description = description
self.id = id
self.label_scope = label_scope
self.modified_date = modified_date
self.name = name
self.owner = owner
self.url = url
class TfvcLabelRequestData(Model):
"""
:param include_links: Whether to include the _links field on the shallow references
:type include_links: bool
:param item_label_filter:
:type item_label_filter: str
:param label_scope:
:type label_scope: str
:param max_item_count:
:type max_item_count: int
:param name:
:type name: str
:param owner:
:type owner: str
"""
_attribute_map = {
'include_links': {'key': 'includeLinks', 'type': 'bool'},
'item_label_filter': {'key': 'itemLabelFilter', 'type': 'str'},
'label_scope': {'key': 'labelScope', 'type': 'str'},
'max_item_count': {'key': 'maxItemCount', 'type': 'int'},
'name': {'key': 'name', 'type': 'str'},
'owner': {'key': 'owner', 'type': 'str'}
}
def __init__(self, include_links=None, item_label_filter=None, label_scope=None, max_item_count=None, name=None, owner=None):
super(TfvcLabelRequestData, self).__init__()
self.include_links = include_links
self.item_label_filter = item_label_filter
self.label_scope = label_scope
self.max_item_count = max_item_count
self.name = name
self.owner = owner
class TfvcMappingFilter(Model):
"""
MappingFilter can be used to include or exclude specific paths.
:param exclude: True if ServerPath should be excluded.
:type exclude: bool
:param server_path: Path to be included or excluded.
:type server_path: str
"""
_attribute_map = {
'exclude': {'key': 'exclude', 'type': 'bool'},
'server_path': {'key': 'serverPath', 'type': 'str'}
}
def __init__(self, exclude=None, server_path=None):
super(TfvcMappingFilter, self).__init__()
self.exclude = exclude
self.server_path = server_path
class TfvcMergeSource(Model):
"""
:param is_rename: Indicates if this a rename source. If false, it is a merge source.
:type is_rename: bool
:param server_item: The server item of the merge source.
:type server_item: str
:param version_from: Start of the version range.
:type version_from: int
:param version_to: End of the version range.
:type version_to: int
"""
_attribute_map = {
'is_rename': {'key': 'isRename', 'type': 'bool'},
'server_item': {'key': 'serverItem', 'type': 'str'},
'version_from': {'key': 'versionFrom', 'type': 'int'},
'version_to': {'key': 'versionTo', 'type': 'int'}
}
def __init__(self, is_rename=None, server_item=None, version_from=None, version_to=None):
super(TfvcMergeSource, self).__init__()
self.is_rename = is_rename
self.server_item = server_item
self.version_from = version_from
self.version_to = version_to
class TfvcPolicyFailureInfo(Model):
"""
Policy failure information.
:param message: Policy failure message.
:type message: str
:param policy_name: Name of the policy that failed.
:type policy_name: str
"""
_attribute_map = {
'message': {'key': 'message', 'type': 'str'},
'policy_name': {'key': 'policyName', 'type': 'str'}
}
def __init__(self, message=None, policy_name=None):
super(TfvcPolicyFailureInfo, self).__init__()
self.message = message
self.policy_name = policy_name
class TfvcPolicyOverrideInfo(Model):
"""
Information on the policy override.
:param comment: Overidden policy comment.
:type comment: str
:param policy_failures: Information on the failed policy that was overridden.
:type policy_failures: list of :class:`TfvcPolicyFailureInfo <azure.devops.v7_1.tfvc.models.TfvcPolicyFailureInfo>`
"""
_attribute_map = {
'comment': {'key': 'comment', 'type': 'str'},
'policy_failures': {'key': 'policyFailures', 'type': '[TfvcPolicyFailureInfo]'}
}
def __init__(self, comment=None, policy_failures=None):
super(TfvcPolicyOverrideInfo, self).__init__()
self.comment = comment
self.policy_failures = policy_failures
class TfvcShallowBranchRef(Model):
"""
This is the shallow branchref class.
:param path: Path for the branch.
:type path: str
"""
_attribute_map = {
'path': {'key': 'path', 'type': 'str'}
}
def __init__(self, path=None):
super(TfvcShallowBranchRef, self).__init__()
self.path = path
class TfvcShelvesetRef(Model):
"""
Metadata for a shallow shelveset.
:param _links: List of reference links for the shelveset.
:type _links: :class:`ReferenceLinks <azure.devops.v7_1.tfvc.models.ReferenceLinks>`
:param comment: Shelveset comment.
:type comment: str
:param comment_truncated: Shelveset comment truncated as applicable.
:type comment_truncated: bool
:param created_date: Shelveset create date.
:type created_date: datetime
:param id: Shelveset Id.
:type id: str
:param name: Shelveset name.
:type name: str
:param owner: Shelveset Owner.
:type owner: :class:`IdentityRef <azure.devops.v7_1.tfvc.models.IdentityRef>`
:param url: Shelveset Url.
:type url: str
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'comment': {'key': 'comment', 'type': 'str'},
'comment_truncated': {'key': 'commentTruncated', 'type': 'bool'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'owner': {'key': 'owner', 'type': 'IdentityRef'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, _links=None, comment=None, comment_truncated=None, created_date=None, id=None, name=None, owner=None, url=None):
super(TfvcShelvesetRef, self).__init__()
self._links = _links
self.comment = comment
self.comment_truncated = comment_truncated
self.created_date = created_date
self.id = id
self.name = name
self.owner = owner
self.url = url
class TfvcShelvesetRequestData(Model):
"""
:param include_details: Whether to include policyOverride and notes Only applies when requesting a single deep shelveset
:type include_details: bool
:param include_links: Whether to include the _links field on the shallow references. Does not apply when requesting a single deep shelveset object. Links will always be included in the deep shelveset.
:type include_links: bool
:param include_work_items: Whether to include workItems
:type include_work_items: bool
:param max_comment_length: Max length of comment
:type max_comment_length: int
:param max_change_count: Max number of changes to include
:type max_change_count: int
:param name: Shelveset name
:type name: str
:param owner: Owner's ID. Could be a name or a guid.
:type owner: str
"""
_attribute_map = {
'include_details': {'key': 'includeDetails', 'type': 'bool'},
'include_links': {'key': 'includeLinks', 'type': 'bool'},
'include_work_items': {'key': 'includeWorkItems', 'type': 'bool'},
'max_comment_length': {'key': 'maxCommentLength', 'type': 'int'},
'max_change_count': {'key': 'maxChangeCount', 'type': 'int'},
'name': {'key': 'name', 'type': 'str'},
'owner': {'key': 'owner', 'type': 'str'}
}
def __init__(self, include_details=None, include_links=None, include_work_items=None, max_comment_length=None, max_change_count=None, name=None, owner=None):
super(TfvcShelvesetRequestData, self).__init__()
self.include_details = include_details
self.include_links = include_links
self.include_work_items = include_work_items
self.max_comment_length = max_comment_length
self.max_change_count = max_change_count
self.name = name
self.owner = owner
class TfvcStatistics(Model):
"""
:param file_count_total: Count of files at the requested scope.
:type file_count_total: long
:param changeset_id: Id of the last changeset the stats are based on.
:type changeset_id: int
"""
_attribute_map = {
'file_count_total': {'key': 'fileCountTotal', 'type': 'long'},
'changeset_id': {'key': 'changesetId', 'type': 'int'}
}
def __init__(self, file_count_total=None, changeset_id=None):
super(TfvcStatistics, self).__init__()
self.file_count_total = file_count_total
self.changeset_id = changeset_id
class TfvcVersionDescriptor(Model):
"""
Version descriptor properties.
:param version: Version object.
:type version: str
:param version_option:
:type version_option: object
:param version_type:
:type version_type: object
"""
_attribute_map = {
'version': {'key': 'version', 'type': 'str'},
'version_option': {'key': 'versionOption', 'type': 'object'},
'version_type': {'key': 'versionType', 'type': 'object'}
}
def __init__(self, version=None, version_option=None, version_type=None):
super(TfvcVersionDescriptor, self).__init__()
self.version = version
self.version_option = version_option
self.version_type = version_type
class VersionControlProjectInfo(Model):
"""
:param default_source_control_type:
:type default_source_control_type: object
:param project:
:type project: :class:`TeamProjectReference <azure.devops.v7_1.tfvc.models.TeamProjectReference>`
:param supports_git:
:type supports_git: bool
:param supports_tFVC:
:type supports_tFVC: bool
"""
_attribute_map = {
'default_source_control_type': {'key': 'defaultSourceControlType', 'type': 'object'},
'project': {'key': 'project', 'type': 'TeamProjectReference'},
'supports_git': {'key': 'supportsGit', 'type': 'bool'},
'supports_tFVC': {'key': 'supportsTFVC', 'type': 'bool'}
}
def __init__(self, default_source_control_type=None, project=None, supports_git=None, supports_tFVC=None):
super(VersionControlProjectInfo, self).__init__()
self.default_source_control_type = default_source_control_type
self.project = project
self.supports_git = supports_git
self.supports_tFVC = supports_tFVC
class VstsInfo(Model):
"""
:param collection:
:type collection: :class:`TeamProjectCollectionReference <azure.devops.v7_1.tfvc.models.TeamProjectCollectionReference>`
:param repository:
:type repository: :class:`GitRepository <azure.devops.v7_1.tfvc.models.GitRepository>`
:param server_url:
:type server_url: str
"""
_attribute_map = {
'collection': {'key': 'collection', 'type': 'TeamProjectCollectionReference'},
'repository': {'key': 'repository', 'type': 'GitRepository'},
'server_url': {'key': 'serverUrl', 'type': 'str'}
}
def __init__(self, collection=None, repository=None, server_url=None):
super(VstsInfo, self).__init__()
self.collection = collection
self.repository = repository
self.server_url = server_url
class TfvcBranchRef(TfvcShallowBranchRef):
"""
Metadata for a branchref.
:param path: Path for the branch.
:type path: str
:param _links: A collection of REST reference links.
:type _links: :class:`ReferenceLinks <azure.devops.v7_1.tfvc.models.ReferenceLinks>`
:param created_date: Creation date of the branch.
:type created_date: datetime
:param description: Branch description.
:type description: str
:param is_deleted: Is the branch deleted?
:type is_deleted: bool
:param owner: Alias or display name of user
:type owner: :class:`IdentityRef <azure.devops.v7_1.tfvc.models.IdentityRef>`
:param url: URL to retrieve the item.
:type url: str
"""
_attribute_map = {
'path': {'key': 'path', 'type': 'str'},
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'description': {'key': 'description', 'type': 'str'},
'is_deleted': {'key': 'isDeleted', 'type': 'bool'},
'owner': {'key': 'owner', 'type': 'IdentityRef'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, path=None, _links=None, created_date=None, description=None, is_deleted=None, owner=None, url=None):
super(TfvcBranchRef, self).__init__(path=path)
self._links = _links
self.created_date = created_date
self.description = description
self.is_deleted = is_deleted
self.owner = owner
self.url = url
class TfvcChangeset(TfvcChangesetRef):
"""
A collection of changes.
:param _links: A collection of REST reference links.
:type _links: :class:`ReferenceLinks <azure.devops.v7_1.tfvc.models.ReferenceLinks>`
:param author: Alias or display name of user.
:type author: :class:`IdentityRef <azure.devops.v7_1.tfvc.models.IdentityRef>`
:param comment: Comment for the changeset.
:type comment: str
:param comment_truncated: Was the Comment result truncated?
:type comment_truncated: bool
:param created_date: Creation date of the changeset.
:type created_date: datetime
:param changeset_id: Changeset Id.
:type changeset_id: int
:param checked_in_by: Alias or display name of user.
:type checked_in_by: :class:`IdentityRef <azure.devops.v7_1.tfvc.models.IdentityRef>`
:param url: URL to retrieve the item.
:type url: str
:param account_id: Changeset Account Id also known as Organization Id.
:type account_id: str
:param collection_id: Changeset collection Id.
:type collection_id: str
:param has_more_changes: True if more changes are available.
:type has_more_changes: bool
:param changes: List of associated changes.
:type changes: list of :class:`TfvcChange <azure.devops.v7_1.tfvc.models.TfvcChange>`
:param checkin_notes: List of Checkin Notes for the changeset.
:type checkin_notes: list of :class:`CheckinNote <azure.devops.v7_1.tfvc.models.CheckinNote>`
:param policy_override: Policy Override for the changeset.
:type policy_override: :class:`TfvcPolicyOverrideInfo <azure.devops.v7_1.tfvc.models.TfvcPolicyOverrideInfo>`
:param team_project_ids: Team Project Ids for the changeset.
:type team_project_ids: list of str
:param work_items: List of work items associated with the changeset.
:type work_items: list of :class:`AssociatedWorkItem <azure.devops.v7_1.tfvc.models.AssociatedWorkItem>`
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'author': {'key': 'author', 'type': 'IdentityRef'},
'comment': {'key': 'comment', 'type': 'str'},
'comment_truncated': {'key': 'commentTruncated', 'type': 'bool'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'changeset_id': {'key': 'changesetId', 'type': 'int'},
'checked_in_by': {'key': 'checkedInBy', 'type': 'IdentityRef'},
'url': {'key': 'url', 'type': 'str'},
'account_id': {'key': 'accountId', 'type': 'str'},
'collection_id': {'key': 'collectionId', 'type': 'str'},
'has_more_changes': {'key': 'hasMoreChanges', 'type': 'bool'},
'changes': {'key': 'changes', 'type': '[TfvcChange]'},
'checkin_notes': {'key': 'checkinNotes', 'type': '[CheckinNote]'},
'policy_override': {'key': 'policyOverride', 'type': 'TfvcPolicyOverrideInfo'},
'team_project_ids': {'key': 'teamProjectIds', 'type': '[str]'},
'work_items': {'key': 'workItems', 'type': '[AssociatedWorkItem]'}
}
def __init__(self, _links=None, author=None, comment=None, comment_truncated=None, created_date=None, changeset_id=None, checked_in_by=None, url=None, account_id=None, collection_id=None, has_more_changes=None, changes=None, checkin_notes=None, policy_override=None, team_project_ids=None, work_items=None):
super(TfvcChangeset, self).__init__(_links=_links, author=author, comment=comment, comment_truncated=comment_truncated, created_date=created_date, changeset_id=changeset_id, checked_in_by=checked_in_by, url=url)
self.account_id = account_id
self.collection_id = collection_id
self.has_more_changes = has_more_changes
self.changes = changes
self.checkin_notes = checkin_notes
self.policy_override = policy_override
self.team_project_ids = team_project_ids
self.work_items = work_items
class TfvcLabel(TfvcLabelRef):
"""
Metadata for a label.
:param _links: Collection of reference links.
:type _links: :class:`ReferenceLinks <azure.devops.v7_1.tfvc.models.ReferenceLinks>`
:param description: Label description.
:type description: str
:param id: Label Id.
:type id: int
:param label_scope: Label scope.
:type label_scope: str
:param modified_date: Last modified datetime for the label.
:type modified_date: datetime
:param name: Label name.
:type name: str
:param owner: Label owner.
:type owner: :class:`IdentityRef <azure.devops.v7_1.tfvc.models.IdentityRef>`
:param url: Label Url.
:type url: str
:param items: List of items.
:type items: list of :class:`TfvcItem <azure.devops.v7_1.tfvc.models.TfvcItem>`
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'description': {'key': 'description', 'type': 'str'},
'id': {'key': 'id', 'type': 'int'},
'label_scope': {'key': 'labelScope', 'type': 'str'},
'modified_date': {'key': 'modifiedDate', 'type': 'iso-8601'},
'name': {'key': 'name', 'type': 'str'},
'owner': {'key': 'owner', 'type': 'IdentityRef'},
'url': {'key': 'url', 'type': 'str'},
'items': {'key': 'items', 'type': '[TfvcItem]'}
}
def __init__(self, _links=None, description=None, id=None, label_scope=None, modified_date=None, name=None, owner=None, url=None, items=None):
super(TfvcLabel, self).__init__(_links=_links, description=description, id=id, label_scope=label_scope, modified_date=modified_date, name=name, owner=owner, url=url)
self.items = items
class TfvcShelveset(TfvcShelvesetRef):
"""
Metadata for a shelveset.
:param _links: List of reference links for the shelveset.
:type _links: :class:`ReferenceLinks <azure.devops.v7_1.tfvc.models.ReferenceLinks>`
:param comment: Shelveset comment.
:type comment: str
:param comment_truncated: Shelveset comment truncated as applicable.
:type comment_truncated: bool
:param created_date: Shelveset create date.
:type created_date: datetime
:param id: Shelveset Id.
:type id: str
:param name: Shelveset name.
:type name: str
:param owner: Shelveset Owner.
:type owner: :class:`IdentityRef <azure.devops.v7_1.tfvc.models.IdentityRef>`
:param url: Shelveset Url.
:type url: str
:param changes: List of changes.
:type changes: list of :class:`TfvcChange <azure.devops.v7_1.tfvc.models.TfvcChange>`
:param notes: List of checkin notes.
:type notes: list of :class:`CheckinNote <azure.devops.v7_1.tfvc.models.CheckinNote>`
:param policy_override: Policy override information if applicable.
:type policy_override: :class:`TfvcPolicyOverrideInfo <azure.devops.v7_1.tfvc.models.TfvcPolicyOverrideInfo>`
:param work_items: List of associated workitems.
:type work_items: list of :class:`AssociatedWorkItem <azure.devops.v7_1.tfvc.models.AssociatedWorkItem>`
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'comment': {'key': 'comment', 'type': 'str'},
'comment_truncated': {'key': 'commentTruncated', 'type': 'bool'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'owner': {'key': 'owner', 'type': 'IdentityRef'},
'url': {'key': 'url', 'type': 'str'},
'changes': {'key': 'changes', 'type': '[TfvcChange]'},
'notes': {'key': 'notes', 'type': '[CheckinNote]'},
'policy_override': {'key': 'policyOverride', 'type': 'TfvcPolicyOverrideInfo'},
'work_items': {'key': 'workItems', 'type': '[AssociatedWorkItem]'}
}
def __init__(self, _links=None, comment=None, comment_truncated=None, created_date=None, id=None, name=None, owner=None, url=None, changes=None, notes=None, policy_override=None, work_items=None):
super(TfvcShelveset, self).__init__(_links=_links, comment=comment, comment_truncated=comment_truncated, created_date=created_date, id=id, name=name, owner=owner, url=url)
self.changes = changes
self.notes = notes
self.policy_override = policy_override
self.work_items = work_items
class TfvcBranch(TfvcBranchRef):
"""
Class representing a branch object.
:param path: Path for the branch.
:type path: str
:param _links: A collection of REST reference links.
:type _links: :class:`ReferenceLinks <azure.devops.v7_1.tfvc.models.ReferenceLinks>`
:param created_date: Creation date of the branch.
:type created_date: datetime
:param description: Branch description.
:type description: str
:param is_deleted: Is the branch deleted?
:type is_deleted: bool
:param owner: Alias or display name of user
:type owner: :class:`IdentityRef <azure.devops.v7_1.tfvc.models.IdentityRef>`
:param url: URL to retrieve the item.
:type url: str
:param children: List of children for the branch.
:type children: list of :class:`TfvcBranch <azure.devops.v7_1.tfvc.models.TfvcBranch>`
:param mappings: List of branch mappings.
:type mappings: list of :class:`TfvcBranchMapping <azure.devops.v7_1.tfvc.models.TfvcBranchMapping>`
:param parent: Path of the branch's parent.
:type parent: :class:`TfvcShallowBranchRef <azure.devops.v7_1.tfvc.models.TfvcShallowBranchRef>`
:param related_branches: List of paths of the related branches.
:type related_branches: list of :class:`TfvcShallowBranchRef <azure.devops.v7_1.tfvc.models.TfvcShallowBranchRef>`
"""
_attribute_map = {
'path': {'key': 'path', 'type': 'str'},
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'description': {'key': 'description', 'type': 'str'},
'is_deleted': {'key': 'isDeleted', 'type': 'bool'},
'owner': {'key': 'owner', 'type': 'IdentityRef'},
'url': {'key': 'url', 'type': 'str'},
'children': {'key': 'children', 'type': '[TfvcBranch]'},
'mappings': {'key': 'mappings', 'type': '[TfvcBranchMapping]'},
'parent': {'key': 'parent', 'type': 'TfvcShallowBranchRef'},
'related_branches': {'key': 'relatedBranches', 'type': '[TfvcShallowBranchRef]'}
}
def __init__(self, path=None, _links=None, created_date=None, description=None, is_deleted=None, owner=None, url=None, children=None, mappings=None, parent=None, related_branches=None):
super(TfvcBranch, self).__init__(path=path, _links=_links, created_date=created_date, description=description, is_deleted=is_deleted, owner=owner, url=url)
self.children = children
self.mappings = mappings
self.parent = parent
self.related_branches = related_branches
__all__ = [
'AssociatedWorkItem',
'FileContentMetadata',
'GitRepository',
'GitRepositoryRef',
'GraphSubjectBase',
'Change',
'CheckinNote',
'IdentityRef',
'ItemContent',
'ItemModel',
'ReferenceLinks',
'TeamProjectCollectionReference',
'TeamProjectReference',
'TfvcBranchMapping',
'TfvcChange',
'TfvcChangesetRef',
'TfvcChangesetSearchCriteria',
'TfvcChangesetsRequestData',
'TfvcItem',
'TfvcItemDescriptor',
'TfvcItemRequestData',
'TfvcLabelRef',
'TfvcLabelRequestData',
'TfvcMappingFilter',
'TfvcMergeSource',
'TfvcPolicyFailureInfo',
'TfvcPolicyOverrideInfo',
'TfvcShallowBranchRef',
'TfvcShelvesetRef',
'TfvcShelvesetRequestData',
'TfvcStatistics',
'TfvcVersionDescriptor',
'VersionControlProjectInfo',
'VstsInfo',
'TfvcBranchRef',
'TfvcChangeset',
'TfvcLabel',
'TfvcShelveset',
'TfvcBranch',
]
|
azure-devops-python-api/azure-devops/azure/devops/v7_1/tfvc/models.py/0
|
{
"file_path": "azure-devops-python-api/azure-devops/azure/devops/v7_1/tfvc/models.py",
"repo_id": "azure-devops-python-api",
"token_count": 22812
}
| 365 |
@REM init section. Set _echo=1 to echo everything
@IF NOT DEFINED _echo ECHO OFF
pip install wheel --upgrade --no-cache-dir
python.exe %~dp0\..\create_wheels.py
IF ERRORLEVEL 1 GOTO FAIL
GOTO :EOF
:FAIL
ECHO Failed to create wheels.
EXIT /B 1
|
azure-devops-python-api/scripts/windows/create_wheels.cmd/0
|
{
"file_path": "azure-devops-python-api/scripts/windows/create_wheels.cmd",
"repo_id": "azure-devops-python-api",
"token_count": 95
}
| 366 |
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import List, cast
def _format_url_section(template, **kwargs):
components = template.split("/")
while components:
try:
return template.format(**kwargs)
except KeyError as key:
# Need the cast, as for some reasons "split" is typed as list[str | Any]
formatted_components = cast(List[str], template.split("/"))
components = [c for c in formatted_components if "{}".format(key.args[0]) not in c]
template = "/".join(components)
|
azure-quantum-python/azure-quantum/azure/quantum/_client/_vendor.py/0
|
{
"file_path": "azure-quantum-python/azure-quantum/azure/quantum/_client/_vendor.py",
"repo_id": "azure-quantum-python",
"token_count": 283
}
| 367 |
##
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
##
try:
import cirq
except ImportError:
raise ImportError(
"Missing optional 'cirq' dependencies. \
To install run: pip install azure-quantum[cirq]"
)
from azure.quantum import Workspace
from azure.quantum.job.base_job import DEFAULT_TIMEOUT
from azure.quantum.cirq.targets import *
from typing import Optional, Union, List, TYPE_CHECKING
if TYPE_CHECKING:
from azure.quantum.cirq.targets import Target as CirqTarget
from azure.quantum.cirq.job import Job as CirqJob
from cirq_ionq import Job as CirqIonqJob
DEFAULT_JOB_NAME = "cirq-job"
CIRQ_USER_AGENT = "azure-quantum-cirq"
class AzureQuantumService:
"""
Class for interfacing with the Azure Quantum service
using Cirq quantum circuits
"""
def __init__(
self,
workspace: Workspace = None,
default_target: Optional[str] = None,
**kwargs
):
"""AzureQuantumService class
:param workspace: Azure Quantum workspace. If missing it will create a new Workspace passing `kwargs` to the constructor. Defaults to None.
:type workspace: Workspace
:param default_target: Default target name, defaults to None
:type default_target: Optional[str]
"""
if kwargs is not None and len(kwargs) > 0:
from warnings import warn
warn(f"""Consider passing \"workspace\" argument explicitly.
The ability to initialize AzureQuantumService with arguments {', '.join(f'"{argName}"' for argName in kwargs)} is going to be deprecated in future versions.""",
DeprecationWarning,
stacklevel=2)
if workspace is None:
workspace = Workspace(**kwargs)
workspace.append_user_agent(CIRQ_USER_AGENT)
self._workspace = workspace
self._default_target = default_target
@property
def _target_factory(self):
from azure.quantum.target.target_factory import TargetFactory
from azure.quantum.cirq.targets import Target, DEFAULT_TARGETS
target_factory = TargetFactory(
base_cls=Target,
workspace=self._workspace,
default_targets=DEFAULT_TARGETS
)
return target_factory
def targets(
self,
name: str = None,
provider_id: str = None,
**kwargs
) -> Union["CirqTarget", List["CirqTarget"]]:
"""Get all quantum computing targets available in the Azure Quantum Workspace.
:param name: Target name, defaults to None
:type name: str
:return: Target instance or list thereof
:rtype: typing.Union[Target, typing.List[Target]]
"""
return self._target_factory.get_targets(
name=name,
provider_id=provider_id
)
def get_target(self, name: str = None, **kwargs) -> "CirqTarget":
"""Get target with the specified name
:param name: Target name
:type name: str
:return: Cirq target
:rtype: Target
"""
if name is None:
if self._default_target is None:
raise ValueError("No default target specified for job.")
return self.targets(name=self._default_target, **kwargs)
if isinstance(name, str):
return self.targets(name=name, **kwargs)
def get_job(self, job_id: str, *args, **kwargs) -> Union["CirqJob", "CirqIonqJob"]:
"""Get Cirq Job by job ID
:param job_id: Job ID
:type job_id: str
:return: Job
:rtype: azure.quantum.cirq.Job
"""
job = self._workspace.get_job(job_id=job_id)
target : CirqTarget = self._target_factory.create_target(
provider_id=job.details.provider_id,
name=job.details.target
)
return target._to_cirq_job(azure_job=job, *args, **kwargs)
def create_job(
self,
program: cirq.Circuit,
repetitions: int,
name: str = DEFAULT_JOB_NAME,
target: str = None,
param_resolver: cirq.ParamResolverOrSimilarType = cirq.ParamResolver({})
) -> Union["CirqJob", "CirqIonqJob"]:
"""Create job to run the given `cirq` program in Azure Quantum
:param program: Cirq program or circuit
:type program: cirq.Circuit
:param repetitions: Number of measurements
:type repetitions: int
:param name: Program name
:type name: str
:param target: Target name
:type target: str
:param param_resolver: Parameter resolver for cirq program
:type param_resolver: cirq.ParamResolverOrSimilarType
:return: Job
:rtype: azure.quantum.cirq.Job
"""
# Get target
_target = self.get_target(name=target)
if not _target:
target_name = target or self._default_target
raise RuntimeError(f"Could not find target '{target_name}'. \
Please make sure the target name is valid and that the associated provider is added to your Workspace. \
To add a provider to your quantum workspace on the Azure Portal, \
see https://aka.ms/AQ/Docs/AddProvider")
# Resolve parameters
resolved_circuit = cirq.resolve_parameters(program, param_resolver)
# Submit job to Azure
return _target.submit(
program=resolved_circuit,
repetitions=repetitions,
name=name
)
def estimate_cost(
self,
program: cirq.Circuit,
repetitions: int,
target: str = None,
param_resolver: cirq.ParamResolverOrSimilarType = cirq.ParamResolver({}),
**kwargs
):
"""
Estimate the cost for a given circuit.
:param program: Cirq program or circuit
:type program: cirq.Circuit
:param repetitions: Number of measurement repetitions
:type repetitions: int
:param target: Target name, defaults to default_target
:type target: str
:param param_resolver: Cirq parameters, defaults to `cirq.ParamResolver({})`
:type param_resolver: cirq.ParamResolverOrSimilarType
"""
# Resolve parameters
resolved_circuit = cirq.resolve_parameters(program, param_resolver)
target = self.get_target(name=target)
return target.estimate_cost(
program=resolved_circuit,
repetitions=repetitions,
**kwargs
)
def run(
self,
program: cirq.Circuit,
repetitions: int,
target: str = None,
name: str = DEFAULT_JOB_NAME,
param_resolver: cirq.ParamResolverOrSimilarType = cirq.ParamResolver({}),
seed: cirq.RANDOM_STATE_OR_SEED_LIKE = None,
timeout_seconds: int = DEFAULT_TIMEOUT,
) -> cirq.Result:
"""Run Cirq circuit on specified target, if target not specified then it runs on the default target
:param program: Cirq program or circuit
:type program: cirq.Circuit
:param repetitions: Number of measurement repetitions
:type repetitions: int
:param target: Target name, defaults to default_target
:type target: str
:param name: Program name, defaults to "cirq-job"
:type name: str
:param param_resolver: Cirq parameters, defaults to `cirq.ParamResolver({})`
:type param_resolver: cirq.ParamResolverOrSimilarType
:param seed: Random seed for simulator results, defaults to None
:type seed: cirq.RANDOM_STATE_OR_SEED_LIKE
:param timeout_seconds: Timeout in seconds, defaults to None
:type timeout_seconds: int
:return: Measurement results
:rtype: cirq.Result
"""
job = self.create_job(
program=program,
repetitions=repetitions,
name=name,
target=target,
param_resolver=param_resolver
)
# Get raw job results
try:
result = job.results(timeout_seconds=timeout_seconds)
except RuntimeError as e:
# Catch errors from cirq_ionq.Job.results
if "Job was not completed successful. Instead had status: " in str(e):
raise TimeoutError(f"The wait time has exceeded {timeout_seconds} seconds. \
Job status: '{job.status()}'.")
else:
raise e
# Convert to Cirq Result
target = self.get_target(name=target)
return target._to_cirq_result(
result=result,
param_resolver=param_resolver,
seed=seed
)
|
azure-quantum-python/azure-quantum/azure/quantum/cirq/service.py/0
|
{
"file_path": "azure-quantum-python/azure-quantum/azure/quantum/cirq/service.py",
"repo_id": "azure-quantum-python",
"token_count": 3717
}
| 368 |
##
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
##
from typing import TYPE_CHECKING, Dict, List, Union
from azure.quantum import __version__
from azure.quantum.qiskit.job import AzureQuantumJob
from azure.quantum.target.ionq import IonQ
from abc import abstractmethod
from qiskit import QuantumCircuit
from .backend import (
AzureBackend,
AzureQirBackend,
_get_shots_or_deprecated_count_input_param
)
from qiskit.providers.models import BackendConfiguration
from qiskit.providers import Options, Provider
from qiskit_ionq.helpers import (
ionq_basis_gates,
GATESET_MAP,
qiskit_circ_to_ionq_circ,
)
if TYPE_CHECKING:
from azure.quantum.qiskit import AzureQuantumProvider
import logging
logger = logging.getLogger(__name__)
__all__ = [
"IonQBackend",
"IonQQPUBackend",
"IonQSimulatorBackend",
"IonQAriaBackend",
"IonQForteBackend",
"IonQQirBackend",
"IonQSimulatorQirBackend",
"IonQSimulatorNativeBackend",
"IonQQPUQirBackend",
"IonQQPUNativeBackend",
"IonQAriaQirBackend",
"IonQForteQirBackend",
"IonQAriaNativeBackend",
"IonQForteNativeBackend",
]
_IONQ_SHOTS_INPUT_PARAM_NAME = "shots"
_DEFAULT_SHOTS_COUNT = 500
class IonQQirBackendBase(AzureQirBackend):
"""Base class for interfacing with an IonQ QIR backend"""
_SHOTS_PARAM_NAME = _IONQ_SHOTS_INPUT_PARAM_NAME
@abstractmethod
def __init__(
self, configuration: BackendConfiguration, provider: Provider = None, **fields
):
super().__init__(configuration, provider, **fields)
@classmethod
def _default_options(cls) -> Options:
return Options(
**{
cls._SHOTS_PARAM_NAME: _DEFAULT_SHOTS_COUNT,
},
targetCapability="BasicExecution",
)
def _azure_config(self) -> Dict[str, str]:
config = super()._azure_config()
config.update(
{
"provider_id": "ionq",
}
)
return config
def run(
self,
run_input: Union[QuantumCircuit, List[QuantumCircuit]] = [],
shots: int = None,
**options,
) -> AzureQuantumJob:
# In earlier versions, backends for all providers accepted the 'count' option,
# but now we accept it only for a compatibility reasons and do not recommend using it.
count = options.pop("count", None)
final_shots = _get_shots_or_deprecated_count_input_param(
param_name=self.__class__._SHOTS_PARAM_NAME,
shots=shots,
count=count,
)
return super().run(run_input, shots=final_shots, **options)
class IonQSimulatorQirBackend(IonQQirBackendBase):
backend_names = ("ionq.simulator",)
def __init__(self, name: str, provider: "AzureQuantumProvider", **kwargs):
"""Base class for interfacing with an IonQ QIR Simulator backend"""
default_config = BackendConfiguration.from_dict(
{
"backend_name": name,
"backend_version": __version__,
"simulator": True,
"local": False,
"coupling_map": None,
"description": "IonQ simulator on Azure Quantum",
"basis_gates": ionq_basis_gates,
"memory": False,
"n_qubits": 29,
"conditional": False,
"max_shots": None,
"max_experiments": 1,
"open_pulse": False,
"gates": [{"name": "TODO", "parameters": [], "qasm_def": "TODO"}],
"azure": self._azure_config(),
}
)
logger.info("Initializing IonQSimulatorQirBackend")
configuration: BackendConfiguration = kwargs.pop(
"configuration", default_config
)
super().__init__(configuration=configuration, provider=provider, **kwargs)
class IonQQPUQirBackend(IonQQirBackendBase):
backend_names = ("ionq.qpu",)
def __init__(self, name: str, provider: "AzureQuantumProvider", **kwargs):
"""Base class for interfacing with an IonQ QPU backend"""
default_config = BackendConfiguration.from_dict(
{
"backend_name": name,
"backend_version": __version__,
"simulator": False,
"local": False,
"coupling_map": None,
"description": "IonQ QPU on Azure Quantum",
"basis_gates": ionq_basis_gates,
"memory": False,
"n_qubits": 11,
"conditional": False,
"max_shots": 10000,
"max_experiments": 1,
"open_pulse": False,
"gates": [{"name": "TODO", "parameters": [], "qasm_def": "TODO"}],
"azure": self._azure_config(),
}
)
logger.info("Initializing IonQQPUQirBackend")
configuration: BackendConfiguration = kwargs.pop(
"configuration", default_config
)
super().__init__(configuration=configuration, provider=provider, **kwargs)
class IonQAriaQirBackend(IonQQirBackendBase):
backend_names = ("ionq.qpu.aria-1", "ionq.qpu.aria-2")
def __init__(self, name: str, provider: "AzureQuantumProvider", **kwargs):
"""Base class for interfacing with an IonQ Aria QPU backend"""
default_config = BackendConfiguration.from_dict(
{
"backend_name": name,
"backend_version": __version__,
"simulator": False,
"local": False,
"coupling_map": None,
"description": "IonQ Aria QPU on Azure Quantum",
"basis_gates": ionq_basis_gates,
"memory": False,
"n_qubits": 23,
"conditional": False,
"max_shots": 10000,
"max_experiments": 1,
"open_pulse": False,
"gates": [{"name": "TODO", "parameters": [], "qasm_def": "TODO"}],
"azure": self._azure_config(),
}
)
logger.info("Initializing IonQAriaQirBackend")
configuration: BackendConfiguration = kwargs.pop(
"configuration", default_config
)
super().__init__(configuration=configuration, provider=provider, **kwargs)
class IonQForteQirBackend(IonQQirBackendBase):
backend_names = ("ionq.qpu.forte-1",)
def __init__(self, name: str, provider: "AzureQuantumProvider", **kwargs):
"""Base class for interfacing with an IonQ Forte QPU backend"""
default_config = BackendConfiguration.from_dict(
{
"backend_name": name,
"backend_version": __version__,
"simulator": False,
"local": False,
"coupling_map": None,
"description": "IonQ Forte QPU on Azure Quantum",
"basis_gates": ionq_basis_gates,
"memory": False,
"n_qubits": 35,
"conditional": False,
"max_shots": 10000,
"max_experiments": 1,
"open_pulse": False,
"gates": [{"name": "TODO", "parameters": [], "qasm_def": "TODO"}],
"azure": self._azure_config(),
}
)
logger.info("Initializing IonQForteQirBackend")
configuration: BackendConfiguration = kwargs.pop(
"configuration", default_config
)
super().__init__(configuration=configuration, provider=provider, **kwargs)
class IonQBackend(AzureBackend):
"""Base class for interfacing with an IonQ backend in Azure Quantum"""
backend_name = None
_SHOTS_PARAM_NAME = _IONQ_SHOTS_INPUT_PARAM_NAME
@abstractmethod
def __init__(
self, configuration: BackendConfiguration, provider: Provider = None, **fields
):
super().__init__(configuration, provider, **fields)
def run(
self,
run_input=None,
shots: int = None,
**options,
) -> AzureQuantumJob:
# In earlier versions, backends for all providers accepted the 'count' option,
# but now we accept it only for a compatibility reasons and do not recommend using it.
count = options.pop("count", None)
final_shots = _get_shots_or_deprecated_count_input_param(
param_name=self.__class__._SHOTS_PARAM_NAME,
shots=shots,
count=count,
)
return super().run(run_input, shots=final_shots, **options)
@classmethod
def _default_options(cls):
return Options(
**{
cls._SHOTS_PARAM_NAME: _DEFAULT_SHOTS_COUNT,
},
)
def _azure_config(self) -> Dict[str, str]:
return {
"blob_name": "inputData",
"content_type": "application/json",
"provider_id": "ionq",
"input_data_format": "ionq.circuit.v1",
"output_data_format": "ionq.quantum-results.v1",
"is_default": True,
}
def _prepare_job_metadata(self, circuit, **kwargs):
_, _, meas_map = qiskit_circ_to_ionq_circ(circuit, gateset=self.gateset())
metadata = super()._prepare_job_metadata(circuit, **kwargs)
metadata["meas_map"] = meas_map
return metadata
def _translate_input(self, circuit):
"""Translates the input values to the format expected by the AzureBackend."""
ionq_circ, _, _ = qiskit_circ_to_ionq_circ(circuit, gateset=self.gateset())
input_data = {
"gateset": self.gateset(),
"qubits": circuit.num_qubits,
"circuit": ionq_circ,
}
return IonQ._encode_input_data(input_data)
def gateset(self):
return self.configuration().gateset
def estimate_cost(self, circuit, shots):
"""Estimate the cost for the given circuit."""
ionq_circ, _, _ = qiskit_circ_to_ionq_circ(circuit, gateset=self.gateset())
input_data = {
"qubits": circuit.num_qubits,
"circuit": ionq_circ,
}
workspace = self.provider().get_workspace()
target = workspace.get_targets(self.name())
return target.estimate_cost(input_data, shots=shots)
class IonQSimulatorBackend(IonQBackend):
backend_names = ("ionq.simulator",)
def __init__(self, name: str, provider: "AzureQuantumProvider", **kwargs):
"""Base class for interfacing with an IonQ Simulator backend"""
gateset = kwargs.pop("gateset", "qis")
default_config = BackendConfiguration.from_dict(
{
"backend_name": name,
"backend_version": __version__,
"simulator": True,
"local": False,
"coupling_map": None,
"description": "IonQ simulator on Azure Quantum",
"basis_gates": GATESET_MAP[gateset],
"memory": False,
"n_qubits": 29,
"conditional": False,
"max_shots": None,
"max_experiments": 1,
"open_pulse": False,
"gates": [{"name": "TODO", "parameters": [], "qasm_def": "TODO"}],
"azure": self._azure_config(),
"gateset": gateset,
}
)
logger.info("Initializing IonQSimulatorBackend")
configuration: BackendConfiguration = kwargs.pop(
"configuration", default_config
)
super().__init__(configuration=configuration, provider=provider, **kwargs)
class IonQSimulatorNativeBackend(IonQSimulatorBackend):
def __init__(self, name: str, provider: "AzureQuantumProvider", **kwargs):
if "gateset" not in kwargs:
kwargs["gateset"] = "native"
super().__init__(name, provider, **kwargs)
def _azure_config(self) -> Dict[str, str]:
config = super()._azure_config()
config.update(
{
"is_default": False,
}
)
return config
class IonQQPUBackend(IonQBackend):
backend_names = ("ionq.qpu",)
def __init__(self, name: str, provider: "AzureQuantumProvider", **kwargs):
"""Base class for interfacing with an IonQ QPU backend"""
gateset = kwargs.pop("gateset", "qis")
default_config = BackendConfiguration.from_dict(
{
"backend_name": name,
"backend_version": __version__,
"simulator": False,
"local": False,
"coupling_map": None,
"description": "IonQ QPU on Azure Quantum",
"basis_gates": GATESET_MAP[gateset],
"memory": False,
"n_qubits": 11,
"conditional": False,
"max_shots": 10000,
"max_experiments": 1,
"open_pulse": False,
"gates": [{"name": "TODO", "parameters": [], "qasm_def": "TODO"}],
"azure": self._azure_config(),
"gateset": gateset,
}
)
logger.info("Initializing IonQQPUBackend")
configuration: BackendConfiguration = kwargs.pop(
"configuration", default_config
)
super().__init__(configuration=configuration, provider=provider, **kwargs)
class IonQQPUNativeBackend(IonQQPUBackend):
def __init__(self, name: str, provider: "AzureQuantumProvider", **kwargs):
if "gateset" not in kwargs:
kwargs["gateset"] = "native"
super().__init__(name, provider, **kwargs)
def _azure_config(self) -> Dict[str, str]:
config = super()._azure_config()
config.update(
{
"is_default": False,
}
)
return config
class IonQAriaBackend(IonQBackend):
backend_names = ("ionq.qpu.aria-1", "ionq.qpu.aria-2")
def __init__(self, name: str, provider: "AzureQuantumProvider", **kwargs):
"""Base class for interfacing with an IonQ Aria QPU backend"""
gateset = kwargs.pop("gateset", "qis")
default_config = BackendConfiguration.from_dict(
{
"backend_name": name,
"backend_version": __version__,
"simulator": False,
"local": False,
"coupling_map": None,
"description": "IonQ Aria QPU on Azure Quantum",
"basis_gates": GATESET_MAP[gateset],
"memory": False,
"n_qubits": 23,
"conditional": False,
"max_shots": 10000,
"max_experiments": 1,
"open_pulse": False,
"gates": [{"name": "TODO", "parameters": [], "qasm_def": "TODO"}],
"azure": self._azure_config(),
"gateset": gateset,
}
)
logger.info("Initializing IonQAriaQPUBackend")
configuration: BackendConfiguration = kwargs.pop(
"configuration", default_config
)
super().__init__(configuration=configuration, provider=provider, **kwargs)
class IonQForteBackend(IonQBackend):
backend_names = ("ionq.qpu.forte-1",)
def __init__(self, name: str, provider: "AzureQuantumProvider", **kwargs):
"""Base class for interfacing with an IonQ Forte QPU backend"""
gateset = kwargs.pop("gateset", "qis")
default_config = BackendConfiguration.from_dict(
{
"backend_name": name,
"backend_version": __version__,
"simulator": False,
"local": False,
"coupling_map": None,
"description": "IonQ Forte QPU on Azure Quantum",
"basis_gates": GATESET_MAP[gateset],
"memory": False,
"n_qubits": 35,
"conditional": False,
"max_shots": 10000,
"max_experiments": 1,
"open_pulse": False,
"gates": [{"name": "TODO", "parameters": [], "qasm_def": "TODO"}],
"azure": self._azure_config(),
"gateset": gateset,
}
)
logger.info("Initializing IonQForteBackend")
configuration: BackendConfiguration = kwargs.pop(
"configuration", default_config
)
super().__init__(configuration=configuration, provider=provider, **kwargs)
class IonQAriaNativeBackend(IonQAriaBackend):
def __init__(self, name: str, provider: "AzureQuantumProvider", **kwargs):
if "gateset" not in kwargs:
kwargs["gateset"] = "native"
super().__init__(name, provider, **kwargs)
def _azure_config(self) -> Dict[str, str]:
config = super()._azure_config()
config.update(
{
"is_default": False,
}
)
return config
class IonQForteNativeBackend(IonQForteBackend):
def __init__(self, name: str, provider: "AzureQuantumProvider", **kwargs):
if "gateset" not in kwargs:
kwargs["gateset"] = "native"
super().__init__(name, provider, **kwargs)
def _azure_config(self) -> Dict[str, str]:
config = super()._azure_config()
config.update(
{
"is_default": False,
}
)
return config
|
azure-quantum-python/azure-quantum/azure/quantum/qiskit/backends/ionq.py/0
|
{
"file_path": "azure-quantum-python/azure-quantum/azure/quantum/qiskit/backends/ionq.py",
"repo_id": "azure-quantum-python",
"token_count": 8665
}
| 369 |
import warnings
from azure.quantum.job.base_job import ContentType
from azure.quantum.job.job import Job
from azure.quantum.target.target import Target
from azure.quantum.workspace import Workspace
from azure.quantum.target.params import InputParams
from typing import Any, Dict, Type, Union
from .job import MicrosoftElementsDftJob
class MicrosoftElementsDft(Target):
"""
Microsoft Elements Dft target from the microsoft-elements provider.
"""
target_names = [
"microsoft.dft"
]
def __init__(
self,
workspace: "Workspace",
name: str = "microsoft.dft",
**kwargs
):
"""
Initializes a new DFT target.
:param workspace: Associated workspace
:type workspace: Workspace
:param name: Target name
"""
# There is only a single target name for this target
assert name == self.target_names[0]
# make sure to not pass argument twice
kwargs.pop("provider_id", None)
super().__init__(
workspace=workspace,
name=name,
input_data_format="microsoft.xyz.v1",
output_data_format="microsoft.dft-results.v1",
provider_id="microsoft-elements",
content_type=ContentType.text_plain,
**kwargs
)
def submit(self,
input_data: Any,
name: str = "azure-quantum-dft-job",
shots: int = None,
input_params: Union[Dict[str, Any], InputParams, None] = None,
**kwargs) -> MicrosoftElementsDftJob:
"""
Submit DFT job to Azure Quantum Services.
:param input_data: Input data
:type input_data: Any
:param name: Job name
:type name: str
:param shots: Number of shots. Ignored in DFT job. Defaults to None
:type shots: int
:param input_params: Input parameters
:type input_params: Dict[str, Any]
:return: Azure Quantum job
:rtype: Job
"""
if shots is not None:
warnings.warn("The 'shots' parameter is ignored in Microsoft Elements Dft job.")
return super().submit(
input_data=input_data,
name=name,
shots=shots,
input_params=input_params,
**kwargs
)
@classmethod
def _get_job_class(cls) -> Type[Job]:
return MicrosoftElementsDftJob
|
azure-quantum-python/azure-quantum/azure/quantum/target/microsoft/elements/dft/target.py/0
|
{
"file_path": "azure-quantum-python/azure-quantum/azure/quantum/target/microsoft/elements/dft/target.py",
"repo_id": "azure-quantum-python",
"token_count": 1112
}
| 370 |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
<#
.SYNOPSIS
(Re)Generate the underlying Azure Quantum Python data-Plane client for the CLI based on the latest published Swagger.
.DESCRIPTION
(Re)Generate the underlying Azure Quantum Python data-Plane client for the CLI based on the latest published Swagger.
.PARAMETER SwaggerRepoUrl
The URL of the git repo that contains the Swagger and AutoRest ReadMe.md configurations (defaults to "https://github.com/Azure/azure-rest-api-specs")
.PARAMETER SwaggerRepoBranch
The name of the swagger repo branch (defaults to "main")
.PARAMETER SwaggerTagVersion
The Swagger version to be used (defaults to "", which will use the default tag from the main ReadMe.md)
.EXAMPLE
./eng/Generate-DataPlane-Client.ps1
# Regenerate the data-plane client using the latest published Swagger from the official repo
.EXAMPLE
./eng/Generate-DataPlane-Client.ps1 -SwaggerRepoBranch "feature/quantum/update-clients"
# Regenerate the data-plane client using the Swagger from the official repo, but from a feature branch
.EXAMPLE
./eng/Generate-DataPlane-Client.ps1 -SwaggerTagVersion "package-2019-11-04-preview"
# Regenerate the data-plane client using the an older version of the Swagger
#>
[CmdletBinding()]
Param (
[string] $SwaggerRepoUrl = "https://github.com/Azure/azure-rest-api-specs",
[string] $SwaggerRepoBranch = "main",
[string] $SwaggerTagVersion
)
$PackageVersion = $env:PYTHON_VERSION
if ([string]::IsNullOrEmpty($PackageVersion)) {
$VersionFilePath = Join-Path $PSScriptRoot "../azure/quantum/version.py"
if (Test-Path $VersionFilePath) {
$VersionFileContent = Get-Content -Path $VersionFilePath
$PackageVersion = [regex]::Match($VersionFileContent, '__version__\s*=\s*"(?<version>[^"]+)"').Groups["version"]?.Value
}
}
if ([string]::IsNullOrEmpty($PackageVersion)) {
$PackageVersion = "0.0.0.1"
}
$OutputFolder = Join-Path $PSScriptRoot "../azure/quantum/_client"
Write-Verbose "Output folder: $OutputFolder"
Write-Verbose "Deleting previous output folder contents"
if (Test-Path $OutputFolder) {
Remove-Item $OutputFolder -Recurse | Write-Verbose
}
$AutoRestConfig = $SwaggerRepoUrl.StartsWith("https://") `
? "$SwaggerRepoUrl/blob/$SwaggerRepoBranch/specification/quantum/data-plane/readme.md" `
: "$SwaggerRepoUrl/specification/quantum/data-plane/readme.md"
Write-Verbose "Installing latest AutoRest client"
npm install -g autorest@latest | Write-Verbose
autorest --reset | Write-Verbose
if ([string]::IsNullOrEmpty($SwaggerTagVersion))
{
Write-Verbose "Generating the client based on:`nConfig: $AutoRestConfig"
autorest $AutoRestConfig `
--verbose `
--python `
--python-mode=pythonSdk `
--output-folder=$OutputFolder `
--package-version=$PackageVersion `
| Write-Verbose
}
else
{
Write-Verbose "Generating the client based on:`nConfig: $AutoRestConfig`nTag: $SwaggerTagVersion"
autorest $AutoRestConfig `
--verbose `
--python `
--python-mode=pythonSdk `
--tag=$SwaggerTagVersion `
--output-folder=$OutputFolder `
--package-version=$PackageVersion `
| Write-Verbose
}
|
azure-quantum-python/azure-quantum/eng/Generate-DataPlane-Client.ps1/0
|
{
"file_path": "azure-quantum-python/azure-quantum/eng/Generate-DataPlane-Client.ps1",
"repo_id": "azure-quantum-python",
"token_count": 1209
}
| 371 |
interactions:
- request:
body: client_id=PLACEHOLDER&grant_type=client_credentials&client_assertion=PLACEHOLDER&client_info=1&client_assertion_type=PLACEHOLDER&scope=https%3A%2F%2Fquantum.microsoft.com%2F.default
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
Content-Length:
- '181'
Content-Type:
- application/x-www-form-urlencoded
User-Agent:
- azsdk-python-identity/1.16.0 Python/3.9.19 (Windows-10-10.0.22631-SP0)
x-client-current-telemetry:
- 4|730,2|
x-client-os:
- win32
x-client-sku:
- MSAL.Python
x-client-ver:
- 1.28.0
method: POST
uri: https://login.microsoftonline.com/00000000-0000-0000-0000-000000000000/oauth2/v2.0/token
response:
body:
string: '{"token_type": "Bearer", "expires_in": 1746121229, "ext_expires_in":
1746121229, "refresh_in": 31536000, "access_token": "PLACEHOLDER"}'
headers:
content-length:
- '135'
content-type:
- application/json; charset=utf-8
status:
code: 200
message: OK
- request:
body: 'b''{"containerName": "job-00000000-0000-0000-0000-000000000001"}'''
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
Content-Length:
- '64'
Content-Type:
- application/json
User-Agent:
- azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: POST
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/storage/sasUri?api-version=2022-09-12-preview&test-sequence-id=1
response:
body:
string: '{"sasUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?sv=PLACEHOLDER&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&srt=co&ss=b&sp=racwl"}'
headers:
connection:
- keep-alive
content-length:
- '174'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
- request:
body: null
headers:
Accept:
- application/xml
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- azsdk-python-storage-blob/12.19.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
x-ms-date:
- Wed, 01 May 2024 17:40:30 GMT
x-ms-version:
- '2023-11-03'
method: GET
uri: https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?restype=container&sv=PLACEHOLDER&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&srt=co&ss=b&sp=racwl
response:
body:
string: "\uFEFF<?xml version=\"1.0\" encoding=\"utf-8\"?><Error><Code>ContainerNotFound</Code><Message>The
specified container does not exist.\nRequestId:e9854f61-601e-0068-73ee-9b4a50000000\nTime:2024-05-01T17:40:32.9516867Z</Message></Error>"
headers:
content-length:
- '223'
content-type:
- application/xml
x-ms-version:
- '2023-11-03'
status:
code: 404
message: The specified container does not exist.
- request:
body: null
headers:
Accept:
- application/xml
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
Content-Length:
- '0'
User-Agent:
- azsdk-python-storage-blob/12.19.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
x-ms-date:
- Wed, 01 May 2024 17:40:32 GMT
x-ms-version:
- '2023-11-03'
method: PUT
uri: https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?restype=container&sv=PLACEHOLDER&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&srt=co&ss=b&sp=racwl
response:
body:
string: ''
headers:
content-length:
- '0'
x-ms-version:
- '2023-11-03'
status:
code: 201
message: Created
- request:
body: null
headers:
Accept:
- application/xml
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- azsdk-python-storage-blob/12.19.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
x-ms-date:
- Wed, 01 May 2024 17:40:32 GMT
x-ms-version:
- '2023-11-03'
method: GET
uri: https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?restype=container&sv=PLACEHOLDER&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&srt=co&ss=b&sp=racwl
response:
body:
string: ''
headers:
content-length:
- '0'
x-ms-lease-state:
- available
x-ms-lease-status:
- unlocked
x-ms-version:
- '2023-11-03'
status:
code: 200
message: OK
- request:
body: 'b''{"sequence_builder": "{\\"version\\": \\"1\\", \\"name\\": \\"pulser-exported\\",
\\"register\\": [{\\"name\\": \\"q0\\", \\"x\\": -10.0, \\"y\\": 0.0}, {\\"name\\":
\\"q1\\", \\"x\\": -5.0, \\"y\\": -8.660254}, {\\"name\\": \\"q2\\", \\"x\\":
-5.0, \\"y\\": 0.0}, {\\"name\\": \\"q3\\", \\"x\\": 0.0, \\"y\\": 0.0}, {\\"name\\":
\\"q4\\", \\"x\\": 5.0, \\"y\\": -8.660254}, {\\"name\\": \\"q5\\", \\"x\\":
7.5, \\"y\\": 4.330127}], \\"channels\\": {\\"ch_global\\": \\"rydberg_global\\"},
\\"variables\\": {}, \\"operations\\": [{\\"op\\": \\"pulse\\", \\"channel\\":
\\"ch_global\\", \\"protocol\\": \\"min-delay\\", \\"amplitude\\": {\\"kind\\":
\\"constant\\", \\"duration\\": 124, \\"value\\": 12.566370614359172}, \\"detuning\\":
{\\"kind\\": \\"constant\\", \\"duration\\": 124, \\"value\\": 25.132741228718345},
\\"phase\\": 0.0, \\"post_phase_shift\\": 0.0}, {\\"op\\": \\"pulse\\", \\"channel\\":
\\"ch_global\\", \\"protocol\\": \\"min-delay\\", \\"amplitude\\": {\\"kind\\":
\\"constant\\", \\"duration\\": 400, \\"value\\": 0.0}, \\"detuning\\": {\\"kind\\":
\\"constant\\", \\"duration\\": 400, \\"value\\": -25.132741228718345}, \\"phase\\":
0.0, \\"post_phase_shift\\": 0.0}, {\\"op\\": \\"pulse\\", \\"channel\\": \\"ch_global\\",
\\"protocol\\": \\"min-delay\\", \\"amplitude\\": {\\"kind\\": \\"constant\\",
\\"duration\\": 100, \\"value\\": 12.566370614359172}, \\"detuning\\": {\\"kind\\":
\\"constant\\", \\"duration\\": 100, \\"value\\": 25.132741228718345}, \\"phase\\":
0.0, \\"post_phase_shift\\": 0.0}, {\\"op\\": \\"pulse\\", \\"channel\\": \\"ch_global\\",
\\"protocol\\": \\"min-delay\\", \\"amplitude\\": {\\"kind\\": \\"constant\\",
\\"duration\\": 400, \\"value\\": 0.0}, \\"detuning\\": {\\"kind\\": \\"constant\\",
\\"duration\\": 400, \\"value\\": -25.132741228718345}, \\"phase\\": 0.0, \\"post_phase_shift\\":
0.0}, {\\"op\\": \\"pulse\\", \\"channel\\": \\"ch_global\\", \\"protocol\\":
\\"min-delay\\", \\"amplitude\\": {\\"kind\\": \\"constant\\", \\"duration\\":
100, \\"value\\": 12.566370614359172}, \\"detuning\\": {\\"kind\\": \\"constant\\",
\\"duration\\": 100, \\"value\\": 25.132741228718345}, \\"phase\\": 0.0, \\"post_phase_shift\\":
0.0}], \\"measurement\\": null, \\"device\\": {\\"version\\": \\"1\\", \\"channels\\":
[{\\"id\\": \\"rydberg_global\\", \\"basis\\": \\"ground-rydberg\\", \\"addressing\\":
\\"Global\\", \\"max_abs_detuning\\": 31.41592653589793, \\"max_amp\\": 12.566370614359172,
\\"min_retarget_interval\\": null, \\"fixed_retarget_t\\": null, \\"max_targets\\":
null, \\"clock_period\\": 4, \\"min_duration\\": 16, \\"max_duration\\": 100000000,
\\"mod_bandwidth\\": 2, \\"eom_config\\": {\\"limiting_beam\\": \\"RED\\", \\"max_limiting_amp\\":
251.32741228718345, \\"intermediate_detuning\\": 4398.22971502571, \\"controlled_beams\\":
[\\"BLUE\\", \\"RED\\"], \\"mod_bandwidth\\": 11}}], \\"name\\": \\"Fresnel\\",
\\"dimensions\\": 2, \\"rydberg_level\\": 60, \\"min_atom_distance\\": 5, \\"max_atom_num\\":
20, \\"max_radial_distance\\": 35, \\"interaction_coeff_xy\\": null, \\"supports_slm_mask\\":
false, \\"max_layout_filling\\": 0.5, \\"reusable_channels\\": false, \\"pre_calibrated_layouts\\":
[], \\"is_virtual\\": false}, \\"layout\\": {\\"coordinates\\": [[-12.5, 4.330127],
[-10.0, 0.0], [-7.5, -4.330127], [-7.5, 4.330127], [-5.0, -8.660254], [-5.0,
0.0], [-5.0, 8.660254], [-2.5, -4.330127], [-2.5, 4.330127], [0.0, -8.660254],
[0.0, 0.0], [0.0, 8.660254], [2.5, -4.330127], [2.5, 4.330127], [5.0, -8.660254],
[5.0, 0.0], [5.0, 8.660254], [7.5, -4.330127], [7.5, 4.330127], [10.0, 0.0]],
\\"slug\\": \\"TriangularLatticeLayout(20, 5.0\\\\u00b5m)\\"}}"}'''
headers:
Accept:
- application/xml
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
Content-Length:
- '3645'
Content-Type:
- application/octet-stream
User-Agent:
- azsdk-python-storage-blob/12.19.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
x-ms-blob-type:
- BlockBlob
x-ms-date:
- Wed, 01 May 2024 17:40:33 GMT
x-ms-version:
- '2023-11-03'
method: PUT
uri: https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/inputData?sv=PLACEHOLDER&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&srt=co&ss=b&sp=racwl
response:
body:
string: ''
headers:
content-length:
- '0'
x-ms-version:
- '2023-11-03'
status:
code: 201
message: Created
- request:
body: 'b''{"id": "00000000-0000-0000-0000-000000000001", "name": "qdk-python-test",
"providerId": "pasqal", "target": "pasqal.sim.emu-tn", "itemType": "Job", "containerUri":
"https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?sv=PLACEHOLDER&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&srt=co&ss=b&sp=racwl",
"inputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/inputData",
"inputDataFormat": "pasqal.pulser.v1", "inputParams": {}, "outputDataFormat":
"pasqal.pulser-results.v1"}'''
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
Content-Length:
- '546'
Content-Type:
- application/json
User-Agent:
- azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: PUT
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/jobs/00000000-0000-0000-0000-000000000001?api-version=2022-09-12-preview&test-sequence-id=1
response:
body:
string: '{"containerUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?sv=PLACEHOLDER&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&srt=co&ss=b&sp=racwl",
"inputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/inputData",
"inputDataFormat": "pasqal.pulser.v1", "inputParams": {}, "metadata": null,
"sessionId": null, "status": "Waiting", "jobType": "QuantumComputing", "outputDataFormat":
"pasqal.pulser-results.v1", "outputDataUri": "https://mystorage.blob.core.windows.net:443/job-00000000-0000-0000-0000-000000000001/outputData?sv=PLACEHOLDER&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&srt=co&ss=b&sp=racwl",
"beginExecutionTime": null, "cancellationTime": null, "quantumComputingData":
null, "errorData": null, "isCancelling": false, "tags": [], "name": "qdk-python-test",
"id": "00000000-0000-0000-0000-000000000001", "providerId": "pasqal", "target":
"pasqal.sim.emu-tn", "creationTime": "2024-05-01T17:40:35.1173625+00:00",
"endExecutionTime": null, "costEstimate": null, "itemType": "Job"}'
headers:
connection:
- keep-alive
content-length:
- '1067'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
- request:
body: null
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: GET
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/jobs/00000000-0000-0000-0000-000000000001?api-version=2022-09-12-preview&test-sequence-id=1
response:
body:
string: '{"containerUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?sv=PLACEHOLDER&sr=c&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=rcwl",
"inputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/inputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dqdk-python-test-00000000-0000-0000-0000-000000000001.input.json",
"inputDataFormat": "pasqal.pulser.v1", "inputParams": {}, "metadata": null,
"sessionId": null, "status": "Waiting", "jobType": "QuantumComputing", "outputDataFormat":
"pasqal.pulser-results.v1", "outputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/outputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dqdk-python-test-00000000-0000-0000-0000-000000000001.output.json",
"beginExecutionTime": null, "cancellationTime": null, "quantumComputingData":
{"count": 1}, "errorData": null, "isCancelling": false, "tags": [], "name":
"qdk-python-test", "id": "00000000-0000-0000-0000-000000000001", "providerId":
"pasqal", "target": "pasqal.sim.emu-tn", "creationTime": "2024-05-01T17:40:35.1173625+00:00",
"endExecutionTime": null, "costEstimate": null, "itemType": "Job"}'
headers:
connection:
- keep-alive
content-length:
- '1314'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
- request:
body: null
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: GET
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/jobs/00000000-0000-0000-0000-000000000001?api-version=2022-09-12-preview&test-sequence-id=2
response:
body:
string: '{"containerUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?sv=PLACEHOLDER&sr=c&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=rcwl",
"inputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/inputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dqdk-python-test-00000000-0000-0000-0000-000000000001.input.json",
"inputDataFormat": "pasqal.pulser.v1", "inputParams": {}, "metadata": null,
"sessionId": null, "status": "Waiting", "jobType": "QuantumComputing", "outputDataFormat":
"pasqal.pulser-results.v1", "outputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/outputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dqdk-python-test-00000000-0000-0000-0000-000000000001.output.json",
"beginExecutionTime": null, "cancellationTime": null, "quantumComputingData":
{"count": 1}, "errorData": null, "isCancelling": false, "tags": [], "name":
"qdk-python-test", "id": "00000000-0000-0000-0000-000000000001", "providerId":
"pasqal", "target": "pasqal.sim.emu-tn", "creationTime": "2024-05-01T17:40:35.1173625+00:00",
"endExecutionTime": null, "costEstimate": null, "itemType": "Job"}'
headers:
connection:
- keep-alive
content-length:
- '1314'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
- request:
body: null
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: GET
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/jobs/00000000-0000-0000-0000-000000000001?api-version=2022-09-12-preview&test-sequence-id=3
response:
body:
string: '{"containerUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?sv=PLACEHOLDER&sr=c&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=rcwl",
"inputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/inputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dqdk-python-test-00000000-0000-0000-0000-000000000001.input.json",
"inputDataFormat": "pasqal.pulser.v1", "inputParams": {}, "metadata": null,
"sessionId": null, "status": "Waiting", "jobType": "QuantumComputing", "outputDataFormat":
"pasqal.pulser-results.v1", "outputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/outputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dqdk-python-test-00000000-0000-0000-0000-000000000001.output.json",
"beginExecutionTime": null, "cancellationTime": null, "quantumComputingData":
{"count": 1}, "errorData": null, "isCancelling": false, "tags": [], "name":
"qdk-python-test", "id": "00000000-0000-0000-0000-000000000001", "providerId":
"pasqal", "target": "pasqal.sim.emu-tn", "creationTime": "2024-05-01T17:40:35.1173625+00:00",
"endExecutionTime": null, "costEstimate": null, "itemType": "Job"}'
headers:
connection:
- keep-alive
content-length:
- '1314'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
- request:
body: null
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: GET
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/jobs/00000000-0000-0000-0000-000000000001?api-version=2022-09-12-preview&test-sequence-id=4
response:
body:
string: '{"containerUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?sv=PLACEHOLDER&sr=c&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=rcwl",
"inputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/inputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dqdk-python-test-00000000-0000-0000-0000-000000000001.input.json",
"inputDataFormat": "pasqal.pulser.v1", "inputParams": {}, "metadata": null,
"sessionId": null, "status": "Waiting", "jobType": "QuantumComputing", "outputDataFormat":
"pasqal.pulser-results.v1", "outputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/outputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dqdk-python-test-00000000-0000-0000-0000-000000000001.output.json",
"beginExecutionTime": null, "cancellationTime": null, "quantumComputingData":
{"count": 1}, "errorData": null, "isCancelling": false, "tags": [], "name":
"qdk-python-test", "id": "00000000-0000-0000-0000-000000000001", "providerId":
"pasqal", "target": "pasqal.sim.emu-tn", "creationTime": "2024-05-01T17:40:35.1173625+00:00",
"endExecutionTime": null, "costEstimate": null, "itemType": "Job"}'
headers:
connection:
- keep-alive
content-length:
- '1314'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
- request:
body: null
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: GET
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/jobs/00000000-0000-0000-0000-000000000001?api-version=2022-09-12-preview&test-sequence-id=5
response:
body:
string: '{"containerUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?sv=PLACEHOLDER&sr=c&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=rcwl",
"inputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/inputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dqdk-python-test-00000000-0000-0000-0000-000000000001.input.json",
"inputDataFormat": "pasqal.pulser.v1", "inputParams": {}, "metadata": null,
"sessionId": null, "status": "Waiting", "jobType": "QuantumComputing", "outputDataFormat":
"pasqal.pulser-results.v1", "outputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/outputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dqdk-python-test-00000000-0000-0000-0000-000000000001.output.json",
"beginExecutionTime": null, "cancellationTime": null, "quantumComputingData":
{"count": 1}, "errorData": null, "isCancelling": false, "tags": [], "name":
"qdk-python-test", "id": "00000000-0000-0000-0000-000000000001", "providerId":
"pasqal", "target": "pasqal.sim.emu-tn", "creationTime": "2024-05-01T17:40:35.1173625+00:00",
"endExecutionTime": null, "costEstimate": null, "itemType": "Job"}'
headers:
connection:
- keep-alive
content-length:
- '1314'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
- request:
body: null
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: GET
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/jobs/00000000-0000-0000-0000-000000000001?api-version=2022-09-12-preview&test-sequence-id=6
response:
body:
string: '{"containerUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?sv=PLACEHOLDER&sr=c&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=rcwl",
"inputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/inputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dqdk-python-test-00000000-0000-0000-0000-000000000001.input.json",
"inputDataFormat": "pasqal.pulser.v1", "inputParams": {}, "metadata": null,
"sessionId": null, "status": "Waiting", "jobType": "QuantumComputing", "outputDataFormat":
"pasqal.pulser-results.v1", "outputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/outputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dqdk-python-test-00000000-0000-0000-0000-000000000001.output.json",
"beginExecutionTime": null, "cancellationTime": null, "quantumComputingData":
{"count": 1}, "errorData": null, "isCancelling": false, "tags": [], "name":
"qdk-python-test", "id": "00000000-0000-0000-0000-000000000001", "providerId":
"pasqal", "target": "pasqal.sim.emu-tn", "creationTime": "2024-05-01T17:40:35.1173625+00:00",
"endExecutionTime": null, "costEstimate": null, "itemType": "Job"}'
headers:
connection:
- keep-alive
content-length:
- '1314'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
- request:
body: null
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: GET
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/jobs/00000000-0000-0000-0000-000000000001?api-version=2022-09-12-preview&test-sequence-id=7
response:
body:
string: '{"containerUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?sv=PLACEHOLDER&sr=c&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=rcwl",
"inputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/inputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dqdk-python-test-00000000-0000-0000-0000-000000000001.input.json",
"inputDataFormat": "pasqal.pulser.v1", "inputParams": {}, "metadata": null,
"sessionId": null, "status": "Executing", "jobType": "QuantumComputing", "outputDataFormat":
"pasqal.pulser-results.v1", "outputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/outputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dqdk-python-test-00000000-0000-0000-0000-000000000001.output.json",
"beginExecutionTime": "2024-05-01T17:40:39.870065+00:00", "cancellationTime":
null, "quantumComputingData": {"count": 1}, "errorData": null, "isCancelling":
false, "tags": [], "name": "qdk-python-test", "id": "00000000-0000-0000-0000-000000000001",
"providerId": "pasqal", "target": "pasqal.sim.emu-tn", "creationTime": "2024-05-01T17:40:35.1173625+00:00",
"endExecutionTime": null, "costEstimate": null, "itemType": "Job"}'
headers:
connection:
- keep-alive
content-length:
- '1346'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
- request:
body: null
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: GET
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/jobs/00000000-0000-0000-0000-000000000001?api-version=2022-09-12-preview&test-sequence-id=8
response:
body:
string: '{"containerUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?sv=PLACEHOLDER&sr=c&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=rcwl",
"inputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/inputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dqdk-python-test-00000000-0000-0000-0000-000000000001.input.json",
"inputDataFormat": "pasqal.pulser.v1", "inputParams": {}, "metadata": null,
"sessionId": null, "status": "Executing", "jobType": "QuantumComputing", "outputDataFormat":
"pasqal.pulser-results.v1", "outputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/outputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dqdk-python-test-00000000-0000-0000-0000-000000000001.output.json",
"beginExecutionTime": "2024-05-01T17:40:39.870065+00:00", "cancellationTime":
null, "quantumComputingData": {"count": 1}, "errorData": null, "isCancelling":
false, "tags": [], "name": "qdk-python-test", "id": "00000000-0000-0000-0000-000000000001",
"providerId": "pasqal", "target": "pasqal.sim.emu-tn", "creationTime": "2024-05-01T17:40:35.1173625+00:00",
"endExecutionTime": null, "costEstimate": null, "itemType": "Job"}'
headers:
connection:
- keep-alive
content-length:
- '1346'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
- request:
body: null
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: GET
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/jobs/00000000-0000-0000-0000-000000000001?api-version=2022-09-12-preview&test-sequence-id=9
response:
body:
string: '{"containerUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?sv=PLACEHOLDER&sr=c&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=rcwl",
"inputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/inputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dqdk-python-test-00000000-0000-0000-0000-000000000001.input.json",
"inputDataFormat": "pasqal.pulser.v1", "inputParams": {}, "metadata": null,
"sessionId": null, "status": "Executing", "jobType": "QuantumComputing", "outputDataFormat":
"pasqal.pulser-results.v1", "outputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/outputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dqdk-python-test-00000000-0000-0000-0000-000000000001.output.json",
"beginExecutionTime": "2024-05-01T17:40:39.870065+00:00", "cancellationTime":
null, "quantumComputingData": {"count": 1}, "errorData": null, "isCancelling":
false, "tags": [], "name": "qdk-python-test", "id": "00000000-0000-0000-0000-000000000001",
"providerId": "pasqal", "target": "pasqal.sim.emu-tn", "creationTime": "2024-05-01T17:40:35.1173625+00:00",
"endExecutionTime": null, "costEstimate": null, "itemType": "Job"}'
headers:
connection:
- keep-alive
content-length:
- '1346'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
- request:
body: null
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: GET
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/jobs/00000000-0000-0000-0000-000000000001?api-version=2022-09-12-preview&test-sequence-id=10
response:
body:
string: '{"containerUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?sv=PLACEHOLDER&sr=c&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=rcwl",
"inputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/inputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dqdk-python-test-00000000-0000-0000-0000-000000000001.input.json",
"inputDataFormat": "pasqal.pulser.v1", "inputParams": {}, "metadata": null,
"sessionId": null, "status": "Executing", "jobType": "QuantumComputing", "outputDataFormat":
"pasqal.pulser-results.v1", "outputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/outputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dqdk-python-test-00000000-0000-0000-0000-000000000001.output.json",
"beginExecutionTime": "2024-05-01T17:40:39.870065+00:00", "cancellationTime":
null, "quantumComputingData": {"count": 1}, "errorData": null, "isCancelling":
false, "tags": [], "name": "qdk-python-test", "id": "00000000-0000-0000-0000-000000000001",
"providerId": "pasqal", "target": "pasqal.sim.emu-tn", "creationTime": "2024-05-01T17:40:35.1173625+00:00",
"endExecutionTime": null, "costEstimate": null, "itemType": "Job"}'
headers:
connection:
- keep-alive
content-length:
- '1346'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
- request:
body: null
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: GET
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/jobs/00000000-0000-0000-0000-000000000001?api-version=2022-09-12-preview&test-sequence-id=11
response:
body:
string: '{"containerUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?sv=PLACEHOLDER&sr=c&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=rcwl",
"inputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/inputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dqdk-python-test-00000000-0000-0000-0000-000000000001.input.json",
"inputDataFormat": "pasqal.pulser.v1", "inputParams": {}, "metadata": null,
"sessionId": null, "status": "Succeeded", "jobType": "QuantumComputing", "outputDataFormat":
"pasqal.pulser-results.v1", "outputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/rawOutputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dqdk-python-test-00000000-0000-0000-0000-000000000001.output.json",
"beginExecutionTime": "2024-05-01T17:40:39.870065+00:00", "cancellationTime":
null, "quantumComputingData": {"count": 1}, "errorData": null, "isCancelling":
false, "tags": [], "name": "qdk-python-test", "id": "00000000-0000-0000-0000-000000000001",
"providerId": "pasqal", "target": "pasqal.sim.emu-tn", "creationTime": "2024-05-01T17:40:35.1173625+00:00",
"endExecutionTime": "2024-05-01T17:40:51.165632+00:00", "costEstimate": {"currencyCode":
"USD", "events": [{"dimensionId": "compute-time-emu", "dimensionName": "Compute
Time on HPC-based Emulators", "measureUnit": "per hour", "amountBilled": 0.0031,
"amountConsumed": 0.0031, "unitPrice": 0.0}, {"dimensionId": "compute-time-qpu",
"dimensionName": "Compute Time on QPU", "measureUnit": "per hour", "amountBilled":
0.0, "amountConsumed": 0.0, "unitPrice": 0.0}], "estimatedTotal": 0.0}, "itemType":
"Job"}'
headers:
connection:
- keep-alive
content-length:
- '1787'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
- request:
body: null
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: GET
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/jobs/00000000-0000-0000-0000-000000000001?api-version=2022-09-12-preview&test-sequence-id=12
response:
body:
string: '{"containerUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?sv=PLACEHOLDER&sr=c&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=rcwl",
"inputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/inputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dqdk-python-test-00000000-0000-0000-0000-000000000001.input.json",
"inputDataFormat": "pasqal.pulser.v1", "inputParams": {}, "metadata": null,
"sessionId": null, "status": "Succeeded", "jobType": "QuantumComputing", "outputDataFormat":
"pasqal.pulser-results.v1", "outputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/rawOutputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dqdk-python-test-00000000-0000-0000-0000-000000000001.output.json",
"beginExecutionTime": "2024-05-01T17:40:39.870065+00:00", "cancellationTime":
null, "quantumComputingData": {"count": 1}, "errorData": null, "isCancelling":
false, "tags": [], "name": "qdk-python-test", "id": "00000000-0000-0000-0000-000000000001",
"providerId": "pasqal", "target": "pasqal.sim.emu-tn", "creationTime": "2024-05-01T17:40:35.1173625+00:00",
"endExecutionTime": "2024-05-01T17:40:51.165632+00:00", "costEstimate": {"currencyCode":
"USD", "events": [{"dimensionId": "compute-time-emu", "dimensionName": "Compute
Time on HPC-based Emulators", "measureUnit": "per hour", "amountBilled": 0.0031,
"amountConsumed": 0.0031, "unitPrice": 0.0}, {"dimensionId": "compute-time-qpu",
"dimensionName": "Compute Time on QPU", "measureUnit": "per hour", "amountBilled":
0.0, "amountConsumed": 0.0, "unitPrice": 0.0}], "estimatedTotal": 0.0}, "itemType":
"Job"}'
headers:
connection:
- keep-alive
content-length:
- '1787'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
- request:
body: null
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: GET
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/jobs/00000000-0000-0000-0000-000000000001?api-version=2022-09-12-preview&test-sequence-id=13
response:
body:
string: '{"containerUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001?sv=PLACEHOLDER&sr=c&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=rcwl",
"inputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/inputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dqdk-python-test-00000000-0000-0000-0000-000000000001.input.json",
"inputDataFormat": "pasqal.pulser.v1", "inputParams": {}, "metadata": null,
"sessionId": null, "status": "Succeeded", "jobType": "QuantumComputing", "outputDataFormat":
"pasqal.pulser-results.v1", "outputDataUri": "https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/rawOutputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dqdk-python-test-00000000-0000-0000-0000-000000000001.output.json",
"beginExecutionTime": "2024-05-01T17:40:39.870065+00:00", "cancellationTime":
null, "quantumComputingData": {"count": 1}, "errorData": null, "isCancelling":
false, "tags": [], "name": "qdk-python-test", "id": "00000000-0000-0000-0000-000000000001",
"providerId": "pasqal", "target": "pasqal.sim.emu-tn", "creationTime": "2024-05-01T17:40:35.1173625+00:00",
"endExecutionTime": "2024-05-01T17:40:51.165632+00:00", "costEstimate": {"currencyCode":
"USD", "events": [{"dimensionId": "compute-time-emu", "dimensionName": "Compute
Time on HPC-based Emulators", "measureUnit": "per hour", "amountBilled": 0.0031,
"amountConsumed": 0.0031, "unitPrice": 0.0}, {"dimensionId": "compute-time-qpu",
"dimensionName": "Compute Time on QPU", "measureUnit": "per hour", "amountBilled":
0.0, "amountConsumed": 0.0, "unitPrice": 0.0}], "estimatedTotal": 0.0}, "itemType":
"Job"}'
headers:
connection:
- keep-alive
content-length:
- '1787'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
- request:
body: null
headers:
Accept:
- application/xml
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- azsdk-python-storage-blob/12.19.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
x-ms-date:
- Wed, 01 May 2024 17:41:00 GMT
x-ms-range:
- bytes=0-33554431
x-ms-version:
- '2023-11-03'
method: GET
uri: https://mystorage.blob.core.windows.net/job-00000000-0000-0000-0000-000000000001/rawOutputData?sv=PLACEHOLDER&sr=b&sig=PLACEHOLDER&se=2050-01-01T00%3A00%3A00Z&sp=r&rscd=attachment%3B%20filename%3Dqdk-python-test-00000000-0000-0000-0000-000000000001.output.json
response:
body:
string: '{"001011": 13, "110001": 6, "000000": 6, "100011": 6, "001000": 5,
"010001": 5, "010011": 5, "100010": 5, "001101": 4, "100001": 4, "100000":
4, "001010": 4, "010000": 3, "010100": 3, "101100": 3, "110010": 2, "100100":
2, "110000": 2, "000100": 2, "111011": 2, "001111": 2, "011100": 2, "011010":
1, "001001": 1, "001100": 1, "111100": 1, "000011": 1, "101000": 1, "000001":
1, "110100": 1, "000010": 1, "001110": 1}'
headers:
accept-ranges:
- bytes
content-length:
- '417'
content-range:
- bytes 0-416/417
content-type:
- application/json
x-ms-blob-content-md5:
- Jqkh0ksZM7W8jgtMNkrnfA==
x-ms-blob-type:
- BlockBlob
x-ms-creation-time:
- Wed, 01 May 2024 17:40:35 GMT
x-ms-lease-state:
- available
x-ms-lease-status:
- unlocked
x-ms-server-encrypted:
- 'true'
x-ms-version:
- '2023-11-03'
status:
code: 206
message: Partial Content
version: 1
|
azure-quantum-python/azure-quantum/tests/unit/recordings/test_job_submit_pasqal_default_input_params.yaml/0
|
{
"file_path": "azure-quantum-python/azure-quantum/tests/unit/recordings/test_job_submit_pasqal_default_input_params.yaml",
"repo_id": "azure-quantum-python",
"token_count": 20223
}
| 372 |
interactions:
- request:
body: client_id=PLACEHOLDER&grant_type=client_credentials&client_assertion=PLACEHOLDER&client_info=1&client_assertion_type=PLACEHOLDER&scope=https%3A%2F%2Fquantum.microsoft.com%2F.default
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
Content-Length:
- '181'
Content-Type:
- application/x-www-form-urlencoded
User-Agent:
- azsdk-python-identity/1.16.0 Python/3.9.19 (Windows-10-10.0.22631-SP0)
x-client-current-telemetry:
- 4|730,2|
x-client-os:
- win32
x-client-sku:
- MSAL.Python
x-client-ver:
- 1.28.0
method: POST
uri: https://login.microsoftonline.com/00000000-0000-0000-0000-000000000000/oauth2/v2.0/token
response:
body:
string: '{"token_type": "Bearer", "expires_in": 1746122811, "ext_expires_in":
1746122811, "refresh_in": 31536000, "access_token": "PLACEHOLDER"}'
headers:
content-length:
- '135'
content-type:
- application/json; charset=utf-8
status:
code: 200
message: OK
- request:
body: null
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- testapp azsdk-python-quantum/0.0.1 Python/3.9.19 (Windows-10-10.0.22631-SP0)
method: GET
uri: https://eastus.quantum.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myresourcegroup/providers/Microsoft.Quantum/workspaces/myworkspace/quotas?api-version=2022-09-12-preview&test-sequence-id=1
response:
body:
string: '{"value": [{"dimension": "concurrent_dft_jobs", "scope": "Subscription",
"providerId": "microsoft-elements", "utilization": 0.0, "holds": 0.0, "limit":
5.0, "period": "None"}, {"dimension": "qgs", "scope": "Subscription", "providerId":
"ionq", "utilization": 0.0, "holds": 0.0, "limit": 16666667.0, "period": "Infinite"},
{"dimension": "concurrent_resource_estimator_jobs", "scope": "Workspace",
"providerId": "microsoft-qc", "utilization": 0.0, "holds": 0.0, "limit": 10.0,
"period": "None"}, {"dimension": "qpu_hours", "scope": "Subscription", "providerId":
"pasqal", "utilization": 0.0, "holds": 0.0, "limit": 0.167, "period": "Infinite"},
{"dimension": "emulator_hours", "scope": "Subscription", "providerId": "pasqal",
"utilization": 1.1056251225, "holds": 0.0, "limit": 20.0, "period": "Infinite"},
{"dimension": "provider-credit", "scope": "Subscription", "providerId": "rigetti",
"utilization": 3.0, "holds": 0.0, "limit": 25000.0, "period": "Infinite"}],
"nextLink": null}'
headers:
connection:
- keep-alive
content-length:
- '981'
content-type:
- application/json; charset=utf-8
transfer-encoding:
- chunked
status:
code: 200
message: OK
version: 1
|
azure-quantum-python/azure-quantum/tests/unit/recordings/test_workspace_job_quotas.yaml/0
|
{
"file_path": "azure-quantum-python/azure-quantum/tests/unit/recordings/test_workspace_job_quotas.yaml",
"repo_id": "azure-quantum-python",
"token_count": 1376
}
| 373 |
##
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
##
import os
from unittest import mock
import pytest
from common import (
QuantumTestBase,
SUBSCRIPTION_ID,
RESOURCE_GROUP,
WORKSPACE,
LOCATION,
STORAGE,
API_KEY,
)
from azure.quantum import Workspace
from azure.quantum._constants import (
EnvironmentVariables,
ConnectionConstants,
)
from azure.core.credentials import AzureKeyCredential
from azure.core.pipeline.policies import AzureKeyCredentialPolicy
from azure.identity import EnvironmentCredential
SIMPLE_RESOURCE_ID = ConnectionConstants.VALID_RESOURCE_ID(
subscription_id=SUBSCRIPTION_ID,
resource_group=RESOURCE_GROUP,
workspace_name=WORKSPACE,
)
SIMPLE_CONNECTION_STRING = ConnectionConstants.VALID_CONNECTION_STRING(
subscription_id=SUBSCRIPTION_ID,
resource_group=RESOURCE_GROUP,
workspace_name=WORKSPACE,
api_key=API_KEY,
quantum_endpoint=ConnectionConstants.GET_QUANTUM_PRODUCTION_ENDPOINT(LOCATION)
)
class TestWorkspace(QuantumTestBase):
def test_create_workspace_instance_valid(self):
ws = Workspace(
subscription_id=SUBSCRIPTION_ID,
resource_group=RESOURCE_GROUP,
name=WORKSPACE,
location=LOCATION,
)
self.assertEqual(ws.subscription_id, SUBSCRIPTION_ID)
self.assertEqual(ws.resource_group, RESOURCE_GROUP)
self.assertEqual(ws.name, WORKSPACE)
self.assertEqual(ws.location, LOCATION)
ws = Workspace(
subscription_id=SUBSCRIPTION_ID,
resource_group=RESOURCE_GROUP,
name=WORKSPACE,
location=LOCATION,
storage=STORAGE,
)
self.assertEqual(ws.storage, STORAGE)
ws = Workspace(
resource_id=SIMPLE_RESOURCE_ID,
location=LOCATION,
)
self.assertEqual(ws.subscription_id, SUBSCRIPTION_ID)
self.assertEqual(ws.resource_group, RESOURCE_GROUP)
self.assertEqual(ws.name, WORKSPACE)
self.assertEqual(ws.location, LOCATION)
ws = Workspace(
resource_id=SIMPLE_RESOURCE_ID,
storage=STORAGE,
location=LOCATION,
)
self.assertEqual(ws.storage, STORAGE)
def test_create_workspace_locations(self):
# User-provided location name should be normalized
location = "East US"
ws = Workspace(
subscription_id=SUBSCRIPTION_ID,
resource_group=RESOURCE_GROUP,
name=WORKSPACE,
location=location,
)
self.assertEqual(ws.location, "eastus")
def test_env_connection_string(self):
with mock.patch.dict(os.environ):
self.clear_env_vars(os.environ)
os.environ[EnvironmentVariables.CONNECTION_STRING] = SIMPLE_CONNECTION_STRING
workspace = Workspace()
self.assertEqual(workspace.location, LOCATION)
self.assertEqual(workspace.subscription_id, SUBSCRIPTION_ID)
self.assertEqual(workspace.name, WORKSPACE)
self.assertEqual(workspace.resource_group, RESOURCE_GROUP)
self.assertIsInstance(workspace.credential, AzureKeyCredential)
self.assertEqual(workspace.credential.key, API_KEY)
# pylint: disable=protected-access
self.assertIsInstance(
workspace._client._config.authentication_policy,
AzureKeyCredentialPolicy)
auth_policy = workspace._client._config.authentication_policy
self.assertEqual(auth_policy._name, ConnectionConstants.QUANTUM_API_KEY_HEADER)
self.assertEqual(id(auth_policy._credential),
id(workspace.credential))
def test_workspace_from_connection_string(self):
with mock.patch.dict(
os.environ,
clear=True
):
workspace = Workspace.from_connection_string(SIMPLE_CONNECTION_STRING)
self.assertEqual(workspace.location, LOCATION)
self.assertIsInstance(workspace.credential, AzureKeyCredential)
self.assertEqual(workspace.credential.key, API_KEY)
# pylint: disable=protected-access
self.assertIsInstance(
workspace._client._config.authentication_policy,
AzureKeyCredentialPolicy)
auth_policy = workspace._client._config.authentication_policy
self.assertEqual(auth_policy._name, ConnectionConstants.QUANTUM_API_KEY_HEADER)
self.assertEqual(id(auth_policy._credential),
id(workspace.credential))
# assert that the connection string environment variable
# does not overwrite values that were set
# via the other environment variables
with mock.patch.dict(os.environ):
self.clear_env_vars(os.environ)
wrong_subscription_id = "00000000-2BAD-2BAD-2BAD-000000000000"
wrong_resource_group = "wrongrg"
wrong_workspace = "wrong-workspace"
wrong_location = "wrong-location"
# make sure the values above are really different from the default values
self.assertNotEqual(wrong_subscription_id, SUBSCRIPTION_ID)
self.assertNotEqual(wrong_resource_group, RESOURCE_GROUP)
self.assertNotEqual(wrong_workspace, WORKSPACE)
self.assertNotEqual(wrong_location, LOCATION)
wrong_connection_string = ConnectionConstants.VALID_CONNECTION_STRING(
subscription_id=wrong_subscription_id,
resource_group=wrong_resource_group,
workspace_name=wrong_workspace,
api_key=API_KEY,
quantum_endpoint=ConnectionConstants.GET_QUANTUM_PRODUCTION_ENDPOINT(wrong_location)
)
os.environ[EnvironmentVariables.CONNECTION_STRING] = wrong_connection_string
os.environ[EnvironmentVariables.LOCATION] = LOCATION
os.environ[EnvironmentVariables.SUBSCRIPTION_ID] = SUBSCRIPTION_ID
os.environ[EnvironmentVariables.RESOURCE_GROUP] = RESOURCE_GROUP
os.environ[EnvironmentVariables.WORKSPACE_NAME] = WORKSPACE
workspace = Workspace()
self.assertEqual(workspace.location, LOCATION)
self.assertEqual(workspace.subscription_id, SUBSCRIPTION_ID)
self.assertEqual(workspace.resource_group, RESOURCE_GROUP)
self.assertEqual(workspace.name, WORKSPACE)
# since no credential was passed, we will use the api-key
# credential from the connection string
self.assertIsInstance(workspace.credential, AzureKeyCredential)
# if we pass a credential, then it should be used
workspace = Workspace(credential=EnvironmentCredential())
self.assertIsInstance(workspace.credential, EnvironmentCredential)
# the connection string passed as a parameter should override the
# connection string from the env var
self.clear_env_vars(os.environ)
os.environ[EnvironmentVariables.CONNECTION_STRING] = wrong_connection_string
connection_string = ConnectionConstants.VALID_CONNECTION_STRING(
subscription_id=SUBSCRIPTION_ID,
resource_group=RESOURCE_GROUP,
workspace_name=WORKSPACE,
api_key=API_KEY,
quantum_endpoint=ConnectionConstants.GET_QUANTUM_PRODUCTION_ENDPOINT(LOCATION)
)
workspace = Workspace.from_connection_string(connection_string=connection_string)
self.assertEqual(workspace.location, LOCATION)
self.assertEqual(workspace.subscription_id, SUBSCRIPTION_ID)
self.assertEqual(workspace.resource_group, RESOURCE_GROUP)
self.assertEqual(workspace.name, WORKSPACE)
# the connection string in the env var should not be parsed if we
# don't really need it
self.clear_env_vars(os.environ)
os.environ[EnvironmentVariables.CONNECTION_STRING] = "bad-connection-string"
connection_string = ConnectionConstants.VALID_CONNECTION_STRING(
subscription_id=SUBSCRIPTION_ID,
resource_group=RESOURCE_GROUP,
workspace_name=WORKSPACE,
api_key=API_KEY,
quantum_endpoint=ConnectionConstants.GET_QUANTUM_PRODUCTION_ENDPOINT(LOCATION)
)
workspace = Workspace.from_connection_string(connection_string=connection_string)
self.assertEqual(workspace.location, LOCATION)
self.assertEqual(workspace.subscription_id, SUBSCRIPTION_ID)
self.assertEqual(workspace.resource_group, RESOURCE_GROUP)
self.assertEqual(workspace.name, WORKSPACE)
def test_create_workspace_instance_invalid(self):
def assert_value_error(exception):
self.assertIn("Azure Quantum workspace not fully specified.",
exception.args[0])
with mock.patch.dict(os.environ):
self.clear_env_vars(os.environ)
# missing location
with self.assertRaises(ValueError) as context:
Workspace(
location=None,
subscription_id=SUBSCRIPTION_ID,
resource_group=RESOURCE_GROUP,
name=WORKSPACE,
)
assert_value_error(context.exception)
# missing location
with self.assertRaises(ValueError) as context:
Workspace(resource_id=SIMPLE_RESOURCE_ID)
assert_value_error(context.exception)
# missing subscription id
with self.assertRaises(ValueError) as context:
Workspace(
location=LOCATION,
subscription_id=None,
resource_group=RESOURCE_GROUP,
name=WORKSPACE
)
assert_value_error(context.exception)
# missing resource group
with self.assertRaises(ValueError) as context:
Workspace(
location=LOCATION,
subscription_id=SUBSCRIPTION_ID,
resource_group=None,
name=WORKSPACE
)
assert_value_error(context.exception)
# missing workspace name
with self.assertRaises(ValueError) as context:
Workspace(
location=LOCATION,
subscription_id=SUBSCRIPTION_ID,
resource_group=RESOURCE_GROUP,
name=None
)
assert_value_error(context.exception)
# missing everything
with self.assertRaises(ValueError) as context:
Workspace()
assert_value_error(context.exception)
# invalid resource id
with self.assertRaises(ValueError) as context:
Workspace(
location=LOCATION,
resource_id="invalid/resource/id")
self.assertIn("Invalid resource id",
context.exception.args[0])
@pytest.mark.ionq
@pytest.mark.live_test
def test_workspace_get_targets_ionq(self):
ws = self.create_workspace()
targets = ws.get_targets()
self.assertNotIn(None, targets)
test_targets = set([
'ionq.simulator'
])
self.assertTrue(test_targets.issubset(set([t.name for t in targets])))
@pytest.mark.quantinuum
@pytest.mark.live_test
def test_workspace_get_targets_quantinuum(self):
ws = self.create_workspace()
targets = ws.get_targets()
self.assertNotIn(None, targets)
test_targets = set([
'quantinuum.sim.h1-1sc',
'quantinuum.sim.h1-1e',
'quantinuum.qpu.h1-1',
'quantinuum.sim.h2-1sc',
'quantinuum.sim.h2-1e',
'quantinuum.qpu.h2-1'
])
self.assertTrue(test_targets.issubset(set([t.name for t in targets])))
@pytest.mark.ionq
@pytest.mark.live_test
def test_workspace_get_target_ionq(self):
ws = self.create_workspace()
target = ws.get_targets("ionq.qpu")
self.assertIsNotNone(target.average_queue_time)
self.assertIsNotNone(target.current_availability)
self.assertEqual(target.name, "ionq.qpu")
target.refresh()
self.assertIsNotNone(target.average_queue_time)
self.assertIsNotNone(target.current_availability)
# target lookup is case insensitive
target1 = ws.get_targets("IonQ.QPU")
self.assertEqual(target.name, target1.name)
with pytest.raises(ValueError):
target.name = "foo"
target.refresh()
@pytest.mark.ionq
@pytest.mark.live_test
def test_workspace_get_targets_result_type(self):
ws = self.create_workspace()
targets = ws.get_targets()
assert isinstance(targets, list)
# For now, we keep a single result as instance instead of list,
# but it has to be changed in the next major release.
target = ws.get_targets(name="ionq.qpu")
assert not isinstance(target, list)
@pytest.mark.microsoft_qc
@pytest.mark.live_test
def test_workspace_get_target_microsoft_qc(self):
from azure.quantum.target.microsoft import MicrosoftEstimator
ws = self.create_workspace()
target = ws.get_targets("microsoft.estimator")
self.assertEqual(type(target), MicrosoftEstimator)
@pytest.mark.live_test
def test_workspace_job_quotas(self):
ws = self.create_workspace()
quotas = ws.get_quotas()
self.assertGreater(len(quotas), 0)
self.assertIn("dimension", quotas[0])
self.assertIn("scope", quotas[0])
self.assertIn("provider_id", quotas[0])
self.assertIn("utilization", quotas[0])
self.assertIn("holds", quotas[0])
self.assertIn("limit", quotas[0])
self.assertIn("period", quotas[0])
@pytest.mark.live_test
def test_workspace_list_jobs(self):
ws = self.create_workspace()
jobs = ws.list_jobs()
self.assertIsInstance(jobs, list)
def test_workspace_user_agent_appid(self):
app_id = "MyEnvVarAppId"
user_agent = "MyUserAgent"
with mock.patch.dict(os.environ):
self.clear_env_vars(os.environ)
# no UserAgent parameter and no EnvVar AppId
os.environ[EnvironmentVariables.USER_AGENT_APPID] = ""
ws = Workspace(
subscription_id=SUBSCRIPTION_ID,
resource_group=RESOURCE_GROUP,
name=WORKSPACE,
location=LOCATION
)
self.assertIsNone(ws.user_agent)
# no UserAgent parameter and with EnvVar AppId
os.environ[EnvironmentVariables.USER_AGENT_APPID] = app_id
ws = Workspace(
subscription_id=SUBSCRIPTION_ID,
resource_group=RESOURCE_GROUP,
name=WORKSPACE,
location=LOCATION
)
self.assertEqual(ws.user_agent, app_id)
# with UserAgent parameter and no EnvVar AppId
os.environ[EnvironmentVariables.USER_AGENT_APPID] = ""
ws = Workspace(
subscription_id=SUBSCRIPTION_ID,
resource_group=RESOURCE_GROUP,
name=WORKSPACE,
location=LOCATION,
user_agent=user_agent
)
self.assertEqual(ws.user_agent, user_agent)
# with UserAgent parameter and EnvVar AppId
os.environ[EnvironmentVariables.USER_AGENT_APPID] = app_id
ws = Workspace(
subscription_id=SUBSCRIPTION_ID,
resource_group=RESOURCE_GROUP,
name=WORKSPACE,
location=LOCATION,
user_agent=user_agent
)
self.assertEqual(ws.user_agent,
f"{app_id} {user_agent}")
# Append with UserAgent parameter and with EnvVar AppId
os.environ[EnvironmentVariables.USER_AGENT_APPID] = app_id
ws = Workspace(
subscription_id=SUBSCRIPTION_ID,
resource_group=RESOURCE_GROUP,
name=WORKSPACE,
location=LOCATION,
user_agent=user_agent
)
ws.append_user_agent("featurex")
self.assertEqual(ws.user_agent,
f"{app_id} {user_agent}-featurex")
ws.append_user_agent(None)
self.assertEqual(ws.user_agent, app_id)
# Append with no UserAgent parameter and no EnvVar AppId
os.environ[EnvironmentVariables.USER_AGENT_APPID] = ""
ws = Workspace(
subscription_id=SUBSCRIPTION_ID,
resource_group=RESOURCE_GROUP,
name=WORKSPACE,
location=LOCATION
)
ws.append_user_agent("featurex")
self.assertEqual(ws.user_agent, "featurex")
|
azure-quantum-python/azure-quantum/tests/unit/test_workspace.py/0
|
{
"file_path": "azure-quantum-python/azure-quantum/tests/unit/test_workspace.py",
"repo_id": "azure-quantum-python",
"token_count": 8308
}
| 374 |
---
page_type: sample
author: msoeken
description: Azure Quantum Resource Estimation
ms.author: [email protected]
ms.date:
languages:
- python
- qsharp
products:
- azure-quantum
---
# Azure Quantum Resource Estimation
This folder contains various sample notebooks to interact with the Azure Quantum Resource Estimator.
## Manifest
- [estimation-qiskit.ipynb](https://github.com/microsoft/azure-quantum-python/blob/main/samples/resource-estimator/estimation-qiskit.ipynb): Estimates with Qiskit input
- [estimation-qir.ipynb](https://github.com/microsoft/azure-quantum-python/blob/main/samples/resource-estimator/estimation-qir.ipynb): Estimate with tools producing QIR
- [estimation-chemistry.ipynb](https://github.com/microsoft/azure-quantum-python/blob/main/samples/resource-estimator/estimation-chemistry.ipynb): Resource estimation for a double-factorized chemistry application
|
azure-quantum-python/samples/resource-estimator/README.md/0
|
{
"file_path": "azure-quantum-python/samples/resource-estimator/README.md",
"repo_id": "azure-quantum-python",
"token_count": 280
}
| 375 |
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`Line chart tests Verify Line Chart: LineChart 1`] = `
<div>
<div
style={
{
"alignItems": "center",
"justifyContent": "center",
}
}
>
<svg
height={1000}
id="linechart"
width={1000}
/>
</div>
</div>
`;
|
azure-quantum-python/visualization/react-lib/src/components/d3-visualization-components/__tests__/__snapshots__/LineChart.test.tsx.snap/0
|
{
"file_path": "azure-quantum-python/visualization/react-lib/src/components/d3-visualization-components/__tests__/__snapshots__/LineChart.test.tsx.snap",
"repo_id": "azure-quantum-python",
"token_count": 161
}
| 376 |
{
"compilerOptions": {
"esModuleInterop": true,
"jsx": "react",
"module": "esnext",
"moduleResolution": "node",
"lib": [
"dom",
"es2016"
],
"strict": true,
"sourceMap": true,
"target": "es5",
},
"exclude": [
"node_modules"
]
}
|
azure-quantum-python/visualization/react-lib/tsconfig.json/0
|
{
"file_path": "azure-quantum-python/visualization/react-lib/tsconfig.json",
"repo_id": "azure-quantum-python",
"token_count": 142
}
| 377 |
all:
+$(MAKE) -C python
+$(MAKE) -C js
+$(MAKE) -C docs html
deps:
+$(MAKE) -C python deps
+$(MAKE) -C js deps
+$(MAKE) -C docs deps
check:
+$(MAKE) -C python check
+$(MAKE) -C js check
+$(MAKE) -C docs doctest
clean:
+$(MAKE) -C python clean
+$(MAKE) -C js clean
+$(MAKE) -C docs clean
.PHONY: all deps check clean
|
bistring/Makefile/0
|
{
"file_path": "bistring/Makefile",
"repo_id": "bistring",
"token_count": 173
}
| 378 |
BistrBuilder
============
.. testsetup:: *
from bistring import BistrBuilder
.. autoclass:: bistring.BistrBuilder
|
bistring/docs/Python/BistrBuilder.rst/0
|
{
"file_path": "bistring/docs/Python/BistrBuilder.rst",
"repo_id": "bistring",
"token_count": 42
}
| 379 |
import babel from "@rollup/plugin-babel";
import commonjs from "@rollup/plugin-commonjs";
import typescript from "@rollup/plugin-typescript";
import fs from "fs";
import pkg from "./package.json" assert { type: "json" };
export default [
{
input: "src/index.ts",
output: [
{
file: pkg.main,
format: "cjs",
exports: "named",
sourcemap: true,
},
{
file: pkg.module,
format: "esm",
exports: "named",
sourcemap: true,
},
],
external: [
...Object.keys(pkg.dependencies || {}),
],
plugins: [
typescript({
tsconfig: "./tsconfig.json",
}),
babel({
exclude: "node_modules/**",
extensions: [".js", ".ts"],
babelHelpers: "bundled",
presets: [
[
"@babel/preset-env",
{
corejs: 3,
targets: {
node: "current",
},
useBuiltIns: "usage",
shippedProposals: true,
},
],
],
}),
commonjs(),
],
},
{
input: "src/index.ts",
output: {
file: pkg.browser,
format: "umd",
exports: "named",
name: "bistring",
sourcemap: true,
},
external: [
...Object.keys(pkg.dependencies || {}),
"regenerator-runtime/runtime",
],
plugins: [
typescript({
tsconfig: "./tsconfig.json",
}),
babel({
exclude: "node_modules/**",
extensions: [".js", ".ts"],
babelHelpers: "bundled",
presets: [
[
"@babel/preset-env",
{
corejs: 3,
targets: {
browsers: "> 2%, not dead",
},
useBuiltIns: "usage",
shippedProposals: true,
},
],
],
}),
commonjs(),
],
},
];
|
bistring/js/rollup.config.mjs/0
|
{
"file_path": "bistring/js/rollup.config.mjs",
"repo_id": "bistring",
"token_count": 1712
}
| 380 |
all:
pipenv run mypy -p bistring --html-report=build/mypy
deps:
pipenv sync --dev
check: all
pipenv run pytest
clean:
pipenv run python setup.py clean --all
.PHONY: all deps check clean
|
bistring/python/Makefile/0
|
{
"file_path": "bistring/python/Makefile",
"repo_id": "bistring",
"token_count": 79
}
| 381 |
#!/usr/bin/env python3
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT license.
from pathlib import Path
from setuptools import setup
def readme():
with open(Path(__file__).parent/'README.rst') as f:
return f.read()
setup(
name='bistring',
version='0.5.0',
description='Bidirectionally transformed strings',
long_description=readme(),
long_description_content_type='text/x-rst',
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.10',
'Topic :: Text Processing :: General',
'Typing :: Typed',
],
keywords='bistring string non-destructive',
url='https://github.com/microsoft/bistring',
author='Microsoft Research Montreal',
author_email='[email protected]',
license='MIT',
packages=[
'bistring',
],
package_data={
'bistring': [
'py.typed',
],
},
zip_safe=False,
test_suite='tests',
python_requires='>=3.10',
setup_requires=[
'pytest-runner',
],
install_requires=[
'pyicu',
],
extras_require={
'dev': [
'exceptiongroup',
'lxml',
'mypy',
'pytest',
'regex',
'tomli',
],
},
tests_require=[
'bistring[dev]',
],
)
|
bistring/python/setup.py/0
|
{
"file_path": "bistring/python/setup.py",
"repo_id": "bistring",
"token_count": 651
}
| 382 |
# 
This repository contains code for the Python version of the [Microsoft Bot Framework SDK](https://github.com/Microsoft/botframework-sdk), which is part of the Microsoft Bot Framework - a comprehensive framework for building enterprise-grade conversational AI experiences.
This SDK enables developers to model conversation and build sophisticated bot applications using Python. SDKs for [JavaScript](https://github.com/Microsoft/botbuilder-js) and [.NET](https://github.com/Microsoft/botbuilder-dotnet) are also available.
To get started building bots using the SDK, see the [Azure Bot Service Documentation](https://docs.microsoft.com/en-us/azure/bot-service/?view=azure-bot-service-4.0).
For more information jump to a section below.
* [Build status](#build-status)
* [Packages](#packages)
* [Getting started](#getting-started)
* [Getting support and providing feedback](#getting-support-and-providing-feedback)
* [Contributing and our code of conduct](contributing-and-our-code-of-conduct)
* [Reporting security issues](#reporting-security-issues)
## Build Status
| Branch | Description | Build Status | Coverage Status | Code Style |
|----|---------------|--------------|-----------------|--|
| Main | 4.16.0 Builds | [](https://fuselabs.visualstudio.com/SDK_v4/_build/latest?definitionId=771&branchName=main) | [](https://coveralls.io/github/microsoft/botbuilder-python?branch=HEAD) | [](https://github.com/psf/black) |
## Packages
| Build | Released Package |
|----|---------------|
| botbuilder-ai | [](https://pypi.org/project/botbuilder-ai/) |
| botbuilder-applicationinsights | [](https://pypi.org/project/botbuilder-applicationinsights/) |
| botbuilder-azure | [](https://pypi.org/project/botbuilder-azure/) |
| botbuilder-core | [](https://pypi.org/project/botbuilder-core/) |
| botbuilder-dialogs | [](https://pypi.org/project/botbuilder-dialogs/) |
| botbuilder-schema | [](https://pypi.org/project/botbuilder-schema/) |
| botframework-connector | [](https://pypi.org/project/botframework-connector/) |
|
## Getting Started
To get started building bots using the SDK, see the [Azure Bot Service Documentation](https://docs.microsoft.com/en-us/azure/bot-service/?view=azure-bot-service-4.0).
The [Bot Framework Samples](https://github.com/microsoft/botbuilder-samples) includes a rich set of samples repository.
If you want to debug an issue, would like to [contribute](#contributing-code), or understand how the Bot Builder SDK works, instructions for building and testing the SDK are below.
### Prerequisites
- [Git](https://git-scm.com/downloads)
- [Python 3.8.17](https://www.python.org/downloads/)
Python "Virtual Environments" allow Python packages to be installed in an isolated location for a particular application, rather than being installed globally, as such it is common practice to use them. Click [here](https://packaging.python.org/tutorials/installing-packages/#creating-virtual-environments) to learn more about creating _and activating_ Virtual Environments in Python.
### Clone
Clone a copy of the repo:
```bash
git clone https://github.com/Microsoft/botbuilder-python.git
```
Change to the SDK's directory:
```bash
cd botbuilder-python
```
### Using the SDK locally
To use a local copy of the SDK you can link to these packages with the pip -e option.
```bash
pip install -e ./libraries/botbuilder-schema
pip install -e ./libraries/botframework-connector
pip install -e ./libraries/botframework-streaming
pip install -e ./libraries/botbuilder-core
pip install -e ./libraries/botbuilder-ai
pip install -e ./libraries/botbuilder-applicationinsights
pip install -e ./libraries/botbuilder-dialogs
pip install -e ./libraries/botbuilder-azure
pip install -e ./libraries/botbuilder-integration-applicationinsights-aiohttp
pip install -e ./libraries/botbuilder-adapters-slack
pip install -e ./libraries/botbuilder-integration-aiohttp
pip install -e ./libraries/botbuilder-testing
```
### Running unit tests
First execute the following command from the root level of the repo:
```bash
pip install -r ./libraries/botframework-connector/tests/requirements.txt
pip install -r ./libraries/botbuilder-core/tests/requirements.txt
pip install -r ./libraries/botbuilder-ai/tests/requirements.txt
```
Then enter run pytest by simply typing it into your CLI:
```bash
pytest
```
This is the expected output:
```bash
============================= test session starts =============================
platform win32 -- Python 3.8.2, pytest-3.4.0, py-1.5.2, pluggy-0.6.0
rootdir: C:\projects\botbuilder-python, inifile:
plugins: cov-2.5.1
...
```
## Getting support and providing feedback
Below are the various channels that are available to you for obtaining support and providing feedback. Please pay carful attention to which channel should be used for which type of content. e.g. general "how do I..." questions should be asked on Stack Overflow, Twitter or Gitter, with GitHub issues being for feature requests and bug reports.
### Github issues
[Github issues](https://github.com/Microsoft/botbuilder-python/issues) should be used for bugs and feature requests.
### Stack overflow
[Stack Overflow](https://stackoverflow.com/questions/tagged/botframework) is a great place for getting high-quality answers. Our support team, as well as many of our community members are already on Stack Overflow providing answers to 'how-to' questions.
### Azure Support
If you issues relates to [Azure Bot Service](https://azure.microsoft.com/en-gb/services/bot-service/), you can take advantage of the available [Azure support options](https://azure.microsoft.com/en-us/support/options/).
### Twitter
We use the [@msbotframework](https://twitter.com/msbotframework) account on twitter for announcements and members from the development team watch for tweets for [@msbotframework](https://twitter.com/msbotframework).
### Gitter Chat Room
The [Gitter Channel](https://gitter.im/Microsoft/BotBuilder) provides a place where the Community can get together and collaborate.
## Contributing and our code of conduct
We welcome contributions and suggestions. Please see our [contributing guidelines](./contributing.md) for more information.
This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/).
For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or contact
[[email protected]](mailto:[email protected]) with any additional questions or comments.
### Contributing Code
In order to create pull requests, submitted code must pass ```pylint``` and ```black``` checks. Run both tools on every file you've changed.
For more information and installation instructions, see:
* [black](https://pypi.org/project/black/)
* [pylint](https://pylint.org/)
## Reporting Security Issues
Security issues and bugs should be reported privately, via email, to the Microsoft Security Response Center (MSRC)
at [[email protected]](mailto:[email protected]). You should receive a response within 24 hours. If for some
reason you do not, please follow up via email to ensure we received your original message. Further information,
including the [MSRC PGP](https://technet.microsoft.com/en-us/security/dn606155) key, can be found in the
[Security TechCenter](https://technet.microsoft.com/en-us/security/default).
Copyright (c) Microsoft Corporation. All rights reserved.
Licensed under the [MIT](./LICENSE) License.
|
botbuilder-python/README.md/0
|
{
"file_path": "botbuilder-python/README.md",
"repo_id": "botbuilder-python",
"token_count": 2406
}
| 383 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
from .booking_dialog import BookingDialog
from .cancel_and_help_dialog import CancelAndHelpDialog
from .date_resolver_dialog import DateResolverDialog
from .main_dialog import MainDialog
__all__ = ["BookingDialog", "CancelAndHelpDialog", "DateResolverDialog", "MainDialog"]
|
botbuilder-python/generators/app/templates/core/{{cookiecutter.bot_name}}/dialogs/__init__.py/0
|
{
"file_path": "botbuilder-python/generators/app/templates/core/{{cookiecutter.bot_name}}/dialogs/__init__.py",
"repo_id": "botbuilder-python",
"token_count": 104
}
| 384 |
#!/usr/bin/env python3
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
import os
class DefaultConfig:
""" Bot Configuration """
PORT = 3978
APP_ID = os.environ.get("MicrosoftAppId", "")
APP_PASSWORD = os.environ.get("MicrosoftAppPassword", "")
APP_TYPE = os.environ.get("MicrosoftAppType", "MultiTenant")
APP_TENANTID = os.environ.get("MicrosoftAppTenantId", "")
|
botbuilder-python/generators/app/templates/echo/{{cookiecutter.bot_name}}/config.py/0
|
{
"file_path": "botbuilder-python/generators/app/templates/echo/{{cookiecutter.bot_name}}/config.py",
"repo_id": "botbuilder-python",
"token_count": 152
}
| 385 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
from .about import __title__, __version__
__all__ = ["__title__", "__version__"]
|
botbuilder-python/libraries/botbuilder-ai/botbuilder/ai/__init__.py/0
|
{
"file_path": "botbuilder-python/libraries/botbuilder-ai/botbuilder/ai/__init__.py",
"repo_id": "botbuilder-python",
"token_count": 50
}
| 386 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
from .qnamaker_dialog import QnAMakerDialog
from .qnamaker_dialog_options import QnAMakerDialogOptions
__all__ = [
"QnAMakerDialogOptions",
"QnAMakerDialog",
]
|
botbuilder-python/libraries/botbuilder-ai/botbuilder/ai/qna/dialogs/__init__.py/0
|
{
"file_path": "botbuilder-python/libraries/botbuilder-ai/botbuilder/ai/qna/dialogs/__init__.py",
"repo_id": "botbuilder-python",
"token_count": 116
}
| 387 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
from msrest.serialization import Model
class TrainRequestBody(Model):
"""Class the models the request body that is sent as feedback to the Train API."""
_attribute_map = {
"feedback_records": {"key": "feedbackRecords", "type": "[FeedbackRecord]"}
}
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.feedback_records = kwargs.get("feedback_records", None)
|
botbuilder-python/libraries/botbuilder-ai/botbuilder/ai/qna/models/train_request_body.py/0
|
{
"file_path": "botbuilder-python/libraries/botbuilder-ai/botbuilder/ai/qna/models/train_request_body.py",
"repo_id": "botbuilder-python",
"token_count": 173
}
| 388 |
{
"entities": {
"$instance": {
"Part": [
{
"endIndex": 5,
"modelType": "Regex Entity Extractor",
"recognitionSources": [
"externalEntities"
],
"startIndex": 0,
"text": "42ski",
"type": "Part"
},
{
"endIndex": 26,
"modelType": "Regex Entity Extractor",
"recognitionSources": [
"model"
],
"startIndex": 21,
"text": "kb423",
"type": "Part"
}
]
},
"Part": [
"42ski",
"kb423"
]
},
"intents": {
"Cancel": {
"score": 0.0127721056
},
"Delivery": {
"score": 0.004578639
},
"EntityTests": {
"score": 0.008811761
},
"Greeting": {
"score": 0.00256775436
},
"Help": {
"score": 0.00214677141
},
"None": {
"score": 0.27875194
},
"Roles": {
"score": 0.0273685548
},
"search": {
"score": 0.0084077
},
"SpecifyName": {
"score": 0.0148377549
},
"Travel": {
"score": 0.0039825947
},
"Weather_GetForecast": {
"score": 0.009611839
}
},
"sentiment": {
"label": "neutral",
"score": 0.5
},
"text": "42ski is a part like kb423",
"v3": {
"options": {
"externalEntities": [
{
"entityLength": 5,
"entityName": "Part",
"startIndex": 0
}
],
"includeAllIntents": true,
"includeAPIResults": true,
"includeInstanceData": true,
"log": true,
"preferExternalEntities": true,
"slot": "production"
},
"response": {
"prediction": {
"entities": {
"$instance": {
"Part": [
{
"length": 5,
"modelType": "Regex Entity Extractor",
"modelTypeId": 8,
"recognitionSources": [
"externalEntities"
],
"startIndex": 0,
"text": "42ski",
"type": "Part"
},
{
"length": 5,
"modelType": "Regex Entity Extractor",
"modelTypeId": 8,
"recognitionSources": [
"model"
],
"startIndex": 21,
"text": "kb423",
"type": "Part"
}
]
},
"Part": [
"42ski",
"kb423"
]
},
"intents": {
"Cancel": {
"score": 0.0127721056
},
"Delivery": {
"score": 0.004578639
},
"EntityTests": {
"score": 0.008811761
},
"Greeting": {
"score": 0.00256775436
},
"Help": {
"score": 0.00214677141
},
"None": {
"score": 0.27875194
},
"Roles": {
"score": 0.0273685548
},
"search": {
"score": 0.0084077
},
"SpecifyName": {
"score": 0.0148377549
},
"Travel": {
"score": 0.0039825947
},
"Weather.GetForecast": {
"score": 0.009611839
}
},
"normalizedQuery": "42ski is a part like kb423",
"sentiment": {
"label": "neutral",
"score": 0.5
},
"topIntent": "None"
},
"query": "42ski is a part like kb423"
}
}
}
|
botbuilder-python/libraries/botbuilder-ai/tests/luis/test_data/ExternalEntitiesAndRegex_v3.json/0
|
{
"file_path": "botbuilder-python/libraries/botbuilder-ai/tests/luis/test_data/ExternalEntitiesAndRegex_v3.json",
"repo_id": "botbuilder-python",
"token_count": 2542
}
| 389 |
{
"query": "my name is Emad",
"topScoringIntent": {
"intent": "SpecifyName",
"score": 0.8785189
},
"intents": [
{
"intent": "SpecifyName",
"score": 0.8785189
}
],
"entities": [
{
"entity": "emad",
"type": "Name",
"startIndex": 11,
"endIndex": 14,
"score": 0.8446753
}
]
}
|
botbuilder-python/libraries/botbuilder-ai/tests/luis/test_data/SingleIntent_SimplyEntity.json/0
|
{
"file_path": "botbuilder-python/libraries/botbuilder-ai/tests/luis/test_data/SingleIntent_SimplyEntity.json",
"repo_id": "botbuilder-python",
"token_count": 263
}
| 390 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
"""Common utilities for Django middleware."""
import collections
from applicationinsights import TelemetryClient
from applicationinsights.channel import (
AsynchronousQueue,
AsynchronousSender,
NullSender,
SynchronousQueue,
TelemetryChannel,
)
from ..processor.telemetry_processor import TelemetryProcessor
from .django_telemetry_processor import DjangoTelemetryProcessor
ApplicationInsightsSettings = collections.namedtuple(
"ApplicationInsightsSettings",
[
"ikey",
"channel_settings",
"use_view_name",
"record_view_arguments",
"log_exceptions",
],
)
ApplicationInsightsChannelSettings = collections.namedtuple(
"ApplicationInsightsChannelSettings", ["send_interval", "send_time", "endpoint"]
)
def load_settings():
from django.conf import settings # pylint: disable=import-outside-toplevel
if hasattr(settings, "APPLICATION_INSIGHTS"):
config = settings.APPLICATION_INSIGHTS
elif hasattr(settings, "APPLICATIONINSIGHTS"):
config = settings.APPLICATIONINSIGHTS
else:
config = {}
if not isinstance(config, dict):
config = {}
return ApplicationInsightsSettings(
ikey=config.get("ikey"),
use_view_name=config.get("use_view_name", False),
record_view_arguments=config.get("record_view_arguments", False),
log_exceptions=config.get("log_exceptions", True),
channel_settings=ApplicationInsightsChannelSettings(
endpoint=config.get("endpoint"),
send_interval=config.get("send_interval"),
send_time=config.get("send_time"),
),
)
saved_clients = {} # pylint: disable=invalid-name
saved_channels = {} # pylint: disable=invalid-name
def get_telemetry_client_with_processor(
key: str, channel: TelemetryChannel, telemetry_processor: TelemetryProcessor = None
) -> TelemetryClient:
"""Gets a telemetry client instance with a telemetry processor.
:param key: instrumentation key
:type key: str
:param channel: Telemetry channel
:type channel: TelemetryChannel
:param telemetry_processor: use an existing telemetry processor from caller.
:type telemetry_processor: TelemetryProcessor
:return: a telemetry client with telemetry processor.
:rtype: TelemetryClient
"""
client = TelemetryClient(key, channel)
processor = (
telemetry_processor
if telemetry_processor is not None
else DjangoTelemetryProcessor()
)
client.add_telemetry_processor(processor)
return client
def create_client(aisettings=None, telemetry_processor: TelemetryProcessor = None):
global saved_clients, saved_channels # pylint: disable=invalid-name, global-statement
if aisettings is None:
aisettings = load_settings()
if aisettings in saved_clients:
return saved_clients[aisettings]
channel_settings = aisettings.channel_settings
if channel_settings in saved_channels:
channel = saved_channels[channel_settings]
else:
sender = AsynchronousSender(service_endpoint_uri=channel_settings.endpoint)
if channel_settings.send_time is not None:
sender.send_time = channel_settings.send_time
if channel_settings.send_interval is not None:
sender.send_interval = channel_settings.send_interval
queue = AsynchronousQueue(sender)
channel = TelemetryChannel(None, queue)
saved_channels[channel_settings] = channel
ikey = aisettings.ikey
if ikey is None:
return dummy_client("No ikey specified", telemetry_processor)
client = get_telemetry_client_with_processor(
aisettings.ikey, channel, telemetry_processor
)
saved_clients[aisettings] = client
return client
def dummy_client(
reason: str, telemetry_processor: TelemetryProcessor = None
): # pylint: disable=unused-argument
"""Creates a dummy channel so even if we're not logging telemetry, we can still send
along the real object to things that depend on it to exist"""
sender = NullSender()
queue = SynchronousQueue(sender)
channel = TelemetryChannel(None, queue)
client = get_telemetry_client_with_processor(
"00000000-0000-0000-0000-000000000000", channel, telemetry_processor
)
return client
|
botbuilder-python/libraries/botbuilder-applicationinsights/botbuilder/applicationinsights/django/common.py/0
|
{
"file_path": "botbuilder-python/libraries/botbuilder-applicationinsights/botbuilder/applicationinsights/django/common.py",
"repo_id": "botbuilder-python",
"token_count": 1563
}
| 391 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
from .about import __version__
from .azure_queue_storage import AzureQueueStorage
from .cosmosdb_storage import CosmosDbStorage, CosmosDbConfig, CosmosDbKeyEscape
from .cosmosdb_partitioned_storage import (
CosmosDbPartitionedStorage,
CosmosDbPartitionedConfig,
)
from .blob_storage import BlobStorage, BlobStorageSettings
__all__ = [
"AzureQueueStorage",
"BlobStorage",
"BlobStorageSettings",
"CosmosDbStorage",
"CosmosDbConfig",
"CosmosDbKeyEscape",
"CosmosDbPartitionedStorage",
"CosmosDbPartitionedConfig",
"__version__",
]
|
botbuilder-python/libraries/botbuilder-azure/botbuilder/azure/__init__.py/0
|
{
"file_path": "botbuilder-python/libraries/botbuilder-azure/botbuilder/azure/__init__.py",
"repo_id": "botbuilder-python",
"token_count": 253
}
| 392 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
from http import HTTPStatus
from typing import List, Union
from botbuilder.schema import (
Activity,
ActivityTypes,
AdaptiveCardInvokeResponse,
AdaptiveCardInvokeValue,
ChannelAccount,
InvokeResponse,
MessageReaction,
SignInConstants,
)
from .bot import Bot
from .serializer_helper import serializer_helper
from .bot_framework_adapter import BotFrameworkAdapter
from .turn_context import TurnContext
class ActivityHandler(Bot):
"""
Handles activities and should be subclassed.
.. remarks::
Derive from this class to handle particular activity types.
Yon can add pre and post processing of activities by calling the base class
in the derived class.
"""
async def on_turn(
self, turn_context: TurnContext
): # pylint: disable=arguments-differ
"""
Called by the adapter (for example, :class:`BotFrameworkAdapter`) at runtime
in order to process an inbound :class:`botbuilder.schema.Activity`.
:param turn_context: The context object for this turn
:type turn_context: :class:`botbuilder.core.TurnContext`
:returns: A task that represents the work queued to execute
.. remarks::
It calls other methods in this class based on the type of the activity to
process, which allows a derived class to provide type-specific logic in a controlled way.
In a derived class, override this method to add logic that applies to all activity types.
Also
- Add logic to apply before the type-specific logic and before calling :meth:`on_turn()`.
- Add logic to apply after the type-specific logic after calling :meth:`on_turn()`.
"""
if turn_context is None:
raise TypeError("ActivityHandler.on_turn(): turn_context cannot be None.")
if hasattr(turn_context, "activity") and turn_context.activity is None:
raise TypeError(
"ActivityHandler.on_turn(): turn_context must have a non-None activity."
)
if (
hasattr(turn_context.activity, "type")
and turn_context.activity.type is None
):
raise TypeError(
"ActivityHandler.on_turn(): turn_context activity must have a non-None type."
)
if turn_context.activity.type == ActivityTypes.message:
await self.on_message_activity(turn_context)
elif turn_context.activity.type == ActivityTypes.conversation_update:
await self.on_conversation_update_activity(turn_context)
elif turn_context.activity.type == ActivityTypes.message_reaction:
await self.on_message_reaction_activity(turn_context)
elif turn_context.activity.type == ActivityTypes.event:
await self.on_event_activity(turn_context)
elif turn_context.activity.type == ActivityTypes.invoke:
invoke_response = await self.on_invoke_activity(turn_context)
# If OnInvokeActivityAsync has already sent an InvokeResponse, do not send another one.
if invoke_response and not turn_context.turn_state.get(
BotFrameworkAdapter._INVOKE_RESPONSE_KEY # pylint: disable=protected-access
):
await turn_context.send_activity(
Activity(value=invoke_response, type=ActivityTypes.invoke_response)
)
elif turn_context.activity.type == ActivityTypes.end_of_conversation:
await self.on_end_of_conversation_activity(turn_context)
elif turn_context.activity.type == ActivityTypes.typing:
await self.on_typing_activity(turn_context)
elif turn_context.activity.type == ActivityTypes.installation_update:
await self.on_installation_update(turn_context)
else:
await self.on_unrecognized_activity_type(turn_context)
async def on_message_activity( # pylint: disable=unused-argument
self, turn_context: TurnContext
):
"""
Override this method in a derived class to provide logic specific to activities,
such as the conversational logic.
:param turn_context: The context object for this turn
:type turn_context: :class:`botbuilder.core.TurnContext`
:returns: A task that represents the work queued to execute
"""
return
async def on_conversation_update_activity(self, turn_context: TurnContext):
"""
Invoked when a conversation update activity is received from the channel when the base behavior of
:meth:`on_turn()` is used.
:param turn_context: The context object for this turn
:type turn_context: :class:`botbuilder.core.TurnContext`
:returns: A task that represents the work queued to execute
.. remarks::
When the :meth:`on_turn()` method receives a conversation update activity, it calls this
method.
Also
- If the conversation update activity indicates that members other than the bot joined the conversation,
it calls the :meth:`on_members_added_activity()` method.
- If the conversation update activity indicates that members other than the bot left the conversation,
it calls the :meth:`on_members_removed_activity()` method.
- In a derived class, override this method to add logic that applies to all conversation update activities.
Add logic to apply before the member added or removed logic before the call to this base class method.
"""
if (
turn_context.activity.members_added is not None
and turn_context.activity.members_added
):
return await self.on_members_added_activity(
turn_context.activity.members_added, turn_context
)
if (
turn_context.activity.members_removed is not None
and turn_context.activity.members_removed
):
return await self.on_members_removed_activity(
turn_context.activity.members_removed, turn_context
)
return
async def on_members_added_activity(
self, members_added: List[ChannelAccount], turn_context: TurnContext
): # pylint: disable=unused-argument
"""
Override this method in a derived class to provide logic for when members other than the bot join
the conversation. You can add your bot's welcome logic.
:param members_added: A list of all the members added to the conversation, as described by the
conversation update activity
:type members_added: :class:`typing.List`
:param turn_context: The context object for this turn
:type turn_context: :class:`botbuilder.core.TurnContext`
:returns: A task that represents the work queued to execute
.. remarks::
When the :meth:`on_conversation_update_activity()` method receives a conversation
update activity that indicates
one or more users other than the bot are joining the conversation, it calls this method.
"""
return
async def on_members_removed_activity(
self, members_removed: List[ChannelAccount], turn_context: TurnContext
): # pylint: disable=unused-argument
"""
Override this method in a derived class to provide logic for when members other than the bot leave
the conversation. You can add your bot's good-bye logic.
:param members_added: A list of all the members removed from the conversation, as described by the
conversation update activity
:type members_added: :class:`typing.List`
:param turn_context: The context object for this turn
:type turn_context: :class:`botbuilder.core.TurnContext`
:returns: A task that represents the work queued to execute
.. remarks::
When the :meth:`on_conversation_update_activity()` method receives a conversation
update activity that indicates one or more users other than the bot are leaving the conversation,
it calls this method.
"""
return
async def on_message_reaction_activity(self, turn_context: TurnContext):
"""
Invoked when an event activity is received from the connector when the base behavior of
:meth:`on_turn()` is used.
:param turn_context: The context object for this turn
:type turn_context: :class:`botbuilder.core.TurnContext`
:returns: A task that represents the work queued to execute
.. remarks::
Message reactions correspond to the user adding a 'like' or 'sad' etc. (often an emoji) to a previously
sent activity.
Message reactions are only supported by a few channels. The activity that the message reaction corresponds
to is indicated in the reply to Id property. The value of this property is the activity id of a previously
sent activity given back to the bot as the response from a send call.
When the :meth:`on_turn()` method receives a message reaction activity, it calls this
method.
- If the message reaction indicates that reactions were added to a message, it calls
:meth:`on_reaction_added()`.
- If the message reaction indicates that reactions were removed from a message, it calls
:meth:`on_reaction_removed()`.
In a derived class, override this method to add logic that applies to all message reaction activities.
Add logic to apply before the reactions added or removed logic before the call to the this base class
method.
Add logic to apply after the reactions added or removed logic after the call to the this base class method.
"""
if turn_context.activity.reactions_added is not None:
await self.on_reactions_added(
turn_context.activity.reactions_added, turn_context
)
if turn_context.activity.reactions_removed is not None:
await self.on_reactions_removed(
turn_context.activity.reactions_removed, turn_context
)
async def on_reactions_added( # pylint: disable=unused-argument
self, message_reactions: List[MessageReaction], turn_context: TurnContext
):
"""
Override this method in a derived class to provide logic for when reactions to a previous activity
are added to the conversation.
:param message_reactions: The list of reactions added
:type message_reactions: :class:`typing.List`
:param turn_context: The context object for this turn
:type turn_context: :class:`botbuilder.core.TurnContext`
:returns: A task that represents the work queued to execute
.. remarks::
Message reactions correspond to the user adding a 'like' or 'sad' etc. (often an emoji)
to a previously sent message on the conversation.
Message reactions are supported by only a few channels.
The activity that the message is in reaction to is identified by the activity's reply to ID property.
The value of this property is the activity ID of a previously sent activity. When the bot sends an activity,
the channel assigns an ID to it, which is available in the resource response Id of the result.
"""
return
async def on_reactions_removed( # pylint: disable=unused-argument
self, message_reactions: List[MessageReaction], turn_context: TurnContext
):
"""
Override this method in a derived class to provide logic for when reactions to a previous activity
are removed from the conversation.
:param message_reactions: The list of reactions removed
:type message_reactions: :class:`typing.List`
:param turn_context: The context object for this turn
:type turn_context: :class:`botbuilder.core.TurnContext`
:returns: A task that represents the work queued to execute
.. remarks::
Message reactions correspond to the user adding a 'like' or 'sad' etc. (often an emoji)
to a previously sent message on the conversation. Message reactions are supported by only a few channels.
The activity that the message is in reaction to is identified by the activity's reply to Id property.
The value of this property is the activity ID of a previously sent activity. When the bot sends an activity,
the channel assigns an ID to it, which is available in the resource response Id of the result.
"""
return
async def on_event_activity(self, turn_context: TurnContext):
"""
Invoked when an event activity is received from the connector when the base behavior of
:meth:`on_turn()` is used.
:param turn_context: The context object for this turn
:type turn_context: :class:`botbuilder.core.TurnContext`
:returns: A task that represents the work queued to execute
.. remarks::
When the :meth:`on_turn()` method receives an event activity, it calls this method.
If the activity name is `tokens/response`, it calls :meth:`on_token_response_event()`;
otherwise, it calls :meth:`on_event()`.
In a derived class, override this method to add logic that applies to all event activities.
Add logic to apply before the specific event-handling logic before the call to this base class method.
Add logic to apply after the specific event-handling logic after the call to this base class method.
Event activities communicate programmatic information from a client or channel to a bot.
The meaning of an event activity is defined by the event activity name property, which is meaningful within
the scope of a channel.
"""
if turn_context.activity.name == SignInConstants.token_response_event_name:
return await self.on_token_response_event(turn_context)
return await self.on_event(turn_context)
async def on_token_response_event( # pylint: disable=unused-argument
self, turn_context: TurnContext
):
"""
Invoked when a `tokens/response` event is received when the base behavior of
:meth:`on_event_activity()` is used.
If using an `oauth_prompt`, override this method to forward this activity to the current dialog.
:param turn_context: The context object for this turn
:type turn_context: :class:`botbuilder.core.TurnContext`
:returns: A task that represents the work queued to execute
.. remarks::
When the :meth:`on_event()` method receives an event with an activity name of
`tokens/response`, it calls this method. If your bot uses an `oauth_prompt`, forward the incoming
activity to the current dialog.
"""
return
async def on_event( # pylint: disable=unused-argument
self, turn_context: TurnContext
):
"""
Invoked when an event other than `tokens/response` is received when the base behavior of
:meth:`on_event_activity()` is used.
:param turn_context: The context object for this turn
:type turn_context: :class:`botbuilder.core.TurnContext`
:returns: A task that represents the work queued to execute
.. remarks::
When the :meth:`on_event_activity()` is used method receives an event with an
activity name other than `tokens/response`, it calls this method.
This method could optionally be overridden if the bot is meant to handle miscellaneous events.
"""
return
async def on_end_of_conversation_activity( # pylint: disable=unused-argument
self, turn_context: TurnContext
):
"""
Invoked when a conversation end activity is received from the channel.
:param turn_context: The context object for this turn
:type turn_context: :class:`botbuilder.core.TurnContext`
:returns: A task that represents the work queued to execute
"""
return
async def on_typing_activity( # pylint: disable=unused-argument
self, turn_context: TurnContext
):
"""
Override this in a derived class to provide logic specific to
ActivityTypes.typing activities, such as the conversational logic.
:param turn_context: The context object for this turn
:type turn_context: :class:`botbuilder.core.TurnContext`
:returns: A task that represents the work queued to execute
"""
return
async def on_installation_update( # pylint: disable=unused-argument
self, turn_context: TurnContext
):
"""
Override this in a derived class to provide logic specific to
ActivityTypes.InstallationUpdate activities.
:param turn_context: The context object for this turn
:type turn_context: :class:`botbuilder.core.TurnContext`
:returns: A task that represents the work queued to execute
"""
if turn_context.activity.action in ("add", "add-upgrade"):
return await self.on_installation_update_add(turn_context)
if turn_context.activity.action in ("remove", "remove-upgrade"):
return await self.on_installation_update_remove(turn_context)
return
async def on_installation_update_add( # pylint: disable=unused-argument
self, turn_context: TurnContext
):
"""
Override this in a derived class to provide logic specific to
ActivityTypes.InstallationUpdate activities with 'action' set to 'add'.
:param turn_context: The context object for this turn
:type turn_context: :class:`botbuilder.core.TurnContext`
:returns: A task that represents the work queued to execute
"""
return
async def on_installation_update_remove( # pylint: disable=unused-argument
self, turn_context: TurnContext
):
"""
Override this in a derived class to provide logic specific to
ActivityTypes.InstallationUpdate activities with 'action' set to 'remove'.
:param turn_context: The context object for this turn
:type turn_context: :class:`botbuilder.core.TurnContext`
:returns: A task that represents the work queued to execute
"""
return
async def on_unrecognized_activity_type( # pylint: disable=unused-argument
self, turn_context: TurnContext
):
"""
Invoked when an activity other than a message, conversation update, or event is received when the base
behavior of :meth:`on_turn()` is used.
If overridden, this method could potentially respond to any of the other activity types.
:param turn_context: The context object for this turn
:type turn_context: :class:`botbuilder.core.TurnContext`
:returns: A task that represents the work queued to execute
.. remarks::
When the :meth:`on_turn()` method receives an activity that is not a message,
conversation update, message reaction, or event activity, it calls this method.
"""
return
async def on_invoke_activity( # pylint: disable=unused-argument
self, turn_context: TurnContext
) -> Union[InvokeResponse, None]:
"""
Registers an activity event handler for the _invoke_ event, emitted for every incoming event activity.
:param turn_context: The context object for this turn
:type turn_context: :class:`botbuilder.core.TurnContext`
:returns: A task that represents the work queued to execute
"""
try:
if (
turn_context.activity.name
== SignInConstants.verify_state_operation_name
or turn_context.activity.name
== SignInConstants.token_exchange_operation_name
):
await self.on_sign_in_invoke(turn_context)
return self._create_invoke_response()
if turn_context.activity.name == "adaptiveCard/action":
invoke_value = self._get_adaptive_card_invoke_value(
turn_context.activity
)
return self._create_invoke_response(
await self.on_adaptive_card_invoke(turn_context, invoke_value)
)
raise _InvokeResponseException(HTTPStatus.NOT_IMPLEMENTED)
except _InvokeResponseException as invoke_exception:
return invoke_exception.create_invoke_response()
async def on_sign_in_invoke( # pylint: disable=unused-argument
self, turn_context: TurnContext
):
"""
Invoked when a signin/verifyState or signin/tokenExchange event is received when the base behavior of
on_invoke_activity(TurnContext{InvokeActivity}) is used.
If using an OAuthPrompt, override this method to forward this Activity"/ to the current dialog.
By default, this method does nothing.
:param turn_context: The context object for this turn
:type turn_context: :class:`botbuilder.core.TurnContext`
:returns: A task that represents the work queued to execute
"""
raise _InvokeResponseException(HTTPStatus.NOT_IMPLEMENTED)
async def on_adaptive_card_invoke(
self, turn_context: TurnContext, invoke_value: AdaptiveCardInvokeValue
) -> AdaptiveCardInvokeResponse:
"""
Invoked when the bot is sent an Adaptive Card Action Execute.
When the on_invoke_activity method receives an Invoke with a Activity.name of `adaptiveCard/action`, it
calls this method.
:param turn_context: A context object for this turn.
:type turn_context: :class:`botbuilder.core.TurnContext`
:param invoke_value: A string-typed object from the incoming activity's value.
:type invoke_value: :class:`botframework.schema.models.AdaptiveCardInvokeValue`
:return: The HealthCheckResponse object
"""
raise _InvokeResponseException(HTTPStatus.NOT_IMPLEMENTED)
@staticmethod
def _create_invoke_response(body: object = None) -> InvokeResponse:
return InvokeResponse(status=int(HTTPStatus.OK), body=serializer_helper(body))
def _get_adaptive_card_invoke_value(self, activity: Activity):
if activity.value is None:
response = self._create_adaptive_card_invoke_error_response(
HTTPStatus.BAD_REQUEST, "BadRequest", "Missing value property"
)
raise _InvokeResponseException(HTTPStatus.BAD_REQUEST, response)
invoke_value = None
try:
invoke_value = AdaptiveCardInvokeValue(**activity.value)
except:
response = self._create_adaptive_card_invoke_error_response(
HTTPStatus.BAD_REQUEST,
"BadRequest",
"Value property is not properly formed",
)
raise _InvokeResponseException(HTTPStatus.BAD_REQUEST, response)
if invoke_value.action is None:
response = self._create_adaptive_card_invoke_error_response(
HTTPStatus.BAD_REQUEST, "BadRequest", "Missing action property"
)
raise _InvokeResponseException(HTTPStatus.BAD_REQUEST, response)
if invoke_value.action.get("type") != "Action.Execute":
response = self._create_adaptive_card_invoke_error_response(
HTTPStatus.BAD_REQUEST,
"NotSupported",
f"The action '{invoke_value.action.get('type')}' is not supported.",
)
raise _InvokeResponseException(HTTPStatus.BAD_REQUEST, response)
return invoke_value
def _create_adaptive_card_invoke_error_response(
self, status_code: HTTPStatus, code: str, message: str
):
return AdaptiveCardInvokeResponse(
status_code=status_code,
type="application/vnd.microsoft.error",
value=Exception(code, message),
)
class _InvokeResponseException(Exception):
def __init__(self, status_code: HTTPStatus, body: object = None):
super(_InvokeResponseException, self).__init__()
self._status_code = status_code
self._body = body
def create_invoke_response(self) -> InvokeResponse:
return InvokeResponse(status=int(self._status_code), body=self._body)
|
botbuilder-python/libraries/botbuilder-core/botbuilder/core/activity_handler.py/0
|
{
"file_path": "botbuilder-python/libraries/botbuilder-core/botbuilder/core/activity_handler.py",
"repo_id": "botbuilder-python",
"token_count": 9306
}
| 393 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
from typing import Dict, Iterable, Type
class ComponentRegistration:
@staticmethod
def get_components() -> Iterable["ComponentRegistration"]:
return _components.values()
@staticmethod
def add(component_registration: "ComponentRegistration"):
_components[component_registration.__class__] = component_registration
_components: Dict[Type, ComponentRegistration] = {}
|
botbuilder-python/libraries/botbuilder-core/botbuilder/core/component_registration.py/0
|
{
"file_path": "botbuilder-python/libraries/botbuilder-core/botbuilder/core/component_registration.py",
"repo_id": "botbuilder-python",
"token_count": 145
}
| 394 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
"""The memory transcript store stores transcripts in volatile memory."""
import datetime
from typing import List, Dict
from botbuilder.schema import Activity
from .transcript_logger import PagedResult, TranscriptInfo, TranscriptStore
# pylint: disable=line-too-long
class MemoryTranscriptStore(TranscriptStore):
"""This provider is most useful for simulating production storage when running locally against the
emulator or as part of a unit test.
"""
channels: Dict[str, Dict[str, Activity]] = {}
async def log_activity(self, activity: Activity) -> None:
if not activity:
raise TypeError("activity cannot be None for log_activity()")
# get channel
channel = {}
if not activity.channel_id in self.channels:
channel = {}
self.channels[activity.channel_id] = channel
else:
channel = self.channels[activity.channel_id]
# Get conversation transcript.
transcript = []
if activity.conversation.id in channel:
transcript = channel[activity.conversation.id]
else:
transcript = []
channel[activity.conversation.id] = transcript
transcript.append(activity)
async def get_transcript_activities(
self,
channel_id: str,
conversation_id: str,
continuation_token: str = None,
start_date: datetime = datetime.datetime.min,
) -> "PagedResult[Activity]":
if not channel_id:
raise TypeError("Missing channel_id")
if not conversation_id:
raise TypeError("Missing conversation_id")
paged_result = PagedResult()
if channel_id in self.channels:
channel = self.channels[channel_id]
if conversation_id in channel:
transcript = channel[conversation_id]
if continuation_token:
paged_result.items = (
[
x
for x in sorted(
transcript,
key=lambda x: x.timestamp or str(datetime.datetime.min),
reverse=False,
)
if x.timestamp >= start_date
]
.dropwhile(lambda x: x.id != continuation_token)
.Skip(1)[:20]
)
if paged_result.items.count == 20:
paged_result.continuation_token = paged_result.items[-1].id
else:
paged_result.items = [
x
for x in sorted(
transcript,
key=lambda x: x.timestamp or datetime.datetime.min,
reverse=False,
)
if (x.timestamp or datetime.datetime.min) >= start_date
][:20]
if paged_result.items.count == 20:
paged_result.continuation_token = paged_result.items[-1].id
return paged_result
async def delete_transcript(self, channel_id: str, conversation_id: str) -> None:
if not channel_id:
raise TypeError("channel_id should not be None")
if not conversation_id:
raise TypeError("conversation_id should not be None")
if channel_id in self.channels:
if conversation_id in self.channels[channel_id]:
del self.channels[channel_id][conversation_id]
async def list_transcripts(
self, channel_id: str, continuation_token: str = None
) -> "PagedResult[TranscriptInfo]":
if not channel_id:
raise TypeError("Missing channel_id")
paged_result = PagedResult()
if channel_id in self.channels:
channel: Dict[str, List[Activity]] = self.channels[channel_id]
if continuation_token:
paged_result.items = (
sorted(
[
TranscriptInfo(
channel_id,
c.value()[0].timestamp if c.value() else None,
c.id,
)
for c in channel
],
key=lambda x: x.created,
reverse=True,
)
.dropwhile(lambda x: x.id != continuation_token)
.Skip(1)
.Take(20)
)
if paged_result.items.count == 20:
paged_result.continuation_token = paged_result.items[-1].id
else:
paged_result.items = (
sorted(
[
TranscriptInfo(
channel_id,
c.value()[0].timestamp if c.value() else None,
c.id,
)
for c in channel
],
key=lambda x: x.created,
reverse=True,
)
.dropwhile(lambda x: x.id != continuation_token)
.Skip(1)
.Take(20)
)
if paged_result.items.count == 20:
paged_result.continuation_token = paged_result.items[-1].id
return paged_result
|
botbuilder-python/libraries/botbuilder-core/botbuilder/core/memory_transcript_store.py/0
|
{
"file_path": "botbuilder-python/libraries/botbuilder-core/botbuilder/core/memory_transcript_store.py",
"repo_id": "botbuilder-python",
"token_count": 3151
}
| 395 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
import asyncio
from typing import Awaitable, Callable
from botbuilder.schema import Activity, ActivityTypes
from botframework.connector.auth import ClaimsIdentity, SkillValidation
from .bot_adapter import BotAdapter
from .middleware_set import Middleware
from .turn_context import TurnContext
class Timer:
clear_timer = False
def set_timeout(self, func, span):
async def some_fn(): # pylint: disable=function-redefined
await asyncio.sleep(span)
if not self.clear_timer:
await func()
asyncio.ensure_future(some_fn())
def set_clear_timer(self):
self.clear_timer = True
class ShowTypingMiddleware(Middleware):
"""
When added, this middleware will send typing activities back to the user when a Message activity
is received to let them know that the bot has received the message and is working on the response.
You can specify a delay before the first typing activity is sent and then a frequency, which
determines how often another typing activity is sent. Typing activities will continue to be sent
until your bot sends another message back to the user.
"""
def __init__(self, delay: float = 0.5, period: float = 2.0):
"""
Initializes the middleware.
:param delay: Delay in seconds for the first typing indicator to be sent.
:param period: Delay in seconds for subsequent typing indicators.
"""
if delay < 0:
raise ValueError("Delay must be greater than or equal to zero")
if period <= 0:
raise ValueError("Repeat period must be greater than zero")
self._delay = delay
self._period = period
async def on_turn(
self, context: TurnContext, logic: Callable[[TurnContext], Awaitable]
):
timer = Timer()
def start_interval(context: TurnContext, delay, period):
async def aux():
typing_activity = Activity(
type=ActivityTypes.typing,
relates_to=context.activity.relates_to,
)
conversation_reference = TurnContext.get_conversation_reference(
context.activity
)
typing_activity = TurnContext.apply_conversation_reference(
typing_activity, conversation_reference
)
asyncio.ensure_future(
context.adapter.send_activities(context, [typing_activity])
)
# restart the timer, with the 'period' value for the delay
timer.set_timeout(aux, period)
# first time through we use the 'delay' value for the timer.
timer.set_timeout(aux, delay)
def stop_interval():
timer.set_clear_timer()
# Start a timer to periodically send the typing activity
# (bots running as skills should not send typing activity)
if (
context.activity.type == ActivityTypes.message
and not ShowTypingMiddleware._is_skill_bot(context)
):
start_interval(context, self._delay, self._period)
# call the bot logic
result = await logic()
stop_interval()
return result
@staticmethod
def _is_skill_bot(context: TurnContext) -> bool:
claims_identity = context.turn_state.get(BotAdapter.BOT_IDENTITY_KEY)
return isinstance(
claims_identity, ClaimsIdentity
) and SkillValidation.is_skill_claim(claims_identity.claims)
|
botbuilder-python/libraries/botbuilder-core/botbuilder/core/show_typing_middleware.py/0
|
{
"file_path": "botbuilder-python/libraries/botbuilder-core/botbuilder/core/show_typing_middleware.py",
"repo_id": "botbuilder-python",
"token_count": 1451
}
| 396 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
from abc import ABC
from typing import Awaitable, Callable
from botbuilder.core import TurnContext, InvokeResponse
from botbuilder.schema import Activity
class StreamingActivityProcessor(ABC):
"""
Process streaming activities.
"""
async def process_streaming_activity(
self,
activity: Activity,
bot_callback_handler: Callable[[TurnContext], Awaitable],
) -> InvokeResponse:
raise NotImplementedError()
|
botbuilder-python/libraries/botbuilder-core/botbuilder/core/streaming/streaming_activity_processor.py/0
|
{
"file_path": "botbuilder-python/libraries/botbuilder-core/botbuilder/core/streaming/streaming_activity_processor.py",
"repo_id": "botbuilder-python",
"token_count": 173
}
| 397 |
import aiounittest
from botbuilder.core import AutoSaveStateMiddleware, BotState, TurnContext
from botbuilder.core.adapters import TestAdapter
from botbuilder.schema import Activity
async def aux_func():
return
class BotStateMock(BotState):
def __init__(self, state): # pylint: disable=super-init-not-called
self.state = state
self.assert_force = False
self.read_called = False
self.write_called = False
async def load(self, turn_context: TurnContext, force: bool = False) -> None:
assert turn_context is not None, "BotStateMock.load() not passed context."
if self.assert_force:
assert force, "BotStateMock.load(): force not set."
self.read_called = True
async def save_changes(
self, turn_context: TurnContext, force: bool = False
) -> None:
assert (
turn_context is not None
), "BotStateMock.save_changes() not passed context."
if self.assert_force:
assert force, "BotStateMock.save_changes(): force not set."
self.write_called = True
def get_storage_key(
self, turn_context: TurnContext # pylint: disable=unused-argument
) -> str:
return ""
class TestAutoSaveMiddleware(aiounittest.AsyncTestCase):
async def test_should_add_and_call_load_all_on_single_plugin(self):
adapter = TestAdapter()
context = TurnContext(adapter, Activity())
foo_state = BotStateMock({"foo": "bar"})
bot_state_set = AutoSaveStateMiddleware().add(foo_state)
await bot_state_set.bot_state_set.load_all(context)
async def test_should_add_and_call_load_all_on_multiple_plugins(self):
adapter = TestAdapter()
context = TurnContext(adapter, Activity())
foo_state = BotStateMock({"foo": "bar"})
bar_state = BotStateMock({"bar": "foo"})
bot_state_set = AutoSaveStateMiddleware([foo_state, bar_state])
await bot_state_set.bot_state_set.load_all(context)
async def test_should_add_and_call_save_all_changes_on_a_single_plugin(self):
adapter = TestAdapter()
context = TurnContext(adapter, Activity())
foo_state = BotStateMock({"foo": "bar"})
bot_state_set = AutoSaveStateMiddleware().add(foo_state)
await bot_state_set.bot_state_set.save_all_changes(context)
assert foo_state.write_called, "write not called for plugin."
async def test_should_add_and_call_save_all_changes_on_multiple_plugins(self):
adapter = TestAdapter()
context = TurnContext(adapter, Activity())
foo_state = BotStateMock({"foo": "bar"})
bar_state = BotStateMock({"bar": "foo"})
autosave_middleware = AutoSaveStateMiddleware([foo_state, bar_state])
await autosave_middleware.bot_state_set.save_all_changes(context)
assert (
foo_state.write_called or bar_state.write_called
), "write not called for either plugin."
assert foo_state.write_called, "write not called for 'foo_state' plugin."
assert bar_state.write_called, "write not called for 'bar_state' plugin."
async def test_should_pass_force_flag_through_in_load_all_call(self):
adapter = TestAdapter()
context = TurnContext(adapter, Activity())
foo_state = BotStateMock({"foo": "bar"})
foo_state.assert_force = True
autosave_middleware = AutoSaveStateMiddleware().add(foo_state)
await autosave_middleware.bot_state_set.load_all(context, True)
async def test_should_pass_force_flag_through_in_save_all_changes_call(self):
adapter = TestAdapter()
context = TurnContext(adapter, Activity())
foo_state = BotStateMock({"foo": "bar"})
foo_state.assert_force = True
autosave_middleware = AutoSaveStateMiddleware().add(foo_state)
await autosave_middleware.bot_state_set.save_all_changes(context, True)
async def test_should_work_as_a_middleware_plugin(self):
adapter = TestAdapter()
context = TurnContext(adapter, Activity())
foo_state = BotStateMock({"foo": "bar"})
autosave_middleware = AutoSaveStateMiddleware().add(foo_state)
await autosave_middleware.on_turn(context, aux_func)
assert foo_state.write_called, "save_all_changes() not called."
async def test_should_support_plugins_passed_to_constructor(self):
adapter = TestAdapter()
context = TurnContext(adapter, Activity())
foo_state = BotStateMock({"foo": "bar"})
autosave_middleware = AutoSaveStateMiddleware().add(foo_state)
await autosave_middleware.on_turn(context, aux_func)
assert foo_state.write_called, "save_all_changes() not called."
async def test_should_not_add_any_bot_state_on_construction_if_none_are_passed_in(
self,
):
middleware = AutoSaveStateMiddleware()
assert (
not middleware.bot_state_set.bot_states
), "should not have added any BotState."
|
botbuilder-python/libraries/botbuilder-core/tests/test_auto_save_middleware.py/0
|
{
"file_path": "botbuilder-python/libraries/botbuilder-core/tests/test_auto_save_middleware.py",
"repo_id": "botbuilder-python",
"token_count": 1983
}
| 398 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
import aiounittest
from botframework.connector.auth import MicrosoftAppCredentials
from botbuilder.core import TurnContext
from botbuilder.core.adapters import TestAdapter
from botbuilder.schema import Activity, ConversationReference, ChannelAccount
RECEIVED_MESSAGE = Activity(type="message", text="received")
UPDATED_ACTIVITY = Activity(type="message", text="update")
DELETED_ACTIVITY_REFERENCE = ConversationReference(activity_id="1234")
class TestTestAdapter(aiounittest.AsyncTestCase):
async def test_should_call_bog_logic_when_receive_activity_is_called(self):
async def logic(context: TurnContext):
assert context
assert context.activity
assert context.activity.type == "message"
assert context.activity.text == "test"
assert context.activity.id
assert context.activity.from_property
assert context.activity.recipient
assert context.activity.conversation
assert context.activity.channel_id
assert context.activity.service_url
adapter = TestAdapter(logic)
await adapter.receive_activity("test")
async def test_should_support_receive_activity_with_activity(self):
async def logic(context: TurnContext):
assert context.activity.type == "message"
assert context.activity.text == "test"
adapter = TestAdapter(logic)
await adapter.receive_activity(Activity(type="message", text="test"))
async def test_should_set_activity_type_when_receive_activity_receives_activity_without_type(
self,
):
async def logic(context: TurnContext):
assert context.activity.type == "message"
assert context.activity.text == "test"
adapter = TestAdapter(logic)
await adapter.receive_activity(Activity(text="test"))
async def test_should_support_custom_activity_id_in_receive_activity(self):
async def logic(context: TurnContext):
assert context.activity.id == "myId"
assert context.activity.type == "message"
assert context.activity.text == "test"
adapter = TestAdapter(logic)
await adapter.receive_activity(Activity(type="message", text="test", id="myId"))
async def test_should_call_bot_logic_when_send_is_called(self):
async def logic(context: TurnContext):
assert context.activity.text == "test"
adapter = TestAdapter(logic)
await adapter.send("test")
async def test_should_send_and_receive_when_test_is_called(self):
async def logic(context: TurnContext):
await context.send_activity(RECEIVED_MESSAGE)
adapter = TestAdapter(logic)
await adapter.test("test", "received")
async def test_should_send_and_throw_assertion_error_when_test_is_called(self):
async def logic(context: TurnContext):
await context.send_activity(RECEIVED_MESSAGE)
adapter = TestAdapter(logic)
try:
await adapter.test("test", "foobar")
except AssertionError:
pass
else:
raise AssertionError("Assertion error should have been raised")
async def test_tests_should_call_test_for_each_tuple(self):
counter = 0
async def logic(context: TurnContext):
nonlocal counter
counter += 1
await context.send_activity(Activity(type="message", text=str(counter)))
adapter = TestAdapter(logic)
await adapter.tests(("test", "1"), ("test", "2"), ("test", "3"))
assert counter == 3
async def test_tests_should_call_test_for_each_list(self):
counter = 0
async def logic(context: TurnContext):
nonlocal counter
counter += 1
await context.send_activity(Activity(type="message", text=str(counter)))
adapter = TestAdapter(logic)
await adapter.tests(["test", "1"], ["test", "2"], ["test", "3"])
assert counter == 3
async def test_should_assert_reply_after_send(self):
async def logic(context: TurnContext):
await context.send_activity(RECEIVED_MESSAGE)
adapter = TestAdapter(logic)
test_flow = await adapter.send("test")
await test_flow.assert_reply("received")
async def test_should_support_context_update_activity_call(self):
async def logic(context: TurnContext):
await context.update_activity(UPDATED_ACTIVITY)
await context.send_activity(RECEIVED_MESSAGE)
adapter = TestAdapter(logic)
await adapter.test("test", "received")
assert len(adapter.updated_activities) == 1
assert adapter.updated_activities[0].text == UPDATED_ACTIVITY.text
async def test_should_support_context_delete_activity_call(self):
async def logic(context: TurnContext):
await context.delete_activity(DELETED_ACTIVITY_REFERENCE)
await context.send_activity(RECEIVED_MESSAGE)
adapter = TestAdapter(logic)
await adapter.test("test", "received")
assert len(adapter.deleted_activities) == 1
assert (
adapter.deleted_activities[0].activity_id
== DELETED_ACTIVITY_REFERENCE.activity_id
)
async def test_get_user_token_returns_null(self):
adapter = TestAdapter()
activity = Activity(
channel_id="directline", from_property=ChannelAccount(id="testuser")
)
turn_context = TurnContext(adapter, activity)
token_response = await adapter.get_user_token(turn_context, "myConnection")
assert not token_response
oauth_app_credentials = MicrosoftAppCredentials(None, None)
token_response = await adapter.get_user_token(
turn_context, "myConnection", oauth_app_credentials=oauth_app_credentials
)
assert not token_response
async def test_get_user_token_returns_null_with_code(self):
adapter = TestAdapter()
activity = Activity(
channel_id="directline", from_property=ChannelAccount(id="testuser")
)
turn_context = TurnContext(adapter, activity)
token_response = await adapter.get_user_token(
turn_context, "myConnection", "abc123"
)
assert not token_response
oauth_app_credentials = MicrosoftAppCredentials(None, None)
token_response = await adapter.get_user_token(
turn_context,
"myConnection",
"abc123",
oauth_app_credentials=oauth_app_credentials,
)
assert not token_response
async def test_get_user_token_returns_token(self):
adapter = TestAdapter()
connection_name = "myConnection"
channel_id = "directline"
user_id = "testUser"
token = "abc123"
activity = Activity(
channel_id=channel_id, from_property=ChannelAccount(id=user_id)
)
turn_context = TurnContext(adapter, activity)
adapter.add_user_token(connection_name, channel_id, user_id, token)
token_response = await adapter.get_user_token(turn_context, connection_name)
assert token_response
assert token == token_response.token
assert connection_name == token_response.connection_name
oauth_app_credentials = MicrosoftAppCredentials(None, None)
token_response = await adapter.get_user_token(
turn_context, connection_name, oauth_app_credentials=oauth_app_credentials
)
assert token_response
assert token == token_response.token
assert connection_name == token_response.connection_name
async def test_get_user_token_returns_token_with_magice_code(self):
adapter = TestAdapter()
connection_name = "myConnection"
channel_id = "directline"
user_id = "testUser"
token = "abc123"
magic_code = "888999"
activity = Activity(
channel_id=channel_id, from_property=ChannelAccount(id=user_id)
)
turn_context = TurnContext(adapter, activity)
adapter.add_user_token(connection_name, channel_id, user_id, token, magic_code)
# First no magic_code
token_response = await adapter.get_user_token(turn_context, connection_name)
assert not token_response
# Can be retrieved with magic code
token_response = await adapter.get_user_token(
turn_context, connection_name, magic_code
)
assert token_response
assert token == token_response.token
assert connection_name == token_response.connection_name
# Then can be retrieved without magic code
token_response = await adapter.get_user_token(turn_context, connection_name)
assert token_response
assert token == token_response.token
assert connection_name == token_response.connection_name
# Then can be retrieved using customized AppCredentials
oauth_app_credentials = MicrosoftAppCredentials(None, None)
token_response = await adapter.get_user_token(
turn_context, connection_name, oauth_app_credentials=oauth_app_credentials
)
assert token_response
assert token == token_response.token
assert connection_name == token_response.connection_name
async def test_should_validate_no_reply_when_no_reply_expected(self):
async def logic(context: TurnContext):
await context.send_activity(RECEIVED_MESSAGE)
adapter = TestAdapter(logic)
test_flow = await adapter.test("test", "received")
await test_flow.assert_no_reply("should be no additional replies")
async def test_should_timeout_waiting_for_assert_no_reply_when_no_reply_expected(
self,
):
async def logic(context: TurnContext):
await context.send_activity(RECEIVED_MESSAGE)
adapter = TestAdapter(logic)
test_flow = await adapter.test("test", "received")
await test_flow.assert_no_reply("no reply received", 500)
async def test_should_throw_error_with_assert_no_reply_when_no_reply_expected_but_was_received(
self,
):
async def logic(context: TurnContext):
activities = [RECEIVED_MESSAGE, RECEIVED_MESSAGE]
await context.send_activities(activities)
adapter = TestAdapter(logic)
test_flow = await adapter.test("test", "received")
with self.assertRaises(Exception):
await test_flow.assert_no_reply("should be no additional replies")
|
botbuilder-python/libraries/botbuilder-core/tests/test_test_adapter.py/0
|
{
"file_path": "botbuilder-python/libraries/botbuilder-core/tests/test_test_adapter.py",
"repo_id": "botbuilder-python",
"token_count": 4281
}
| 399 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
from .find_values_options import FindValuesOptions
class FindChoicesOptions(FindValuesOptions):
"""Contains options to control how input is matched against a list of choices"""
def __init__(
self,
no_value: bool = None,
no_action: bool = None,
recognize_numbers: bool = True,
recognize_ordinals: bool = True,
**kwargs,
):
"""
Parameters:
-----------
no_value: (Optional) If `True`, the choices `value` field will NOT be search over. Defaults to `False`.
no_action: (Optional) If `True`, the choices `action.title` field will NOT be searched over.
Defaults to `False`.
recognize_numbers: (Optional) Indicates whether the recognizer should check for Numbers using the
NumberRecognizer's NumberModel.
recognize_ordinals: (Options) Indicates whether the recognizer should check for Ordinal Numbers using
the NumberRecognizer's OrdinalModel.
"""
super().__init__(**kwargs)
self.no_value = no_value
self.no_action = no_action
self.recognize_numbers = recognize_numbers
self.recognize_ordinals = recognize_ordinals
|
botbuilder-python/libraries/botbuilder-dialogs/botbuilder/dialogs/choices/find_choices_options.py/0
|
{
"file_path": "botbuilder-python/libraries/botbuilder-dialogs/botbuilder/dialogs/choices/find_choices_options.py",
"repo_id": "botbuilder-python",
"token_count": 474
}
| 400 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
from typing import Dict
class DialogInstance:
"""
Tracking information for a dialog on the stack.
"""
def __init__(
self, id: str = None, state: Dict[str, object] = None
): # pylint: disable=invalid-name
"""
Gets or sets the ID of the dialog and gets or sets the instance's persisted state.
:var self.id: The ID of the dialog
:vartype self.id: str
:var self.state: The instance's persisted state.
:vartype self.state: :class:`typing.Dict[str, object]`
"""
self.id = id # pylint: disable=invalid-name
self.state = state or {}
def __str__(self):
"""
Gets or sets a stack index.
:return: Returns stack index.
:rtype: str
"""
result = "\ndialog_instance_id: %s\n" % self.id
if self.state is not None:
for key, value in self.state.items():
result += " {} ({})\n".format(key, str(value))
return result
|
botbuilder-python/libraries/botbuilder-dialogs/botbuilder/dialogs/dialog_instance.py/0
|
{
"file_path": "botbuilder-python/libraries/botbuilder-dialogs/botbuilder/dialogs/dialog_instance.py",
"repo_id": "botbuilder-python",
"token_count": 466
}
| 401 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
from .alias_path_resolver import AliasPathResolver
from .at_at_path_resolver import AtAtPathResolver
from .at_path_resolver import AtPathResolver
from .dollar_path_resolver import DollarPathResolver
from .hash_path_resolver import HashPathResolver
from .percent_path_resolver import PercentPathResolver
__all__ = [
"AliasPathResolver",
"AtAtPathResolver",
"AtPathResolver",
"DollarPathResolver",
"HashPathResolver",
"PercentPathResolver",
]
|
botbuilder-python/libraries/botbuilder-dialogs/botbuilder/dialogs/memory/path_resolvers/__init__.py/0
|
{
"file_path": "botbuilder-python/libraries/botbuilder-dialogs/botbuilder/dialogs/memory/path_resolvers/__init__.py",
"repo_id": "botbuilder-python",
"token_count": 181
}
| 402 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
from botbuilder.dialogs.memory import scope_path
from .memory_scope import MemoryScope
class SettingsMemoryScope(MemoryScope):
def __init__(self):
super().__init__(scope_path.SETTINGS)
self._empty_settings = {}
self.include_in_snapshot = False
def get_memory(self, dialog_context: "DialogContext") -> object:
if not dialog_context:
raise TypeError(f"Expecting: DialogContext, but received None")
settings: dict = dialog_context.context.turn_state.get(
scope_path.SETTINGS, None
)
if not settings:
settings = self._empty_settings
return settings
def set_memory(self, dialog_context: "DialogContext", memory: object):
raise Exception(
f"{self.__class__.__name__}.set_memory not supported (read only)"
)
|
botbuilder-python/libraries/botbuilder-dialogs/botbuilder/dialogs/memory/scopes/settings_memory_scope.py/0
|
{
"file_path": "botbuilder-python/libraries/botbuilder-dialogs/botbuilder/dialogs/memory/scopes/settings_memory_scope.py",
"repo_id": "botbuilder-python",
"token_count": 358
}
| 403 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
from botframework.connector.auth import AppCredentials
class OAuthPromptSettings:
def __init__(
self,
connection_name: str,
title: str,
text: str = None,
timeout: int = None,
oauth_app_credentials: AppCredentials = None,
end_on_invalid_message: bool = False,
):
"""
Settings used to configure an `OAuthPrompt` instance.
Parameters:
connection_name (str): Name of the OAuth connection being used.
title (str): The title of the cards signin button.
text (str): (Optional) additional text included on the signin card.
timeout (int): (Optional) number of milliseconds the prompt will wait for the user to authenticate.
`OAuthPrompt` defaults value to `900,000` ms (15 minutes).
oauth_app_credentials (AppCredentials): (Optional) AppCredentials to use for OAuth. If None,
the Bots credentials are used.
end_on_invalid_message (bool): (Optional) value indicating whether the OAuthPrompt should end upon
receiving an invalid message. Generally the OAuthPrompt will ignore incoming messages from the
user during the auth flow, if they are not related to the auth flow. This flag enables ending the
OAuthPrompt rather than ignoring the user's message. Typically, this flag will be set to 'true',
but is 'false' by default for backwards compatibility.
"""
self.connection_name = connection_name
self.title = title
self.text = text
self.timeout = timeout
self.oath_app_credentials = oauth_app_credentials
self.end_on_invalid_message = end_on_invalid_message
|
botbuilder-python/libraries/botbuilder-dialogs/botbuilder/dialogs/prompts/oauth_prompt_settings.py/0
|
{
"file_path": "botbuilder-python/libraries/botbuilder-dialogs/botbuilder/dialogs/prompts/oauth_prompt_settings.py",
"repo_id": "botbuilder-python",
"token_count": 696
}
| 404 |
[bdist_wheel]
universal=0
|
botbuilder-python/libraries/botbuilder-dialogs/setup.cfg/0
|
{
"file_path": "botbuilder-python/libraries/botbuilder-dialogs/setup.cfg",
"repo_id": "botbuilder-python",
"token_count": 10
}
| 405 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
import aiounittest
from botbuilder.dialogs import DialogSet, ComponentDialog, WaterfallDialog
from botbuilder.core import ConversationState, MemoryStorage, NullTelemetryClient
class MyBotTelemetryClient(NullTelemetryClient):
# pylint: disable=useless-return
def __init__(self):
super().__init__()
return
class DialogSetTests(aiounittest.AsyncTestCase):
def test_dialogset_constructor_valid(self):
convo_state = ConversationState(MemoryStorage())
dialog_state_property = convo_state.create_property("dialogstate")
dialog_set = DialogSet(dialog_state_property)
assert dialog_set is not None
def test_dialogset_constructor_null_property(self):
self.assertRaises(TypeError, lambda: DialogSet(None))
def test_dialogset_constructor_null_from_componentdialog(self):
ComponentDialog("MyId")
def test_dialogset_telemetryset(self):
convo_state = ConversationState(MemoryStorage())
dialog_state_property = convo_state.create_property("dialogstate")
dialog_set = DialogSet(dialog_state_property)
dialog_set.add(WaterfallDialog("A"))
dialog_set.add(WaterfallDialog("B"))
self.assertTrue(
isinstance(
dialog_set.find_dialog("A").telemetry_client, NullTelemetryClient
)
)
self.assertTrue(
isinstance(
dialog_set.find_dialog("B").telemetry_client, NullTelemetryClient
)
)
dialog_set.telemetry_client = MyBotTelemetryClient()
self.assertTrue(
isinstance(
dialog_set.find_dialog("A").telemetry_client, MyBotTelemetryClient
)
)
self.assertTrue(
isinstance(
dialog_set.find_dialog("B").telemetry_client, MyBotTelemetryClient
)
)
def test_dialogset_nulltelemetryset(self):
convo_state = ConversationState(MemoryStorage())
dialog_state_property = convo_state.create_property("dialogstate")
dialog_set = DialogSet(dialog_state_property)
dialog_set.add(WaterfallDialog("A"))
dialog_set.add(WaterfallDialog("B"))
dialog_set.telemetry_client = MyBotTelemetryClient()
dialog_set.telemetry_client = None
self.assertFalse(
isinstance(
dialog_set.find_dialog("A").telemetry_client, MyBotTelemetryClient
)
)
self.assertFalse(
isinstance(
dialog_set.find_dialog("B").telemetry_client, MyBotTelemetryClient
)
)
self.assertTrue(
isinstance(
dialog_set.find_dialog("A").telemetry_client, NullTelemetryClient
)
)
self.assertTrue(
isinstance(
dialog_set.find_dialog("B").telemetry_client, NullTelemetryClient
)
)
# pylint: disable=pointless-string-statement
"""
This test will be enabled when telematry tests are fixed for DialogSet telemetry
def test_dialogset_addtelemetryset(self):
convo_state = ConversationState(MemoryStorage())
dialog_state_property = convo_state.create_property("dialogstate")
dialog_set = DialogSet(dialog_state_property)
dialog_set.add(WaterfallDialog("A"))
dialog_set.add(WaterfallDialog("B"))
dialog_set.telemetry_client = MyBotTelemetryClient()
dialog_set.add(WaterfallDialog("C"))
self.assertTrue(isinstance(dialog_set.find_dialog("C").telemetry_client, MyBotTelemetryClient))
"""
|
botbuilder-python/libraries/botbuilder-dialogs/tests/test_dialog_set.py/0
|
{
"file_path": "botbuilder-python/libraries/botbuilder-dialogs/tests/test_dialog_set.py",
"repo_id": "botbuilder-python",
"token_count": 1597
}
| 406 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
from typing import Awaitable, Callable, Optional
from aiohttp.web import (
Request,
Response,
json_response,
WebSocketResponse,
HTTPBadRequest,
HTTPMethodNotAllowed,
HTTPUnauthorized,
HTTPUnsupportedMediaType,
)
from botbuilder.core import (
Bot,
CloudAdapterBase,
InvokeResponse,
TurnContext,
)
from botbuilder.core.streaming import (
StreamingActivityProcessor,
StreamingHttpDriver,
StreamingRequestHandler,
)
from botbuilder.schema import Activity
from botbuilder.integration.aiohttp.streaming import AiohttpWebSocket
from botframework.connector import AsyncBfPipeline, BotFrameworkConnectorConfiguration
from botframework.connector.aio import ConnectorClient
from botframework.connector.auth import (
AuthenticateRequestResult,
BotFrameworkAuthentication,
BotFrameworkAuthenticationFactory,
ConnectorFactory,
MicrosoftAppCredentials,
)
from .bot_framework_http_adapter_integration_base import (
BotFrameworkHttpAdapterIntegrationBase,
)
class CloudAdapter(CloudAdapterBase, BotFrameworkHttpAdapterIntegrationBase):
def __init__(self, bot_framework_authentication: BotFrameworkAuthentication = None):
"""
Initializes a new instance of the CloudAdapter class.
:param bot_framework_authentication: Optional BotFrameworkAuthentication instance
"""
# pylint: disable=invalid-name
if not bot_framework_authentication:
bot_framework_authentication = BotFrameworkAuthenticationFactory.create()
self._AUTH_HEADER_NAME = "authorization"
self._CHANNEL_ID_HEADER_NAME = "channelid"
super().__init__(bot_framework_authentication)
async def process(
self, request: Request, bot: Bot, ws_response: WebSocketResponse = None
) -> Optional[Response]:
if not request:
raise TypeError("request can't be None")
# if ws_response is None:
# raise TypeError("ws_response can't be None")
if not bot:
raise TypeError("bot can't be None")
try:
# Only GET requests for web socket connects are allowed
if (
request.method == "GET"
and ws_response
and ws_response.can_prepare(request)
):
# All socket communication will be handled by the internal streaming-specific BotAdapter
await self._connect(bot, request, ws_response)
elif request.method == "POST":
# Deserialize the incoming Activity
if "application/json" in request.headers["Content-Type"]:
body = await request.json()
else:
raise HTTPUnsupportedMediaType()
activity: Activity = Activity().deserialize(body)
# A POST request must contain an Activity
if not activity.type:
raise HTTPBadRequest
# Grab the auth header from the inbound http request
auth_header = (
request.headers["Authorization"]
if "Authorization" in request.headers
else ""
)
# Process the inbound activity with the bot
invoke_response = await self.process_activity(
auth_header, activity, bot.on_turn
)
# Write the response, serializing the InvokeResponse
if invoke_response:
return json_response(
data=invoke_response.body, status=invoke_response.status
)
return Response(status=201)
else:
raise HTTPMethodNotAllowed
except (HTTPUnauthorized, PermissionError) as _:
raise HTTPUnauthorized
async def _connect(
self, bot: Bot, request: Request, ws_response: WebSocketResponse
):
if ws_response is None:
raise TypeError("ws_response can't be None")
# Grab the auth header from the inbound http request
auth_header = request.headers.get(self._AUTH_HEADER_NAME)
# Grab the channelId which should be in the http headers
channel_id = request.headers.get(self._CHANNEL_ID_HEADER_NAME)
authentication_request_result = (
await self.bot_framework_authentication.authenticate_streaming_request(
auth_header, channel_id
)
)
# Transition the request to a WebSocket connection
await ws_response.prepare(request)
bf_web_socket = AiohttpWebSocket(ws_response)
streaming_activity_processor = _StreamingActivityProcessor(
authentication_request_result, self, bot, bf_web_socket
)
await streaming_activity_processor.listen()
class _StreamingActivityProcessor(StreamingActivityProcessor):
def __init__(
self,
authenticate_request_result: AuthenticateRequestResult,
adapter: CloudAdapter,
bot: Bot,
web_socket: AiohttpWebSocket = None,
) -> None:
self._authenticate_request_result = authenticate_request_result
self._adapter = adapter
# Internal reuse of the existing StreamingRequestHandler class
self._request_handler = StreamingRequestHandler(bot, self, web_socket)
# Fix up the connector factory so connector create from it will send over this connection
self._authenticate_request_result.connector_factory = (
_StreamingConnectorFactory(self._request_handler)
)
async def listen(self):
await self._request_handler.listen()
async def process_streaming_activity(
self,
activity: Activity,
bot_callback_handler: Callable[[TurnContext], Awaitable],
) -> InvokeResponse:
return await self._adapter.process_activity(
self._authenticate_request_result, activity, bot_callback_handler
)
class _StreamingConnectorFactory(ConnectorFactory):
def __init__(self, request_handler: StreamingRequestHandler) -> None:
self._request_handler = request_handler
self._service_url = None
async def create(
self, service_url: str, audience: str # pylint: disable=unused-argument
) -> ConnectorClient:
if not self._service_url:
self._service_url = service_url
elif service_url != self._service_url:
raise RuntimeError(
"This is a streaming scenario, all connectors from this factory must all be for the same url."
)
# TODO: investigate if Driver and pipeline should be moved here
streaming_driver = StreamingHttpDriver(self._request_handler)
config = BotFrameworkConnectorConfiguration(
MicrosoftAppCredentials.empty(),
service_url,
pipeline_type=AsyncBfPipeline,
driver=streaming_driver,
)
streaming_driver.config = config
connector_client = ConnectorClient(None, custom_configuration=config)
return connector_client
|
botbuilder-python/libraries/botbuilder-integration-aiohttp/botbuilder/integration/aiohttp/cloud_adapter.py/0
|
{
"file_path": "botbuilder-python/libraries/botbuilder-integration-aiohttp/botbuilder/integration/aiohttp/cloud_adapter.py",
"repo_id": "botbuilder-python",
"token_count": 2925
}
| 407 |
from threading import current_thread
from aiohttp.web import middleware
# Map of thread id => POST body text
_REQUEST_BODIES = {}
def retrieve_aiohttp_body():
"""
Retrieve the POST body text from temporary cache.
The POST body corresponds with the thread id and should resides in
cache just for lifetime of request.
"""
result = _REQUEST_BODIES.pop(current_thread().ident, None)
return result
@middleware
async def bot_telemetry_middleware(request, handler):
"""Process the incoming Flask request."""
if (
"Content-Type" in request.headers
and request.headers["Content-Type"] == "application/json"
):
body = await request.json()
_REQUEST_BODIES[current_thread().ident] = body
response = await handler(request)
return response
|
botbuilder-python/libraries/botbuilder-integration-applicationinsights-aiohttp/botbuilder/integration/applicationinsights/aiohttp/aiohttp_telemetry_middleware.py/0
|
{
"file_path": "botbuilder-python/libraries/botbuilder-integration-applicationinsights-aiohttp/botbuilder/integration/applicationinsights/aiohttp/aiohttp_telemetry_middleware.py",
"repo_id": "botbuilder-python",
"token_count": 271
}
| 408 |
"""
"wheel" copyright (c) 2012-2017 Daniel Holth <[email protected]> and
contributors.
The MIT License
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
Create a Azure wheel (.whl) distribution (a wheel is a built archive format).
This file is a copy of the official bdist_wheel file from wheel 0.30.0a0, enhanced
of the bottom with some Microsoft extension for Azure SDK for Python
"""
import csv
import hashlib
import os
import subprocess
import warnings
import shutil
import json
import sys
try:
import sysconfig
except ImportError: # pragma nocover
# Python < 2.7
import distutils.sysconfig as sysconfig
import pkg_resources
safe_name = pkg_resources.safe_name
safe_version = pkg_resources.safe_version
from shutil import rmtree
from email.generator import Generator
from distutils.core import Command
from distutils.sysconfig import get_python_version
from distutils import log as logger
from wheel.pep425tags import get_abbr_impl, get_impl_ver, get_abi_tag, get_platform
from wheel.util import native, open_for_csv
from wheel.archive import archive_wheelfile
from wheel.pkginfo import read_pkg_info, write_pkg_info
from wheel.metadata import pkginfo_to_dict
from wheel import pep425tags, metadata
from wheel import __version__ as wheel_version
def safer_name(name):
return safe_name(name).replace("-", "_")
def safer_version(version):
return safe_version(version).replace("-", "_")
class bdist_wheel(Command):
description = "create a wheel distribution"
user_options = [
("bdist-dir=", "b", "temporary directory for creating the distribution"),
(
"plat-name=",
"p",
"platform name to embed in generated filenames "
"(default: %s)" % get_platform(),
),
(
"keep-temp",
"k",
"keep the pseudo-installation tree around after "
+ "creating the distribution archive",
),
("dist-dir=", "d", "directory to put final built distributions in"),
("skip-build", None, "skip rebuilding everything (for testing/debugging)"),
("relative", None, "build the archive using relative paths" "(default: false)"),
(
"owner=",
"u",
"Owner name used when creating a tar file" " [default: current user]",
),
(
"group=",
"g",
"Group name used when creating a tar file" " [default: current group]",
),
("universal", None, "make a universal wheel" " (default: false)"),
(
"python-tag=",
None,
"Python implementation compatibility tag"
" (default: py%s)" % get_impl_ver()[0],
),
]
boolean_options = ["keep-temp", "skip-build", "relative", "universal"]
def initialize_options(self):
self.bdist_dir = None
self.data_dir = None
self.plat_name = None
self.plat_tag = None
self.format = "zip"
self.keep_temp = False
self.dist_dir = None
self.distinfo_dir = None
self.egginfo_dir = None
self.root_is_pure = None
self.skip_build = None
self.relative = False
self.owner = None
self.group = None
self.universal = False
self.python_tag = "py" + get_impl_ver()[0]
self.plat_name_supplied = False
def finalize_options(self):
if self.bdist_dir is None:
bdist_base = self.get_finalized_command("bdist").bdist_base
self.bdist_dir = os.path.join(bdist_base, "wheel")
self.data_dir = self.wheel_dist_name + ".data"
self.plat_name_supplied = self.plat_name is not None
need_options = ("dist_dir", "plat_name", "skip_build")
self.set_undefined_options("bdist", *zip(need_options, need_options))
self.root_is_pure = not (
self.distribution.has_ext_modules() or self.distribution.has_c_libraries()
)
# Support legacy [wheel] section for setting universal
wheel = self.distribution.get_option_dict("wheel")
if "universal" in wheel:
# please don't define this in your global configs
val = wheel["universal"][1].strip()
if val.lower() in ("1", "true", "yes"):
self.universal = True
@property
def wheel_dist_name(self):
"""Return distribution full name with - replaced with _"""
return "-".join(
(
safer_name(self.distribution.get_name()),
safer_version(self.distribution.get_version()),
)
)
def get_tag(self):
# bdist sets self.plat_name if unset, we should only use it for purepy
# wheels if the user supplied it.
if self.plat_name_supplied:
plat_name = self.plat_name
elif self.root_is_pure:
plat_name = "any"
else:
plat_name = self.plat_name or get_platform()
if (
plat_name in ("linux-x86_64", "linux_x86_64")
and sys.maxsize == 2147483647
):
plat_name = "linux_i686"
plat_name = plat_name.replace("-", "_").replace(".", "_")
if self.root_is_pure:
if self.universal:
impl = "py2.py3"
else:
impl = self.python_tag
tag = (impl, "none", plat_name)
else:
impl_name = get_abbr_impl()
impl_ver = get_impl_ver()
# PEP 3149
abi_tag = str(get_abi_tag()).lower()
tag = (impl_name + impl_ver, abi_tag, plat_name)
supported_tags = pep425tags.get_supported(
supplied_platform=plat_name if self.plat_name_supplied else None
)
# XXX switch to this alternate implementation for non-pure:
assert tag == supported_tags[0], "%s != %s" % (tag, supported_tags[0])
return tag
def get_archive_basename(self):
"""Return archive name without extension"""
impl_tag, abi_tag, plat_tag = self.get_tag()
archive_basename = "%s-%s-%s-%s" % (
self.wheel_dist_name,
impl_tag,
abi_tag,
plat_tag,
)
return archive_basename
def run(self):
build_scripts = self.reinitialize_command("build_scripts")
build_scripts.executable = "python"
if not self.skip_build:
self.run_command("build")
install = self.reinitialize_command("install", reinit_subcommands=True)
install.root = self.bdist_dir
install.compile = False
install.skip_build = self.skip_build
install.warn_dir = False
# A wheel without setuptools scripts is more cross-platform.
# Use the (undocumented) `no_ep` option to setuptools'
# install_scripts command to avoid creating entry point scripts.
install_scripts = self.reinitialize_command("install_scripts")
install_scripts.no_ep = True
# Use a custom scheme for the archive, because we have to decide
# at installation time which scheme to use.
for key in ("headers", "scripts", "data", "purelib", "platlib"):
setattr(install, "install_" + key, os.path.join(self.data_dir, key))
basedir_observed = ""
if os.name == "nt":
# win32 barfs if any of these are ''; could be '.'?
# (distutils.command.install:change_roots bug)
basedir_observed = os.path.normpath(os.path.join(self.data_dir, ".."))
self.install_libbase = self.install_lib = basedir_observed
setattr(
install,
"install_purelib" if self.root_is_pure else "install_platlib",
basedir_observed,
)
logger.info("installing to %s", self.bdist_dir)
self.run_command("install")
archive_basename = self.get_archive_basename()
pseudoinstall_root = os.path.join(self.dist_dir, archive_basename)
if not self.relative:
archive_root = self.bdist_dir
else:
archive_root = os.path.join(
self.bdist_dir, self._ensure_relative(install.install_base)
)
self.set_undefined_options("install_egg_info", ("target", "egginfo_dir"))
self.distinfo_dir = os.path.join(
self.bdist_dir, "%s.dist-info" % self.wheel_dist_name
)
self.egg2dist(self.egginfo_dir, self.distinfo_dir)
self.write_wheelfile(self.distinfo_dir)
self.write_record(self.bdist_dir, self.distinfo_dir)
# Make the archive
if not os.path.exists(self.dist_dir):
os.makedirs(self.dist_dir)
wheel_name = archive_wheelfile(pseudoinstall_root, archive_root)
# Sign the archive
if "WHEEL_TOOL" in os.environ:
subprocess.call([os.environ["WHEEL_TOOL"], "sign", wheel_name])
# Add to 'Distribution.dist_files' so that the "upload" command works
getattr(self.distribution, "dist_files", []).append(
("bdist_wheel", get_python_version(), wheel_name)
)
if not self.keep_temp:
if self.dry_run:
logger.info("removing %s", self.bdist_dir)
else:
rmtree(self.bdist_dir)
def write_wheelfile(
self, wheelfile_base, generator="bdist_wheel (" + wheel_version + ")"
):
from email.message import Message
msg = Message()
msg["Wheel-Version"] = "1.0" # of the spec
msg["Generator"] = generator
msg["Root-Is-Purelib"] = str(self.root_is_pure).lower()
# Doesn't work for bdist_wininst
impl_tag, abi_tag, plat_tag = self.get_tag()
for impl in impl_tag.split("."):
for abi in abi_tag.split("."):
for plat in plat_tag.split("."):
msg["Tag"] = "-".join((impl, abi, plat))
wheelfile_path = os.path.join(wheelfile_base, "WHEEL")
logger.info("creating %s", wheelfile_path)
with open(wheelfile_path, "w") as f:
Generator(f, maxheaderlen=0).flatten(msg)
def _ensure_relative(self, path):
# copied from dir_util, deleted
drive, path = os.path.splitdrive(path)
if path[0:1] == os.sep:
path = drive + path[1:]
return path
def _pkginfo_to_metadata(self, egg_info_path, pkginfo_path):
return metadata.pkginfo_to_metadata(egg_info_path, pkginfo_path)
def license_file(self):
"""Return license filename from a license-file key in setup.cfg, or None."""
metadata = self.distribution.get_option_dict("metadata")
if not "license_file" in metadata:
return None
return metadata["license_file"][1]
def setupcfg_requirements(self):
"""Generate requirements from setup.cfg as
('Requires-Dist', 'requirement; qualifier') tuples. From a metadata
section in setup.cfg:
[metadata]
provides-extra = extra1
extra2
requires-dist = requirement; qualifier
another; qualifier2
unqualified
Yields
('Provides-Extra', 'extra1'),
('Provides-Extra', 'extra2'),
('Requires-Dist', 'requirement; qualifier'),
('Requires-Dist', 'another; qualifier2'),
('Requires-Dist', 'unqualified')
"""
metadata = self.distribution.get_option_dict("metadata")
# our .ini parser folds - to _ in key names:
for key, title in (
("provides_extra", "Provides-Extra"),
("requires_dist", "Requires-Dist"),
):
if not key in metadata:
continue
field = metadata[key]
for line in field[1].splitlines():
line = line.strip()
if not line:
continue
yield (title, line)
def add_requirements(self, metadata_path):
"""Add additional requirements from setup.cfg to file metadata_path"""
additional = list(self.setupcfg_requirements())
if not additional:
return
pkg_info = read_pkg_info(metadata_path)
if "Provides-Extra" in pkg_info or "Requires-Dist" in pkg_info:
warnings.warn("setup.cfg requirements overwrite values from setup.py")
del pkg_info["Provides-Extra"]
del pkg_info["Requires-Dist"]
for k, v in additional:
pkg_info[k] = v
write_pkg_info(metadata_path, pkg_info)
def egg2dist(self, egginfo_path, distinfo_path):
"""Convert an .egg-info directory into a .dist-info directory"""
def adios(p):
"""Appropriately delete directory, file or link."""
if os.path.exists(p) and not os.path.islink(p) and os.path.isdir(p):
shutil.rmtree(p)
elif os.path.exists(p):
os.unlink(p)
adios(distinfo_path)
if not os.path.exists(egginfo_path):
# There is no egg-info. This is probably because the egg-info
# file/directory is not named matching the distribution name used
# to name the archive file. Check for this case and report
# accordingly.
import glob
pat = os.path.join(os.path.dirname(egginfo_path), "*.egg-info")
possible = glob.glob(pat)
err = "Egg metadata expected at %s but not found" % (egginfo_path,)
if possible:
alt = os.path.basename(possible[0])
err += " (%s found - possible misnamed archive file?)" % (alt,)
raise ValueError(err)
if os.path.isfile(egginfo_path):
# .egg-info is a single file
pkginfo_path = egginfo_path
pkg_info = self._pkginfo_to_metadata(egginfo_path, egginfo_path)
os.mkdir(distinfo_path)
else:
# .egg-info is a directory
pkginfo_path = os.path.join(egginfo_path, "PKG-INFO")
pkg_info = self._pkginfo_to_metadata(egginfo_path, pkginfo_path)
# ignore common egg metadata that is useless to wheel
shutil.copytree(
egginfo_path,
distinfo_path,
ignore=lambda x, y: set(
("PKG-INFO", "requires.txt", "SOURCES.txt", "not-zip-safe")
),
)
# delete dependency_links if it is only whitespace
dependency_links_path = os.path.join(distinfo_path, "dependency_links.txt")
with open(dependency_links_path, "r") as dependency_links_file:
dependency_links = dependency_links_file.read().strip()
if not dependency_links:
adios(dependency_links_path)
write_pkg_info(os.path.join(distinfo_path, "METADATA"), pkg_info)
# XXX deprecated. Still useful for current distribute/setuptools.
metadata_path = os.path.join(distinfo_path, "METADATA")
self.add_requirements(metadata_path)
# XXX intentionally a different path than the PEP.
metadata_json_path = os.path.join(distinfo_path, "metadata.json")
pymeta = pkginfo_to_dict(metadata_path, distribution=self.distribution)
if "description" in pymeta:
description_filename = "DESCRIPTION.rst"
description_text = pymeta.pop("description")
description_path = os.path.join(distinfo_path, description_filename)
with open(description_path, "wb") as description_file:
description_file.write(description_text.encode("utf-8"))
pymeta["extensions"]["python.details"]["document_names"][
"description"
] = description_filename
# XXX heuristically copy any LICENSE/LICENSE.txt?
license = self.license_file()
if license:
license_filename = "LICENSE.txt"
shutil.copy(license, os.path.join(self.distinfo_dir, license_filename))
pymeta["extensions"]["python.details"]["document_names"][
"license"
] = license_filename
with open(metadata_json_path, "w") as metadata_json:
json.dump(pymeta, metadata_json, sort_keys=True)
adios(egginfo_path)
def write_record(self, bdist_dir, distinfo_dir):
from wheel.util import urlsafe_b64encode
record_path = os.path.join(distinfo_dir, "RECORD")
record_relpath = os.path.relpath(record_path, bdist_dir)
def walk():
for dir, dirs, files in os.walk(bdist_dir):
dirs.sort()
for f in sorted(files):
yield os.path.join(dir, f)
def skip(path):
"""Wheel hashes every possible file."""
return path == record_relpath
with open_for_csv(record_path, "w+") as record_file:
writer = csv.writer(record_file)
for path in walk():
relpath = os.path.relpath(path, bdist_dir)
if skip(relpath):
hash = ""
size = ""
else:
with open(path, "rb") as f:
data = f.read()
digest = hashlib.sha256(data).digest()
hash = "sha256=" + native(urlsafe_b64encode(digest))
size = len(data)
record_path = os.path.relpath(path, bdist_dir).replace(os.path.sep, "/")
writer.writerow((record_path, hash, size))
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
from distutils import log as logger
import os.path
# from wheel.bdist_wheel import bdist_wheel
class azure_bdist_wheel(bdist_wheel):
description = "Create an Azure wheel distribution"
user_options = bdist_wheel.user_options + [
("azure-namespace-package=", None, "Name of the deepest nspkg used")
]
def initialize_options(self):
bdist_wheel.initialize_options(self)
self.azure_namespace_package = None
def finalize_options(self):
bdist_wheel.finalize_options(self)
if self.azure_namespace_package and not self.azure_namespace_package.endswith(
"-nspkg"
):
raise ValueError("azure_namespace_package must finish by -nspkg")
def run(self):
if not self.distribution.install_requires:
self.distribution.install_requires = []
self.distribution.install_requires.append(
"{}>=2.0.0".format(self.azure_namespace_package)
)
bdist_wheel.run(self)
def write_record(self, bdist_dir, distinfo_dir):
if self.azure_namespace_package:
# Split and remove last part, assuming it's "nspkg"
subparts = self.azure_namespace_package.split("-")[0:-1]
folder_with_init = [
os.path.join(*subparts[0 : i + 1]) for i in range(len(subparts))
]
for azure_sub_package in folder_with_init:
init_file = os.path.join(bdist_dir, azure_sub_package, "__init__.py")
if os.path.isfile(init_file):
logger.info(
"manually remove {} while building the wheel".format(init_file)
)
os.remove(init_file)
else:
raise ValueError(
"Unable to find {}. Are you sure of your namespace package?".format(
init_file
)
)
bdist_wheel.write_record(self, bdist_dir, distinfo_dir)
cmdclass = {"bdist_wheel": azure_bdist_wheel}
|
botbuilder-python/libraries/botframework-connector/azure_bdist_wheel.py/0
|
{
"file_path": "botbuilder-python/libraries/botframework-connector/azure_bdist_wheel.py",
"repo_id": "botbuilder-python",
"token_count": 9342
}
| 409 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
from logging import Logger
from botframework.connector.aio import ConnectorClient
from ..about import __version__
from ..bot_framework_sdk_client_async import BotFrameworkConnectorConfiguration
from .connector_factory import ConnectorFactory
from .service_client_credentials_factory import ServiceClientCredentialsFactory
USER_AGENT = f"Microsoft-BotFramework/3.1 (BotBuilder Python/{__version__})"
class _ConnectorFactoryImpl(ConnectorFactory):
def __init__(
self,
app_id: str,
to_channel_from_bot_oauth_scope: str,
login_endpoint: str,
validate_authority: bool,
credential_factory: ServiceClientCredentialsFactory,
connector_client_configuration: BotFrameworkConnectorConfiguration = None,
logger: Logger = None,
) -> None:
self._app_id = app_id
self._to_channel_from_bot_oauth_scope = to_channel_from_bot_oauth_scope
self._login_endpoint = login_endpoint
self._validate_authority = validate_authority
self._credential_factory = credential_factory
self._connector_client_configuration = connector_client_configuration
self._logger = logger
async def create(self, service_url: str, audience: str = None) -> ConnectorClient:
# Use the credentials factory to create credentails specific to this particular cloud environment.
credentials = await self._credential_factory.create_credentials(
self._app_id,
audience or self._to_channel_from_bot_oauth_scope,
self._login_endpoint,
self._validate_authority,
)
# A new connector client for making calls against this serviceUrl using credentials derived
# from the current appId and the specified audience.
if self._connector_client_configuration:
client = ConnectorClient(
credentials,
base_url=service_url,
custom_configuration=self._connector_client_configuration,
)
else:
client = ConnectorClient(credentials, base_url=service_url)
client.config.add_user_agent(USER_AGENT)
return client
|
botbuilder-python/libraries/botframework-connector/botframework/connector/auth/_connector_factory_impl.py/0
|
{
"file_path": "botbuilder-python/libraries/botframework-connector/botframework/connector/auth/_connector_factory_impl.py",
"repo_id": "botbuilder-python",
"token_count": 857
}
| 410 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
class CredentialProvider:
"""CredentialProvider.
This class allows Bots to provide their own implementation
of what is, and what is not, a valid appId and password.
This is useful in the case of multi-tenant bots, where the bot
may need to call out to a service to determine if a particular
appid/password pair is valid.
"""
async def is_valid_appid(self, app_id: str) -> bool:
"""Validate AppId.
This method is async to enable custom implementations
that may need to call out to serviced to validate the appId / password pair.
:param app_id: bot appid
:return: true if it is a valid AppId
"""
raise NotImplementedError()
async def get_app_password(self, app_id: str) -> str:
"""Get the app password for a given bot appId, if it is not a valid appId, return Null
This method is async to enable custom implementations
that may need to call out to serviced to validate the appId / password pair.
:param app_id: bot appid
:return: password or null for invalid appid
"""
raise NotImplementedError()
async def is_authentication_disabled(self) -> bool:
"""Checks if bot authentication is disabled.
Return true if bot authentication is disabled.
This method is async to enable custom implementations
that may need to call out to serviced to validate the appId / password pair.
:return: true if bot authentication is disabled.
"""
raise NotImplementedError()
class SimpleCredentialProvider(CredentialProvider):
def __init__(self, app_id: str, password: str):
self.app_id = app_id
self.password = password
async def is_valid_appid(self, app_id: str) -> bool:
return self.app_id == app_id
async def get_app_password(self, app_id: str) -> str:
return self.password if self.app_id == app_id else None
async def is_authentication_disabled(self) -> bool:
return not self.app_id
class _DelegatingCredentialProvider(CredentialProvider):
def __init__(self, credentials_factory: "botframework.connector.auth"):
self._credentials_factory = credentials_factory
async def is_valid_appid(self, app_id: str) -> bool:
return await self._credentials_factory.is_valid_app_id(app_id)
async def get_app_password(self, app_id: str) -> str:
raise NotImplementedError()
async def is_authentication_disabled(self) -> bool:
return await self._credentials_factory.is_authentication_disabled()
|
botbuilder-python/libraries/botframework-connector/botframework/connector/auth/credential_provider.py/0
|
{
"file_path": "botbuilder-python/libraries/botframework-connector/botframework/connector/auth/credential_provider.py",
"repo_id": "botbuilder-python",
"token_count": 944
}
| 411 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
from typing import Optional, Type
from msrest.async_client import SDKClientAsync
from msrest.universal_http.async_abc import AsyncHTTPSender as AsyncHttpDriver
from msrest.pipeline import AsyncPipeline
from msrest.pipeline.aiohttp import AsyncHTTPSender
from ._configuration import ConnectorClientConfiguration
class BotFrameworkConnectorConfiguration(ConnectorClientConfiguration):
def __init__(
self,
credentials,
base_url: str,
*,
pipeline_type: Optional[Type[AsyncPipeline]] = None,
sender: Optional[AsyncHTTPSender] = None,
driver: Optional[AsyncHttpDriver] = None
):
super().__init__(credentials, base_url)
# The overwrite hierarchy should be well documented
self.sender = sender
self.driver = driver
self.custom_pipeline = pipeline_type(self) if pipeline_type else None
class BotFrameworkSDKClientAsync(SDKClientAsync):
def __init__(self, config: BotFrameworkConnectorConfiguration) -> None:
super().__init__(config)
self._client.config.pipeline = (
config.custom_pipeline or self._client.config.pipeline
)
|
botbuilder-python/libraries/botframework-connector/botframework/connector/bot_framework_sdk_client_async.py/0
|
{
"file_path": "botbuilder-python/libraries/botframework-connector/botframework/connector/bot_framework_sdk_client_async.py",
"repo_id": "botbuilder-python",
"token_count": 447
}
| 412 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
from msrest.pipeline import ClientRawResponse
from msrest.exceptions import HttpOperationError
from ... import models
class TeamsOperations(object):
"""TeamsOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.config = config
def get_teams_channels(
self, team_id, custom_headers=None, raw=False, **operation_config
):
"""Fetches channel list for a given team.
Fetch the channel list.
:param team_id: Team Id
:type team_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: ConversationList or ClientRawResponse if raw=true
:rtype: ~botframework.connector.teams.models.ConversationList or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.get_teams_channels.metadata["url"]
path_format_arguments = {
"teamId": self._serialize.url("team_id", team_id, "str")
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters["Accept"] = "application/json"
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize("ConversationList", response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_teams_channels.metadata = {"url": "/v3/teams/{teamId}/conversations"}
def get_team_details(
self, team_id, custom_headers=None, raw=False, **operation_config
):
"""Fetches details related to a team.
Fetch details for a team.
:param team_id: Team Id
:type team_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: TeamDetails or ClientRawResponse if raw=true
:rtype: ~botframework.connector.teams.models.TeamDetails or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.get_team_details.metadata["url"]
path_format_arguments = {
"teamId": self._serialize.url("team_id", team_id, "str")
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters["Accept"] = "application/json"
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize("TeamDetails", response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_team_details.metadata = {"url": "/v3/teams/{teamId}"}
def fetch_participant(
self,
meeting_id: str,
participant_id: str,
tenant_id: str,
custom_headers=None,
raw=False,
**operation_config
):
"""Fetches Teams meeting participant details.
:param meeting_id: Teams meeting id
:type meeting_id: str
:param participant_id: Teams meeting participant id
:type participant_id: str
:param tenant_id: Teams meeting tenant id
:type tenant_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: TeamsMeetingParticipant or ClientRawResponse if raw=true
:rtype: ~botframework.connector.teams.models.TeamsParticipantChannelAccount or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.fetch_participant.metadata["url"]
path_format_arguments = {
"meetingId": self._serialize.url("meeting_id", meeting_id, "str"),
"participantId": self._serialize.url(
"participant_id", participant_id, "str"
),
"tenantId": self._serialize.url("tenant_id", tenant_id, "str"),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters["Accept"] = "application/json"
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize("TeamsMeetingParticipant", response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
fetch_participant.metadata = {
"url": "/v1/meetings/{meetingId}/participants/{participantId}?tenantId={tenantId}"
}
def fetch_meeting(
self, meeting_id: str, custom_headers=None, raw=False, **operation_config
):
"""Fetch meeting information.
:param meeting_id: Meeting Id, encoded as a BASE64 string.
:type meeting_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: MeetingInfo or ClientRawResponse if raw=true
:rtype: ~botframework.connector.teams.models.MeetingInfo or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.fetch_participant.metadata["url"]
path_format_arguments = {
"meetingId": self._serialize.url("meeting_id", meeting_id, "str")
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters["Accept"] = "application/json"
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize("MeetingInfo", response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
fetch_participant.metadata = {"url": "/v1/meetings/{meetingId}"}
|
botbuilder-python/libraries/botframework-connector/botframework/connector/teams/operations/teams_operations.py/0
|
{
"file_path": "botbuilder-python/libraries/botframework-connector/botframework/connector/teams/operations/teams_operations.py",
"repo_id": "botbuilder-python",
"token_count": 3773
}
| 413 |
interactions:
- request:
body: '{"bot": {"id": "B21UTEF8S:T03CWQ0QB"}, "members": [{"id": "B21UTEF8S:T03CWQ0QB"}],
"activity": {"type": "message", "channelId": "slack", "from": {"id": "B21UTEF8S:T03CWQ0QB"},
"recipient": {"id": "B21UTEF8S:T03CWQ0QB"}, "textFormat": "markdown", "attachmentLayout":
"list", "text": "Hi there!"}}'
headers:
Accept: [application/json]
Accept-Encoding: ['gzip, deflate']
Connection: [keep-alive]
Content-Length: ['295']
Content-Type: [application/json; charset=utf-8]
User-Agent: [python/3.6.2 (Windows-10-10.0.16299-SP0) requests/2.18.1 msrest/0.4.23
azure-botframework-connector/3.0]
method: POST
uri: https://slack.botframework.com/v3/conversations
response:
body: {string: "{\r\n \"error\": {\r\n \"code\": \"BadArgument\",\r\n \"\
message\": \"Bots cannot IM other bots\"\r\n }\r\n}"}
headers:
cache-control: [no-cache]
content-length: ['95']
content-type: [application/json; charset=utf-8]
date: ['Fri, 29 Dec 2017 15:25:44 GMT']
expires: ['-1']
pragma: [no-cache]
request-context: ['appId=cid-v1:6814484e-c0d5-40ea-9dba-74ff29ca4f62']
server: [Microsoft-IIS/10.0]
strict-transport-security: [max-age=31536000]
x-powered-by: [ASP.NET]
status: {code: 400, message: Bad Request}
version: 1
|
botbuilder-python/libraries/botframework-connector/tests/recordings/test_conversations_create_conversation_with_bot_as_only_member_fails.yaml/0
|
{
"file_path": "botbuilder-python/libraries/botframework-connector/tests/recordings/test_conversations_create_conversation_with_bot_as_only_member_fails.yaml",
"repo_id": "botbuilder-python",
"token_count": 651
}
| 414 |
interactions:
- request:
body: '{"type": "message", "channelId": "slack", "from": {"id": "B21UTEF8S:T03CWQ0QB"},
"recipient": {"id": "U19KH8EHJ:T03CWQ0QB"}, "textFormat": "markdown", "attachmentLayout":
"list", "text": "Updating activity..."}'
headers:
Accept: [application/json]
Accept-Encoding: ['gzip, deflate']
Connection: [keep-alive]
Content-Length: ['210']
Content-Type: [application/json; charset=utf-8]
User-Agent: [python/3.6.2 (Windows-10-10.0.16299-SP0) requests/2.18.1 msrest/0.4.23
azure-botframework-connector/3.0]
method: POST
uri: https://slack.botframework.com/v3/conversations/B21UTEF8S%3AT03CWQ0QB%3AD2369CT7C/activities
response:
body: {string: "{\r\n \"id\": \"1514572506.000066\"\r\n}"}
headers:
cache-control: [no-cache]
content-length: ['33']
content-type: [application/json; charset=utf-8]
date: ['Fri, 29 Dec 2017 18:35:06 GMT']
expires: ['-1']
pragma: [no-cache]
request-context: ['appId=cid-v1:6814484e-c0d5-40ea-9dba-74ff29ca4f62']
server: [Microsoft-IIS/10.0]
strict-transport-security: [max-age=31536000]
vary: [Accept-Encoding]
x-powered-by: [ASP.NET]
status: {code: 200, message: OK}
- request:
body: '{"type": "message", "channelId": "slack", "from": {"id": "B21UTEF8S:T03CWQ0QB"},
"recipient": {"id": "U19KH8EHJ:T03CWQ0QB"}, "textFormat": "markdown", "attachmentLayout":
"list", "text": "Activity updated."}'
headers:
Accept: [application/json]
Accept-Encoding: ['gzip, deflate']
Connection: [keep-alive]
Content-Length: ['207']
Content-Type: [application/json; charset=utf-8]
User-Agent: [python/3.6.2 (Windows-10-10.0.16299-SP0) requests/2.18.1 msrest/0.4.23
azure-botframework-connector/3.0]
method: PUT
uri: https://slack.botframework.com/v3/conversations/INVALID_ID/activities/1514572506.000066
response:
body: {string: "{\r\n \"error\": {\r\n \"code\": \"ServiceError\",\r\n \
\ \"message\": \"Invalid ConversationId: INVALID_ID\"\r\n }\r\n}"}
headers:
cache-control: [no-cache]
content-length: ['105']
content-type: [application/json; charset=utf-8]
date: ['Fri, 29 Dec 2017 18:35:07 GMT']
expires: ['-1']
pragma: [no-cache]
request-context: ['appId=cid-v1:6814484e-c0d5-40ea-9dba-74ff29ca4f62']
server: [Microsoft-IIS/10.0]
strict-transport-security: [max-age=31536000]
x-powered-by: [ASP.NET]
status: {code: 400, message: Bad Request}
version: 1
|
botbuilder-python/libraries/botframework-connector/tests/recordings/test_conversations_update_activity_invalid_conversation_id_fails.yaml/0
|
{
"file_path": "botbuilder-python/libraries/botframework-connector/tests/recordings/test_conversations_update_activity_invalid_conversation_id_fails.yaml",
"repo_id": "botbuilder-python",
"token_count": 1213
}
| 415 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
from asyncio import Lock, Semaphore
from typing import List
from botframework.streaming.payloads.assemblers import PayloadStreamAssembler
class PayloadStream:
def __init__(self, assembler: PayloadStreamAssembler):
self._assembler = assembler
self._buffer_queue: List[List[int]] = []
self._lock = Lock()
self._data_available = Semaphore(0)
self._producer_length = 0 # total length
self._consumer_position = 0 # read position
self._active: List[int] = []
self._active_offset = 0
self._end = False
def __len__(self):
return self._producer_length
def give_buffer(self, buffer: List[int]):
self._buffer_queue.append(buffer)
self._producer_length += len(buffer)
self._data_available.release()
def done_producing(self):
self.give_buffer([])
def write(self, buffer: List[int], offset: int, count: int):
buffer_copy = buffer[offset : offset + count]
self.give_buffer(buffer_copy)
async def read(self, buffer: List[int], offset: int, count: int):
if self._end:
return 0
if not self._active:
await self._data_available.acquire()
async with self._lock:
self._active = self._buffer_queue.pop(0)
available_count = min(len(self._active) - self._active_offset, count)
for index in range(available_count):
buffer[offset + index] = self._active[self._active_offset]
self._active_offset += 1
self._consumer_position += available_count
if self._active_offset >= len(self._active):
self._active = []
self._active_offset = 0
if (
self._assembler
and self._consumer_position >= self._assembler.content_length
):
self._end = True
return available_count
async def read_until_end(self):
result = [None] * self._assembler.content_length
current_size = 0
while not self._end:
count = await self.read(
result, current_size, self._assembler.content_length
)
current_size += count
return result
|
botbuilder-python/libraries/botframework-streaming/botframework/streaming/payload_stream.py/0
|
{
"file_path": "botbuilder-python/libraries/botframework-streaming/botframework/streaming/payload_stream.py",
"repo_id": "botbuilder-python",
"token_count": 972
}
| 416 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
from uuid import UUID
from typing import List
from botframework.streaming.payload_transport import PayloadSender
from botframework.streaming.payloads.models import PayloadTypes, RequestPayload
from .payload_disassembler import PayloadDisassembler
class RequestDisassembler(PayloadDisassembler):
def __init__(
self,
sender: PayloadSender,
identifier: UUID,
request: "streaming.StreamingRequest",
):
super().__init__(sender, identifier)
self.request = request
@property
def type(self) -> str:
return PayloadTypes.REQUEST
async def get_stream(self) -> List[int]:
payload = RequestPayload(verb=self.request.verb, path=self.request.path)
if self.request.streams:
payload.streams = [
self.get_stream_description(content_stream)
for content_stream in self.request.streams
]
memory_stream: List[int] = []
stream_length: List[int] = []
# TODO: high probability stream length is not necessary
self.serialize(payload, memory_stream, stream_length)
return memory_stream
|
botbuilder-python/libraries/botframework-streaming/botframework/streaming/payloads/disassemblers/request_disassembler.py/0
|
{
"file_path": "botbuilder-python/libraries/botframework-streaming/botframework/streaming/payloads/disassemblers/request_disassembler.py",
"repo_id": "botbuilder-python",
"token_count": 478
}
| 417 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
import asyncio
from uuid import UUID, uuid4
from botframework.streaming.payloads import (
PayloadAssemblerManager,
RequestManager,
SendOperations,
StreamManager,
)
from botframework.streaming.payloads.assemblers import PayloadStreamAssembler
from botframework.streaming.payload_transport import PayloadSender, PayloadReceiver
from .receive_request import ReceiveRequest
from .receive_response import ReceiveResponse
from .request_handler import RequestHandler
from .streaming_request import StreamingRequest
class ProtocolAdapter:
def __init__(
self,
request_handler: RequestHandler,
request_manager: RequestManager,
payload_sender: PayloadSender,
payload_receiver: PayloadReceiver,
handler_context: object = None,
):
self._request_handler = request_handler
self._request_manager = request_manager
self._payload_sender = payload_sender
self._payload_receiver = payload_receiver
self._handler_context = handler_context
self._send_operations = SendOperations(self._payload_sender)
# TODO: might be able to remove
self._stream_manager = StreamManager(self._on_cancel_stream)
self._assembler_manager = PayloadAssemblerManager(
self._stream_manager, self._on_receive_request, self._on_receive_response
)
self._payload_receiver.subscribe(
self._assembler_manager.get_payload_stream,
self._assembler_manager.on_receive,
)
async def send_request(self, request: StreamingRequest) -> ReceiveResponse:
if not request:
raise TypeError(
f"'request: {request.__class__.__name__}' argument can't be None"
)
request_id = uuid4()
response_task = self._request_manager.get_response(request_id)
request_task = self._send_operations.send_request(request_id, request)
[_, response] = await asyncio.gather(request_task, response_task)
return response
async def _on_receive_request(self, identifier: UUID, request: ReceiveRequest):
# request is done, we can handle it
if self._request_handler:
response = await self._request_handler.process_request(
request, None, self._handler_context
)
if response:
await self._send_operations.send_response(identifier, response)
async def _on_receive_response(self, identifier: UUID, response: ReceiveResponse):
# we received the response to something, signal it
await self._request_manager.signal_response(identifier, response)
def _on_cancel_stream(self, content_stream_assembler: PayloadStreamAssembler):
# TODO: on original C# code content_stream_assembler is typed as IAssembler
task = asyncio.create_task(
self._send_operations.send_cancel_stream(
content_stream_assembler.identifier
)
)
|
botbuilder-python/libraries/botframework-streaming/botframework/streaming/protocol_adapter.py/0
|
{
"file_path": "botbuilder-python/libraries/botframework-streaming/botframework/streaming/protocol_adapter.py",
"repo_id": "botbuilder-python",
"token_count": 1186
}
| 418 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
from enum import IntEnum
class WebSocketMessageType(IntEnum):
# websocket spec types
CONTINUATION = 0
TEXT = 1
BINARY = 2
PING = 9
PONG = 10
CLOSE = 8
|
botbuilder-python/libraries/botframework-streaming/botframework/streaming/transport/web_socket/web_socket_message_type.py/0
|
{
"file_path": "botbuilder-python/libraries/botframework-streaming/botframework/streaming/transport/web_socket/web_socket_message_type.py",
"repo_id": "botbuilder-python",
"token_count": 99
}
| 419 |
from unittest import TestCase
from uuid import UUID, uuid4
from botframework.streaming.payloads import StreamManager
from botframework.streaming.payloads.assemblers import PayloadStreamAssembler
from botframework.streaming.payloads.models import Header
class TestStreamManager(TestCase):
def test_ctor_null_cancel_ok(self):
manager = StreamManager(None)
self.assertIsNotNone(manager)
def test_get_payload_assembler_not_exists_ok(self):
manager = StreamManager(None)
identifier: UUID = uuid4()
assembler = manager.get_payload_assembler(identifier)
self.assertIsNotNone(assembler)
self.assertEqual(identifier, assembler.identifier)
def test_get_payload_assembler_exists_ok(self):
manager = StreamManager(None)
identifier: UUID = uuid4()
assembler1 = manager.get_payload_assembler(identifier)
assembler2 = manager.get_payload_assembler(identifier)
self.assertEqual(assembler1, assembler2)
def test_get_payload_stream_not_exists_ok(self):
manager = StreamManager(None)
identifier: UUID = uuid4()
stream = manager.get_payload_stream(Header(id=identifier))
self.assertIsNotNone(stream)
def test_get_payload_stream_exists_ok(self):
manager = StreamManager(None)
identifier: UUID = uuid4()
stream1 = manager.get_payload_stream(Header(id=identifier))
stream2 = manager.get_payload_stream(Header(id=identifier))
self.assertEqual(stream1, stream2)
def test_get_payload_stream_streams_match(self):
manager = StreamManager(None)
identifier: UUID = uuid4()
assembler = manager.get_payload_assembler(identifier)
stream = manager.get_payload_stream(Header(id=identifier))
self.assertEqual(assembler.get_payload_as_stream(), stream)
def test_on_receive_not_exists_no_op(self):
manager = StreamManager(None)
identifier: UUID = uuid4()
manager.on_receive(Header(id=identifier), [], 100)
def test_on_receive_exists(self):
manager = StreamManager(None)
identifier: UUID = uuid4()
assembler = manager.get_payload_assembler(identifier)
assembler.get_payload_as_stream()
manager.on_receive(Header(id=identifier, end=True), [], 100)
self.assertTrue(assembler.end)
def test_close_stream_not_exists_no_op(self):
manager = StreamManager(None)
identifier: UUID = uuid4()
manager.close_stream(identifier)
def test_close_stream_not_end_closed(self):
closed = False
def mock_cancel_stream(_: PayloadStreamAssembler):
nonlocal closed
closed = True
manager = StreamManager(on_cancel_stream=mock_cancel_stream)
identifier: UUID = uuid4()
assembler = manager.get_payload_assembler(identifier)
assembler.get_payload_as_stream()
manager.close_stream(identifier)
self.assertTrue(closed)
def test_close_stream_end_no_op(self):
closed = False
def mock_cancel_stream(_: PayloadStreamAssembler):
nonlocal closed
closed = True
manager = StreamManager(on_cancel_stream=mock_cancel_stream)
identifier: UUID = uuid4()
assembler = manager.get_payload_assembler(identifier)
assembler.get_payload_as_stream()
assembler.on_receive(Header(end=True), [], 1) # Set it as ended
manager.close_stream(identifier)
self.assertFalse(closed)
|
botbuilder-python/libraries/botframework-streaming/tests/test_stream_manager.py/0
|
{
"file_path": "botbuilder-python/libraries/botframework-streaming/tests/test_stream_manager.py",
"repo_id": "botbuilder-python",
"token_count": 1465
}
| 420 |
from .child_bot import ChildBot
__all__ = ["ChildBot"]
|
botbuilder-python/tests/experimental/sso/child/bots/__init__.py/0
|
{
"file_path": "botbuilder-python/tests/experimental/sso/child/bots/__init__.py",
"repo_id": "botbuilder-python",
"token_count": 19
}
| 421 |
from botbuilder.core import BotFrameworkHttpClient, InvokeResponse, TurnContext
from botbuilder.core.skills import BotFrameworkSkill, ConversationIdFactoryBase
from botbuilder.schema import Activity
class SkillHttpClient(BotFrameworkHttpClient):
def __init__(self, credential_provider, conversation_id_factory, channel_provider=None):
super().__init__(credential_provider, channel_provider)
self._conversation_id_factory: ConversationIdFactoryBase = conversation_id_factory
async def post_activity_to_skill(
self,
from_bot_id: str,
to_skill: BotFrameworkSkill,
callback_url: str,
activity: Activity,
) -> InvokeResponse:
skill_conversation_id = await self._conversation_id_factory.create_skill_conversation_id(
TurnContext.get_conversation_reference(activity)
)
return await self.post_activity(
from_bot_id,
to_skill.app_id,
to_skill.skill_endpoint,
callback_url,
skill_conversation_id,
activity
)
|
botbuilder-python/tests/experimental/sso/parent/skill_client.py/0
|
{
"file_path": "botbuilder-python/tests/experimental/sso/parent/skill_client.py",
"repo_id": "botbuilder-python",
"token_count": 444
}
| 422 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
from botbuilder.dialogs import (
WaterfallDialog,
WaterfallStepContext,
DialogTurnResult,
)
from botbuilder.dialogs.prompts import ConfirmPrompt, PromptOptions, OAuthPrompt, OAuthPromptSettings
from botbuilder.core import MessageFactory
from dialogs import LogoutDialog
class MainDialog(LogoutDialog):
def __init__(
self, configuration,
):
super().__init__(MainDialog.__name__, configuration.CONNECTION_NAME)
self.add_dialog(
OAuthPrompt(
OAuthPrompt.__name__,
OAuthPromptSettings(
connection_name=self.connection_name,
text="Please Sign In",
title="Sign In",
timeout=30000,
)
)
)
self.add_dialog(ConfirmPrompt(ConfirmPrompt.__name__))
self.add_dialog(
WaterfallDialog(
"WFDialog",
[self.prompt_step, self.login_step, self.display_token_phase_one, self.display_token_phase_two]
)
)
self.initial_dialog_id = "WFDialog"
async def prompt_step(self, step_context: WaterfallStepContext) -> DialogTurnResult:
return await step_context.begin_dialog(
OAuthPrompt.__name__
)
async def login_step(self, step_context: WaterfallStepContext) -> DialogTurnResult:
token_response = step_context.result
if token_response:
await step_context.context.send_activity(MessageFactory.text("You are now logged in."))
return await step_context.prompt(
ConfirmPrompt.__name__,
PromptOptions(prompt=MessageFactory.text("Would you like to view your token?"))
)
await step_context.context.send_activity(MessageFactory.text("Login was not successful please try again."))
return await step_context.end_dialog()
async def display_token_phase_one(self, step_context: WaterfallStepContext) -> DialogTurnResult:
await step_context.context.send_activity(MessageFactory.text("Thank you"))
result = step_context.result
if result:
return await step_context.begin_dialog(OAuthPrompt.__name__)
return await step_context.end_dialog()
async def display_token_phase_two(self, step_context: WaterfallStepContext) -> DialogTurnResult:
token_response = step_context.result
if token_response:
await step_context.context.send_activity(MessageFactory.text(f"Here is your token {token_response.token}"))
return await step_context.end_dialog()
|
botbuilder-python/tests/skills/skills-prototypes/dialog-to-dialog/authentication-bot/dialogs/main_dialog.py/0
|
{
"file_path": "botbuilder-python/tests/skills/skills-prototypes/dialog-to-dialog/authentication-bot/dialogs/main_dialog.py",
"repo_id": "botbuilder-python",
"token_count": 1139
}
| 423 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="acute" format="2">
<advance width="1200"/>
<unicode hex="00B4"/>
<outline>
<component base="acutecomb"/>
</outline>
<lib>
<dict>
<key>com.schriftgestaltung.Glyphs.ComponentInfo</key>
<array>
<dict>
<key>alignment</key>
<integer>-1</integer>
<key>index</key>
<integer>0</integer>
<key>name</key>
<string>acutecomb</string>
</dict>
</array>
</dict>
</lib>
</glyph>
|
cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/acute.glif/0
|
{
"file_path": "cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/acute.glif",
"repo_id": "cascadia-code",
"token_count": 277
}
| 424 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="ainTwodotsverticalabove-ar" format="2">
<advance width="1200"/>
<unicode hex="075F"/>
<outline>
<component base="ain-ar"/>
<component base="twodotsverticalabove-ar" xOffset="-93" yOffset="436"/>
</outline>
<lib>
<dict>
<key>public.markColor</key>
<string>0.98,0.36,0.67,1</string>
</dict>
</lib>
</glyph>
|
cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/ainT_wodotsverticalabove-ar.glif/0
|
{
"file_path": "cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/ainT_wodotsverticalabove-ar.glif",
"repo_id": "cascadia-code",
"token_count": 177
}
| 425 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="chedescender-cy" format="2">
<advance width="1200"/>
<unicode hex="04B7"/>
<anchor x="560" y="-270" name="bottom"/>
<outline>
<contour>
<point x="914" y="-270" type="line"/>
<point x="1183" y="-270" type="line"/>
<point x="1183" y="252" type="line"/>
<point x="914" y="252" type="line"/>
</contour>
<component base="che-cy"/>
</outline>
<lib>
<dict>
<key>com.schriftgestaltung.Glyphs.ComponentInfo</key>
<array>
<dict>
<key>alignment</key>
<integer>-1</integer>
<key>index</key>
<integer>0</integer>
<key>name</key>
<string>che-cy</string>
</dict>
</array>
</dict>
</lib>
</glyph>
|
cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/chedescender-cy.glif/0
|
{
"file_path": "cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/chedescender-cy.glif",
"repo_id": "cascadia-code",
"token_count": 398
}
| 426 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="dad-ar" format="2">
<advance width="1200"/>
<unicode hex="0636"/>
<outline>
<component base="sad-ar"/>
<component base="dotabove-ar" xOffset="280" yOffset="373"/>
</outline>
<lib>
<dict>
<key>public.markColor</key>
<string>0.98,0.36,0.67,1</string>
</dict>
</lib>
</glyph>
|
cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/dad-ar.glif/0
|
{
"file_path": "cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/dad-ar.glif",
"repo_id": "cascadia-code",
"token_count": 167
}
| 427 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="dalDotbelow-ar.fina" format="2">
<advance width="1200"/>
<outline>
<component base="dal-ar.fina"/>
<component base="dotbelow-ar" xOffset="-40" yOffset="-24"/>
</outline>
<lib>
<dict>
<key>public.markColor</key>
<string>0.98,0.36,0.67,1</string>
</dict>
</lib>
</glyph>
|
cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/dalD_otbelow-ar.fina.glif/0
|
{
"file_path": "cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/dalD_otbelow-ar.fina.glif",
"repo_id": "cascadia-code",
"token_count": 167
}
| 428 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="dalVinvertedbelow-ar.fina" format="2">
<advance width="1200"/>
<outline>
<component base="dal-ar.fina"/>
<component base="_vinvertedbelow-ar" xOffset="-40" yOffset="-24"/>
</outline>
<lib>
<dict>
<key>public.markColor</key>
<string>0.98,0.36,0.67,1</string>
</dict>
</lib>
</glyph>
|
cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/dalV_invertedbelow-ar.fina.glif/0
|
{
"file_path": "cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/dalV_invertedbelow-ar.fina.glif",
"repo_id": "cascadia-code",
"token_count": 170
}
| 429 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="firsttonechinese" format="2">
<advance width="1200"/>
<unicode hex="02C9"/>
<outline>
<component base="macron"/>
</outline>
<lib>
<dict>
<key>com.schriftgestaltung.Glyphs.ComponentInfo</key>
<array>
<dict>
<key>alignment</key>
<integer>-1</integer>
<key>index</key>
<integer>0</integer>
<key>name</key>
<string>macron</string>
</dict>
</array>
</dict>
</lib>
</glyph>
|
cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/firsttonechinese.glif/0
|
{
"file_path": "cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/firsttonechinese.glif",
"repo_id": "cascadia-code",
"token_count": 275
}
| 430 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="gershayim-hb" format="2">
<advance width="1200"/>
<unicode hex="05F4"/>
<outline>
<component base="geresh-hb" xOffset="218"/>
<component base="geresh-hb" xOffset="-222"/>
</outline>
<lib>
<dict>
<key>public.markColor</key>
<string>0.97,1,0,1</string>
</dict>
</lib>
</glyph>
|
cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/gershayim-hb.glif/0
|
{
"file_path": "cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/gershayim-hb.glif",
"repo_id": "cascadia-code",
"token_count": 171
}
| 431 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="jeemTwodotsabove-ar.fina" format="2">
<advance width="1200"/>
<outline>
<component base="jeem-ar.fina"/>
<component base="twodotshorizontalabove-ar" xOffset="-34" yOffset="392"/>
</outline>
<lib>
<dict>
<key>public.markColor</key>
<string>0.98,0.36,0.67,1</string>
</dict>
</lib>
</glyph>
|
cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/jeemT_wodotsabove-ar.fina.glif/0
|
{
"file_path": "cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/jeemT_wodotsabove-ar.fina.glif",
"repo_id": "cascadia-code",
"token_count": 174
}
| 432 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="kehehThreedotsupbelow-ar" format="2">
<advance width="1200"/>
<unicode hex="0764"/>
<outline>
<component base="keheh-ar"/>
<component base="threedotsupbelow-ar" xOffset="-47" yOffset="-24"/>
</outline>
<lib>
<dict>
<key>public.markColor</key>
<string>0.98,0.36,0.67,1</string>
</dict>
</lib>
</glyph>
|
cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/kehehT_hreedotsupbelow-ar.glif/0
|
{
"file_path": "cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/kehehT_hreedotsupbelow-ar.glif",
"repo_id": "cascadia-code",
"token_count": 180
}
| 433 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.