prompt
large_stringlengths 72
9.34k
| completion
large_stringlengths 0
7.61k
|
---|---|
<|file_name|>test_dynamodb_consumedcapacity.py<|end_file_name|><|fim▁begin|>import boto3
import pytest
import sure # noqa # pylint: disable=unused-import
from botocore.exceptions import ClientError
from moto import mock_dynamodb2
@mock_dynamodb2
def test_error_on_wrong_value_for_consumed_capacity():
resource = boto3.resource("dynamodb", region_name="ap-northeast-3")
client = boto3.client("dynamodb", region_name="ap-northeast-3")
client.create_table(
TableName="jobs",
KeySchema=[{"AttributeName": "job_id", "KeyType": "HASH"}],
AttributeDefinitions=[{"AttributeName": "job_id", "AttributeType": "S"}],
ProvisionedThroughput={"ReadCapacityUnits": 5, "WriteCapacityUnits": 5},
)
table = resource.Table("jobs")
item = {"job_id": "asdasdasd", "expires_at": "1"}
# PUT_ITEM
with pytest.raises(ClientError) as ex:
table.put_item(Item=item, ReturnConsumedCapacity="Garbage")
err = ex.value.response["Error"]
err["Code"].should.equal("ValidationException")
err["Message"].should.equal(
"1 validation error detected: Value 'Garbage' at 'returnConsumedCapacity' failed to satisfy constraint: Member must satisfy enum value set: [INDEXES, TOTAL, NONE]"
)
@mock_dynamodb2
def test_consumed_capacity_get_unknown_item():
conn = boto3.client("dynamodb", region_name="us-east-1")
conn.create_table(
TableName="test_table",
KeySchema=[{"AttributeName": "u", "KeyType": "HASH"}],
AttributeDefinitions=[{"AttributeName": "u", "AttributeType": "S"}],
BillingMode="PAY_PER_REQUEST",
)
response = conn.get_item(
TableName="test_table",
Key={"u": {"S": "does_not_exist"}},
ReturnConsumedCapacity="TOTAL",
)
# Should still return ConsumedCapacity, even if it does not return an item
response.should.have.key("ConsumedCapacity")
response["ConsumedCapacity"].should.equal(
{"TableName": "test_table", "CapacityUnits": 0.5}
)
@mock_dynamodb2
@pytest.mark.parametrize(
"capacity,should_have_capacity,should_have_table",
[
[None, False, False],
["NONE", False, False],
["TOTAL", True, False],
["INDEXES", True, True],
],
)
def test_only_return_consumed_capacity_when_required(
capacity, should_have_capacity, should_have_table
):
resource = boto3.resource("dynamodb", region_name="ap-northeast-3")
client = boto3.client("dynamodb", region_name="ap-northeast-3")
client.create_table(
TableName="jobs",
KeySchema=[{"AttributeName": "job_id", "KeyType": "HASH"}],
LocalSecondaryIndexes=[
{
"IndexName": "job_name-index",
"KeySchema": [{"AttributeName": "job_name", "KeyType": "HASH"}],
"Projection": {"ProjectionType": "ALL"},
}
],
AttributeDefinitions=[
{"AttributeName": "job_id", "AttributeType": "S"},
{"AttributeName": "job_name", "AttributeType": "S"},
],
ProvisionedThroughput={"ReadCapacityUnits": 5, "WriteCapacityUnits": 5},
)
table = resource.Table("jobs")
item = {"job_id": "asdasdasd", "expires_at": "1"}
# PUT_ITEM
args = {"Item": item}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = table.put_item(**args)
validate_response(response, should_have_capacity, should_have_table)
# GET_ITEM
args = {"Key": item}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = table.get_item(**args)
validate_response(response, should_have_capacity, should_have_table, value=0.5)
<|fim▁hole|> response = client.scan(**args)
validate_response(response, should_have_capacity, should_have_table)
# SCAN_INDEX
args["IndexName"] = "job_name-index"
response = client.scan(**args)
validate_response(response, should_have_capacity, should_have_table, is_index=True)
# QUERY
args = {
"TableName": "jobs",
"KeyConditionExpression": "job_id = :id",
"ExpressionAttributeValues": {":id": {"S": "asdasdasd"}},
}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = client.query(**args)
validate_response(response, should_have_capacity, should_have_table)
# QUERY_INDEX
args["IndexName"] = "job_name-index"
response = client.query(**args)
validate_response(response, should_have_capacity, should_have_table, is_index=True)
def validate_response(
response, should_have_capacity, should_have_table, is_index=False, value=1.0
):
if should_have_capacity:
response.should.have.key("ConsumedCapacity")
response["ConsumedCapacity"]["TableName"].should.equal("jobs")
response["ConsumedCapacity"]["CapacityUnits"].should.equal(value)
if should_have_table:
response["ConsumedCapacity"]["Table"].should.equal({"CapacityUnits": value})
if is_index:
response["ConsumedCapacity"].should.have.key("LocalSecondaryIndexes")
response["ConsumedCapacity"]["LocalSecondaryIndexes"].should.equal(
{"job_name-index": {"CapacityUnits": value}}
)
else:
response.shouldnt.have.key("ConsumedCapacity")<|fim▁end|> | # SCAN
args = {"TableName": "jobs"}
if capacity:
args["ReturnConsumedCapacity"] = capacity |
<|file_name|>test_dynamodb_consumedcapacity.py<|end_file_name|><|fim▁begin|>import boto3
import pytest
import sure # noqa # pylint: disable=unused-import
from botocore.exceptions import ClientError
from moto import mock_dynamodb2
@mock_dynamodb2
def test_error_on_wrong_value_for_consumed_capacity():
<|fim_middle|>
@mock_dynamodb2
def test_consumed_capacity_get_unknown_item():
conn = boto3.client("dynamodb", region_name="us-east-1")
conn.create_table(
TableName="test_table",
KeySchema=[{"AttributeName": "u", "KeyType": "HASH"}],
AttributeDefinitions=[{"AttributeName": "u", "AttributeType": "S"}],
BillingMode="PAY_PER_REQUEST",
)
response = conn.get_item(
TableName="test_table",
Key={"u": {"S": "does_not_exist"}},
ReturnConsumedCapacity="TOTAL",
)
# Should still return ConsumedCapacity, even if it does not return an item
response.should.have.key("ConsumedCapacity")
response["ConsumedCapacity"].should.equal(
{"TableName": "test_table", "CapacityUnits": 0.5}
)
@mock_dynamodb2
@pytest.mark.parametrize(
"capacity,should_have_capacity,should_have_table",
[
[None, False, False],
["NONE", False, False],
["TOTAL", True, False],
["INDEXES", True, True],
],
)
def test_only_return_consumed_capacity_when_required(
capacity, should_have_capacity, should_have_table
):
resource = boto3.resource("dynamodb", region_name="ap-northeast-3")
client = boto3.client("dynamodb", region_name="ap-northeast-3")
client.create_table(
TableName="jobs",
KeySchema=[{"AttributeName": "job_id", "KeyType": "HASH"}],
LocalSecondaryIndexes=[
{
"IndexName": "job_name-index",
"KeySchema": [{"AttributeName": "job_name", "KeyType": "HASH"}],
"Projection": {"ProjectionType": "ALL"},
}
],
AttributeDefinitions=[
{"AttributeName": "job_id", "AttributeType": "S"},
{"AttributeName": "job_name", "AttributeType": "S"},
],
ProvisionedThroughput={"ReadCapacityUnits": 5, "WriteCapacityUnits": 5},
)
table = resource.Table("jobs")
item = {"job_id": "asdasdasd", "expires_at": "1"}
# PUT_ITEM
args = {"Item": item}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = table.put_item(**args)
validate_response(response, should_have_capacity, should_have_table)
# GET_ITEM
args = {"Key": item}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = table.get_item(**args)
validate_response(response, should_have_capacity, should_have_table, value=0.5)
# SCAN
args = {"TableName": "jobs"}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = client.scan(**args)
validate_response(response, should_have_capacity, should_have_table)
# SCAN_INDEX
args["IndexName"] = "job_name-index"
response = client.scan(**args)
validate_response(response, should_have_capacity, should_have_table, is_index=True)
# QUERY
args = {
"TableName": "jobs",
"KeyConditionExpression": "job_id = :id",
"ExpressionAttributeValues": {":id": {"S": "asdasdasd"}},
}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = client.query(**args)
validate_response(response, should_have_capacity, should_have_table)
# QUERY_INDEX
args["IndexName"] = "job_name-index"
response = client.query(**args)
validate_response(response, should_have_capacity, should_have_table, is_index=True)
def validate_response(
response, should_have_capacity, should_have_table, is_index=False, value=1.0
):
if should_have_capacity:
response.should.have.key("ConsumedCapacity")
response["ConsumedCapacity"]["TableName"].should.equal("jobs")
response["ConsumedCapacity"]["CapacityUnits"].should.equal(value)
if should_have_table:
response["ConsumedCapacity"]["Table"].should.equal({"CapacityUnits": value})
if is_index:
response["ConsumedCapacity"].should.have.key("LocalSecondaryIndexes")
response["ConsumedCapacity"]["LocalSecondaryIndexes"].should.equal(
{"job_name-index": {"CapacityUnits": value}}
)
else:
response.shouldnt.have.key("ConsumedCapacity")
<|fim▁end|> | resource = boto3.resource("dynamodb", region_name="ap-northeast-3")
client = boto3.client("dynamodb", region_name="ap-northeast-3")
client.create_table(
TableName="jobs",
KeySchema=[{"AttributeName": "job_id", "KeyType": "HASH"}],
AttributeDefinitions=[{"AttributeName": "job_id", "AttributeType": "S"}],
ProvisionedThroughput={"ReadCapacityUnits": 5, "WriteCapacityUnits": 5},
)
table = resource.Table("jobs")
item = {"job_id": "asdasdasd", "expires_at": "1"}
# PUT_ITEM
with pytest.raises(ClientError) as ex:
table.put_item(Item=item, ReturnConsumedCapacity="Garbage")
err = ex.value.response["Error"]
err["Code"].should.equal("ValidationException")
err["Message"].should.equal(
"1 validation error detected: Value 'Garbage' at 'returnConsumedCapacity' failed to satisfy constraint: Member must satisfy enum value set: [INDEXES, TOTAL, NONE]"
) |
<|file_name|>test_dynamodb_consumedcapacity.py<|end_file_name|><|fim▁begin|>import boto3
import pytest
import sure # noqa # pylint: disable=unused-import
from botocore.exceptions import ClientError
from moto import mock_dynamodb2
@mock_dynamodb2
def test_error_on_wrong_value_for_consumed_capacity():
resource = boto3.resource("dynamodb", region_name="ap-northeast-3")
client = boto3.client("dynamodb", region_name="ap-northeast-3")
client.create_table(
TableName="jobs",
KeySchema=[{"AttributeName": "job_id", "KeyType": "HASH"}],
AttributeDefinitions=[{"AttributeName": "job_id", "AttributeType": "S"}],
ProvisionedThroughput={"ReadCapacityUnits": 5, "WriteCapacityUnits": 5},
)
table = resource.Table("jobs")
item = {"job_id": "asdasdasd", "expires_at": "1"}
# PUT_ITEM
with pytest.raises(ClientError) as ex:
table.put_item(Item=item, ReturnConsumedCapacity="Garbage")
err = ex.value.response["Error"]
err["Code"].should.equal("ValidationException")
err["Message"].should.equal(
"1 validation error detected: Value 'Garbage' at 'returnConsumedCapacity' failed to satisfy constraint: Member must satisfy enum value set: [INDEXES, TOTAL, NONE]"
)
@mock_dynamodb2
def test_consumed_capacity_get_unknown_item():
<|fim_middle|>
@mock_dynamodb2
@pytest.mark.parametrize(
"capacity,should_have_capacity,should_have_table",
[
[None, False, False],
["NONE", False, False],
["TOTAL", True, False],
["INDEXES", True, True],
],
)
def test_only_return_consumed_capacity_when_required(
capacity, should_have_capacity, should_have_table
):
resource = boto3.resource("dynamodb", region_name="ap-northeast-3")
client = boto3.client("dynamodb", region_name="ap-northeast-3")
client.create_table(
TableName="jobs",
KeySchema=[{"AttributeName": "job_id", "KeyType": "HASH"}],
LocalSecondaryIndexes=[
{
"IndexName": "job_name-index",
"KeySchema": [{"AttributeName": "job_name", "KeyType": "HASH"}],
"Projection": {"ProjectionType": "ALL"},
}
],
AttributeDefinitions=[
{"AttributeName": "job_id", "AttributeType": "S"},
{"AttributeName": "job_name", "AttributeType": "S"},
],
ProvisionedThroughput={"ReadCapacityUnits": 5, "WriteCapacityUnits": 5},
)
table = resource.Table("jobs")
item = {"job_id": "asdasdasd", "expires_at": "1"}
# PUT_ITEM
args = {"Item": item}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = table.put_item(**args)
validate_response(response, should_have_capacity, should_have_table)
# GET_ITEM
args = {"Key": item}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = table.get_item(**args)
validate_response(response, should_have_capacity, should_have_table, value=0.5)
# SCAN
args = {"TableName": "jobs"}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = client.scan(**args)
validate_response(response, should_have_capacity, should_have_table)
# SCAN_INDEX
args["IndexName"] = "job_name-index"
response = client.scan(**args)
validate_response(response, should_have_capacity, should_have_table, is_index=True)
# QUERY
args = {
"TableName": "jobs",
"KeyConditionExpression": "job_id = :id",
"ExpressionAttributeValues": {":id": {"S": "asdasdasd"}},
}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = client.query(**args)
validate_response(response, should_have_capacity, should_have_table)
# QUERY_INDEX
args["IndexName"] = "job_name-index"
response = client.query(**args)
validate_response(response, should_have_capacity, should_have_table, is_index=True)
def validate_response(
response, should_have_capacity, should_have_table, is_index=False, value=1.0
):
if should_have_capacity:
response.should.have.key("ConsumedCapacity")
response["ConsumedCapacity"]["TableName"].should.equal("jobs")
response["ConsumedCapacity"]["CapacityUnits"].should.equal(value)
if should_have_table:
response["ConsumedCapacity"]["Table"].should.equal({"CapacityUnits": value})
if is_index:
response["ConsumedCapacity"].should.have.key("LocalSecondaryIndexes")
response["ConsumedCapacity"]["LocalSecondaryIndexes"].should.equal(
{"job_name-index": {"CapacityUnits": value}}
)
else:
response.shouldnt.have.key("ConsumedCapacity")
<|fim▁end|> | conn = boto3.client("dynamodb", region_name="us-east-1")
conn.create_table(
TableName="test_table",
KeySchema=[{"AttributeName": "u", "KeyType": "HASH"}],
AttributeDefinitions=[{"AttributeName": "u", "AttributeType": "S"}],
BillingMode="PAY_PER_REQUEST",
)
response = conn.get_item(
TableName="test_table",
Key={"u": {"S": "does_not_exist"}},
ReturnConsumedCapacity="TOTAL",
)
# Should still return ConsumedCapacity, even if it does not return an item
response.should.have.key("ConsumedCapacity")
response["ConsumedCapacity"].should.equal(
{"TableName": "test_table", "CapacityUnits": 0.5}
) |
<|file_name|>test_dynamodb_consumedcapacity.py<|end_file_name|><|fim▁begin|>import boto3
import pytest
import sure # noqa # pylint: disable=unused-import
from botocore.exceptions import ClientError
from moto import mock_dynamodb2
@mock_dynamodb2
def test_error_on_wrong_value_for_consumed_capacity():
resource = boto3.resource("dynamodb", region_name="ap-northeast-3")
client = boto3.client("dynamodb", region_name="ap-northeast-3")
client.create_table(
TableName="jobs",
KeySchema=[{"AttributeName": "job_id", "KeyType": "HASH"}],
AttributeDefinitions=[{"AttributeName": "job_id", "AttributeType": "S"}],
ProvisionedThroughput={"ReadCapacityUnits": 5, "WriteCapacityUnits": 5},
)
table = resource.Table("jobs")
item = {"job_id": "asdasdasd", "expires_at": "1"}
# PUT_ITEM
with pytest.raises(ClientError) as ex:
table.put_item(Item=item, ReturnConsumedCapacity="Garbage")
err = ex.value.response["Error"]
err["Code"].should.equal("ValidationException")
err["Message"].should.equal(
"1 validation error detected: Value 'Garbage' at 'returnConsumedCapacity' failed to satisfy constraint: Member must satisfy enum value set: [INDEXES, TOTAL, NONE]"
)
@mock_dynamodb2
def test_consumed_capacity_get_unknown_item():
conn = boto3.client("dynamodb", region_name="us-east-1")
conn.create_table(
TableName="test_table",
KeySchema=[{"AttributeName": "u", "KeyType": "HASH"}],
AttributeDefinitions=[{"AttributeName": "u", "AttributeType": "S"}],
BillingMode="PAY_PER_REQUEST",
)
response = conn.get_item(
TableName="test_table",
Key={"u": {"S": "does_not_exist"}},
ReturnConsumedCapacity="TOTAL",
)
# Should still return ConsumedCapacity, even if it does not return an item
response.should.have.key("ConsumedCapacity")
response["ConsumedCapacity"].should.equal(
{"TableName": "test_table", "CapacityUnits": 0.5}
)
@mock_dynamodb2
@pytest.mark.parametrize(
"capacity,should_have_capacity,should_have_table",
[
[None, False, False],
["NONE", False, False],
["TOTAL", True, False],
["INDEXES", True, True],
],
)
def test_only_return_consumed_capacity_when_required(
capacity, should_have_capacity, should_have_table
):
<|fim_middle|>
def validate_response(
response, should_have_capacity, should_have_table, is_index=False, value=1.0
):
if should_have_capacity:
response.should.have.key("ConsumedCapacity")
response["ConsumedCapacity"]["TableName"].should.equal("jobs")
response["ConsumedCapacity"]["CapacityUnits"].should.equal(value)
if should_have_table:
response["ConsumedCapacity"]["Table"].should.equal({"CapacityUnits": value})
if is_index:
response["ConsumedCapacity"].should.have.key("LocalSecondaryIndexes")
response["ConsumedCapacity"]["LocalSecondaryIndexes"].should.equal(
{"job_name-index": {"CapacityUnits": value}}
)
else:
response.shouldnt.have.key("ConsumedCapacity")
<|fim▁end|> | resource = boto3.resource("dynamodb", region_name="ap-northeast-3")
client = boto3.client("dynamodb", region_name="ap-northeast-3")
client.create_table(
TableName="jobs",
KeySchema=[{"AttributeName": "job_id", "KeyType": "HASH"}],
LocalSecondaryIndexes=[
{
"IndexName": "job_name-index",
"KeySchema": [{"AttributeName": "job_name", "KeyType": "HASH"}],
"Projection": {"ProjectionType": "ALL"},
}
],
AttributeDefinitions=[
{"AttributeName": "job_id", "AttributeType": "S"},
{"AttributeName": "job_name", "AttributeType": "S"},
],
ProvisionedThroughput={"ReadCapacityUnits": 5, "WriteCapacityUnits": 5},
)
table = resource.Table("jobs")
item = {"job_id": "asdasdasd", "expires_at": "1"}
# PUT_ITEM
args = {"Item": item}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = table.put_item(**args)
validate_response(response, should_have_capacity, should_have_table)
# GET_ITEM
args = {"Key": item}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = table.get_item(**args)
validate_response(response, should_have_capacity, should_have_table, value=0.5)
# SCAN
args = {"TableName": "jobs"}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = client.scan(**args)
validate_response(response, should_have_capacity, should_have_table)
# SCAN_INDEX
args["IndexName"] = "job_name-index"
response = client.scan(**args)
validate_response(response, should_have_capacity, should_have_table, is_index=True)
# QUERY
args = {
"TableName": "jobs",
"KeyConditionExpression": "job_id = :id",
"ExpressionAttributeValues": {":id": {"S": "asdasdasd"}},
}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = client.query(**args)
validate_response(response, should_have_capacity, should_have_table)
# QUERY_INDEX
args["IndexName"] = "job_name-index"
response = client.query(**args)
validate_response(response, should_have_capacity, should_have_table, is_index=True) |
<|file_name|>test_dynamodb_consumedcapacity.py<|end_file_name|><|fim▁begin|>import boto3
import pytest
import sure # noqa # pylint: disable=unused-import
from botocore.exceptions import ClientError
from moto import mock_dynamodb2
@mock_dynamodb2
def test_error_on_wrong_value_for_consumed_capacity():
resource = boto3.resource("dynamodb", region_name="ap-northeast-3")
client = boto3.client("dynamodb", region_name="ap-northeast-3")
client.create_table(
TableName="jobs",
KeySchema=[{"AttributeName": "job_id", "KeyType": "HASH"}],
AttributeDefinitions=[{"AttributeName": "job_id", "AttributeType": "S"}],
ProvisionedThroughput={"ReadCapacityUnits": 5, "WriteCapacityUnits": 5},
)
table = resource.Table("jobs")
item = {"job_id": "asdasdasd", "expires_at": "1"}
# PUT_ITEM
with pytest.raises(ClientError) as ex:
table.put_item(Item=item, ReturnConsumedCapacity="Garbage")
err = ex.value.response["Error"]
err["Code"].should.equal("ValidationException")
err["Message"].should.equal(
"1 validation error detected: Value 'Garbage' at 'returnConsumedCapacity' failed to satisfy constraint: Member must satisfy enum value set: [INDEXES, TOTAL, NONE]"
)
@mock_dynamodb2
def test_consumed_capacity_get_unknown_item():
conn = boto3.client("dynamodb", region_name="us-east-1")
conn.create_table(
TableName="test_table",
KeySchema=[{"AttributeName": "u", "KeyType": "HASH"}],
AttributeDefinitions=[{"AttributeName": "u", "AttributeType": "S"}],
BillingMode="PAY_PER_REQUEST",
)
response = conn.get_item(
TableName="test_table",
Key={"u": {"S": "does_not_exist"}},
ReturnConsumedCapacity="TOTAL",
)
# Should still return ConsumedCapacity, even if it does not return an item
response.should.have.key("ConsumedCapacity")
response["ConsumedCapacity"].should.equal(
{"TableName": "test_table", "CapacityUnits": 0.5}
)
@mock_dynamodb2
@pytest.mark.parametrize(
"capacity,should_have_capacity,should_have_table",
[
[None, False, False],
["NONE", False, False],
["TOTAL", True, False],
["INDEXES", True, True],
],
)
def test_only_return_consumed_capacity_when_required(
capacity, should_have_capacity, should_have_table
):
resource = boto3.resource("dynamodb", region_name="ap-northeast-3")
client = boto3.client("dynamodb", region_name="ap-northeast-3")
client.create_table(
TableName="jobs",
KeySchema=[{"AttributeName": "job_id", "KeyType": "HASH"}],
LocalSecondaryIndexes=[
{
"IndexName": "job_name-index",
"KeySchema": [{"AttributeName": "job_name", "KeyType": "HASH"}],
"Projection": {"ProjectionType": "ALL"},
}
],
AttributeDefinitions=[
{"AttributeName": "job_id", "AttributeType": "S"},
{"AttributeName": "job_name", "AttributeType": "S"},
],
ProvisionedThroughput={"ReadCapacityUnits": 5, "WriteCapacityUnits": 5},
)
table = resource.Table("jobs")
item = {"job_id": "asdasdasd", "expires_at": "1"}
# PUT_ITEM
args = {"Item": item}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = table.put_item(**args)
validate_response(response, should_have_capacity, should_have_table)
# GET_ITEM
args = {"Key": item}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = table.get_item(**args)
validate_response(response, should_have_capacity, should_have_table, value=0.5)
# SCAN
args = {"TableName": "jobs"}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = client.scan(**args)
validate_response(response, should_have_capacity, should_have_table)
# SCAN_INDEX
args["IndexName"] = "job_name-index"
response = client.scan(**args)
validate_response(response, should_have_capacity, should_have_table, is_index=True)
# QUERY
args = {
"TableName": "jobs",
"KeyConditionExpression": "job_id = :id",
"ExpressionAttributeValues": {":id": {"S": "asdasdasd"}},
}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = client.query(**args)
validate_response(response, should_have_capacity, should_have_table)
# QUERY_INDEX
args["IndexName"] = "job_name-index"
response = client.query(**args)
validate_response(response, should_have_capacity, should_have_table, is_index=True)
def validate_response(
response, should_have_capacity, should_have_table, is_index=False, value=1.0
):
<|fim_middle|>
<|fim▁end|> | if should_have_capacity:
response.should.have.key("ConsumedCapacity")
response["ConsumedCapacity"]["TableName"].should.equal("jobs")
response["ConsumedCapacity"]["CapacityUnits"].should.equal(value)
if should_have_table:
response["ConsumedCapacity"]["Table"].should.equal({"CapacityUnits": value})
if is_index:
response["ConsumedCapacity"].should.have.key("LocalSecondaryIndexes")
response["ConsumedCapacity"]["LocalSecondaryIndexes"].should.equal(
{"job_name-index": {"CapacityUnits": value}}
)
else:
response.shouldnt.have.key("ConsumedCapacity") |
<|file_name|>test_dynamodb_consumedcapacity.py<|end_file_name|><|fim▁begin|>import boto3
import pytest
import sure # noqa # pylint: disable=unused-import
from botocore.exceptions import ClientError
from moto import mock_dynamodb2
@mock_dynamodb2
def test_error_on_wrong_value_for_consumed_capacity():
resource = boto3.resource("dynamodb", region_name="ap-northeast-3")
client = boto3.client("dynamodb", region_name="ap-northeast-3")
client.create_table(
TableName="jobs",
KeySchema=[{"AttributeName": "job_id", "KeyType": "HASH"}],
AttributeDefinitions=[{"AttributeName": "job_id", "AttributeType": "S"}],
ProvisionedThroughput={"ReadCapacityUnits": 5, "WriteCapacityUnits": 5},
)
table = resource.Table("jobs")
item = {"job_id": "asdasdasd", "expires_at": "1"}
# PUT_ITEM
with pytest.raises(ClientError) as ex:
table.put_item(Item=item, ReturnConsumedCapacity="Garbage")
err = ex.value.response["Error"]
err["Code"].should.equal("ValidationException")
err["Message"].should.equal(
"1 validation error detected: Value 'Garbage' at 'returnConsumedCapacity' failed to satisfy constraint: Member must satisfy enum value set: [INDEXES, TOTAL, NONE]"
)
@mock_dynamodb2
def test_consumed_capacity_get_unknown_item():
conn = boto3.client("dynamodb", region_name="us-east-1")
conn.create_table(
TableName="test_table",
KeySchema=[{"AttributeName": "u", "KeyType": "HASH"}],
AttributeDefinitions=[{"AttributeName": "u", "AttributeType": "S"}],
BillingMode="PAY_PER_REQUEST",
)
response = conn.get_item(
TableName="test_table",
Key={"u": {"S": "does_not_exist"}},
ReturnConsumedCapacity="TOTAL",
)
# Should still return ConsumedCapacity, even if it does not return an item
response.should.have.key("ConsumedCapacity")
response["ConsumedCapacity"].should.equal(
{"TableName": "test_table", "CapacityUnits": 0.5}
)
@mock_dynamodb2
@pytest.mark.parametrize(
"capacity,should_have_capacity,should_have_table",
[
[None, False, False],
["NONE", False, False],
["TOTAL", True, False],
["INDEXES", True, True],
],
)
def test_only_return_consumed_capacity_when_required(
capacity, should_have_capacity, should_have_table
):
resource = boto3.resource("dynamodb", region_name="ap-northeast-3")
client = boto3.client("dynamodb", region_name="ap-northeast-3")
client.create_table(
TableName="jobs",
KeySchema=[{"AttributeName": "job_id", "KeyType": "HASH"}],
LocalSecondaryIndexes=[
{
"IndexName": "job_name-index",
"KeySchema": [{"AttributeName": "job_name", "KeyType": "HASH"}],
"Projection": {"ProjectionType": "ALL"},
}
],
AttributeDefinitions=[
{"AttributeName": "job_id", "AttributeType": "S"},
{"AttributeName": "job_name", "AttributeType": "S"},
],
ProvisionedThroughput={"ReadCapacityUnits": 5, "WriteCapacityUnits": 5},
)
table = resource.Table("jobs")
item = {"job_id": "asdasdasd", "expires_at": "1"}
# PUT_ITEM
args = {"Item": item}
if capacity:
<|fim_middle|>
response = table.put_item(**args)
validate_response(response, should_have_capacity, should_have_table)
# GET_ITEM
args = {"Key": item}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = table.get_item(**args)
validate_response(response, should_have_capacity, should_have_table, value=0.5)
# SCAN
args = {"TableName": "jobs"}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = client.scan(**args)
validate_response(response, should_have_capacity, should_have_table)
# SCAN_INDEX
args["IndexName"] = "job_name-index"
response = client.scan(**args)
validate_response(response, should_have_capacity, should_have_table, is_index=True)
# QUERY
args = {
"TableName": "jobs",
"KeyConditionExpression": "job_id = :id",
"ExpressionAttributeValues": {":id": {"S": "asdasdasd"}},
}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = client.query(**args)
validate_response(response, should_have_capacity, should_have_table)
# QUERY_INDEX
args["IndexName"] = "job_name-index"
response = client.query(**args)
validate_response(response, should_have_capacity, should_have_table, is_index=True)
def validate_response(
response, should_have_capacity, should_have_table, is_index=False, value=1.0
):
if should_have_capacity:
response.should.have.key("ConsumedCapacity")
response["ConsumedCapacity"]["TableName"].should.equal("jobs")
response["ConsumedCapacity"]["CapacityUnits"].should.equal(value)
if should_have_table:
response["ConsumedCapacity"]["Table"].should.equal({"CapacityUnits": value})
if is_index:
response["ConsumedCapacity"].should.have.key("LocalSecondaryIndexes")
response["ConsumedCapacity"]["LocalSecondaryIndexes"].should.equal(
{"job_name-index": {"CapacityUnits": value}}
)
else:
response.shouldnt.have.key("ConsumedCapacity")
<|fim▁end|> | args["ReturnConsumedCapacity"] = capacity |
<|file_name|>test_dynamodb_consumedcapacity.py<|end_file_name|><|fim▁begin|>import boto3
import pytest
import sure # noqa # pylint: disable=unused-import
from botocore.exceptions import ClientError
from moto import mock_dynamodb2
@mock_dynamodb2
def test_error_on_wrong_value_for_consumed_capacity():
resource = boto3.resource("dynamodb", region_name="ap-northeast-3")
client = boto3.client("dynamodb", region_name="ap-northeast-3")
client.create_table(
TableName="jobs",
KeySchema=[{"AttributeName": "job_id", "KeyType": "HASH"}],
AttributeDefinitions=[{"AttributeName": "job_id", "AttributeType": "S"}],
ProvisionedThroughput={"ReadCapacityUnits": 5, "WriteCapacityUnits": 5},
)
table = resource.Table("jobs")
item = {"job_id": "asdasdasd", "expires_at": "1"}
# PUT_ITEM
with pytest.raises(ClientError) as ex:
table.put_item(Item=item, ReturnConsumedCapacity="Garbage")
err = ex.value.response["Error"]
err["Code"].should.equal("ValidationException")
err["Message"].should.equal(
"1 validation error detected: Value 'Garbage' at 'returnConsumedCapacity' failed to satisfy constraint: Member must satisfy enum value set: [INDEXES, TOTAL, NONE]"
)
@mock_dynamodb2
def test_consumed_capacity_get_unknown_item():
conn = boto3.client("dynamodb", region_name="us-east-1")
conn.create_table(
TableName="test_table",
KeySchema=[{"AttributeName": "u", "KeyType": "HASH"}],
AttributeDefinitions=[{"AttributeName": "u", "AttributeType": "S"}],
BillingMode="PAY_PER_REQUEST",
)
response = conn.get_item(
TableName="test_table",
Key={"u": {"S": "does_not_exist"}},
ReturnConsumedCapacity="TOTAL",
)
# Should still return ConsumedCapacity, even if it does not return an item
response.should.have.key("ConsumedCapacity")
response["ConsumedCapacity"].should.equal(
{"TableName": "test_table", "CapacityUnits": 0.5}
)
@mock_dynamodb2
@pytest.mark.parametrize(
"capacity,should_have_capacity,should_have_table",
[
[None, False, False],
["NONE", False, False],
["TOTAL", True, False],
["INDEXES", True, True],
],
)
def test_only_return_consumed_capacity_when_required(
capacity, should_have_capacity, should_have_table
):
resource = boto3.resource("dynamodb", region_name="ap-northeast-3")
client = boto3.client("dynamodb", region_name="ap-northeast-3")
client.create_table(
TableName="jobs",
KeySchema=[{"AttributeName": "job_id", "KeyType": "HASH"}],
LocalSecondaryIndexes=[
{
"IndexName": "job_name-index",
"KeySchema": [{"AttributeName": "job_name", "KeyType": "HASH"}],
"Projection": {"ProjectionType": "ALL"},
}
],
AttributeDefinitions=[
{"AttributeName": "job_id", "AttributeType": "S"},
{"AttributeName": "job_name", "AttributeType": "S"},
],
ProvisionedThroughput={"ReadCapacityUnits": 5, "WriteCapacityUnits": 5},
)
table = resource.Table("jobs")
item = {"job_id": "asdasdasd", "expires_at": "1"}
# PUT_ITEM
args = {"Item": item}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = table.put_item(**args)
validate_response(response, should_have_capacity, should_have_table)
# GET_ITEM
args = {"Key": item}
if capacity:
<|fim_middle|>
response = table.get_item(**args)
validate_response(response, should_have_capacity, should_have_table, value=0.5)
# SCAN
args = {"TableName": "jobs"}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = client.scan(**args)
validate_response(response, should_have_capacity, should_have_table)
# SCAN_INDEX
args["IndexName"] = "job_name-index"
response = client.scan(**args)
validate_response(response, should_have_capacity, should_have_table, is_index=True)
# QUERY
args = {
"TableName": "jobs",
"KeyConditionExpression": "job_id = :id",
"ExpressionAttributeValues": {":id": {"S": "asdasdasd"}},
}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = client.query(**args)
validate_response(response, should_have_capacity, should_have_table)
# QUERY_INDEX
args["IndexName"] = "job_name-index"
response = client.query(**args)
validate_response(response, should_have_capacity, should_have_table, is_index=True)
def validate_response(
response, should_have_capacity, should_have_table, is_index=False, value=1.0
):
if should_have_capacity:
response.should.have.key("ConsumedCapacity")
response["ConsumedCapacity"]["TableName"].should.equal("jobs")
response["ConsumedCapacity"]["CapacityUnits"].should.equal(value)
if should_have_table:
response["ConsumedCapacity"]["Table"].should.equal({"CapacityUnits": value})
if is_index:
response["ConsumedCapacity"].should.have.key("LocalSecondaryIndexes")
response["ConsumedCapacity"]["LocalSecondaryIndexes"].should.equal(
{"job_name-index": {"CapacityUnits": value}}
)
else:
response.shouldnt.have.key("ConsumedCapacity")
<|fim▁end|> | args["ReturnConsumedCapacity"] = capacity |
<|file_name|>test_dynamodb_consumedcapacity.py<|end_file_name|><|fim▁begin|>import boto3
import pytest
import sure # noqa # pylint: disable=unused-import
from botocore.exceptions import ClientError
from moto import mock_dynamodb2
@mock_dynamodb2
def test_error_on_wrong_value_for_consumed_capacity():
resource = boto3.resource("dynamodb", region_name="ap-northeast-3")
client = boto3.client("dynamodb", region_name="ap-northeast-3")
client.create_table(
TableName="jobs",
KeySchema=[{"AttributeName": "job_id", "KeyType": "HASH"}],
AttributeDefinitions=[{"AttributeName": "job_id", "AttributeType": "S"}],
ProvisionedThroughput={"ReadCapacityUnits": 5, "WriteCapacityUnits": 5},
)
table = resource.Table("jobs")
item = {"job_id": "asdasdasd", "expires_at": "1"}
# PUT_ITEM
with pytest.raises(ClientError) as ex:
table.put_item(Item=item, ReturnConsumedCapacity="Garbage")
err = ex.value.response["Error"]
err["Code"].should.equal("ValidationException")
err["Message"].should.equal(
"1 validation error detected: Value 'Garbage' at 'returnConsumedCapacity' failed to satisfy constraint: Member must satisfy enum value set: [INDEXES, TOTAL, NONE]"
)
@mock_dynamodb2
def test_consumed_capacity_get_unknown_item():
conn = boto3.client("dynamodb", region_name="us-east-1")
conn.create_table(
TableName="test_table",
KeySchema=[{"AttributeName": "u", "KeyType": "HASH"}],
AttributeDefinitions=[{"AttributeName": "u", "AttributeType": "S"}],
BillingMode="PAY_PER_REQUEST",
)
response = conn.get_item(
TableName="test_table",
Key={"u": {"S": "does_not_exist"}},
ReturnConsumedCapacity="TOTAL",
)
# Should still return ConsumedCapacity, even if it does not return an item
response.should.have.key("ConsumedCapacity")
response["ConsumedCapacity"].should.equal(
{"TableName": "test_table", "CapacityUnits": 0.5}
)
@mock_dynamodb2
@pytest.mark.parametrize(
"capacity,should_have_capacity,should_have_table",
[
[None, False, False],
["NONE", False, False],
["TOTAL", True, False],
["INDEXES", True, True],
],
)
def test_only_return_consumed_capacity_when_required(
capacity, should_have_capacity, should_have_table
):
resource = boto3.resource("dynamodb", region_name="ap-northeast-3")
client = boto3.client("dynamodb", region_name="ap-northeast-3")
client.create_table(
TableName="jobs",
KeySchema=[{"AttributeName": "job_id", "KeyType": "HASH"}],
LocalSecondaryIndexes=[
{
"IndexName": "job_name-index",
"KeySchema": [{"AttributeName": "job_name", "KeyType": "HASH"}],
"Projection": {"ProjectionType": "ALL"},
}
],
AttributeDefinitions=[
{"AttributeName": "job_id", "AttributeType": "S"},
{"AttributeName": "job_name", "AttributeType": "S"},
],
ProvisionedThroughput={"ReadCapacityUnits": 5, "WriteCapacityUnits": 5},
)
table = resource.Table("jobs")
item = {"job_id": "asdasdasd", "expires_at": "1"}
# PUT_ITEM
args = {"Item": item}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = table.put_item(**args)
validate_response(response, should_have_capacity, should_have_table)
# GET_ITEM
args = {"Key": item}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = table.get_item(**args)
validate_response(response, should_have_capacity, should_have_table, value=0.5)
# SCAN
args = {"TableName": "jobs"}
if capacity:
<|fim_middle|>
response = client.scan(**args)
validate_response(response, should_have_capacity, should_have_table)
# SCAN_INDEX
args["IndexName"] = "job_name-index"
response = client.scan(**args)
validate_response(response, should_have_capacity, should_have_table, is_index=True)
# QUERY
args = {
"TableName": "jobs",
"KeyConditionExpression": "job_id = :id",
"ExpressionAttributeValues": {":id": {"S": "asdasdasd"}},
}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = client.query(**args)
validate_response(response, should_have_capacity, should_have_table)
# QUERY_INDEX
args["IndexName"] = "job_name-index"
response = client.query(**args)
validate_response(response, should_have_capacity, should_have_table, is_index=True)
def validate_response(
response, should_have_capacity, should_have_table, is_index=False, value=1.0
):
if should_have_capacity:
response.should.have.key("ConsumedCapacity")
response["ConsumedCapacity"]["TableName"].should.equal("jobs")
response["ConsumedCapacity"]["CapacityUnits"].should.equal(value)
if should_have_table:
response["ConsumedCapacity"]["Table"].should.equal({"CapacityUnits": value})
if is_index:
response["ConsumedCapacity"].should.have.key("LocalSecondaryIndexes")
response["ConsumedCapacity"]["LocalSecondaryIndexes"].should.equal(
{"job_name-index": {"CapacityUnits": value}}
)
else:
response.shouldnt.have.key("ConsumedCapacity")
<|fim▁end|> | args["ReturnConsumedCapacity"] = capacity |
<|file_name|>test_dynamodb_consumedcapacity.py<|end_file_name|><|fim▁begin|>import boto3
import pytest
import sure # noqa # pylint: disable=unused-import
from botocore.exceptions import ClientError
from moto import mock_dynamodb2
@mock_dynamodb2
def test_error_on_wrong_value_for_consumed_capacity():
resource = boto3.resource("dynamodb", region_name="ap-northeast-3")
client = boto3.client("dynamodb", region_name="ap-northeast-3")
client.create_table(
TableName="jobs",
KeySchema=[{"AttributeName": "job_id", "KeyType": "HASH"}],
AttributeDefinitions=[{"AttributeName": "job_id", "AttributeType": "S"}],
ProvisionedThroughput={"ReadCapacityUnits": 5, "WriteCapacityUnits": 5},
)
table = resource.Table("jobs")
item = {"job_id": "asdasdasd", "expires_at": "1"}
# PUT_ITEM
with pytest.raises(ClientError) as ex:
table.put_item(Item=item, ReturnConsumedCapacity="Garbage")
err = ex.value.response["Error"]
err["Code"].should.equal("ValidationException")
err["Message"].should.equal(
"1 validation error detected: Value 'Garbage' at 'returnConsumedCapacity' failed to satisfy constraint: Member must satisfy enum value set: [INDEXES, TOTAL, NONE]"
)
@mock_dynamodb2
def test_consumed_capacity_get_unknown_item():
conn = boto3.client("dynamodb", region_name="us-east-1")
conn.create_table(
TableName="test_table",
KeySchema=[{"AttributeName": "u", "KeyType": "HASH"}],
AttributeDefinitions=[{"AttributeName": "u", "AttributeType": "S"}],
BillingMode="PAY_PER_REQUEST",
)
response = conn.get_item(
TableName="test_table",
Key={"u": {"S": "does_not_exist"}},
ReturnConsumedCapacity="TOTAL",
)
# Should still return ConsumedCapacity, even if it does not return an item
response.should.have.key("ConsumedCapacity")
response["ConsumedCapacity"].should.equal(
{"TableName": "test_table", "CapacityUnits": 0.5}
)
@mock_dynamodb2
@pytest.mark.parametrize(
"capacity,should_have_capacity,should_have_table",
[
[None, False, False],
["NONE", False, False],
["TOTAL", True, False],
["INDEXES", True, True],
],
)
def test_only_return_consumed_capacity_when_required(
capacity, should_have_capacity, should_have_table
):
resource = boto3.resource("dynamodb", region_name="ap-northeast-3")
client = boto3.client("dynamodb", region_name="ap-northeast-3")
client.create_table(
TableName="jobs",
KeySchema=[{"AttributeName": "job_id", "KeyType": "HASH"}],
LocalSecondaryIndexes=[
{
"IndexName": "job_name-index",
"KeySchema": [{"AttributeName": "job_name", "KeyType": "HASH"}],
"Projection": {"ProjectionType": "ALL"},
}
],
AttributeDefinitions=[
{"AttributeName": "job_id", "AttributeType": "S"},
{"AttributeName": "job_name", "AttributeType": "S"},
],
ProvisionedThroughput={"ReadCapacityUnits": 5, "WriteCapacityUnits": 5},
)
table = resource.Table("jobs")
item = {"job_id": "asdasdasd", "expires_at": "1"}
# PUT_ITEM
args = {"Item": item}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = table.put_item(**args)
validate_response(response, should_have_capacity, should_have_table)
# GET_ITEM
args = {"Key": item}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = table.get_item(**args)
validate_response(response, should_have_capacity, should_have_table, value=0.5)
# SCAN
args = {"TableName": "jobs"}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = client.scan(**args)
validate_response(response, should_have_capacity, should_have_table)
# SCAN_INDEX
args["IndexName"] = "job_name-index"
response = client.scan(**args)
validate_response(response, should_have_capacity, should_have_table, is_index=True)
# QUERY
args = {
"TableName": "jobs",
"KeyConditionExpression": "job_id = :id",
"ExpressionAttributeValues": {":id": {"S": "asdasdasd"}},
}
if capacity:
<|fim_middle|>
response = client.query(**args)
validate_response(response, should_have_capacity, should_have_table)
# QUERY_INDEX
args["IndexName"] = "job_name-index"
response = client.query(**args)
validate_response(response, should_have_capacity, should_have_table, is_index=True)
def validate_response(
response, should_have_capacity, should_have_table, is_index=False, value=1.0
):
if should_have_capacity:
response.should.have.key("ConsumedCapacity")
response["ConsumedCapacity"]["TableName"].should.equal("jobs")
response["ConsumedCapacity"]["CapacityUnits"].should.equal(value)
if should_have_table:
response["ConsumedCapacity"]["Table"].should.equal({"CapacityUnits": value})
if is_index:
response["ConsumedCapacity"].should.have.key("LocalSecondaryIndexes")
response["ConsumedCapacity"]["LocalSecondaryIndexes"].should.equal(
{"job_name-index": {"CapacityUnits": value}}
)
else:
response.shouldnt.have.key("ConsumedCapacity")
<|fim▁end|> | args["ReturnConsumedCapacity"] = capacity |
<|file_name|>test_dynamodb_consumedcapacity.py<|end_file_name|><|fim▁begin|>import boto3
import pytest
import sure # noqa # pylint: disable=unused-import
from botocore.exceptions import ClientError
from moto import mock_dynamodb2
@mock_dynamodb2
def test_error_on_wrong_value_for_consumed_capacity():
resource = boto3.resource("dynamodb", region_name="ap-northeast-3")
client = boto3.client("dynamodb", region_name="ap-northeast-3")
client.create_table(
TableName="jobs",
KeySchema=[{"AttributeName": "job_id", "KeyType": "HASH"}],
AttributeDefinitions=[{"AttributeName": "job_id", "AttributeType": "S"}],
ProvisionedThroughput={"ReadCapacityUnits": 5, "WriteCapacityUnits": 5},
)
table = resource.Table("jobs")
item = {"job_id": "asdasdasd", "expires_at": "1"}
# PUT_ITEM
with pytest.raises(ClientError) as ex:
table.put_item(Item=item, ReturnConsumedCapacity="Garbage")
err = ex.value.response["Error"]
err["Code"].should.equal("ValidationException")
err["Message"].should.equal(
"1 validation error detected: Value 'Garbage' at 'returnConsumedCapacity' failed to satisfy constraint: Member must satisfy enum value set: [INDEXES, TOTAL, NONE]"
)
@mock_dynamodb2
def test_consumed_capacity_get_unknown_item():
conn = boto3.client("dynamodb", region_name="us-east-1")
conn.create_table(
TableName="test_table",
KeySchema=[{"AttributeName": "u", "KeyType": "HASH"}],
AttributeDefinitions=[{"AttributeName": "u", "AttributeType": "S"}],
BillingMode="PAY_PER_REQUEST",
)
response = conn.get_item(
TableName="test_table",
Key={"u": {"S": "does_not_exist"}},
ReturnConsumedCapacity="TOTAL",
)
# Should still return ConsumedCapacity, even if it does not return an item
response.should.have.key("ConsumedCapacity")
response["ConsumedCapacity"].should.equal(
{"TableName": "test_table", "CapacityUnits": 0.5}
)
@mock_dynamodb2
@pytest.mark.parametrize(
"capacity,should_have_capacity,should_have_table",
[
[None, False, False],
["NONE", False, False],
["TOTAL", True, False],
["INDEXES", True, True],
],
)
def test_only_return_consumed_capacity_when_required(
capacity, should_have_capacity, should_have_table
):
resource = boto3.resource("dynamodb", region_name="ap-northeast-3")
client = boto3.client("dynamodb", region_name="ap-northeast-3")
client.create_table(
TableName="jobs",
KeySchema=[{"AttributeName": "job_id", "KeyType": "HASH"}],
LocalSecondaryIndexes=[
{
"IndexName": "job_name-index",
"KeySchema": [{"AttributeName": "job_name", "KeyType": "HASH"}],
"Projection": {"ProjectionType": "ALL"},
}
],
AttributeDefinitions=[
{"AttributeName": "job_id", "AttributeType": "S"},
{"AttributeName": "job_name", "AttributeType": "S"},
],
ProvisionedThroughput={"ReadCapacityUnits": 5, "WriteCapacityUnits": 5},
)
table = resource.Table("jobs")
item = {"job_id": "asdasdasd", "expires_at": "1"}
# PUT_ITEM
args = {"Item": item}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = table.put_item(**args)
validate_response(response, should_have_capacity, should_have_table)
# GET_ITEM
args = {"Key": item}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = table.get_item(**args)
validate_response(response, should_have_capacity, should_have_table, value=0.5)
# SCAN
args = {"TableName": "jobs"}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = client.scan(**args)
validate_response(response, should_have_capacity, should_have_table)
# SCAN_INDEX
args["IndexName"] = "job_name-index"
response = client.scan(**args)
validate_response(response, should_have_capacity, should_have_table, is_index=True)
# QUERY
args = {
"TableName": "jobs",
"KeyConditionExpression": "job_id = :id",
"ExpressionAttributeValues": {":id": {"S": "asdasdasd"}},
}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = client.query(**args)
validate_response(response, should_have_capacity, should_have_table)
# QUERY_INDEX
args["IndexName"] = "job_name-index"
response = client.query(**args)
validate_response(response, should_have_capacity, should_have_table, is_index=True)
def validate_response(
response, should_have_capacity, should_have_table, is_index=False, value=1.0
):
if should_have_capacity:
<|fim_middle|>
else:
response.shouldnt.have.key("ConsumedCapacity")
<|fim▁end|> | response.should.have.key("ConsumedCapacity")
response["ConsumedCapacity"]["TableName"].should.equal("jobs")
response["ConsumedCapacity"]["CapacityUnits"].should.equal(value)
if should_have_table:
response["ConsumedCapacity"]["Table"].should.equal({"CapacityUnits": value})
if is_index:
response["ConsumedCapacity"].should.have.key("LocalSecondaryIndexes")
response["ConsumedCapacity"]["LocalSecondaryIndexes"].should.equal(
{"job_name-index": {"CapacityUnits": value}}
) |
<|file_name|>test_dynamodb_consumedcapacity.py<|end_file_name|><|fim▁begin|>import boto3
import pytest
import sure # noqa # pylint: disable=unused-import
from botocore.exceptions import ClientError
from moto import mock_dynamodb2
@mock_dynamodb2
def test_error_on_wrong_value_for_consumed_capacity():
resource = boto3.resource("dynamodb", region_name="ap-northeast-3")
client = boto3.client("dynamodb", region_name="ap-northeast-3")
client.create_table(
TableName="jobs",
KeySchema=[{"AttributeName": "job_id", "KeyType": "HASH"}],
AttributeDefinitions=[{"AttributeName": "job_id", "AttributeType": "S"}],
ProvisionedThroughput={"ReadCapacityUnits": 5, "WriteCapacityUnits": 5},
)
table = resource.Table("jobs")
item = {"job_id": "asdasdasd", "expires_at": "1"}
# PUT_ITEM
with pytest.raises(ClientError) as ex:
table.put_item(Item=item, ReturnConsumedCapacity="Garbage")
err = ex.value.response["Error"]
err["Code"].should.equal("ValidationException")
err["Message"].should.equal(
"1 validation error detected: Value 'Garbage' at 'returnConsumedCapacity' failed to satisfy constraint: Member must satisfy enum value set: [INDEXES, TOTAL, NONE]"
)
@mock_dynamodb2
def test_consumed_capacity_get_unknown_item():
conn = boto3.client("dynamodb", region_name="us-east-1")
conn.create_table(
TableName="test_table",
KeySchema=[{"AttributeName": "u", "KeyType": "HASH"}],
AttributeDefinitions=[{"AttributeName": "u", "AttributeType": "S"}],
BillingMode="PAY_PER_REQUEST",
)
response = conn.get_item(
TableName="test_table",
Key={"u": {"S": "does_not_exist"}},
ReturnConsumedCapacity="TOTAL",
)
# Should still return ConsumedCapacity, even if it does not return an item
response.should.have.key("ConsumedCapacity")
response["ConsumedCapacity"].should.equal(
{"TableName": "test_table", "CapacityUnits": 0.5}
)
@mock_dynamodb2
@pytest.mark.parametrize(
"capacity,should_have_capacity,should_have_table",
[
[None, False, False],
["NONE", False, False],
["TOTAL", True, False],
["INDEXES", True, True],
],
)
def test_only_return_consumed_capacity_when_required(
capacity, should_have_capacity, should_have_table
):
resource = boto3.resource("dynamodb", region_name="ap-northeast-3")
client = boto3.client("dynamodb", region_name="ap-northeast-3")
client.create_table(
TableName="jobs",
KeySchema=[{"AttributeName": "job_id", "KeyType": "HASH"}],
LocalSecondaryIndexes=[
{
"IndexName": "job_name-index",
"KeySchema": [{"AttributeName": "job_name", "KeyType": "HASH"}],
"Projection": {"ProjectionType": "ALL"},
}
],
AttributeDefinitions=[
{"AttributeName": "job_id", "AttributeType": "S"},
{"AttributeName": "job_name", "AttributeType": "S"},
],
ProvisionedThroughput={"ReadCapacityUnits": 5, "WriteCapacityUnits": 5},
)
table = resource.Table("jobs")
item = {"job_id": "asdasdasd", "expires_at": "1"}
# PUT_ITEM
args = {"Item": item}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = table.put_item(**args)
validate_response(response, should_have_capacity, should_have_table)
# GET_ITEM
args = {"Key": item}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = table.get_item(**args)
validate_response(response, should_have_capacity, should_have_table, value=0.5)
# SCAN
args = {"TableName": "jobs"}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = client.scan(**args)
validate_response(response, should_have_capacity, should_have_table)
# SCAN_INDEX
args["IndexName"] = "job_name-index"
response = client.scan(**args)
validate_response(response, should_have_capacity, should_have_table, is_index=True)
# QUERY
args = {
"TableName": "jobs",
"KeyConditionExpression": "job_id = :id",
"ExpressionAttributeValues": {":id": {"S": "asdasdasd"}},
}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = client.query(**args)
validate_response(response, should_have_capacity, should_have_table)
# QUERY_INDEX
args["IndexName"] = "job_name-index"
response = client.query(**args)
validate_response(response, should_have_capacity, should_have_table, is_index=True)
def validate_response(
response, should_have_capacity, should_have_table, is_index=False, value=1.0
):
if should_have_capacity:
response.should.have.key("ConsumedCapacity")
response["ConsumedCapacity"]["TableName"].should.equal("jobs")
response["ConsumedCapacity"]["CapacityUnits"].should.equal(value)
if should_have_table:
<|fim_middle|>
else:
response.shouldnt.have.key("ConsumedCapacity")
<|fim▁end|> | response["ConsumedCapacity"]["Table"].should.equal({"CapacityUnits": value})
if is_index:
response["ConsumedCapacity"].should.have.key("LocalSecondaryIndexes")
response["ConsumedCapacity"]["LocalSecondaryIndexes"].should.equal(
{"job_name-index": {"CapacityUnits": value}}
) |
<|file_name|>test_dynamodb_consumedcapacity.py<|end_file_name|><|fim▁begin|>import boto3
import pytest
import sure # noqa # pylint: disable=unused-import
from botocore.exceptions import ClientError
from moto import mock_dynamodb2
@mock_dynamodb2
def test_error_on_wrong_value_for_consumed_capacity():
resource = boto3.resource("dynamodb", region_name="ap-northeast-3")
client = boto3.client("dynamodb", region_name="ap-northeast-3")
client.create_table(
TableName="jobs",
KeySchema=[{"AttributeName": "job_id", "KeyType": "HASH"}],
AttributeDefinitions=[{"AttributeName": "job_id", "AttributeType": "S"}],
ProvisionedThroughput={"ReadCapacityUnits": 5, "WriteCapacityUnits": 5},
)
table = resource.Table("jobs")
item = {"job_id": "asdasdasd", "expires_at": "1"}
# PUT_ITEM
with pytest.raises(ClientError) as ex:
table.put_item(Item=item, ReturnConsumedCapacity="Garbage")
err = ex.value.response["Error"]
err["Code"].should.equal("ValidationException")
err["Message"].should.equal(
"1 validation error detected: Value 'Garbage' at 'returnConsumedCapacity' failed to satisfy constraint: Member must satisfy enum value set: [INDEXES, TOTAL, NONE]"
)
@mock_dynamodb2
def test_consumed_capacity_get_unknown_item():
conn = boto3.client("dynamodb", region_name="us-east-1")
conn.create_table(
TableName="test_table",
KeySchema=[{"AttributeName": "u", "KeyType": "HASH"}],
AttributeDefinitions=[{"AttributeName": "u", "AttributeType": "S"}],
BillingMode="PAY_PER_REQUEST",
)
response = conn.get_item(
TableName="test_table",
Key={"u": {"S": "does_not_exist"}},
ReturnConsumedCapacity="TOTAL",
)
# Should still return ConsumedCapacity, even if it does not return an item
response.should.have.key("ConsumedCapacity")
response["ConsumedCapacity"].should.equal(
{"TableName": "test_table", "CapacityUnits": 0.5}
)
@mock_dynamodb2
@pytest.mark.parametrize(
"capacity,should_have_capacity,should_have_table",
[
[None, False, False],
["NONE", False, False],
["TOTAL", True, False],
["INDEXES", True, True],
],
)
def test_only_return_consumed_capacity_when_required(
capacity, should_have_capacity, should_have_table
):
resource = boto3.resource("dynamodb", region_name="ap-northeast-3")
client = boto3.client("dynamodb", region_name="ap-northeast-3")
client.create_table(
TableName="jobs",
KeySchema=[{"AttributeName": "job_id", "KeyType": "HASH"}],
LocalSecondaryIndexes=[
{
"IndexName": "job_name-index",
"KeySchema": [{"AttributeName": "job_name", "KeyType": "HASH"}],
"Projection": {"ProjectionType": "ALL"},
}
],
AttributeDefinitions=[
{"AttributeName": "job_id", "AttributeType": "S"},
{"AttributeName": "job_name", "AttributeType": "S"},
],
ProvisionedThroughput={"ReadCapacityUnits": 5, "WriteCapacityUnits": 5},
)
table = resource.Table("jobs")
item = {"job_id": "asdasdasd", "expires_at": "1"}
# PUT_ITEM
args = {"Item": item}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = table.put_item(**args)
validate_response(response, should_have_capacity, should_have_table)
# GET_ITEM
args = {"Key": item}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = table.get_item(**args)
validate_response(response, should_have_capacity, should_have_table, value=0.5)
# SCAN
args = {"TableName": "jobs"}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = client.scan(**args)
validate_response(response, should_have_capacity, should_have_table)
# SCAN_INDEX
args["IndexName"] = "job_name-index"
response = client.scan(**args)
validate_response(response, should_have_capacity, should_have_table, is_index=True)
# QUERY
args = {
"TableName": "jobs",
"KeyConditionExpression": "job_id = :id",
"ExpressionAttributeValues": {":id": {"S": "asdasdasd"}},
}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = client.query(**args)
validate_response(response, should_have_capacity, should_have_table)
# QUERY_INDEX
args["IndexName"] = "job_name-index"
response = client.query(**args)
validate_response(response, should_have_capacity, should_have_table, is_index=True)
def validate_response(
response, should_have_capacity, should_have_table, is_index=False, value=1.0
):
if should_have_capacity:
response.should.have.key("ConsumedCapacity")
response["ConsumedCapacity"]["TableName"].should.equal("jobs")
response["ConsumedCapacity"]["CapacityUnits"].should.equal(value)
if should_have_table:
response["ConsumedCapacity"]["Table"].should.equal({"CapacityUnits": value})
if is_index:
<|fim_middle|>
else:
response.shouldnt.have.key("ConsumedCapacity")
<|fim▁end|> | response["ConsumedCapacity"].should.have.key("LocalSecondaryIndexes")
response["ConsumedCapacity"]["LocalSecondaryIndexes"].should.equal(
{"job_name-index": {"CapacityUnits": value}}
) |
<|file_name|>test_dynamodb_consumedcapacity.py<|end_file_name|><|fim▁begin|>import boto3
import pytest
import sure # noqa # pylint: disable=unused-import
from botocore.exceptions import ClientError
from moto import mock_dynamodb2
@mock_dynamodb2
def test_error_on_wrong_value_for_consumed_capacity():
resource = boto3.resource("dynamodb", region_name="ap-northeast-3")
client = boto3.client("dynamodb", region_name="ap-northeast-3")
client.create_table(
TableName="jobs",
KeySchema=[{"AttributeName": "job_id", "KeyType": "HASH"}],
AttributeDefinitions=[{"AttributeName": "job_id", "AttributeType": "S"}],
ProvisionedThroughput={"ReadCapacityUnits": 5, "WriteCapacityUnits": 5},
)
table = resource.Table("jobs")
item = {"job_id": "asdasdasd", "expires_at": "1"}
# PUT_ITEM
with pytest.raises(ClientError) as ex:
table.put_item(Item=item, ReturnConsumedCapacity="Garbage")
err = ex.value.response["Error"]
err["Code"].should.equal("ValidationException")
err["Message"].should.equal(
"1 validation error detected: Value 'Garbage' at 'returnConsumedCapacity' failed to satisfy constraint: Member must satisfy enum value set: [INDEXES, TOTAL, NONE]"
)
@mock_dynamodb2
def test_consumed_capacity_get_unknown_item():
conn = boto3.client("dynamodb", region_name="us-east-1")
conn.create_table(
TableName="test_table",
KeySchema=[{"AttributeName": "u", "KeyType": "HASH"}],
AttributeDefinitions=[{"AttributeName": "u", "AttributeType": "S"}],
BillingMode="PAY_PER_REQUEST",
)
response = conn.get_item(
TableName="test_table",
Key={"u": {"S": "does_not_exist"}},
ReturnConsumedCapacity="TOTAL",
)
# Should still return ConsumedCapacity, even if it does not return an item
response.should.have.key("ConsumedCapacity")
response["ConsumedCapacity"].should.equal(
{"TableName": "test_table", "CapacityUnits": 0.5}
)
@mock_dynamodb2
@pytest.mark.parametrize(
"capacity,should_have_capacity,should_have_table",
[
[None, False, False],
["NONE", False, False],
["TOTAL", True, False],
["INDEXES", True, True],
],
)
def test_only_return_consumed_capacity_when_required(
capacity, should_have_capacity, should_have_table
):
resource = boto3.resource("dynamodb", region_name="ap-northeast-3")
client = boto3.client("dynamodb", region_name="ap-northeast-3")
client.create_table(
TableName="jobs",
KeySchema=[{"AttributeName": "job_id", "KeyType": "HASH"}],
LocalSecondaryIndexes=[
{
"IndexName": "job_name-index",
"KeySchema": [{"AttributeName": "job_name", "KeyType": "HASH"}],
"Projection": {"ProjectionType": "ALL"},
}
],
AttributeDefinitions=[
{"AttributeName": "job_id", "AttributeType": "S"},
{"AttributeName": "job_name", "AttributeType": "S"},
],
ProvisionedThroughput={"ReadCapacityUnits": 5, "WriteCapacityUnits": 5},
)
table = resource.Table("jobs")
item = {"job_id": "asdasdasd", "expires_at": "1"}
# PUT_ITEM
args = {"Item": item}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = table.put_item(**args)
validate_response(response, should_have_capacity, should_have_table)
# GET_ITEM
args = {"Key": item}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = table.get_item(**args)
validate_response(response, should_have_capacity, should_have_table, value=0.5)
# SCAN
args = {"TableName": "jobs"}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = client.scan(**args)
validate_response(response, should_have_capacity, should_have_table)
# SCAN_INDEX
args["IndexName"] = "job_name-index"
response = client.scan(**args)
validate_response(response, should_have_capacity, should_have_table, is_index=True)
# QUERY
args = {
"TableName": "jobs",
"KeyConditionExpression": "job_id = :id",
"ExpressionAttributeValues": {":id": {"S": "asdasdasd"}},
}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = client.query(**args)
validate_response(response, should_have_capacity, should_have_table)
# QUERY_INDEX
args["IndexName"] = "job_name-index"
response = client.query(**args)
validate_response(response, should_have_capacity, should_have_table, is_index=True)
def validate_response(
response, should_have_capacity, should_have_table, is_index=False, value=1.0
):
if should_have_capacity:
response.should.have.key("ConsumedCapacity")
response["ConsumedCapacity"]["TableName"].should.equal("jobs")
response["ConsumedCapacity"]["CapacityUnits"].should.equal(value)
if should_have_table:
response["ConsumedCapacity"]["Table"].should.equal({"CapacityUnits": value})
if is_index:
response["ConsumedCapacity"].should.have.key("LocalSecondaryIndexes")
response["ConsumedCapacity"]["LocalSecondaryIndexes"].should.equal(
{"job_name-index": {"CapacityUnits": value}}
)
else:
<|fim_middle|>
<|fim▁end|> | response.shouldnt.have.key("ConsumedCapacity") |
<|file_name|>test_dynamodb_consumedcapacity.py<|end_file_name|><|fim▁begin|>import boto3
import pytest
import sure # noqa # pylint: disable=unused-import
from botocore.exceptions import ClientError
from moto import mock_dynamodb2
@mock_dynamodb2
def <|fim_middle|>():
resource = boto3.resource("dynamodb", region_name="ap-northeast-3")
client = boto3.client("dynamodb", region_name="ap-northeast-3")
client.create_table(
TableName="jobs",
KeySchema=[{"AttributeName": "job_id", "KeyType": "HASH"}],
AttributeDefinitions=[{"AttributeName": "job_id", "AttributeType": "S"}],
ProvisionedThroughput={"ReadCapacityUnits": 5, "WriteCapacityUnits": 5},
)
table = resource.Table("jobs")
item = {"job_id": "asdasdasd", "expires_at": "1"}
# PUT_ITEM
with pytest.raises(ClientError) as ex:
table.put_item(Item=item, ReturnConsumedCapacity="Garbage")
err = ex.value.response["Error"]
err["Code"].should.equal("ValidationException")
err["Message"].should.equal(
"1 validation error detected: Value 'Garbage' at 'returnConsumedCapacity' failed to satisfy constraint: Member must satisfy enum value set: [INDEXES, TOTAL, NONE]"
)
@mock_dynamodb2
def test_consumed_capacity_get_unknown_item():
conn = boto3.client("dynamodb", region_name="us-east-1")
conn.create_table(
TableName="test_table",
KeySchema=[{"AttributeName": "u", "KeyType": "HASH"}],
AttributeDefinitions=[{"AttributeName": "u", "AttributeType": "S"}],
BillingMode="PAY_PER_REQUEST",
)
response = conn.get_item(
TableName="test_table",
Key={"u": {"S": "does_not_exist"}},
ReturnConsumedCapacity="TOTAL",
)
# Should still return ConsumedCapacity, even if it does not return an item
response.should.have.key("ConsumedCapacity")
response["ConsumedCapacity"].should.equal(
{"TableName": "test_table", "CapacityUnits": 0.5}
)
@mock_dynamodb2
@pytest.mark.parametrize(
"capacity,should_have_capacity,should_have_table",
[
[None, False, False],
["NONE", False, False],
["TOTAL", True, False],
["INDEXES", True, True],
],
)
def test_only_return_consumed_capacity_when_required(
capacity, should_have_capacity, should_have_table
):
resource = boto3.resource("dynamodb", region_name="ap-northeast-3")
client = boto3.client("dynamodb", region_name="ap-northeast-3")
client.create_table(
TableName="jobs",
KeySchema=[{"AttributeName": "job_id", "KeyType": "HASH"}],
LocalSecondaryIndexes=[
{
"IndexName": "job_name-index",
"KeySchema": [{"AttributeName": "job_name", "KeyType": "HASH"}],
"Projection": {"ProjectionType": "ALL"},
}
],
AttributeDefinitions=[
{"AttributeName": "job_id", "AttributeType": "S"},
{"AttributeName": "job_name", "AttributeType": "S"},
],
ProvisionedThroughput={"ReadCapacityUnits": 5, "WriteCapacityUnits": 5},
)
table = resource.Table("jobs")
item = {"job_id": "asdasdasd", "expires_at": "1"}
# PUT_ITEM
args = {"Item": item}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = table.put_item(**args)
validate_response(response, should_have_capacity, should_have_table)
# GET_ITEM
args = {"Key": item}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = table.get_item(**args)
validate_response(response, should_have_capacity, should_have_table, value=0.5)
# SCAN
args = {"TableName": "jobs"}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = client.scan(**args)
validate_response(response, should_have_capacity, should_have_table)
# SCAN_INDEX
args["IndexName"] = "job_name-index"
response = client.scan(**args)
validate_response(response, should_have_capacity, should_have_table, is_index=True)
# QUERY
args = {
"TableName": "jobs",
"KeyConditionExpression": "job_id = :id",
"ExpressionAttributeValues": {":id": {"S": "asdasdasd"}},
}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = client.query(**args)
validate_response(response, should_have_capacity, should_have_table)
# QUERY_INDEX
args["IndexName"] = "job_name-index"
response = client.query(**args)
validate_response(response, should_have_capacity, should_have_table, is_index=True)
def validate_response(
response, should_have_capacity, should_have_table, is_index=False, value=1.0
):
if should_have_capacity:
response.should.have.key("ConsumedCapacity")
response["ConsumedCapacity"]["TableName"].should.equal("jobs")
response["ConsumedCapacity"]["CapacityUnits"].should.equal(value)
if should_have_table:
response["ConsumedCapacity"]["Table"].should.equal({"CapacityUnits": value})
if is_index:
response["ConsumedCapacity"].should.have.key("LocalSecondaryIndexes")
response["ConsumedCapacity"]["LocalSecondaryIndexes"].should.equal(
{"job_name-index": {"CapacityUnits": value}}
)
else:
response.shouldnt.have.key("ConsumedCapacity")
<|fim▁end|> | test_error_on_wrong_value_for_consumed_capacity |
<|file_name|>test_dynamodb_consumedcapacity.py<|end_file_name|><|fim▁begin|>import boto3
import pytest
import sure # noqa # pylint: disable=unused-import
from botocore.exceptions import ClientError
from moto import mock_dynamodb2
@mock_dynamodb2
def test_error_on_wrong_value_for_consumed_capacity():
resource = boto3.resource("dynamodb", region_name="ap-northeast-3")
client = boto3.client("dynamodb", region_name="ap-northeast-3")
client.create_table(
TableName="jobs",
KeySchema=[{"AttributeName": "job_id", "KeyType": "HASH"}],
AttributeDefinitions=[{"AttributeName": "job_id", "AttributeType": "S"}],
ProvisionedThroughput={"ReadCapacityUnits": 5, "WriteCapacityUnits": 5},
)
table = resource.Table("jobs")
item = {"job_id": "asdasdasd", "expires_at": "1"}
# PUT_ITEM
with pytest.raises(ClientError) as ex:
table.put_item(Item=item, ReturnConsumedCapacity="Garbage")
err = ex.value.response["Error"]
err["Code"].should.equal("ValidationException")
err["Message"].should.equal(
"1 validation error detected: Value 'Garbage' at 'returnConsumedCapacity' failed to satisfy constraint: Member must satisfy enum value set: [INDEXES, TOTAL, NONE]"
)
@mock_dynamodb2
def <|fim_middle|>():
conn = boto3.client("dynamodb", region_name="us-east-1")
conn.create_table(
TableName="test_table",
KeySchema=[{"AttributeName": "u", "KeyType": "HASH"}],
AttributeDefinitions=[{"AttributeName": "u", "AttributeType": "S"}],
BillingMode="PAY_PER_REQUEST",
)
response = conn.get_item(
TableName="test_table",
Key={"u": {"S": "does_not_exist"}},
ReturnConsumedCapacity="TOTAL",
)
# Should still return ConsumedCapacity, even if it does not return an item
response.should.have.key("ConsumedCapacity")
response["ConsumedCapacity"].should.equal(
{"TableName": "test_table", "CapacityUnits": 0.5}
)
@mock_dynamodb2
@pytest.mark.parametrize(
"capacity,should_have_capacity,should_have_table",
[
[None, False, False],
["NONE", False, False],
["TOTAL", True, False],
["INDEXES", True, True],
],
)
def test_only_return_consumed_capacity_when_required(
capacity, should_have_capacity, should_have_table
):
resource = boto3.resource("dynamodb", region_name="ap-northeast-3")
client = boto3.client("dynamodb", region_name="ap-northeast-3")
client.create_table(
TableName="jobs",
KeySchema=[{"AttributeName": "job_id", "KeyType": "HASH"}],
LocalSecondaryIndexes=[
{
"IndexName": "job_name-index",
"KeySchema": [{"AttributeName": "job_name", "KeyType": "HASH"}],
"Projection": {"ProjectionType": "ALL"},
}
],
AttributeDefinitions=[
{"AttributeName": "job_id", "AttributeType": "S"},
{"AttributeName": "job_name", "AttributeType": "S"},
],
ProvisionedThroughput={"ReadCapacityUnits": 5, "WriteCapacityUnits": 5},
)
table = resource.Table("jobs")
item = {"job_id": "asdasdasd", "expires_at": "1"}
# PUT_ITEM
args = {"Item": item}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = table.put_item(**args)
validate_response(response, should_have_capacity, should_have_table)
# GET_ITEM
args = {"Key": item}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = table.get_item(**args)
validate_response(response, should_have_capacity, should_have_table, value=0.5)
# SCAN
args = {"TableName": "jobs"}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = client.scan(**args)
validate_response(response, should_have_capacity, should_have_table)
# SCAN_INDEX
args["IndexName"] = "job_name-index"
response = client.scan(**args)
validate_response(response, should_have_capacity, should_have_table, is_index=True)
# QUERY
args = {
"TableName": "jobs",
"KeyConditionExpression": "job_id = :id",
"ExpressionAttributeValues": {":id": {"S": "asdasdasd"}},
}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = client.query(**args)
validate_response(response, should_have_capacity, should_have_table)
# QUERY_INDEX
args["IndexName"] = "job_name-index"
response = client.query(**args)
validate_response(response, should_have_capacity, should_have_table, is_index=True)
def validate_response(
response, should_have_capacity, should_have_table, is_index=False, value=1.0
):
if should_have_capacity:
response.should.have.key("ConsumedCapacity")
response["ConsumedCapacity"]["TableName"].should.equal("jobs")
response["ConsumedCapacity"]["CapacityUnits"].should.equal(value)
if should_have_table:
response["ConsumedCapacity"]["Table"].should.equal({"CapacityUnits": value})
if is_index:
response["ConsumedCapacity"].should.have.key("LocalSecondaryIndexes")
response["ConsumedCapacity"]["LocalSecondaryIndexes"].should.equal(
{"job_name-index": {"CapacityUnits": value}}
)
else:
response.shouldnt.have.key("ConsumedCapacity")
<|fim▁end|> | test_consumed_capacity_get_unknown_item |
<|file_name|>test_dynamodb_consumedcapacity.py<|end_file_name|><|fim▁begin|>import boto3
import pytest
import sure # noqa # pylint: disable=unused-import
from botocore.exceptions import ClientError
from moto import mock_dynamodb2
@mock_dynamodb2
def test_error_on_wrong_value_for_consumed_capacity():
resource = boto3.resource("dynamodb", region_name="ap-northeast-3")
client = boto3.client("dynamodb", region_name="ap-northeast-3")
client.create_table(
TableName="jobs",
KeySchema=[{"AttributeName": "job_id", "KeyType": "HASH"}],
AttributeDefinitions=[{"AttributeName": "job_id", "AttributeType": "S"}],
ProvisionedThroughput={"ReadCapacityUnits": 5, "WriteCapacityUnits": 5},
)
table = resource.Table("jobs")
item = {"job_id": "asdasdasd", "expires_at": "1"}
# PUT_ITEM
with pytest.raises(ClientError) as ex:
table.put_item(Item=item, ReturnConsumedCapacity="Garbage")
err = ex.value.response["Error"]
err["Code"].should.equal("ValidationException")
err["Message"].should.equal(
"1 validation error detected: Value 'Garbage' at 'returnConsumedCapacity' failed to satisfy constraint: Member must satisfy enum value set: [INDEXES, TOTAL, NONE]"
)
@mock_dynamodb2
def test_consumed_capacity_get_unknown_item():
conn = boto3.client("dynamodb", region_name="us-east-1")
conn.create_table(
TableName="test_table",
KeySchema=[{"AttributeName": "u", "KeyType": "HASH"}],
AttributeDefinitions=[{"AttributeName": "u", "AttributeType": "S"}],
BillingMode="PAY_PER_REQUEST",
)
response = conn.get_item(
TableName="test_table",
Key={"u": {"S": "does_not_exist"}},
ReturnConsumedCapacity="TOTAL",
)
# Should still return ConsumedCapacity, even if it does not return an item
response.should.have.key("ConsumedCapacity")
response["ConsumedCapacity"].should.equal(
{"TableName": "test_table", "CapacityUnits": 0.5}
)
@mock_dynamodb2
@pytest.mark.parametrize(
"capacity,should_have_capacity,should_have_table",
[
[None, False, False],
["NONE", False, False],
["TOTAL", True, False],
["INDEXES", True, True],
],
)
def <|fim_middle|>(
capacity, should_have_capacity, should_have_table
):
resource = boto3.resource("dynamodb", region_name="ap-northeast-3")
client = boto3.client("dynamodb", region_name="ap-northeast-3")
client.create_table(
TableName="jobs",
KeySchema=[{"AttributeName": "job_id", "KeyType": "HASH"}],
LocalSecondaryIndexes=[
{
"IndexName": "job_name-index",
"KeySchema": [{"AttributeName": "job_name", "KeyType": "HASH"}],
"Projection": {"ProjectionType": "ALL"},
}
],
AttributeDefinitions=[
{"AttributeName": "job_id", "AttributeType": "S"},
{"AttributeName": "job_name", "AttributeType": "S"},
],
ProvisionedThroughput={"ReadCapacityUnits": 5, "WriteCapacityUnits": 5},
)
table = resource.Table("jobs")
item = {"job_id": "asdasdasd", "expires_at": "1"}
# PUT_ITEM
args = {"Item": item}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = table.put_item(**args)
validate_response(response, should_have_capacity, should_have_table)
# GET_ITEM
args = {"Key": item}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = table.get_item(**args)
validate_response(response, should_have_capacity, should_have_table, value=0.5)
# SCAN
args = {"TableName": "jobs"}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = client.scan(**args)
validate_response(response, should_have_capacity, should_have_table)
# SCAN_INDEX
args["IndexName"] = "job_name-index"
response = client.scan(**args)
validate_response(response, should_have_capacity, should_have_table, is_index=True)
# QUERY
args = {
"TableName": "jobs",
"KeyConditionExpression": "job_id = :id",
"ExpressionAttributeValues": {":id": {"S": "asdasdasd"}},
}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = client.query(**args)
validate_response(response, should_have_capacity, should_have_table)
# QUERY_INDEX
args["IndexName"] = "job_name-index"
response = client.query(**args)
validate_response(response, should_have_capacity, should_have_table, is_index=True)
def validate_response(
response, should_have_capacity, should_have_table, is_index=False, value=1.0
):
if should_have_capacity:
response.should.have.key("ConsumedCapacity")
response["ConsumedCapacity"]["TableName"].should.equal("jobs")
response["ConsumedCapacity"]["CapacityUnits"].should.equal(value)
if should_have_table:
response["ConsumedCapacity"]["Table"].should.equal({"CapacityUnits": value})
if is_index:
response["ConsumedCapacity"].should.have.key("LocalSecondaryIndexes")
response["ConsumedCapacity"]["LocalSecondaryIndexes"].should.equal(
{"job_name-index": {"CapacityUnits": value}}
)
else:
response.shouldnt.have.key("ConsumedCapacity")
<|fim▁end|> | test_only_return_consumed_capacity_when_required |
<|file_name|>test_dynamodb_consumedcapacity.py<|end_file_name|><|fim▁begin|>import boto3
import pytest
import sure # noqa # pylint: disable=unused-import
from botocore.exceptions import ClientError
from moto import mock_dynamodb2
@mock_dynamodb2
def test_error_on_wrong_value_for_consumed_capacity():
resource = boto3.resource("dynamodb", region_name="ap-northeast-3")
client = boto3.client("dynamodb", region_name="ap-northeast-3")
client.create_table(
TableName="jobs",
KeySchema=[{"AttributeName": "job_id", "KeyType": "HASH"}],
AttributeDefinitions=[{"AttributeName": "job_id", "AttributeType": "S"}],
ProvisionedThroughput={"ReadCapacityUnits": 5, "WriteCapacityUnits": 5},
)
table = resource.Table("jobs")
item = {"job_id": "asdasdasd", "expires_at": "1"}
# PUT_ITEM
with pytest.raises(ClientError) as ex:
table.put_item(Item=item, ReturnConsumedCapacity="Garbage")
err = ex.value.response["Error"]
err["Code"].should.equal("ValidationException")
err["Message"].should.equal(
"1 validation error detected: Value 'Garbage' at 'returnConsumedCapacity' failed to satisfy constraint: Member must satisfy enum value set: [INDEXES, TOTAL, NONE]"
)
@mock_dynamodb2
def test_consumed_capacity_get_unknown_item():
conn = boto3.client("dynamodb", region_name="us-east-1")
conn.create_table(
TableName="test_table",
KeySchema=[{"AttributeName": "u", "KeyType": "HASH"}],
AttributeDefinitions=[{"AttributeName": "u", "AttributeType": "S"}],
BillingMode="PAY_PER_REQUEST",
)
response = conn.get_item(
TableName="test_table",
Key={"u": {"S": "does_not_exist"}},
ReturnConsumedCapacity="TOTAL",
)
# Should still return ConsumedCapacity, even if it does not return an item
response.should.have.key("ConsumedCapacity")
response["ConsumedCapacity"].should.equal(
{"TableName": "test_table", "CapacityUnits": 0.5}
)
@mock_dynamodb2
@pytest.mark.parametrize(
"capacity,should_have_capacity,should_have_table",
[
[None, False, False],
["NONE", False, False],
["TOTAL", True, False],
["INDEXES", True, True],
],
)
def test_only_return_consumed_capacity_when_required(
capacity, should_have_capacity, should_have_table
):
resource = boto3.resource("dynamodb", region_name="ap-northeast-3")
client = boto3.client("dynamodb", region_name="ap-northeast-3")
client.create_table(
TableName="jobs",
KeySchema=[{"AttributeName": "job_id", "KeyType": "HASH"}],
LocalSecondaryIndexes=[
{
"IndexName": "job_name-index",
"KeySchema": [{"AttributeName": "job_name", "KeyType": "HASH"}],
"Projection": {"ProjectionType": "ALL"},
}
],
AttributeDefinitions=[
{"AttributeName": "job_id", "AttributeType": "S"},
{"AttributeName": "job_name", "AttributeType": "S"},
],
ProvisionedThroughput={"ReadCapacityUnits": 5, "WriteCapacityUnits": 5},
)
table = resource.Table("jobs")
item = {"job_id": "asdasdasd", "expires_at": "1"}
# PUT_ITEM
args = {"Item": item}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = table.put_item(**args)
validate_response(response, should_have_capacity, should_have_table)
# GET_ITEM
args = {"Key": item}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = table.get_item(**args)
validate_response(response, should_have_capacity, should_have_table, value=0.5)
# SCAN
args = {"TableName": "jobs"}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = client.scan(**args)
validate_response(response, should_have_capacity, should_have_table)
# SCAN_INDEX
args["IndexName"] = "job_name-index"
response = client.scan(**args)
validate_response(response, should_have_capacity, should_have_table, is_index=True)
# QUERY
args = {
"TableName": "jobs",
"KeyConditionExpression": "job_id = :id",
"ExpressionAttributeValues": {":id": {"S": "asdasdasd"}},
}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = client.query(**args)
validate_response(response, should_have_capacity, should_have_table)
# QUERY_INDEX
args["IndexName"] = "job_name-index"
response = client.query(**args)
validate_response(response, should_have_capacity, should_have_table, is_index=True)
def <|fim_middle|>(
response, should_have_capacity, should_have_table, is_index=False, value=1.0
):
if should_have_capacity:
response.should.have.key("ConsumedCapacity")
response["ConsumedCapacity"]["TableName"].should.equal("jobs")
response["ConsumedCapacity"]["CapacityUnits"].should.equal(value)
if should_have_table:
response["ConsumedCapacity"]["Table"].should.equal({"CapacityUnits": value})
if is_index:
response["ConsumedCapacity"].should.have.key("LocalSecondaryIndexes")
response["ConsumedCapacity"]["LocalSecondaryIndexes"].should.equal(
{"job_name-index": {"CapacityUnits": value}}
)
else:
response.shouldnt.have.key("ConsumedCapacity")
<|fim▁end|> | validate_response |
<|file_name|>_part_grammar_processor.py<|end_file_name|><|fim▁begin|># -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2017 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from typing import Any, Dict, Set
from snapcraft import project
from snapcraft.internal.project_loader import grammar
from snapcraft.internal import pluginhandler, repo
from ._package_transformer import package_transformer
class PartGrammarProcessor:
"""Process part properties that support grammar.
Stage packages example:
>>> from unittest import mock
>>> import snapcraft
>>> # Pretend that all packages are valid
>>> repo = mock.Mock()
>>> repo.is_valid.return_value = True
>>> plugin = mock.Mock()
>>> plugin.stage_packages = [{'try': ['foo']}]
>>> processor = PartGrammarProcessor(
... plugin=plugin,<|fim▁hole|> ... project=snapcraft.project.Project(),
... repo=repo)
>>> processor.get_stage_packages()
{'foo'}
Build packages example:
>>> from unittest import mock
>>> import snapcraft
>>> # Pretend that all packages are valid
>>> repo = mock.Mock()
>>> repo.is_valid.return_value = True
>>> plugin = mock.Mock()
>>> plugin.build_packages = [{'try': ['foo']}]
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties={},
... project=snapcraft.project.Project(),
... repo=repo)
>>> processor.get_build_packages()
{'foo'}
Source example:
>>> from unittest import mock
>>> import snapcraft
>>> plugin = mock.Mock()
>>> plugin.properties = {'source': [{'on amd64': 'foo'}, 'else fail']}
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties=plugin.properties,
... project=snapcraft.project.Project(),
... repo=None)
>>> processor.get_source()
'foo'
"""
def __init__(
self,
*,
plugin: pluginhandler.PluginHandler,
properties: Dict[str, Any],
project: project.Project,
repo: "repo.Ubuntu"
) -> None:
self._project = project
self._repo = repo
self._build_snap_grammar = getattr(plugin, "build_snaps", [])
self.__build_snaps = set() # type: Set[str]
self._build_package_grammar = getattr(plugin, "build_packages", [])
self.__build_packages = set() # type: Set[str]
self._stage_package_grammar = getattr(plugin, "stage_packages", [])
self.__stage_packages = set() # type: Set[str]
source_grammar = properties.get("source", [""])
if not isinstance(source_grammar, list):
self._source_grammar = [source_grammar]
else:
self._source_grammar = source_grammar
self.__source = ""
def get_source(self) -> str:
if not self.__source:
# The grammar is array-based, even though we only support a single
# source.
processor = grammar.GrammarProcessor(
self._source_grammar, self._project, lambda s: True
)
source_array = processor.process()
if len(source_array) > 0:
self.__source = source_array.pop()
return self.__source
def get_build_snaps(self) -> Set[str]:
if not self.__build_snaps:
processor = grammar.GrammarProcessor(
self._build_snap_grammar,
self._project,
repo.snaps.SnapPackage.is_valid_snap,
)
self.__build_snaps = processor.process()
return self.__build_snaps
def get_build_packages(self) -> Set[str]:
if not self.__build_packages:
processor = grammar.GrammarProcessor(
self._build_package_grammar,
self._project,
self._repo.build_package_is_valid,
transformer=package_transformer,
)
self.__build_packages = processor.process()
return self.__build_packages
def get_stage_packages(self) -> Set[str]:
if not self.__stage_packages:
processor = grammar.GrammarProcessor(
self._stage_package_grammar,
self._project,
self._repo.is_valid,
transformer=package_transformer,
)
self.__stage_packages = processor.process()
return self.__stage_packages<|fim▁end|> | ... properties={}, |
<|file_name|>_part_grammar_processor.py<|end_file_name|><|fim▁begin|># -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2017 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from typing import Any, Dict, Set
from snapcraft import project
from snapcraft.internal.project_loader import grammar
from snapcraft.internal import pluginhandler, repo
from ._package_transformer import package_transformer
class PartGrammarProcessor:
<|fim_middle|>
<|fim▁end|> | """Process part properties that support grammar.
Stage packages example:
>>> from unittest import mock
>>> import snapcraft
>>> # Pretend that all packages are valid
>>> repo = mock.Mock()
>>> repo.is_valid.return_value = True
>>> plugin = mock.Mock()
>>> plugin.stage_packages = [{'try': ['foo']}]
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties={},
... project=snapcraft.project.Project(),
... repo=repo)
>>> processor.get_stage_packages()
{'foo'}
Build packages example:
>>> from unittest import mock
>>> import snapcraft
>>> # Pretend that all packages are valid
>>> repo = mock.Mock()
>>> repo.is_valid.return_value = True
>>> plugin = mock.Mock()
>>> plugin.build_packages = [{'try': ['foo']}]
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties={},
... project=snapcraft.project.Project(),
... repo=repo)
>>> processor.get_build_packages()
{'foo'}
Source example:
>>> from unittest import mock
>>> import snapcraft
>>> plugin = mock.Mock()
>>> plugin.properties = {'source': [{'on amd64': 'foo'}, 'else fail']}
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties=plugin.properties,
... project=snapcraft.project.Project(),
... repo=None)
>>> processor.get_source()
'foo'
"""
def __init__(
self,
*,
plugin: pluginhandler.PluginHandler,
properties: Dict[str, Any],
project: project.Project,
repo: "repo.Ubuntu"
) -> None:
self._project = project
self._repo = repo
self._build_snap_grammar = getattr(plugin, "build_snaps", [])
self.__build_snaps = set() # type: Set[str]
self._build_package_grammar = getattr(plugin, "build_packages", [])
self.__build_packages = set() # type: Set[str]
self._stage_package_grammar = getattr(plugin, "stage_packages", [])
self.__stage_packages = set() # type: Set[str]
source_grammar = properties.get("source", [""])
if not isinstance(source_grammar, list):
self._source_grammar = [source_grammar]
else:
self._source_grammar = source_grammar
self.__source = ""
def get_source(self) -> str:
if not self.__source:
# The grammar is array-based, even though we only support a single
# source.
processor = grammar.GrammarProcessor(
self._source_grammar, self._project, lambda s: True
)
source_array = processor.process()
if len(source_array) > 0:
self.__source = source_array.pop()
return self.__source
def get_build_snaps(self) -> Set[str]:
if not self.__build_snaps:
processor = grammar.GrammarProcessor(
self._build_snap_grammar,
self._project,
repo.snaps.SnapPackage.is_valid_snap,
)
self.__build_snaps = processor.process()
return self.__build_snaps
def get_build_packages(self) -> Set[str]:
if not self.__build_packages:
processor = grammar.GrammarProcessor(
self._build_package_grammar,
self._project,
self._repo.build_package_is_valid,
transformer=package_transformer,
)
self.__build_packages = processor.process()
return self.__build_packages
def get_stage_packages(self) -> Set[str]:
if not self.__stage_packages:
processor = grammar.GrammarProcessor(
self._stage_package_grammar,
self._project,
self._repo.is_valid,
transformer=package_transformer,
)
self.__stage_packages = processor.process()
return self.__stage_packages |
<|file_name|>_part_grammar_processor.py<|end_file_name|><|fim▁begin|># -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2017 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from typing import Any, Dict, Set
from snapcraft import project
from snapcraft.internal.project_loader import grammar
from snapcraft.internal import pluginhandler, repo
from ._package_transformer import package_transformer
class PartGrammarProcessor:
"""Process part properties that support grammar.
Stage packages example:
>>> from unittest import mock
>>> import snapcraft
>>> # Pretend that all packages are valid
>>> repo = mock.Mock()
>>> repo.is_valid.return_value = True
>>> plugin = mock.Mock()
>>> plugin.stage_packages = [{'try': ['foo']}]
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties={},
... project=snapcraft.project.Project(),
... repo=repo)
>>> processor.get_stage_packages()
{'foo'}
Build packages example:
>>> from unittest import mock
>>> import snapcraft
>>> # Pretend that all packages are valid
>>> repo = mock.Mock()
>>> repo.is_valid.return_value = True
>>> plugin = mock.Mock()
>>> plugin.build_packages = [{'try': ['foo']}]
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties={},
... project=snapcraft.project.Project(),
... repo=repo)
>>> processor.get_build_packages()
{'foo'}
Source example:
>>> from unittest import mock
>>> import snapcraft
>>> plugin = mock.Mock()
>>> plugin.properties = {'source': [{'on amd64': 'foo'}, 'else fail']}
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties=plugin.properties,
... project=snapcraft.project.Project(),
... repo=None)
>>> processor.get_source()
'foo'
"""
def __init__(
self,
*,
plugin: pluginhandler.PluginHandler,
properties: Dict[str, Any],
project: project.Project,
repo: "repo.Ubuntu"
) -> None:
<|fim_middle|>
def get_source(self) -> str:
if not self.__source:
# The grammar is array-based, even though we only support a single
# source.
processor = grammar.GrammarProcessor(
self._source_grammar, self._project, lambda s: True
)
source_array = processor.process()
if len(source_array) > 0:
self.__source = source_array.pop()
return self.__source
def get_build_snaps(self) -> Set[str]:
if not self.__build_snaps:
processor = grammar.GrammarProcessor(
self._build_snap_grammar,
self._project,
repo.snaps.SnapPackage.is_valid_snap,
)
self.__build_snaps = processor.process()
return self.__build_snaps
def get_build_packages(self) -> Set[str]:
if not self.__build_packages:
processor = grammar.GrammarProcessor(
self._build_package_grammar,
self._project,
self._repo.build_package_is_valid,
transformer=package_transformer,
)
self.__build_packages = processor.process()
return self.__build_packages
def get_stage_packages(self) -> Set[str]:
if not self.__stage_packages:
processor = grammar.GrammarProcessor(
self._stage_package_grammar,
self._project,
self._repo.is_valid,
transformer=package_transformer,
)
self.__stage_packages = processor.process()
return self.__stage_packages
<|fim▁end|> | self._project = project
self._repo = repo
self._build_snap_grammar = getattr(plugin, "build_snaps", [])
self.__build_snaps = set() # type: Set[str]
self._build_package_grammar = getattr(plugin, "build_packages", [])
self.__build_packages = set() # type: Set[str]
self._stage_package_grammar = getattr(plugin, "stage_packages", [])
self.__stage_packages = set() # type: Set[str]
source_grammar = properties.get("source", [""])
if not isinstance(source_grammar, list):
self._source_grammar = [source_grammar]
else:
self._source_grammar = source_grammar
self.__source = "" |
<|file_name|>_part_grammar_processor.py<|end_file_name|><|fim▁begin|># -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2017 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from typing import Any, Dict, Set
from snapcraft import project
from snapcraft.internal.project_loader import grammar
from snapcraft.internal import pluginhandler, repo
from ._package_transformer import package_transformer
class PartGrammarProcessor:
"""Process part properties that support grammar.
Stage packages example:
>>> from unittest import mock
>>> import snapcraft
>>> # Pretend that all packages are valid
>>> repo = mock.Mock()
>>> repo.is_valid.return_value = True
>>> plugin = mock.Mock()
>>> plugin.stage_packages = [{'try': ['foo']}]
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties={},
... project=snapcraft.project.Project(),
... repo=repo)
>>> processor.get_stage_packages()
{'foo'}
Build packages example:
>>> from unittest import mock
>>> import snapcraft
>>> # Pretend that all packages are valid
>>> repo = mock.Mock()
>>> repo.is_valid.return_value = True
>>> plugin = mock.Mock()
>>> plugin.build_packages = [{'try': ['foo']}]
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties={},
... project=snapcraft.project.Project(),
... repo=repo)
>>> processor.get_build_packages()
{'foo'}
Source example:
>>> from unittest import mock
>>> import snapcraft
>>> plugin = mock.Mock()
>>> plugin.properties = {'source': [{'on amd64': 'foo'}, 'else fail']}
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties=plugin.properties,
... project=snapcraft.project.Project(),
... repo=None)
>>> processor.get_source()
'foo'
"""
def __init__(
self,
*,
plugin: pluginhandler.PluginHandler,
properties: Dict[str, Any],
project: project.Project,
repo: "repo.Ubuntu"
) -> None:
self._project = project
self._repo = repo
self._build_snap_grammar = getattr(plugin, "build_snaps", [])
self.__build_snaps = set() # type: Set[str]
self._build_package_grammar = getattr(plugin, "build_packages", [])
self.__build_packages = set() # type: Set[str]
self._stage_package_grammar = getattr(plugin, "stage_packages", [])
self.__stage_packages = set() # type: Set[str]
source_grammar = properties.get("source", [""])
if not isinstance(source_grammar, list):
self._source_grammar = [source_grammar]
else:
self._source_grammar = source_grammar
self.__source = ""
def get_source(self) -> str:
<|fim_middle|>
def get_build_snaps(self) -> Set[str]:
if not self.__build_snaps:
processor = grammar.GrammarProcessor(
self._build_snap_grammar,
self._project,
repo.snaps.SnapPackage.is_valid_snap,
)
self.__build_snaps = processor.process()
return self.__build_snaps
def get_build_packages(self) -> Set[str]:
if not self.__build_packages:
processor = grammar.GrammarProcessor(
self._build_package_grammar,
self._project,
self._repo.build_package_is_valid,
transformer=package_transformer,
)
self.__build_packages = processor.process()
return self.__build_packages
def get_stage_packages(self) -> Set[str]:
if not self.__stage_packages:
processor = grammar.GrammarProcessor(
self._stage_package_grammar,
self._project,
self._repo.is_valid,
transformer=package_transformer,
)
self.__stage_packages = processor.process()
return self.__stage_packages
<|fim▁end|> | if not self.__source:
# The grammar is array-based, even though we only support a single
# source.
processor = grammar.GrammarProcessor(
self._source_grammar, self._project, lambda s: True
)
source_array = processor.process()
if len(source_array) > 0:
self.__source = source_array.pop()
return self.__source |
<|file_name|>_part_grammar_processor.py<|end_file_name|><|fim▁begin|># -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2017 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from typing import Any, Dict, Set
from snapcraft import project
from snapcraft.internal.project_loader import grammar
from snapcraft.internal import pluginhandler, repo
from ._package_transformer import package_transformer
class PartGrammarProcessor:
"""Process part properties that support grammar.
Stage packages example:
>>> from unittest import mock
>>> import snapcraft
>>> # Pretend that all packages are valid
>>> repo = mock.Mock()
>>> repo.is_valid.return_value = True
>>> plugin = mock.Mock()
>>> plugin.stage_packages = [{'try': ['foo']}]
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties={},
... project=snapcraft.project.Project(),
... repo=repo)
>>> processor.get_stage_packages()
{'foo'}
Build packages example:
>>> from unittest import mock
>>> import snapcraft
>>> # Pretend that all packages are valid
>>> repo = mock.Mock()
>>> repo.is_valid.return_value = True
>>> plugin = mock.Mock()
>>> plugin.build_packages = [{'try': ['foo']}]
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties={},
... project=snapcraft.project.Project(),
... repo=repo)
>>> processor.get_build_packages()
{'foo'}
Source example:
>>> from unittest import mock
>>> import snapcraft
>>> plugin = mock.Mock()
>>> plugin.properties = {'source': [{'on amd64': 'foo'}, 'else fail']}
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties=plugin.properties,
... project=snapcraft.project.Project(),
... repo=None)
>>> processor.get_source()
'foo'
"""
def __init__(
self,
*,
plugin: pluginhandler.PluginHandler,
properties: Dict[str, Any],
project: project.Project,
repo: "repo.Ubuntu"
) -> None:
self._project = project
self._repo = repo
self._build_snap_grammar = getattr(plugin, "build_snaps", [])
self.__build_snaps = set() # type: Set[str]
self._build_package_grammar = getattr(plugin, "build_packages", [])
self.__build_packages = set() # type: Set[str]
self._stage_package_grammar = getattr(plugin, "stage_packages", [])
self.__stage_packages = set() # type: Set[str]
source_grammar = properties.get("source", [""])
if not isinstance(source_grammar, list):
self._source_grammar = [source_grammar]
else:
self._source_grammar = source_grammar
self.__source = ""
def get_source(self) -> str:
if not self.__source:
# The grammar is array-based, even though we only support a single
# source.
processor = grammar.GrammarProcessor(
self._source_grammar, self._project, lambda s: True
)
source_array = processor.process()
if len(source_array) > 0:
self.__source = source_array.pop()
return self.__source
def get_build_snaps(self) -> Set[str]:
<|fim_middle|>
def get_build_packages(self) -> Set[str]:
if not self.__build_packages:
processor = grammar.GrammarProcessor(
self._build_package_grammar,
self._project,
self._repo.build_package_is_valid,
transformer=package_transformer,
)
self.__build_packages = processor.process()
return self.__build_packages
def get_stage_packages(self) -> Set[str]:
if not self.__stage_packages:
processor = grammar.GrammarProcessor(
self._stage_package_grammar,
self._project,
self._repo.is_valid,
transformer=package_transformer,
)
self.__stage_packages = processor.process()
return self.__stage_packages
<|fim▁end|> | if not self.__build_snaps:
processor = grammar.GrammarProcessor(
self._build_snap_grammar,
self._project,
repo.snaps.SnapPackage.is_valid_snap,
)
self.__build_snaps = processor.process()
return self.__build_snaps |
<|file_name|>_part_grammar_processor.py<|end_file_name|><|fim▁begin|># -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2017 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from typing import Any, Dict, Set
from snapcraft import project
from snapcraft.internal.project_loader import grammar
from snapcraft.internal import pluginhandler, repo
from ._package_transformer import package_transformer
class PartGrammarProcessor:
"""Process part properties that support grammar.
Stage packages example:
>>> from unittest import mock
>>> import snapcraft
>>> # Pretend that all packages are valid
>>> repo = mock.Mock()
>>> repo.is_valid.return_value = True
>>> plugin = mock.Mock()
>>> plugin.stage_packages = [{'try': ['foo']}]
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties={},
... project=snapcraft.project.Project(),
... repo=repo)
>>> processor.get_stage_packages()
{'foo'}
Build packages example:
>>> from unittest import mock
>>> import snapcraft
>>> # Pretend that all packages are valid
>>> repo = mock.Mock()
>>> repo.is_valid.return_value = True
>>> plugin = mock.Mock()
>>> plugin.build_packages = [{'try': ['foo']}]
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties={},
... project=snapcraft.project.Project(),
... repo=repo)
>>> processor.get_build_packages()
{'foo'}
Source example:
>>> from unittest import mock
>>> import snapcraft
>>> plugin = mock.Mock()
>>> plugin.properties = {'source': [{'on amd64': 'foo'}, 'else fail']}
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties=plugin.properties,
... project=snapcraft.project.Project(),
... repo=None)
>>> processor.get_source()
'foo'
"""
def __init__(
self,
*,
plugin: pluginhandler.PluginHandler,
properties: Dict[str, Any],
project: project.Project,
repo: "repo.Ubuntu"
) -> None:
self._project = project
self._repo = repo
self._build_snap_grammar = getattr(plugin, "build_snaps", [])
self.__build_snaps = set() # type: Set[str]
self._build_package_grammar = getattr(plugin, "build_packages", [])
self.__build_packages = set() # type: Set[str]
self._stage_package_grammar = getattr(plugin, "stage_packages", [])
self.__stage_packages = set() # type: Set[str]
source_grammar = properties.get("source", [""])
if not isinstance(source_grammar, list):
self._source_grammar = [source_grammar]
else:
self._source_grammar = source_grammar
self.__source = ""
def get_source(self) -> str:
if not self.__source:
# The grammar is array-based, even though we only support a single
# source.
processor = grammar.GrammarProcessor(
self._source_grammar, self._project, lambda s: True
)
source_array = processor.process()
if len(source_array) > 0:
self.__source = source_array.pop()
return self.__source
def get_build_snaps(self) -> Set[str]:
if not self.__build_snaps:
processor = grammar.GrammarProcessor(
self._build_snap_grammar,
self._project,
repo.snaps.SnapPackage.is_valid_snap,
)
self.__build_snaps = processor.process()
return self.__build_snaps
def get_build_packages(self) -> Set[str]:
<|fim_middle|>
def get_stage_packages(self) -> Set[str]:
if not self.__stage_packages:
processor = grammar.GrammarProcessor(
self._stage_package_grammar,
self._project,
self._repo.is_valid,
transformer=package_transformer,
)
self.__stage_packages = processor.process()
return self.__stage_packages
<|fim▁end|> | if not self.__build_packages:
processor = grammar.GrammarProcessor(
self._build_package_grammar,
self._project,
self._repo.build_package_is_valid,
transformer=package_transformer,
)
self.__build_packages = processor.process()
return self.__build_packages |
<|file_name|>_part_grammar_processor.py<|end_file_name|><|fim▁begin|># -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2017 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from typing import Any, Dict, Set
from snapcraft import project
from snapcraft.internal.project_loader import grammar
from snapcraft.internal import pluginhandler, repo
from ._package_transformer import package_transformer
class PartGrammarProcessor:
"""Process part properties that support grammar.
Stage packages example:
>>> from unittest import mock
>>> import snapcraft
>>> # Pretend that all packages are valid
>>> repo = mock.Mock()
>>> repo.is_valid.return_value = True
>>> plugin = mock.Mock()
>>> plugin.stage_packages = [{'try': ['foo']}]
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties={},
... project=snapcraft.project.Project(),
... repo=repo)
>>> processor.get_stage_packages()
{'foo'}
Build packages example:
>>> from unittest import mock
>>> import snapcraft
>>> # Pretend that all packages are valid
>>> repo = mock.Mock()
>>> repo.is_valid.return_value = True
>>> plugin = mock.Mock()
>>> plugin.build_packages = [{'try': ['foo']}]
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties={},
... project=snapcraft.project.Project(),
... repo=repo)
>>> processor.get_build_packages()
{'foo'}
Source example:
>>> from unittest import mock
>>> import snapcraft
>>> plugin = mock.Mock()
>>> plugin.properties = {'source': [{'on amd64': 'foo'}, 'else fail']}
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties=plugin.properties,
... project=snapcraft.project.Project(),
... repo=None)
>>> processor.get_source()
'foo'
"""
def __init__(
self,
*,
plugin: pluginhandler.PluginHandler,
properties: Dict[str, Any],
project: project.Project,
repo: "repo.Ubuntu"
) -> None:
self._project = project
self._repo = repo
self._build_snap_grammar = getattr(plugin, "build_snaps", [])
self.__build_snaps = set() # type: Set[str]
self._build_package_grammar = getattr(plugin, "build_packages", [])
self.__build_packages = set() # type: Set[str]
self._stage_package_grammar = getattr(plugin, "stage_packages", [])
self.__stage_packages = set() # type: Set[str]
source_grammar = properties.get("source", [""])
if not isinstance(source_grammar, list):
self._source_grammar = [source_grammar]
else:
self._source_grammar = source_grammar
self.__source = ""
def get_source(self) -> str:
if not self.__source:
# The grammar is array-based, even though we only support a single
# source.
processor = grammar.GrammarProcessor(
self._source_grammar, self._project, lambda s: True
)
source_array = processor.process()
if len(source_array) > 0:
self.__source = source_array.pop()
return self.__source
def get_build_snaps(self) -> Set[str]:
if not self.__build_snaps:
processor = grammar.GrammarProcessor(
self._build_snap_grammar,
self._project,
repo.snaps.SnapPackage.is_valid_snap,
)
self.__build_snaps = processor.process()
return self.__build_snaps
def get_build_packages(self) -> Set[str]:
if not self.__build_packages:
processor = grammar.GrammarProcessor(
self._build_package_grammar,
self._project,
self._repo.build_package_is_valid,
transformer=package_transformer,
)
self.__build_packages = processor.process()
return self.__build_packages
def get_stage_packages(self) -> Set[str]:
<|fim_middle|>
<|fim▁end|> | if not self.__stage_packages:
processor = grammar.GrammarProcessor(
self._stage_package_grammar,
self._project,
self._repo.is_valid,
transformer=package_transformer,
)
self.__stage_packages = processor.process()
return self.__stage_packages |
<|file_name|>_part_grammar_processor.py<|end_file_name|><|fim▁begin|># -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2017 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from typing import Any, Dict, Set
from snapcraft import project
from snapcraft.internal.project_loader import grammar
from snapcraft.internal import pluginhandler, repo
from ._package_transformer import package_transformer
class PartGrammarProcessor:
"""Process part properties that support grammar.
Stage packages example:
>>> from unittest import mock
>>> import snapcraft
>>> # Pretend that all packages are valid
>>> repo = mock.Mock()
>>> repo.is_valid.return_value = True
>>> plugin = mock.Mock()
>>> plugin.stage_packages = [{'try': ['foo']}]
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties={},
... project=snapcraft.project.Project(),
... repo=repo)
>>> processor.get_stage_packages()
{'foo'}
Build packages example:
>>> from unittest import mock
>>> import snapcraft
>>> # Pretend that all packages are valid
>>> repo = mock.Mock()
>>> repo.is_valid.return_value = True
>>> plugin = mock.Mock()
>>> plugin.build_packages = [{'try': ['foo']}]
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties={},
... project=snapcraft.project.Project(),
... repo=repo)
>>> processor.get_build_packages()
{'foo'}
Source example:
>>> from unittest import mock
>>> import snapcraft
>>> plugin = mock.Mock()
>>> plugin.properties = {'source': [{'on amd64': 'foo'}, 'else fail']}
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties=plugin.properties,
... project=snapcraft.project.Project(),
... repo=None)
>>> processor.get_source()
'foo'
"""
def __init__(
self,
*,
plugin: pluginhandler.PluginHandler,
properties: Dict[str, Any],
project: project.Project,
repo: "repo.Ubuntu"
) -> None:
self._project = project
self._repo = repo
self._build_snap_grammar = getattr(plugin, "build_snaps", [])
self.__build_snaps = set() # type: Set[str]
self._build_package_grammar = getattr(plugin, "build_packages", [])
self.__build_packages = set() # type: Set[str]
self._stage_package_grammar = getattr(plugin, "stage_packages", [])
self.__stage_packages = set() # type: Set[str]
source_grammar = properties.get("source", [""])
if not isinstance(source_grammar, list):
<|fim_middle|>
else:
self._source_grammar = source_grammar
self.__source = ""
def get_source(self) -> str:
if not self.__source:
# The grammar is array-based, even though we only support a single
# source.
processor = grammar.GrammarProcessor(
self._source_grammar, self._project, lambda s: True
)
source_array = processor.process()
if len(source_array) > 0:
self.__source = source_array.pop()
return self.__source
def get_build_snaps(self) -> Set[str]:
if not self.__build_snaps:
processor = grammar.GrammarProcessor(
self._build_snap_grammar,
self._project,
repo.snaps.SnapPackage.is_valid_snap,
)
self.__build_snaps = processor.process()
return self.__build_snaps
def get_build_packages(self) -> Set[str]:
if not self.__build_packages:
processor = grammar.GrammarProcessor(
self._build_package_grammar,
self._project,
self._repo.build_package_is_valid,
transformer=package_transformer,
)
self.__build_packages = processor.process()
return self.__build_packages
def get_stage_packages(self) -> Set[str]:
if not self.__stage_packages:
processor = grammar.GrammarProcessor(
self._stage_package_grammar,
self._project,
self._repo.is_valid,
transformer=package_transformer,
)
self.__stage_packages = processor.process()
return self.__stage_packages
<|fim▁end|> | self._source_grammar = [source_grammar] |
<|file_name|>_part_grammar_processor.py<|end_file_name|><|fim▁begin|># -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2017 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from typing import Any, Dict, Set
from snapcraft import project
from snapcraft.internal.project_loader import grammar
from snapcraft.internal import pluginhandler, repo
from ._package_transformer import package_transformer
class PartGrammarProcessor:
"""Process part properties that support grammar.
Stage packages example:
>>> from unittest import mock
>>> import snapcraft
>>> # Pretend that all packages are valid
>>> repo = mock.Mock()
>>> repo.is_valid.return_value = True
>>> plugin = mock.Mock()
>>> plugin.stage_packages = [{'try': ['foo']}]
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties={},
... project=snapcraft.project.Project(),
... repo=repo)
>>> processor.get_stage_packages()
{'foo'}
Build packages example:
>>> from unittest import mock
>>> import snapcraft
>>> # Pretend that all packages are valid
>>> repo = mock.Mock()
>>> repo.is_valid.return_value = True
>>> plugin = mock.Mock()
>>> plugin.build_packages = [{'try': ['foo']}]
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties={},
... project=snapcraft.project.Project(),
... repo=repo)
>>> processor.get_build_packages()
{'foo'}
Source example:
>>> from unittest import mock
>>> import snapcraft
>>> plugin = mock.Mock()
>>> plugin.properties = {'source': [{'on amd64': 'foo'}, 'else fail']}
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties=plugin.properties,
... project=snapcraft.project.Project(),
... repo=None)
>>> processor.get_source()
'foo'
"""
def __init__(
self,
*,
plugin: pluginhandler.PluginHandler,
properties: Dict[str, Any],
project: project.Project,
repo: "repo.Ubuntu"
) -> None:
self._project = project
self._repo = repo
self._build_snap_grammar = getattr(plugin, "build_snaps", [])
self.__build_snaps = set() # type: Set[str]
self._build_package_grammar = getattr(plugin, "build_packages", [])
self.__build_packages = set() # type: Set[str]
self._stage_package_grammar = getattr(plugin, "stage_packages", [])
self.__stage_packages = set() # type: Set[str]
source_grammar = properties.get("source", [""])
if not isinstance(source_grammar, list):
self._source_grammar = [source_grammar]
else:
<|fim_middle|>
self.__source = ""
def get_source(self) -> str:
if not self.__source:
# The grammar is array-based, even though we only support a single
# source.
processor = grammar.GrammarProcessor(
self._source_grammar, self._project, lambda s: True
)
source_array = processor.process()
if len(source_array) > 0:
self.__source = source_array.pop()
return self.__source
def get_build_snaps(self) -> Set[str]:
if not self.__build_snaps:
processor = grammar.GrammarProcessor(
self._build_snap_grammar,
self._project,
repo.snaps.SnapPackage.is_valid_snap,
)
self.__build_snaps = processor.process()
return self.__build_snaps
def get_build_packages(self) -> Set[str]:
if not self.__build_packages:
processor = grammar.GrammarProcessor(
self._build_package_grammar,
self._project,
self._repo.build_package_is_valid,
transformer=package_transformer,
)
self.__build_packages = processor.process()
return self.__build_packages
def get_stage_packages(self) -> Set[str]:
if not self.__stage_packages:
processor = grammar.GrammarProcessor(
self._stage_package_grammar,
self._project,
self._repo.is_valid,
transformer=package_transformer,
)
self.__stage_packages = processor.process()
return self.__stage_packages
<|fim▁end|> | self._source_grammar = source_grammar |
<|file_name|>_part_grammar_processor.py<|end_file_name|><|fim▁begin|># -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2017 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from typing import Any, Dict, Set
from snapcraft import project
from snapcraft.internal.project_loader import grammar
from snapcraft.internal import pluginhandler, repo
from ._package_transformer import package_transformer
class PartGrammarProcessor:
"""Process part properties that support grammar.
Stage packages example:
>>> from unittest import mock
>>> import snapcraft
>>> # Pretend that all packages are valid
>>> repo = mock.Mock()
>>> repo.is_valid.return_value = True
>>> plugin = mock.Mock()
>>> plugin.stage_packages = [{'try': ['foo']}]
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties={},
... project=snapcraft.project.Project(),
... repo=repo)
>>> processor.get_stage_packages()
{'foo'}
Build packages example:
>>> from unittest import mock
>>> import snapcraft
>>> # Pretend that all packages are valid
>>> repo = mock.Mock()
>>> repo.is_valid.return_value = True
>>> plugin = mock.Mock()
>>> plugin.build_packages = [{'try': ['foo']}]
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties={},
... project=snapcraft.project.Project(),
... repo=repo)
>>> processor.get_build_packages()
{'foo'}
Source example:
>>> from unittest import mock
>>> import snapcraft
>>> plugin = mock.Mock()
>>> plugin.properties = {'source': [{'on amd64': 'foo'}, 'else fail']}
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties=plugin.properties,
... project=snapcraft.project.Project(),
... repo=None)
>>> processor.get_source()
'foo'
"""
def __init__(
self,
*,
plugin: pluginhandler.PluginHandler,
properties: Dict[str, Any],
project: project.Project,
repo: "repo.Ubuntu"
) -> None:
self._project = project
self._repo = repo
self._build_snap_grammar = getattr(plugin, "build_snaps", [])
self.__build_snaps = set() # type: Set[str]
self._build_package_grammar = getattr(plugin, "build_packages", [])
self.__build_packages = set() # type: Set[str]
self._stage_package_grammar = getattr(plugin, "stage_packages", [])
self.__stage_packages = set() # type: Set[str]
source_grammar = properties.get("source", [""])
if not isinstance(source_grammar, list):
self._source_grammar = [source_grammar]
else:
self._source_grammar = source_grammar
self.__source = ""
def get_source(self) -> str:
if not self.__source:
# The grammar is array-based, even though we only support a single
# source.
<|fim_middle|>
return self.__source
def get_build_snaps(self) -> Set[str]:
if not self.__build_snaps:
processor = grammar.GrammarProcessor(
self._build_snap_grammar,
self._project,
repo.snaps.SnapPackage.is_valid_snap,
)
self.__build_snaps = processor.process()
return self.__build_snaps
def get_build_packages(self) -> Set[str]:
if not self.__build_packages:
processor = grammar.GrammarProcessor(
self._build_package_grammar,
self._project,
self._repo.build_package_is_valid,
transformer=package_transformer,
)
self.__build_packages = processor.process()
return self.__build_packages
def get_stage_packages(self) -> Set[str]:
if not self.__stage_packages:
processor = grammar.GrammarProcessor(
self._stage_package_grammar,
self._project,
self._repo.is_valid,
transformer=package_transformer,
)
self.__stage_packages = processor.process()
return self.__stage_packages
<|fim▁end|> | processor = grammar.GrammarProcessor(
self._source_grammar, self._project, lambda s: True
)
source_array = processor.process()
if len(source_array) > 0:
self.__source = source_array.pop() |
<|file_name|>_part_grammar_processor.py<|end_file_name|><|fim▁begin|># -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2017 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from typing import Any, Dict, Set
from snapcraft import project
from snapcraft.internal.project_loader import grammar
from snapcraft.internal import pluginhandler, repo
from ._package_transformer import package_transformer
class PartGrammarProcessor:
"""Process part properties that support grammar.
Stage packages example:
>>> from unittest import mock
>>> import snapcraft
>>> # Pretend that all packages are valid
>>> repo = mock.Mock()
>>> repo.is_valid.return_value = True
>>> plugin = mock.Mock()
>>> plugin.stage_packages = [{'try': ['foo']}]
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties={},
... project=snapcraft.project.Project(),
... repo=repo)
>>> processor.get_stage_packages()
{'foo'}
Build packages example:
>>> from unittest import mock
>>> import snapcraft
>>> # Pretend that all packages are valid
>>> repo = mock.Mock()
>>> repo.is_valid.return_value = True
>>> plugin = mock.Mock()
>>> plugin.build_packages = [{'try': ['foo']}]
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties={},
... project=snapcraft.project.Project(),
... repo=repo)
>>> processor.get_build_packages()
{'foo'}
Source example:
>>> from unittest import mock
>>> import snapcraft
>>> plugin = mock.Mock()
>>> plugin.properties = {'source': [{'on amd64': 'foo'}, 'else fail']}
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties=plugin.properties,
... project=snapcraft.project.Project(),
... repo=None)
>>> processor.get_source()
'foo'
"""
def __init__(
self,
*,
plugin: pluginhandler.PluginHandler,
properties: Dict[str, Any],
project: project.Project,
repo: "repo.Ubuntu"
) -> None:
self._project = project
self._repo = repo
self._build_snap_grammar = getattr(plugin, "build_snaps", [])
self.__build_snaps = set() # type: Set[str]
self._build_package_grammar = getattr(plugin, "build_packages", [])
self.__build_packages = set() # type: Set[str]
self._stage_package_grammar = getattr(plugin, "stage_packages", [])
self.__stage_packages = set() # type: Set[str]
source_grammar = properties.get("source", [""])
if not isinstance(source_grammar, list):
self._source_grammar = [source_grammar]
else:
self._source_grammar = source_grammar
self.__source = ""
def get_source(self) -> str:
if not self.__source:
# The grammar is array-based, even though we only support a single
# source.
processor = grammar.GrammarProcessor(
self._source_grammar, self._project, lambda s: True
)
source_array = processor.process()
if len(source_array) > 0:
<|fim_middle|>
return self.__source
def get_build_snaps(self) -> Set[str]:
if not self.__build_snaps:
processor = grammar.GrammarProcessor(
self._build_snap_grammar,
self._project,
repo.snaps.SnapPackage.is_valid_snap,
)
self.__build_snaps = processor.process()
return self.__build_snaps
def get_build_packages(self) -> Set[str]:
if not self.__build_packages:
processor = grammar.GrammarProcessor(
self._build_package_grammar,
self._project,
self._repo.build_package_is_valid,
transformer=package_transformer,
)
self.__build_packages = processor.process()
return self.__build_packages
def get_stage_packages(self) -> Set[str]:
if not self.__stage_packages:
processor = grammar.GrammarProcessor(
self._stage_package_grammar,
self._project,
self._repo.is_valid,
transformer=package_transformer,
)
self.__stage_packages = processor.process()
return self.__stage_packages
<|fim▁end|> | self.__source = source_array.pop() |
<|file_name|>_part_grammar_processor.py<|end_file_name|><|fim▁begin|># -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2017 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from typing import Any, Dict, Set
from snapcraft import project
from snapcraft.internal.project_loader import grammar
from snapcraft.internal import pluginhandler, repo
from ._package_transformer import package_transformer
class PartGrammarProcessor:
"""Process part properties that support grammar.
Stage packages example:
>>> from unittest import mock
>>> import snapcraft
>>> # Pretend that all packages are valid
>>> repo = mock.Mock()
>>> repo.is_valid.return_value = True
>>> plugin = mock.Mock()
>>> plugin.stage_packages = [{'try': ['foo']}]
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties={},
... project=snapcraft.project.Project(),
... repo=repo)
>>> processor.get_stage_packages()
{'foo'}
Build packages example:
>>> from unittest import mock
>>> import snapcraft
>>> # Pretend that all packages are valid
>>> repo = mock.Mock()
>>> repo.is_valid.return_value = True
>>> plugin = mock.Mock()
>>> plugin.build_packages = [{'try': ['foo']}]
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties={},
... project=snapcraft.project.Project(),
... repo=repo)
>>> processor.get_build_packages()
{'foo'}
Source example:
>>> from unittest import mock
>>> import snapcraft
>>> plugin = mock.Mock()
>>> plugin.properties = {'source': [{'on amd64': 'foo'}, 'else fail']}
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties=plugin.properties,
... project=snapcraft.project.Project(),
... repo=None)
>>> processor.get_source()
'foo'
"""
def __init__(
self,
*,
plugin: pluginhandler.PluginHandler,
properties: Dict[str, Any],
project: project.Project,
repo: "repo.Ubuntu"
) -> None:
self._project = project
self._repo = repo
self._build_snap_grammar = getattr(plugin, "build_snaps", [])
self.__build_snaps = set() # type: Set[str]
self._build_package_grammar = getattr(plugin, "build_packages", [])
self.__build_packages = set() # type: Set[str]
self._stage_package_grammar = getattr(plugin, "stage_packages", [])
self.__stage_packages = set() # type: Set[str]
source_grammar = properties.get("source", [""])
if not isinstance(source_grammar, list):
self._source_grammar = [source_grammar]
else:
self._source_grammar = source_grammar
self.__source = ""
def get_source(self) -> str:
if not self.__source:
# The grammar is array-based, even though we only support a single
# source.
processor = grammar.GrammarProcessor(
self._source_grammar, self._project, lambda s: True
)
source_array = processor.process()
if len(source_array) > 0:
self.__source = source_array.pop()
return self.__source
def get_build_snaps(self) -> Set[str]:
if not self.__build_snaps:
<|fim_middle|>
return self.__build_snaps
def get_build_packages(self) -> Set[str]:
if not self.__build_packages:
processor = grammar.GrammarProcessor(
self._build_package_grammar,
self._project,
self._repo.build_package_is_valid,
transformer=package_transformer,
)
self.__build_packages = processor.process()
return self.__build_packages
def get_stage_packages(self) -> Set[str]:
if not self.__stage_packages:
processor = grammar.GrammarProcessor(
self._stage_package_grammar,
self._project,
self._repo.is_valid,
transformer=package_transformer,
)
self.__stage_packages = processor.process()
return self.__stage_packages
<|fim▁end|> | processor = grammar.GrammarProcessor(
self._build_snap_grammar,
self._project,
repo.snaps.SnapPackage.is_valid_snap,
)
self.__build_snaps = processor.process() |
<|file_name|>_part_grammar_processor.py<|end_file_name|><|fim▁begin|># -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2017 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from typing import Any, Dict, Set
from snapcraft import project
from snapcraft.internal.project_loader import grammar
from snapcraft.internal import pluginhandler, repo
from ._package_transformer import package_transformer
class PartGrammarProcessor:
"""Process part properties that support grammar.
Stage packages example:
>>> from unittest import mock
>>> import snapcraft
>>> # Pretend that all packages are valid
>>> repo = mock.Mock()
>>> repo.is_valid.return_value = True
>>> plugin = mock.Mock()
>>> plugin.stage_packages = [{'try': ['foo']}]
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties={},
... project=snapcraft.project.Project(),
... repo=repo)
>>> processor.get_stage_packages()
{'foo'}
Build packages example:
>>> from unittest import mock
>>> import snapcraft
>>> # Pretend that all packages are valid
>>> repo = mock.Mock()
>>> repo.is_valid.return_value = True
>>> plugin = mock.Mock()
>>> plugin.build_packages = [{'try': ['foo']}]
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties={},
... project=snapcraft.project.Project(),
... repo=repo)
>>> processor.get_build_packages()
{'foo'}
Source example:
>>> from unittest import mock
>>> import snapcraft
>>> plugin = mock.Mock()
>>> plugin.properties = {'source': [{'on amd64': 'foo'}, 'else fail']}
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties=plugin.properties,
... project=snapcraft.project.Project(),
... repo=None)
>>> processor.get_source()
'foo'
"""
def __init__(
self,
*,
plugin: pluginhandler.PluginHandler,
properties: Dict[str, Any],
project: project.Project,
repo: "repo.Ubuntu"
) -> None:
self._project = project
self._repo = repo
self._build_snap_grammar = getattr(plugin, "build_snaps", [])
self.__build_snaps = set() # type: Set[str]
self._build_package_grammar = getattr(plugin, "build_packages", [])
self.__build_packages = set() # type: Set[str]
self._stage_package_grammar = getattr(plugin, "stage_packages", [])
self.__stage_packages = set() # type: Set[str]
source_grammar = properties.get("source", [""])
if not isinstance(source_grammar, list):
self._source_grammar = [source_grammar]
else:
self._source_grammar = source_grammar
self.__source = ""
def get_source(self) -> str:
if not self.__source:
# The grammar is array-based, even though we only support a single
# source.
processor = grammar.GrammarProcessor(
self._source_grammar, self._project, lambda s: True
)
source_array = processor.process()
if len(source_array) > 0:
self.__source = source_array.pop()
return self.__source
def get_build_snaps(self) -> Set[str]:
if not self.__build_snaps:
processor = grammar.GrammarProcessor(
self._build_snap_grammar,
self._project,
repo.snaps.SnapPackage.is_valid_snap,
)
self.__build_snaps = processor.process()
return self.__build_snaps
def get_build_packages(self) -> Set[str]:
if not self.__build_packages:
<|fim_middle|>
return self.__build_packages
def get_stage_packages(self) -> Set[str]:
if not self.__stage_packages:
processor = grammar.GrammarProcessor(
self._stage_package_grammar,
self._project,
self._repo.is_valid,
transformer=package_transformer,
)
self.__stage_packages = processor.process()
return self.__stage_packages
<|fim▁end|> | processor = grammar.GrammarProcessor(
self._build_package_grammar,
self._project,
self._repo.build_package_is_valid,
transformer=package_transformer,
)
self.__build_packages = processor.process() |
<|file_name|>_part_grammar_processor.py<|end_file_name|><|fim▁begin|># -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2017 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from typing import Any, Dict, Set
from snapcraft import project
from snapcraft.internal.project_loader import grammar
from snapcraft.internal import pluginhandler, repo
from ._package_transformer import package_transformer
class PartGrammarProcessor:
"""Process part properties that support grammar.
Stage packages example:
>>> from unittest import mock
>>> import snapcraft
>>> # Pretend that all packages are valid
>>> repo = mock.Mock()
>>> repo.is_valid.return_value = True
>>> plugin = mock.Mock()
>>> plugin.stage_packages = [{'try': ['foo']}]
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties={},
... project=snapcraft.project.Project(),
... repo=repo)
>>> processor.get_stage_packages()
{'foo'}
Build packages example:
>>> from unittest import mock
>>> import snapcraft
>>> # Pretend that all packages are valid
>>> repo = mock.Mock()
>>> repo.is_valid.return_value = True
>>> plugin = mock.Mock()
>>> plugin.build_packages = [{'try': ['foo']}]
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties={},
... project=snapcraft.project.Project(),
... repo=repo)
>>> processor.get_build_packages()
{'foo'}
Source example:
>>> from unittest import mock
>>> import snapcraft
>>> plugin = mock.Mock()
>>> plugin.properties = {'source': [{'on amd64': 'foo'}, 'else fail']}
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties=plugin.properties,
... project=snapcraft.project.Project(),
... repo=None)
>>> processor.get_source()
'foo'
"""
def __init__(
self,
*,
plugin: pluginhandler.PluginHandler,
properties: Dict[str, Any],
project: project.Project,
repo: "repo.Ubuntu"
) -> None:
self._project = project
self._repo = repo
self._build_snap_grammar = getattr(plugin, "build_snaps", [])
self.__build_snaps = set() # type: Set[str]
self._build_package_grammar = getattr(plugin, "build_packages", [])
self.__build_packages = set() # type: Set[str]
self._stage_package_grammar = getattr(plugin, "stage_packages", [])
self.__stage_packages = set() # type: Set[str]
source_grammar = properties.get("source", [""])
if not isinstance(source_grammar, list):
self._source_grammar = [source_grammar]
else:
self._source_grammar = source_grammar
self.__source = ""
def get_source(self) -> str:
if not self.__source:
# The grammar is array-based, even though we only support a single
# source.
processor = grammar.GrammarProcessor(
self._source_grammar, self._project, lambda s: True
)
source_array = processor.process()
if len(source_array) > 0:
self.__source = source_array.pop()
return self.__source
def get_build_snaps(self) -> Set[str]:
if not self.__build_snaps:
processor = grammar.GrammarProcessor(
self._build_snap_grammar,
self._project,
repo.snaps.SnapPackage.is_valid_snap,
)
self.__build_snaps = processor.process()
return self.__build_snaps
def get_build_packages(self) -> Set[str]:
if not self.__build_packages:
processor = grammar.GrammarProcessor(
self._build_package_grammar,
self._project,
self._repo.build_package_is_valid,
transformer=package_transformer,
)
self.__build_packages = processor.process()
return self.__build_packages
def get_stage_packages(self) -> Set[str]:
if not self.__stage_packages:
<|fim_middle|>
return self.__stage_packages
<|fim▁end|> | processor = grammar.GrammarProcessor(
self._stage_package_grammar,
self._project,
self._repo.is_valid,
transformer=package_transformer,
)
self.__stage_packages = processor.process() |
<|file_name|>_part_grammar_processor.py<|end_file_name|><|fim▁begin|># -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2017 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from typing import Any, Dict, Set
from snapcraft import project
from snapcraft.internal.project_loader import grammar
from snapcraft.internal import pluginhandler, repo
from ._package_transformer import package_transformer
class PartGrammarProcessor:
"""Process part properties that support grammar.
Stage packages example:
>>> from unittest import mock
>>> import snapcraft
>>> # Pretend that all packages are valid
>>> repo = mock.Mock()
>>> repo.is_valid.return_value = True
>>> plugin = mock.Mock()
>>> plugin.stage_packages = [{'try': ['foo']}]
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties={},
... project=snapcraft.project.Project(),
... repo=repo)
>>> processor.get_stage_packages()
{'foo'}
Build packages example:
>>> from unittest import mock
>>> import snapcraft
>>> # Pretend that all packages are valid
>>> repo = mock.Mock()
>>> repo.is_valid.return_value = True
>>> plugin = mock.Mock()
>>> plugin.build_packages = [{'try': ['foo']}]
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties={},
... project=snapcraft.project.Project(),
... repo=repo)
>>> processor.get_build_packages()
{'foo'}
Source example:
>>> from unittest import mock
>>> import snapcraft
>>> plugin = mock.Mock()
>>> plugin.properties = {'source': [{'on amd64': 'foo'}, 'else fail']}
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties=plugin.properties,
... project=snapcraft.project.Project(),
... repo=None)
>>> processor.get_source()
'foo'
"""
def <|fim_middle|>(
self,
*,
plugin: pluginhandler.PluginHandler,
properties: Dict[str, Any],
project: project.Project,
repo: "repo.Ubuntu"
) -> None:
self._project = project
self._repo = repo
self._build_snap_grammar = getattr(plugin, "build_snaps", [])
self.__build_snaps = set() # type: Set[str]
self._build_package_grammar = getattr(plugin, "build_packages", [])
self.__build_packages = set() # type: Set[str]
self._stage_package_grammar = getattr(plugin, "stage_packages", [])
self.__stage_packages = set() # type: Set[str]
source_grammar = properties.get("source", [""])
if not isinstance(source_grammar, list):
self._source_grammar = [source_grammar]
else:
self._source_grammar = source_grammar
self.__source = ""
def get_source(self) -> str:
if not self.__source:
# The grammar is array-based, even though we only support a single
# source.
processor = grammar.GrammarProcessor(
self._source_grammar, self._project, lambda s: True
)
source_array = processor.process()
if len(source_array) > 0:
self.__source = source_array.pop()
return self.__source
def get_build_snaps(self) -> Set[str]:
if not self.__build_snaps:
processor = grammar.GrammarProcessor(
self._build_snap_grammar,
self._project,
repo.snaps.SnapPackage.is_valid_snap,
)
self.__build_snaps = processor.process()
return self.__build_snaps
def get_build_packages(self) -> Set[str]:
if not self.__build_packages:
processor = grammar.GrammarProcessor(
self._build_package_grammar,
self._project,
self._repo.build_package_is_valid,
transformer=package_transformer,
)
self.__build_packages = processor.process()
return self.__build_packages
def get_stage_packages(self) -> Set[str]:
if not self.__stage_packages:
processor = grammar.GrammarProcessor(
self._stage_package_grammar,
self._project,
self._repo.is_valid,
transformer=package_transformer,
)
self.__stage_packages = processor.process()
return self.__stage_packages
<|fim▁end|> | __init__ |
<|file_name|>_part_grammar_processor.py<|end_file_name|><|fim▁begin|># -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2017 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from typing import Any, Dict, Set
from snapcraft import project
from snapcraft.internal.project_loader import grammar
from snapcraft.internal import pluginhandler, repo
from ._package_transformer import package_transformer
class PartGrammarProcessor:
"""Process part properties that support grammar.
Stage packages example:
>>> from unittest import mock
>>> import snapcraft
>>> # Pretend that all packages are valid
>>> repo = mock.Mock()
>>> repo.is_valid.return_value = True
>>> plugin = mock.Mock()
>>> plugin.stage_packages = [{'try': ['foo']}]
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties={},
... project=snapcraft.project.Project(),
... repo=repo)
>>> processor.get_stage_packages()
{'foo'}
Build packages example:
>>> from unittest import mock
>>> import snapcraft
>>> # Pretend that all packages are valid
>>> repo = mock.Mock()
>>> repo.is_valid.return_value = True
>>> plugin = mock.Mock()
>>> plugin.build_packages = [{'try': ['foo']}]
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties={},
... project=snapcraft.project.Project(),
... repo=repo)
>>> processor.get_build_packages()
{'foo'}
Source example:
>>> from unittest import mock
>>> import snapcraft
>>> plugin = mock.Mock()
>>> plugin.properties = {'source': [{'on amd64': 'foo'}, 'else fail']}
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties=plugin.properties,
... project=snapcraft.project.Project(),
... repo=None)
>>> processor.get_source()
'foo'
"""
def __init__(
self,
*,
plugin: pluginhandler.PluginHandler,
properties: Dict[str, Any],
project: project.Project,
repo: "repo.Ubuntu"
) -> None:
self._project = project
self._repo = repo
self._build_snap_grammar = getattr(plugin, "build_snaps", [])
self.__build_snaps = set() # type: Set[str]
self._build_package_grammar = getattr(plugin, "build_packages", [])
self.__build_packages = set() # type: Set[str]
self._stage_package_grammar = getattr(plugin, "stage_packages", [])
self.__stage_packages = set() # type: Set[str]
source_grammar = properties.get("source", [""])
if not isinstance(source_grammar, list):
self._source_grammar = [source_grammar]
else:
self._source_grammar = source_grammar
self.__source = ""
def <|fim_middle|>(self) -> str:
if not self.__source:
# The grammar is array-based, even though we only support a single
# source.
processor = grammar.GrammarProcessor(
self._source_grammar, self._project, lambda s: True
)
source_array = processor.process()
if len(source_array) > 0:
self.__source = source_array.pop()
return self.__source
def get_build_snaps(self) -> Set[str]:
if not self.__build_snaps:
processor = grammar.GrammarProcessor(
self._build_snap_grammar,
self._project,
repo.snaps.SnapPackage.is_valid_snap,
)
self.__build_snaps = processor.process()
return self.__build_snaps
def get_build_packages(self) -> Set[str]:
if not self.__build_packages:
processor = grammar.GrammarProcessor(
self._build_package_grammar,
self._project,
self._repo.build_package_is_valid,
transformer=package_transformer,
)
self.__build_packages = processor.process()
return self.__build_packages
def get_stage_packages(self) -> Set[str]:
if not self.__stage_packages:
processor = grammar.GrammarProcessor(
self._stage_package_grammar,
self._project,
self._repo.is_valid,
transformer=package_transformer,
)
self.__stage_packages = processor.process()
return self.__stage_packages
<|fim▁end|> | get_source |
<|file_name|>_part_grammar_processor.py<|end_file_name|><|fim▁begin|># -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2017 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from typing import Any, Dict, Set
from snapcraft import project
from snapcraft.internal.project_loader import grammar
from snapcraft.internal import pluginhandler, repo
from ._package_transformer import package_transformer
class PartGrammarProcessor:
"""Process part properties that support grammar.
Stage packages example:
>>> from unittest import mock
>>> import snapcraft
>>> # Pretend that all packages are valid
>>> repo = mock.Mock()
>>> repo.is_valid.return_value = True
>>> plugin = mock.Mock()
>>> plugin.stage_packages = [{'try': ['foo']}]
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties={},
... project=snapcraft.project.Project(),
... repo=repo)
>>> processor.get_stage_packages()
{'foo'}
Build packages example:
>>> from unittest import mock
>>> import snapcraft
>>> # Pretend that all packages are valid
>>> repo = mock.Mock()
>>> repo.is_valid.return_value = True
>>> plugin = mock.Mock()
>>> plugin.build_packages = [{'try': ['foo']}]
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties={},
... project=snapcraft.project.Project(),
... repo=repo)
>>> processor.get_build_packages()
{'foo'}
Source example:
>>> from unittest import mock
>>> import snapcraft
>>> plugin = mock.Mock()
>>> plugin.properties = {'source': [{'on amd64': 'foo'}, 'else fail']}
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties=plugin.properties,
... project=snapcraft.project.Project(),
... repo=None)
>>> processor.get_source()
'foo'
"""
def __init__(
self,
*,
plugin: pluginhandler.PluginHandler,
properties: Dict[str, Any],
project: project.Project,
repo: "repo.Ubuntu"
) -> None:
self._project = project
self._repo = repo
self._build_snap_grammar = getattr(plugin, "build_snaps", [])
self.__build_snaps = set() # type: Set[str]
self._build_package_grammar = getattr(plugin, "build_packages", [])
self.__build_packages = set() # type: Set[str]
self._stage_package_grammar = getattr(plugin, "stage_packages", [])
self.__stage_packages = set() # type: Set[str]
source_grammar = properties.get("source", [""])
if not isinstance(source_grammar, list):
self._source_grammar = [source_grammar]
else:
self._source_grammar = source_grammar
self.__source = ""
def get_source(self) -> str:
if not self.__source:
# The grammar is array-based, even though we only support a single
# source.
processor = grammar.GrammarProcessor(
self._source_grammar, self._project, lambda s: True
)
source_array = processor.process()
if len(source_array) > 0:
self.__source = source_array.pop()
return self.__source
def <|fim_middle|>(self) -> Set[str]:
if not self.__build_snaps:
processor = grammar.GrammarProcessor(
self._build_snap_grammar,
self._project,
repo.snaps.SnapPackage.is_valid_snap,
)
self.__build_snaps = processor.process()
return self.__build_snaps
def get_build_packages(self) -> Set[str]:
if not self.__build_packages:
processor = grammar.GrammarProcessor(
self._build_package_grammar,
self._project,
self._repo.build_package_is_valid,
transformer=package_transformer,
)
self.__build_packages = processor.process()
return self.__build_packages
def get_stage_packages(self) -> Set[str]:
if not self.__stage_packages:
processor = grammar.GrammarProcessor(
self._stage_package_grammar,
self._project,
self._repo.is_valid,
transformer=package_transformer,
)
self.__stage_packages = processor.process()
return self.__stage_packages
<|fim▁end|> | get_build_snaps |
<|file_name|>_part_grammar_processor.py<|end_file_name|><|fim▁begin|># -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2017 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from typing import Any, Dict, Set
from snapcraft import project
from snapcraft.internal.project_loader import grammar
from snapcraft.internal import pluginhandler, repo
from ._package_transformer import package_transformer
class PartGrammarProcessor:
"""Process part properties that support grammar.
Stage packages example:
>>> from unittest import mock
>>> import snapcraft
>>> # Pretend that all packages are valid
>>> repo = mock.Mock()
>>> repo.is_valid.return_value = True
>>> plugin = mock.Mock()
>>> plugin.stage_packages = [{'try': ['foo']}]
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties={},
... project=snapcraft.project.Project(),
... repo=repo)
>>> processor.get_stage_packages()
{'foo'}
Build packages example:
>>> from unittest import mock
>>> import snapcraft
>>> # Pretend that all packages are valid
>>> repo = mock.Mock()
>>> repo.is_valid.return_value = True
>>> plugin = mock.Mock()
>>> plugin.build_packages = [{'try': ['foo']}]
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties={},
... project=snapcraft.project.Project(),
... repo=repo)
>>> processor.get_build_packages()
{'foo'}
Source example:
>>> from unittest import mock
>>> import snapcraft
>>> plugin = mock.Mock()
>>> plugin.properties = {'source': [{'on amd64': 'foo'}, 'else fail']}
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties=plugin.properties,
... project=snapcraft.project.Project(),
... repo=None)
>>> processor.get_source()
'foo'
"""
def __init__(
self,
*,
plugin: pluginhandler.PluginHandler,
properties: Dict[str, Any],
project: project.Project,
repo: "repo.Ubuntu"
) -> None:
self._project = project
self._repo = repo
self._build_snap_grammar = getattr(plugin, "build_snaps", [])
self.__build_snaps = set() # type: Set[str]
self._build_package_grammar = getattr(plugin, "build_packages", [])
self.__build_packages = set() # type: Set[str]
self._stage_package_grammar = getattr(plugin, "stage_packages", [])
self.__stage_packages = set() # type: Set[str]
source_grammar = properties.get("source", [""])
if not isinstance(source_grammar, list):
self._source_grammar = [source_grammar]
else:
self._source_grammar = source_grammar
self.__source = ""
def get_source(self) -> str:
if not self.__source:
# The grammar is array-based, even though we only support a single
# source.
processor = grammar.GrammarProcessor(
self._source_grammar, self._project, lambda s: True
)
source_array = processor.process()
if len(source_array) > 0:
self.__source = source_array.pop()
return self.__source
def get_build_snaps(self) -> Set[str]:
if not self.__build_snaps:
processor = grammar.GrammarProcessor(
self._build_snap_grammar,
self._project,
repo.snaps.SnapPackage.is_valid_snap,
)
self.__build_snaps = processor.process()
return self.__build_snaps
def <|fim_middle|>(self) -> Set[str]:
if not self.__build_packages:
processor = grammar.GrammarProcessor(
self._build_package_grammar,
self._project,
self._repo.build_package_is_valid,
transformer=package_transformer,
)
self.__build_packages = processor.process()
return self.__build_packages
def get_stage_packages(self) -> Set[str]:
if not self.__stage_packages:
processor = grammar.GrammarProcessor(
self._stage_package_grammar,
self._project,
self._repo.is_valid,
transformer=package_transformer,
)
self.__stage_packages = processor.process()
return self.__stage_packages
<|fim▁end|> | get_build_packages |
<|file_name|>_part_grammar_processor.py<|end_file_name|><|fim▁begin|># -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2017 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from typing import Any, Dict, Set
from snapcraft import project
from snapcraft.internal.project_loader import grammar
from snapcraft.internal import pluginhandler, repo
from ._package_transformer import package_transformer
class PartGrammarProcessor:
"""Process part properties that support grammar.
Stage packages example:
>>> from unittest import mock
>>> import snapcraft
>>> # Pretend that all packages are valid
>>> repo = mock.Mock()
>>> repo.is_valid.return_value = True
>>> plugin = mock.Mock()
>>> plugin.stage_packages = [{'try': ['foo']}]
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties={},
... project=snapcraft.project.Project(),
... repo=repo)
>>> processor.get_stage_packages()
{'foo'}
Build packages example:
>>> from unittest import mock
>>> import snapcraft
>>> # Pretend that all packages are valid
>>> repo = mock.Mock()
>>> repo.is_valid.return_value = True
>>> plugin = mock.Mock()
>>> plugin.build_packages = [{'try': ['foo']}]
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties={},
... project=snapcraft.project.Project(),
... repo=repo)
>>> processor.get_build_packages()
{'foo'}
Source example:
>>> from unittest import mock
>>> import snapcraft
>>> plugin = mock.Mock()
>>> plugin.properties = {'source': [{'on amd64': 'foo'}, 'else fail']}
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties=plugin.properties,
... project=snapcraft.project.Project(),
... repo=None)
>>> processor.get_source()
'foo'
"""
def __init__(
self,
*,
plugin: pluginhandler.PluginHandler,
properties: Dict[str, Any],
project: project.Project,
repo: "repo.Ubuntu"
) -> None:
self._project = project
self._repo = repo
self._build_snap_grammar = getattr(plugin, "build_snaps", [])
self.__build_snaps = set() # type: Set[str]
self._build_package_grammar = getattr(plugin, "build_packages", [])
self.__build_packages = set() # type: Set[str]
self._stage_package_grammar = getattr(plugin, "stage_packages", [])
self.__stage_packages = set() # type: Set[str]
source_grammar = properties.get("source", [""])
if not isinstance(source_grammar, list):
self._source_grammar = [source_grammar]
else:
self._source_grammar = source_grammar
self.__source = ""
def get_source(self) -> str:
if not self.__source:
# The grammar is array-based, even though we only support a single
# source.
processor = grammar.GrammarProcessor(
self._source_grammar, self._project, lambda s: True
)
source_array = processor.process()
if len(source_array) > 0:
self.__source = source_array.pop()
return self.__source
def get_build_snaps(self) -> Set[str]:
if not self.__build_snaps:
processor = grammar.GrammarProcessor(
self._build_snap_grammar,
self._project,
repo.snaps.SnapPackage.is_valid_snap,
)
self.__build_snaps = processor.process()
return self.__build_snaps
def get_build_packages(self) -> Set[str]:
if not self.__build_packages:
processor = grammar.GrammarProcessor(
self._build_package_grammar,
self._project,
self._repo.build_package_is_valid,
transformer=package_transformer,
)
self.__build_packages = processor.process()
return self.__build_packages
def <|fim_middle|>(self) -> Set[str]:
if not self.__stage_packages:
processor = grammar.GrammarProcessor(
self._stage_package_grammar,
self._project,
self._repo.is_valid,
transformer=package_transformer,
)
self.__stage_packages = processor.process()
return self.__stage_packages
<|fim▁end|> | get_stage_packages |
<|file_name|>progreso.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# progreso.py
#
# Copyright 2010 Jesús Hómez <jesus@jesus-laptop>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.<|fim▁hole|># along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
import gtk, time
import threading
import thread
import gobject
#Iniciando el hilo sin usarlo
gtk.gdk.threads_init()
#La clase App hereda threading.Thread
class App(threading.Thread):
def __init__(self):
#Método constructor, asociando los widgets
self.glade_file = "progreso.glade"
self.glade = gtk.Builder()
self.glade.add_from_file(self.glade_file)
self.window1 = self.glade.get_object('window1')
self.togglebutton1 = self.glade.get_object('togglebutton1')
self.button1 = self.glade.get_object('button1')
self.progressbar1 = self.glade.get_object('progressbar1')
self.new_val = 0.0
self.rango =60
#Definiendo el valor inicial de la barra de proceso, definiendo los saltos en 0.1
self.progressbar1.set_fraction(self.new_val)
self.progressbar1.set_pulse_step(0.1)
self.window1.connect("destroy",self.on_window1_destroy)
self.button1.connect('clicked', self.on_button1_clicked)
self.togglebutton1.connect('toggled',self.on_togglebutton1_toggled)
#Iniciando el hilo en el constructor
threading.Thread.__init__(self)
self.window1.show_all()
def __iteracion__(self):
#Iteración en segundos cambiando el valor en la barra de progreso.
for i in range(self.rango):
if self.togglebutton1.get_active() == True:
self.new_val = self.progressbar1.get_fraction() + 0.01
if self.new_val > 1.0:
self.new_val = 0.0
self.togglebutton1.set_active(False)
break
else:
time.sleep(1)
self.x = self.new_val*100
self.progressbar1.set_text("%s" %self.x)
self.progressbar1.set_fraction(self.new_val)
else:
return
def on_togglebutton1_toggled(self,*args):
#Si cambia el evento en el boton biestado se inicia la iteración entre
los hilos.
variable = self.togglebutton1.get_active()
self.rango = 100
if variable == True:
lock = thread.allocate_lock()
lock.acquire()
thread.start_new_thread( self.__iteracion__, ())
lock.release()
else:
#Se detiene la barra de progreso
self.progressbar1.set_fraction(self.new_val)
self.progressbar1.set_text("%s" %self.x)<|fim▁end|> | #
# You should have received a copy of the GNU General Public License |
<|file_name|>progreso.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# progreso.py
#
# Copyright 2010 Jesús Hómez <jesus@jesus-laptop>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
import gtk, time
import threading
import thread
import gobject
#Iniciando el hilo sin usarlo
gtk.gdk.threads_init()
#La clase App hereda threading.Thread
class App(threading.Thread):
de<|fim_middle|>
<|fim▁end|> | f __init__(self):
#Método constructor, asociando los widgets
self.glade_file = "progreso.glade"
self.glade = gtk.Builder()
self.glade.add_from_file(self.glade_file)
self.window1 = self.glade.get_object('window1')
self.togglebutton1 = self.glade.get_object('togglebutton1')
self.button1 = self.glade.get_object('button1')
self.progressbar1 = self.glade.get_object('progressbar1')
self.new_val = 0.0
self.rango =60
#Definiendo el valor inicial de la barra de proceso, definiendo los saltos en 0.1
self.progressbar1.set_fraction(self.new_val)
self.progressbar1.set_pulse_step(0.1)
self.window1.connect("destroy",self.on_window1_destroy)
self.button1.connect('clicked', self.on_button1_clicked)
self.togglebutton1.connect('toggled',self.on_togglebutton1_toggled)
#Iniciando el hilo en el constructor
threading.Thread.__init__(self)
self.window1.show_all()
def __iteracion__(self):
#Iteración en segundos cambiando el valor en la barra de progreso.
for i in range(self.rango):
if self.togglebutton1.get_active() == True:
self.new_val = self.progressbar1.get_fraction() + 0.01
if self.new_val > 1.0:
self.new_val = 0.0
self.togglebutton1.set_active(False)
break
else:
time.sleep(1)
self.x = self.new_val*100
self.progressbar1.set_text("%s" %self.x)
self.progressbar1.set_fraction(self.new_val)
else:
return
def on_togglebutton1_toggled(self,*args):
#Si cambia el evento en el boton biestado se inicia la iteración entre
los hilos.
variable = self.togglebutton1.get_active()
self.rango = 100
if variable == True:
lock = thread.allocate_lock()
lock.acquire()
thread.start_new_thread( self.__iteracion__, ())
lock.release()
else:
#Se detiene la barra de progreso
self.progressbar1.set_fraction(self.new_val)
self.progressbar1.set_text("%s" %self.x)
|
<|file_name|>progreso.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# progreso.py
#
# Copyright 2010 Jesús Hómez <jesus@jesus-laptop>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
import gtk, time
import threading
import thread
import gobject
#Iniciando el hilo sin usarlo
gtk.gdk.threads_init()
#La clase App hereda threading.Thread
class App(threading.Thread):
def __init__(self):
#Método constructor, asociando los widgets
sel<|fim_middle|>
def __iteracion__(self):
#Iteración en segundos cambiando el valor en la barra de progreso.
for i in range(self.rango):
if self.togglebutton1.get_active() == True:
self.new_val = self.progressbar1.get_fraction() + 0.01
if self.new_val > 1.0:
self.new_val = 0.0
self.togglebutton1.set_active(False)
break
else:
time.sleep(1)
self.x = self.new_val*100
self.progressbar1.set_text("%s" %self.x)
self.progressbar1.set_fraction(self.new_val)
else:
return
def on_togglebutton1_toggled(self,*args):
#Si cambia el evento en el boton biestado se inicia la iteración entre
los hilos.
variable = self.togglebutton1.get_active()
self.rango = 100
if variable == True:
lock = thread.allocate_lock()
lock.acquire()
thread.start_new_thread( self.__iteracion__, ())
lock.release()
else:
#Se detiene la barra de progreso
self.progressbar1.set_fraction(self.new_val)
self.progressbar1.set_text("%s" %self.x)
<|fim▁end|> | f.glade_file = "progreso.glade"
self.glade = gtk.Builder()
self.glade.add_from_file(self.glade_file)
self.window1 = self.glade.get_object('window1')
self.togglebutton1 = self.glade.get_object('togglebutton1')
self.button1 = self.glade.get_object('button1')
self.progressbar1 = self.glade.get_object('progressbar1')
self.new_val = 0.0
self.rango =60
#Definiendo el valor inicial de la barra de proceso, definiendo los saltos en 0.1
self.progressbar1.set_fraction(self.new_val)
self.progressbar1.set_pulse_step(0.1)
self.window1.connect("destroy",self.on_window1_destroy)
self.button1.connect('clicked', self.on_button1_clicked)
self.togglebutton1.connect('toggled',self.on_togglebutton1_toggled)
#Iniciando el hilo en el constructor
threading.Thread.__init__(self)
self.window1.show_all()
|
<|file_name|>progreso.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# progreso.py
#
# Copyright 2010 Jesús Hómez <jesus@jesus-laptop>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
import gtk, time
import threading
import thread
import gobject
#Iniciando el hilo sin usarlo
gtk.gdk.threads_init()
#La clase App hereda threading.Thread
class App(threading.Thread):
def __init__(self):
#Método constructor, asociando los widgets
self.glade_file = "progreso.glade"
self.glade = gtk.Builder()
self.glade.add_from_file(self.glade_file)
self.window1 = self.glade.get_object('window1')
self.togglebutton1 = self.glade.get_object('togglebutton1')
self.button1 = self.glade.get_object('button1')
self.progressbar1 = self.glade.get_object('progressbar1')
self.new_val = 0.0
self.rango =60
#Definiendo el valor inicial de la barra de proceso, definiendo los saltos en 0.1
self.progressbar1.set_fraction(self.new_val)
self.progressbar1.set_pulse_step(0.1)
self.window1.connect("destroy",self.on_window1_destroy)
self.button1.connect('clicked', self.on_button1_clicked)
self.togglebutton1.connect('toggled',self.on_togglebutton1_toggled)
#Iniciando el hilo en el constructor
threading.Thread.__init__(self)
self.window1.show_all()
def __iteracion__(self):
#Iteración en segundos cambiando el valor en la barra de progreso.
for <|fim_middle|>
def on_togglebutton1_toggled(self,*args):
#Si cambia el evento en el boton biestado se inicia la iteración entre
los hilos.
variable = self.togglebutton1.get_active()
self.rango = 100
if variable == True:
lock = thread.allocate_lock()
lock.acquire()
thread.start_new_thread( self.__iteracion__, ())
lock.release()
else:
#Se detiene la barra de progreso
self.progressbar1.set_fraction(self.new_val)
self.progressbar1.set_text("%s" %self.x)
<|fim▁end|> | i in range(self.rango):
if self.togglebutton1.get_active() == True:
self.new_val = self.progressbar1.get_fraction() + 0.01
if self.new_val > 1.0:
self.new_val = 0.0
self.togglebutton1.set_active(False)
break
else:
time.sleep(1)
self.x = self.new_val*100
self.progressbar1.set_text("%s" %self.x)
self.progressbar1.set_fraction(self.new_val)
else:
return
|
<|file_name|>progreso.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# progreso.py
#
# Copyright 2010 Jesús Hómez <jesus@jesus-laptop>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
import gtk, time
import threading
import thread
import gobject
#Iniciando el hilo sin usarlo
gtk.gdk.threads_init()
#La clase App hereda threading.Thread
class App(threading.Thread):
def __init__(self):
#Método constructor, asociando los widgets
self.glade_file = "progreso.glade"
self.glade = gtk.Builder()
self.glade.add_from_file(self.glade_file)
self.window1 = self.glade.get_object('window1')
self.togglebutton1 = self.glade.get_object('togglebutton1')
self.button1 = self.glade.get_object('button1')
self.progressbar1 = self.glade.get_object('progressbar1')
self.new_val = 0.0
self.rango =60
#Definiendo el valor inicial de la barra de proceso, definiendo los saltos en 0.1
self.progressbar1.set_fraction(self.new_val)
self.progressbar1.set_pulse_step(0.1)
self.window1.connect("destroy",self.on_window1_destroy)
self.button1.connect('clicked', self.on_button1_clicked)
self.togglebutton1.connect('toggled',self.on_togglebutton1_toggled)
#Iniciando el hilo en el constructor
threading.Thread.__init__(self)
self.window1.show_all()
def __iteracion__(self):
#Iteración en segundos cambiando el valor en la barra de progreso.
for i in range(self.rango):
if self.togglebutton1.get_active() == True:
self.new_val = self.progressbar1.get_fraction() + 0.01
if self.new_val > 1.0:
self.new_val = 0.0
self.togglebutton1.set_active(False)
break
else:
time.sleep(1)
self.x = self.new_val*100
self.progressbar1.set_text("%s" %self.x)
self.progressbar1.set_fraction(self.new_val)
else:
return
def on_togglebutton1_toggled(self,*args):
#Si cambia el evento en el boton biestado se inicia la iteración entre
los hilos<|fim_middle|>
<|fim▁end|> | .
variable = self.togglebutton1.get_active()
self.rango = 100
if variable == True:
lock = thread.allocate_lock()
lock.acquire()
thread.start_new_thread( self.__iteracion__, ())
lock.release()
else:
#Se detiene la barra de progreso
self.progressbar1.set_fraction(self.new_val)
self.progressbar1.set_text("%s" %self.x)
|
<|file_name|>progreso.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# progreso.py
#
# Copyright 2010 Jesús Hómez <jesus@jesus-laptop>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
import gtk, time
import threading
import thread
import gobject
#Iniciando el hilo sin usarlo
gtk.gdk.threads_init()
#La clase App hereda threading.Thread
class App(threading.Thread):
def __init__(self):
#Método constructor, asociando los widgets
self.glade_file = "progreso.glade"
self.glade = gtk.Builder()
self.glade.add_from_file(self.glade_file)
self.window1 = self.glade.get_object('window1')
self.togglebutton1 = self.glade.get_object('togglebutton1')
self.button1 = self.glade.get_object('button1')
self.progressbar1 = self.glade.get_object('progressbar1')
self.new_val = 0.0
self.rango =60
#Definiendo el valor inicial de la barra de proceso, definiendo los saltos en 0.1
self.progressbar1.set_fraction(self.new_val)
self.progressbar1.set_pulse_step(0.1)
self.window1.connect("destroy",self.on_window1_destroy)
self.button1.connect('clicked', self.on_button1_clicked)
self.togglebutton1.connect('toggled',self.on_togglebutton1_toggled)
#Iniciando el hilo en el constructor
threading.Thread.__init__(self)
self.window1.show_all()
def __iteracion__(self):
#Iteración en segundos cambiando el valor en la barra de progreso.
for i in range(self.rango):
if self.togglebutton1.get_active() == True:
self <|fim_middle|>
else:
return
def on_togglebutton1_toggled(self,*args):
#Si cambia el evento en el boton biestado se inicia la iteración entre
los hilos.
variable = self.togglebutton1.get_active()
self.rango = 100
if variable == True:
lock = thread.allocate_lock()
lock.acquire()
thread.start_new_thread( self.__iteracion__, ())
lock.release()
else:
#Se detiene la barra de progreso
self.progressbar1.set_fraction(self.new_val)
self.progressbar1.set_text("%s" %self.x)
<|fim▁end|> | .new_val = self.progressbar1.get_fraction() + 0.01
if self.new_val > 1.0:
self.new_val = 0.0
self.togglebutton1.set_active(False)
break
else:
time.sleep(1)
self.x = self.new_val*100
self.progressbar1.set_text("%s" %self.x)
self.progressbar1.set_fraction(self.new_val)
|
<|file_name|>progreso.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# progreso.py
#
# Copyright 2010 Jesús Hómez <jesus@jesus-laptop>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
import gtk, time
import threading
import thread
import gobject
#Iniciando el hilo sin usarlo
gtk.gdk.threads_init()
#La clase App hereda threading.Thread
class App(threading.Thread):
def __init__(self):
#Método constructor, asociando los widgets
self.glade_file = "progreso.glade"
self.glade = gtk.Builder()
self.glade.add_from_file(self.glade_file)
self.window1 = self.glade.get_object('window1')
self.togglebutton1 = self.glade.get_object('togglebutton1')
self.button1 = self.glade.get_object('button1')
self.progressbar1 = self.glade.get_object('progressbar1')
self.new_val = 0.0
self.rango =60
#Definiendo el valor inicial de la barra de proceso, definiendo los saltos en 0.1
self.progressbar1.set_fraction(self.new_val)
self.progressbar1.set_pulse_step(0.1)
self.window1.connect("destroy",self.on_window1_destroy)
self.button1.connect('clicked', self.on_button1_clicked)
self.togglebutton1.connect('toggled',self.on_togglebutton1_toggled)
#Iniciando el hilo en el constructor
threading.Thread.__init__(self)
self.window1.show_all()
def __iteracion__(self):
#Iteración en segundos cambiando el valor en la barra de progreso.
for i in range(self.rango):
if self.togglebutton1.get_active() == True:
self.new_val = self.progressbar1.get_fraction() + 0.01
if self.new_val > 1.0:
self <|fim_middle|>
else:
time.sleep(1)
self.x = self.new_val*100
self.progressbar1.set_text("%s" %self.x)
self.progressbar1.set_fraction(self.new_val)
else:
return
def on_togglebutton1_toggled(self,*args):
#Si cambia el evento en el boton biestado se inicia la iteración entre
los hilos.
variable = self.togglebutton1.get_active()
self.rango = 100
if variable == True:
lock = thread.allocate_lock()
lock.acquire()
thread.start_new_thread( self.__iteracion__, ())
lock.release()
else:
#Se detiene la barra de progreso
self.progressbar1.set_fraction(self.new_val)
self.progressbar1.set_text("%s" %self.x)
<|fim▁end|> | .new_val = 0.0
self.togglebutton1.set_active(False)
break
|
<|file_name|>progreso.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# progreso.py
#
# Copyright 2010 Jesús Hómez <jesus@jesus-laptop>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
import gtk, time
import threading
import thread
import gobject
#Iniciando el hilo sin usarlo
gtk.gdk.threads_init()
#La clase App hereda threading.Thread
class App(threading.Thread):
def __init__(self):
#Método constructor, asociando los widgets
self.glade_file = "progreso.glade"
self.glade = gtk.Builder()
self.glade.add_from_file(self.glade_file)
self.window1 = self.glade.get_object('window1')
self.togglebutton1 = self.glade.get_object('togglebutton1')
self.button1 = self.glade.get_object('button1')
self.progressbar1 = self.glade.get_object('progressbar1')
self.new_val = 0.0
self.rango =60
#Definiendo el valor inicial de la barra de proceso, definiendo los saltos en 0.1
self.progressbar1.set_fraction(self.new_val)
self.progressbar1.set_pulse_step(0.1)
self.window1.connect("destroy",self.on_window1_destroy)
self.button1.connect('clicked', self.on_button1_clicked)
self.togglebutton1.connect('toggled',self.on_togglebutton1_toggled)
#Iniciando el hilo en el constructor
threading.Thread.__init__(self)
self.window1.show_all()
def __iteracion__(self):
#Iteración en segundos cambiando el valor en la barra de progreso.
for i in range(self.rango):
if self.togglebutton1.get_active() == True:
self.new_val = self.progressbar1.get_fraction() + 0.01
if self.new_val > 1.0:
self.new_val = 0.0
self.togglebutton1.set_active(False)
break
else:
time <|fim_middle|>
else:
return
def on_togglebutton1_toggled(self,*args):
#Si cambia el evento en el boton biestado se inicia la iteración entre
los hilos.
variable = self.togglebutton1.get_active()
self.rango = 100
if variable == True:
lock = thread.allocate_lock()
lock.acquire()
thread.start_new_thread( self.__iteracion__, ())
lock.release()
else:
#Se detiene la barra de progreso
self.progressbar1.set_fraction(self.new_val)
self.progressbar1.set_text("%s" %self.x)
<|fim▁end|> | .sleep(1)
self.x = self.new_val*100
self.progressbar1.set_text("%s" %self.x)
self.progressbar1.set_fraction(self.new_val)
|
<|file_name|>progreso.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# progreso.py
#
# Copyright 2010 Jesús Hómez <jesus@jesus-laptop>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
import gtk, time
import threading
import thread
import gobject
#Iniciando el hilo sin usarlo
gtk.gdk.threads_init()
#La clase App hereda threading.Thread
class App(threading.Thread):
def __init__(self):
#Método constructor, asociando los widgets
self.glade_file = "progreso.glade"
self.glade = gtk.Builder()
self.glade.add_from_file(self.glade_file)
self.window1 = self.glade.get_object('window1')
self.togglebutton1 = self.glade.get_object('togglebutton1')
self.button1 = self.glade.get_object('button1')
self.progressbar1 = self.glade.get_object('progressbar1')
self.new_val = 0.0
self.rango =60
#Definiendo el valor inicial de la barra de proceso, definiendo los saltos en 0.1
self.progressbar1.set_fraction(self.new_val)
self.progressbar1.set_pulse_step(0.1)
self.window1.connect("destroy",self.on_window1_destroy)
self.button1.connect('clicked', self.on_button1_clicked)
self.togglebutton1.connect('toggled',self.on_togglebutton1_toggled)
#Iniciando el hilo en el constructor
threading.Thread.__init__(self)
self.window1.show_all()
def __iteracion__(self):
#Iteración en segundos cambiando el valor en la barra de progreso.
for i in range(self.rango):
if self.togglebutton1.get_active() == True:
self.new_val = self.progressbar1.get_fraction() + 0.01
if self.new_val > 1.0:
self.new_val = 0.0
self.togglebutton1.set_active(False)
break
else:
time.sleep(1)
self.x = self.new_val*100
self.progressbar1.set_text("%s" %self.x)
self.progressbar1.set_fraction(self.new_val)
else:
retu <|fim_middle|>
def on_togglebutton1_toggled(self,*args):
#Si cambia el evento en el boton biestado se inicia la iteración entre
los hilos.
variable = self.togglebutton1.get_active()
self.rango = 100
if variable == True:
lock = thread.allocate_lock()
lock.acquire()
thread.start_new_thread( self.__iteracion__, ())
lock.release()
else:
#Se detiene la barra de progreso
self.progressbar1.set_fraction(self.new_val)
self.progressbar1.set_text("%s" %self.x)
<|fim▁end|> | rn
|
<|file_name|>progreso.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# progreso.py
#
# Copyright 2010 Jesús Hómez <jesus@jesus-laptop>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
import gtk, time
import threading
import thread
import gobject
#Iniciando el hilo sin usarlo
gtk.gdk.threads_init()
#La clase App hereda threading.Thread
class App(threading.Thread):
def __init__(self):
#Método constructor, asociando los widgets
self.glade_file = "progreso.glade"
self.glade = gtk.Builder()
self.glade.add_from_file(self.glade_file)
self.window1 = self.glade.get_object('window1')
self.togglebutton1 = self.glade.get_object('togglebutton1')
self.button1 = self.glade.get_object('button1')
self.progressbar1 = self.glade.get_object('progressbar1')
self.new_val = 0.0
self.rango =60
#Definiendo el valor inicial de la barra de proceso, definiendo los saltos en 0.1
self.progressbar1.set_fraction(self.new_val)
self.progressbar1.set_pulse_step(0.1)
self.window1.connect("destroy",self.on_window1_destroy)
self.button1.connect('clicked', self.on_button1_clicked)
self.togglebutton1.connect('toggled',self.on_togglebutton1_toggled)
#Iniciando el hilo en el constructor
threading.Thread.__init__(self)
self.window1.show_all()
def __iteracion__(self):
#Iteración en segundos cambiando el valor en la barra de progreso.
for i in range(self.rango):
if self.togglebutton1.get_active() == True:
self.new_val = self.progressbar1.get_fraction() + 0.01
if self.new_val > 1.0:
self.new_val = 0.0
self.togglebutton1.set_active(False)
break
else:
time.sleep(1)
self.x = self.new_val*100
self.progressbar1.set_text("%s" %self.x)
self.progressbar1.set_fraction(self.new_val)
else:
return
def on_togglebutton1_toggled(self,*args):
#Si cambia el evento en el boton biestado se inicia la iteración entre
los hilos.
variable = self.togglebutton1.get_active()
self.rango = 100
if variable == True:
lock <|fim_middle|>
else:
#Se detiene la barra de progreso
self.progressbar1.set_fraction(self.new_val)
self.progressbar1.set_text("%s" %self.x)
<|fim▁end|> | = thread.allocate_lock()
lock.acquire()
thread.start_new_thread( self.__iteracion__, ())
lock.release()
|
<|file_name|>progreso.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# progreso.py
#
# Copyright 2010 Jesús Hómez <jesus@jesus-laptop>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
import gtk, time
import threading
import thread
import gobject
#Iniciando el hilo sin usarlo
gtk.gdk.threads_init()
#La clase App hereda threading.Thread
class App(threading.Thread):
def __init__(self):
#Método constructor, asociando los widgets
self.glade_file = "progreso.glade"
self.glade = gtk.Builder()
self.glade.add_from_file(self.glade_file)
self.window1 = self.glade.get_object('window1')
self.togglebutton1 = self.glade.get_object('togglebutton1')
self.button1 = self.glade.get_object('button1')
self.progressbar1 = self.glade.get_object('progressbar1')
self.new_val = 0.0
self.rango =60
#Definiendo el valor inicial de la barra de proceso, definiendo los saltos en 0.1
self.progressbar1.set_fraction(self.new_val)
self.progressbar1.set_pulse_step(0.1)
self.window1.connect("destroy",self.on_window1_destroy)
self.button1.connect('clicked', self.on_button1_clicked)
self.togglebutton1.connect('toggled',self.on_togglebutton1_toggled)
#Iniciando el hilo en el constructor
threading.Thread.__init__(self)
self.window1.show_all()
def __iteracion__(self):
#Iteración en segundos cambiando el valor en la barra de progreso.
for i in range(self.rango):
if self.togglebutton1.get_active() == True:
self.new_val = self.progressbar1.get_fraction() + 0.01
if self.new_val > 1.0:
self.new_val = 0.0
self.togglebutton1.set_active(False)
break
else:
time.sleep(1)
self.x = self.new_val*100
self.progressbar1.set_text("%s" %self.x)
self.progressbar1.set_fraction(self.new_val)
else:
return
def on_togglebutton1_toggled(self,*args):
#Si cambia el evento en el boton biestado se inicia la iteración entre
los hilos.
variable = self.togglebutton1.get_active()
self.rango = 100
if variable == True:
lock = thread.allocate_lock()
lock.acquire()
thread.start_new_thread( self.__iteracion__, ())
lock.release()
else:
#Se detiene la barra de progreso
self. <|fim_middle|>
<|fim▁end|> | progressbar1.set_fraction(self.new_val)
self.progressbar1.set_text("%s" %self.x)
|
<|file_name|>progreso.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# progreso.py
#
# Copyright 2010 Jesús Hómez <jesus@jesus-laptop>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
import gtk, time
import threading
import thread
import gobject
#Iniciando el hilo sin usarlo
gtk.gdk.threads_init()
#La clase App hereda threading.Thread
class App(threading.Thread):
def __<|fim_middle|>elf):
#Método constructor, asociando los widgets
self.glade_file = "progreso.glade"
self.glade = gtk.Builder()
self.glade.add_from_file(self.glade_file)
self.window1 = self.glade.get_object('window1')
self.togglebutton1 = self.glade.get_object('togglebutton1')
self.button1 = self.glade.get_object('button1')
self.progressbar1 = self.glade.get_object('progressbar1')
self.new_val = 0.0
self.rango =60
#Definiendo el valor inicial de la barra de proceso, definiendo los saltos en 0.1
self.progressbar1.set_fraction(self.new_val)
self.progressbar1.set_pulse_step(0.1)
self.window1.connect("destroy",self.on_window1_destroy)
self.button1.connect('clicked', self.on_button1_clicked)
self.togglebutton1.connect('toggled',self.on_togglebutton1_toggled)
#Iniciando el hilo en el constructor
threading.Thread.__init__(self)
self.window1.show_all()
def __iteracion__(self):
#Iteración en segundos cambiando el valor en la barra de progreso.
for i in range(self.rango):
if self.togglebutton1.get_active() == True:
self.new_val = self.progressbar1.get_fraction() + 0.01
if self.new_val > 1.0:
self.new_val = 0.0
self.togglebutton1.set_active(False)
break
else:
time.sleep(1)
self.x = self.new_val*100
self.progressbar1.set_text("%s" %self.x)
self.progressbar1.set_fraction(self.new_val)
else:
return
def on_togglebutton1_toggled(self,*args):
#Si cambia el evento en el boton biestado se inicia la iteración entre
los hilos.
variable = self.togglebutton1.get_active()
self.rango = 100
if variable == True:
lock = thread.allocate_lock()
lock.acquire()
thread.start_new_thread( self.__iteracion__, ())
lock.release()
else:
#Se detiene la barra de progreso
self.progressbar1.set_fraction(self.new_val)
self.progressbar1.set_text("%s" %self.x)
<|fim▁end|> | init__(s |
<|file_name|>progreso.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# progreso.py
#
# Copyright 2010 Jesús Hómez <jesus@jesus-laptop>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
import gtk, time
import threading
import thread
import gobject
#Iniciando el hilo sin usarlo
gtk.gdk.threads_init()
#La clase App hereda threading.Thread
class App(threading.Thread):
def __init__(self):
#Método constructor, asociando los widgets
self.glade_file = "progreso.glade"
self.glade = gtk.Builder()
self.glade.add_from_file(self.glade_file)
self.window1 = self.glade.get_object('window1')
self.togglebutton1 = self.glade.get_object('togglebutton1')
self.button1 = self.glade.get_object('button1')
self.progressbar1 = self.glade.get_object('progressbar1')
self.new_val = 0.0
self.rango =60
#Definiendo el valor inicial de la barra de proceso, definiendo los saltos en 0.1
self.progressbar1.set_fraction(self.new_val)
self.progressbar1.set_pulse_step(0.1)
self.window1.connect("destroy",self.on_window1_destroy)
self.button1.connect('clicked', self.on_button1_clicked)
self.togglebutton1.connect('toggled',self.on_togglebutton1_toggled)
#Iniciando el hilo en el constructor
threading.Thread.__init__(self)
self.window1.show_all()
def __i<|fim_middle|>lf):
#Iteración en segundos cambiando el valor en la barra de progreso.
for i in range(self.rango):
if self.togglebutton1.get_active() == True:
self.new_val = self.progressbar1.get_fraction() + 0.01
if self.new_val > 1.0:
self.new_val = 0.0
self.togglebutton1.set_active(False)
break
else:
time.sleep(1)
self.x = self.new_val*100
self.progressbar1.set_text("%s" %self.x)
self.progressbar1.set_fraction(self.new_val)
else:
return
def on_togglebutton1_toggled(self,*args):
#Si cambia el evento en el boton biestado se inicia la iteración entre
los hilos.
variable = self.togglebutton1.get_active()
self.rango = 100
if variable == True:
lock = thread.allocate_lock()
lock.acquire()
thread.start_new_thread( self.__iteracion__, ())
lock.release()
else:
#Se detiene la barra de progreso
self.progressbar1.set_fraction(self.new_val)
self.progressbar1.set_text("%s" %self.x)
<|fim▁end|> | teracion__(se |
<|file_name|>progreso.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# progreso.py
#
# Copyright 2010 Jesús Hómez <jesus@jesus-laptop>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
import gtk, time
import threading
import thread
import gobject
#Iniciando el hilo sin usarlo
gtk.gdk.threads_init()
#La clase App hereda threading.Thread
class App(threading.Thread):
def __init__(self):
#Método constructor, asociando los widgets
self.glade_file = "progreso.glade"
self.glade = gtk.Builder()
self.glade.add_from_file(self.glade_file)
self.window1 = self.glade.get_object('window1')
self.togglebutton1 = self.glade.get_object('togglebutton1')
self.button1 = self.glade.get_object('button1')
self.progressbar1 = self.glade.get_object('progressbar1')
self.new_val = 0.0
self.rango =60
#Definiendo el valor inicial de la barra de proceso, definiendo los saltos en 0.1
self.progressbar1.set_fraction(self.new_val)
self.progressbar1.set_pulse_step(0.1)
self.window1.connect("destroy",self.on_window1_destroy)
self.button1.connect('clicked', self.on_button1_clicked)
self.togglebutton1.connect('toggled',self.on_togglebutton1_toggled)
#Iniciando el hilo en el constructor
threading.Thread.__init__(self)
self.window1.show_all()
def __iteracion__(self):
#Iteración en segundos cambiando el valor en la barra de progreso.
for i in range(self.rango):
if self.togglebutton1.get_active() == True:
self.new_val = self.progressbar1.get_fraction() + 0.01
if self.new_val > 1.0:
self.new_val = 0.0
self.togglebutton1.set_active(False)
break
else:
time.sleep(1)
self.x = self.new_val*100
self.progressbar1.set_text("%s" %self.x)
self.progressbar1.set_fraction(self.new_val)
else:
return
def on_t<|fim_middle|>f,*args):
#Si cambia el evento en el boton biestado se inicia la iteración entre
los hilos.
variable = self.togglebutton1.get_active()
self.rango = 100
if variable == True:
lock = thread.allocate_lock()
lock.acquire()
thread.start_new_thread( self.__iteracion__, ())
lock.release()
else:
#Se detiene la barra de progreso
self.progressbar1.set_fraction(self.new_val)
self.progressbar1.set_text("%s" %self.x)
<|fim▁end|> | ogglebutton1_toggled(sel |
<|file_name|>Augur Installer.py<|end_file_name|><|fim▁begin|>import os
import sys
import shutil
import binascii
import traceback
import subprocess
from win32com.client import Dispatch
LAUNCHER_PATH = "C:\\Program Files\\Augur"
DATA_PATH = os.path.join(os.path.expanduser('~'), 'AppData', 'Roaming', "Augur")
PASSFILE = os.path.join(DATA_PATH, "password.txt")
if getattr(sys, 'frozen', False):
# we are running in a |PyInstaller| bundle
BASEDIR = sys._MEIPASS
else:
# we are running in a normal Python environment
BASEDIR = os.path.dirname(os.path.abspath(__file__))
GETH_EXE = os.path.join(BASEDIR, 'geth.exe')
LAUNCHER_EXE = os.path.join(BASEDIR, 'augurlauncher.exe')
def main():
# first make all the appropriate directories
print("Making directories...")
for d in LAUNCHER_PATH, DATA_PATH:
print("Creating", d, end=" ", flush=True)
os.mkdir(d)
print("Success!")
print("Generating random password file...", end=" ", flush=True)
# then generate the password
password = binascii.b2a_hex(os.urandom(32))
passfile = open(PASSFILE, "w")
passfile.write(password.decode('ascii'))
passfile.close()
print("Success!")
# Then copy ".exe"s to the launcher path
exes = GETH_EXE, LAUNCHER_EXE
results = []
for exe in exes:
print("Copying", os.path.basename(exe), "to", LAUNCHER_PATH, "...", end=" ", flush=True)
results.append(shutil.copy(exe, LAUNCHER_PATH))
print("Sucess!")
print("Creating node account...", end=" ", flush=True)
# create account on node
p = subprocess.Popen([results[0],
"--password", PASSFILE,
"account", "new"])
p.wait()
print("Success!")
print("Creating shortcut...", end=" ", flush=True)<|fim▁hole|> desktop = os.path.join(os.path.expanduser('~'), 'Desktop')
shortcut_path = os.path.join(desktop, "Augur Launcher.lnk")
wDir = LAUNCHER_PATH
shell = Dispatch('WScript.Shell')
shortcut = shell.CreateShortCut(shortcut_path)
shortcut.Targetpath = results[1]
shortcut.WorkingDirectory = wDir
shortcut.IconLocation = results[1]
shortcut.save()
print("Success!")
def uninstall():
paths = LAUNCHER_PATH, DATA_PATH
for p in paths:
print("Deleting", p, "...", end=" ", flush=True)
shutil.rmtree(p)
print("Success!")
print("Removing desktop shortcut...", end=" ", flush=True)
desktop = os.path.join(os.path.expanduser('~'), 'Desktop')
shortcut_path = os.path.join(desktop, "Augur Launcher.lnk")
os.remove(shortcut_path)
print("Success!")
if __name__ == '__main__':
try:
if len(sys.argv) == 2 and sys.argv[1] == 'uninstall':
uninstall()
elif len(sys.argv) == 1:
main()
else:
assert len(sys.argv) <= 2, "wrong number of arguements!"
except Exception as exc:
traceback.print_exc()
finally:
os.system("pause")
sys.exit(0)<|fim▁end|> | |
<|file_name|>Augur Installer.py<|end_file_name|><|fim▁begin|>import os
import sys
import shutil
import binascii
import traceback
import subprocess
from win32com.client import Dispatch
LAUNCHER_PATH = "C:\\Program Files\\Augur"
DATA_PATH = os.path.join(os.path.expanduser('~'), 'AppData', 'Roaming', "Augur")
PASSFILE = os.path.join(DATA_PATH, "password.txt")
if getattr(sys, 'frozen', False):
# we are running in a |PyInstaller| bundle
BASEDIR = sys._MEIPASS
else:
# we are running in a normal Python environment
BASEDIR = os.path.dirname(os.path.abspath(__file__))
GETH_EXE = os.path.join(BASEDIR, 'geth.exe')
LAUNCHER_EXE = os.path.join(BASEDIR, 'augurlauncher.exe')
def main():
# first make all the appropriate directories
<|fim_middle|>
def uninstall():
paths = LAUNCHER_PATH, DATA_PATH
for p in paths:
print("Deleting", p, "...", end=" ", flush=True)
shutil.rmtree(p)
print("Success!")
print("Removing desktop shortcut...", end=" ", flush=True)
desktop = os.path.join(os.path.expanduser('~'), 'Desktop')
shortcut_path = os.path.join(desktop, "Augur Launcher.lnk")
os.remove(shortcut_path)
print("Success!")
if __name__ == '__main__':
try:
if len(sys.argv) == 2 and sys.argv[1] == 'uninstall':
uninstall()
elif len(sys.argv) == 1:
main()
else:
assert len(sys.argv) <= 2, "wrong number of arguements!"
except Exception as exc:
traceback.print_exc()
finally:
os.system("pause")
sys.exit(0)
<|fim▁end|> | print("Making directories...")
for d in LAUNCHER_PATH, DATA_PATH:
print("Creating", d, end=" ", flush=True)
os.mkdir(d)
print("Success!")
print("Generating random password file...", end=" ", flush=True)
# then generate the password
password = binascii.b2a_hex(os.urandom(32))
passfile = open(PASSFILE, "w")
passfile.write(password.decode('ascii'))
passfile.close()
print("Success!")
# Then copy ".exe"s to the launcher path
exes = GETH_EXE, LAUNCHER_EXE
results = []
for exe in exes:
print("Copying", os.path.basename(exe), "to", LAUNCHER_PATH, "...", end=" ", flush=True)
results.append(shutil.copy(exe, LAUNCHER_PATH))
print("Sucess!")
print("Creating node account...", end=" ", flush=True)
# create account on node
p = subprocess.Popen([results[0],
"--password", PASSFILE,
"account", "new"])
p.wait()
print("Success!")
print("Creating shortcut...", end=" ", flush=True)
desktop = os.path.join(os.path.expanduser('~'), 'Desktop')
shortcut_path = os.path.join(desktop, "Augur Launcher.lnk")
wDir = LAUNCHER_PATH
shell = Dispatch('WScript.Shell')
shortcut = shell.CreateShortCut(shortcut_path)
shortcut.Targetpath = results[1]
shortcut.WorkingDirectory = wDir
shortcut.IconLocation = results[1]
shortcut.save()
print("Success!") |
<|file_name|>Augur Installer.py<|end_file_name|><|fim▁begin|>import os
import sys
import shutil
import binascii
import traceback
import subprocess
from win32com.client import Dispatch
LAUNCHER_PATH = "C:\\Program Files\\Augur"
DATA_PATH = os.path.join(os.path.expanduser('~'), 'AppData', 'Roaming', "Augur")
PASSFILE = os.path.join(DATA_PATH, "password.txt")
if getattr(sys, 'frozen', False):
# we are running in a |PyInstaller| bundle
BASEDIR = sys._MEIPASS
else:
# we are running in a normal Python environment
BASEDIR = os.path.dirname(os.path.abspath(__file__))
GETH_EXE = os.path.join(BASEDIR, 'geth.exe')
LAUNCHER_EXE = os.path.join(BASEDIR, 'augurlauncher.exe')
def main():
# first make all the appropriate directories
print("Making directories...")
for d in LAUNCHER_PATH, DATA_PATH:
print("Creating", d, end=" ", flush=True)
os.mkdir(d)
print("Success!")
print("Generating random password file...", end=" ", flush=True)
# then generate the password
password = binascii.b2a_hex(os.urandom(32))
passfile = open(PASSFILE, "w")
passfile.write(password.decode('ascii'))
passfile.close()
print("Success!")
# Then copy ".exe"s to the launcher path
exes = GETH_EXE, LAUNCHER_EXE
results = []
for exe in exes:
print("Copying", os.path.basename(exe), "to", LAUNCHER_PATH, "...", end=" ", flush=True)
results.append(shutil.copy(exe, LAUNCHER_PATH))
print("Sucess!")
print("Creating node account...", end=" ", flush=True)
# create account on node
p = subprocess.Popen([results[0],
"--password", PASSFILE,
"account", "new"])
p.wait()
print("Success!")
print("Creating shortcut...", end=" ", flush=True)
desktop = os.path.join(os.path.expanduser('~'), 'Desktop')
shortcut_path = os.path.join(desktop, "Augur Launcher.lnk")
wDir = LAUNCHER_PATH
shell = Dispatch('WScript.Shell')
shortcut = shell.CreateShortCut(shortcut_path)
shortcut.Targetpath = results[1]
shortcut.WorkingDirectory = wDir
shortcut.IconLocation = results[1]
shortcut.save()
print("Success!")
def uninstall():
<|fim_middle|>
if __name__ == '__main__':
try:
if len(sys.argv) == 2 and sys.argv[1] == 'uninstall':
uninstall()
elif len(sys.argv) == 1:
main()
else:
assert len(sys.argv) <= 2, "wrong number of arguements!"
except Exception as exc:
traceback.print_exc()
finally:
os.system("pause")
sys.exit(0)
<|fim▁end|> | paths = LAUNCHER_PATH, DATA_PATH
for p in paths:
print("Deleting", p, "...", end=" ", flush=True)
shutil.rmtree(p)
print("Success!")
print("Removing desktop shortcut...", end=" ", flush=True)
desktop = os.path.join(os.path.expanduser('~'), 'Desktop')
shortcut_path = os.path.join(desktop, "Augur Launcher.lnk")
os.remove(shortcut_path)
print("Success!") |
<|file_name|>Augur Installer.py<|end_file_name|><|fim▁begin|>import os
import sys
import shutil
import binascii
import traceback
import subprocess
from win32com.client import Dispatch
LAUNCHER_PATH = "C:\\Program Files\\Augur"
DATA_PATH = os.path.join(os.path.expanduser('~'), 'AppData', 'Roaming', "Augur")
PASSFILE = os.path.join(DATA_PATH, "password.txt")
if getattr(sys, 'frozen', False):
# we are running in a |PyInstaller| bundle
<|fim_middle|>
else:
# we are running in a normal Python environment
BASEDIR = os.path.dirname(os.path.abspath(__file__))
GETH_EXE = os.path.join(BASEDIR, 'geth.exe')
LAUNCHER_EXE = os.path.join(BASEDIR, 'augurlauncher.exe')
def main():
# first make all the appropriate directories
print("Making directories...")
for d in LAUNCHER_PATH, DATA_PATH:
print("Creating", d, end=" ", flush=True)
os.mkdir(d)
print("Success!")
print("Generating random password file...", end=" ", flush=True)
# then generate the password
password = binascii.b2a_hex(os.urandom(32))
passfile = open(PASSFILE, "w")
passfile.write(password.decode('ascii'))
passfile.close()
print("Success!")
# Then copy ".exe"s to the launcher path
exes = GETH_EXE, LAUNCHER_EXE
results = []
for exe in exes:
print("Copying", os.path.basename(exe), "to", LAUNCHER_PATH, "...", end=" ", flush=True)
results.append(shutil.copy(exe, LAUNCHER_PATH))
print("Sucess!")
print("Creating node account...", end=" ", flush=True)
# create account on node
p = subprocess.Popen([results[0],
"--password", PASSFILE,
"account", "new"])
p.wait()
print("Success!")
print("Creating shortcut...", end=" ", flush=True)
desktop = os.path.join(os.path.expanduser('~'), 'Desktop')
shortcut_path = os.path.join(desktop, "Augur Launcher.lnk")
wDir = LAUNCHER_PATH
shell = Dispatch('WScript.Shell')
shortcut = shell.CreateShortCut(shortcut_path)
shortcut.Targetpath = results[1]
shortcut.WorkingDirectory = wDir
shortcut.IconLocation = results[1]
shortcut.save()
print("Success!")
def uninstall():
paths = LAUNCHER_PATH, DATA_PATH
for p in paths:
print("Deleting", p, "...", end=" ", flush=True)
shutil.rmtree(p)
print("Success!")
print("Removing desktop shortcut...", end=" ", flush=True)
desktop = os.path.join(os.path.expanduser('~'), 'Desktop')
shortcut_path = os.path.join(desktop, "Augur Launcher.lnk")
os.remove(shortcut_path)
print("Success!")
if __name__ == '__main__':
try:
if len(sys.argv) == 2 and sys.argv[1] == 'uninstall':
uninstall()
elif len(sys.argv) == 1:
main()
else:
assert len(sys.argv) <= 2, "wrong number of arguements!"
except Exception as exc:
traceback.print_exc()
finally:
os.system("pause")
sys.exit(0)
<|fim▁end|> | BASEDIR = sys._MEIPASS |
<|file_name|>Augur Installer.py<|end_file_name|><|fim▁begin|>import os
import sys
import shutil
import binascii
import traceback
import subprocess
from win32com.client import Dispatch
LAUNCHER_PATH = "C:\\Program Files\\Augur"
DATA_PATH = os.path.join(os.path.expanduser('~'), 'AppData', 'Roaming', "Augur")
PASSFILE = os.path.join(DATA_PATH, "password.txt")
if getattr(sys, 'frozen', False):
# we are running in a |PyInstaller| bundle
BASEDIR = sys._MEIPASS
else:
# we are running in a normal Python environment
<|fim_middle|>
GETH_EXE = os.path.join(BASEDIR, 'geth.exe')
LAUNCHER_EXE = os.path.join(BASEDIR, 'augurlauncher.exe')
def main():
# first make all the appropriate directories
print("Making directories...")
for d in LAUNCHER_PATH, DATA_PATH:
print("Creating", d, end=" ", flush=True)
os.mkdir(d)
print("Success!")
print("Generating random password file...", end=" ", flush=True)
# then generate the password
password = binascii.b2a_hex(os.urandom(32))
passfile = open(PASSFILE, "w")
passfile.write(password.decode('ascii'))
passfile.close()
print("Success!")
# Then copy ".exe"s to the launcher path
exes = GETH_EXE, LAUNCHER_EXE
results = []
for exe in exes:
print("Copying", os.path.basename(exe), "to", LAUNCHER_PATH, "...", end=" ", flush=True)
results.append(shutil.copy(exe, LAUNCHER_PATH))
print("Sucess!")
print("Creating node account...", end=" ", flush=True)
# create account on node
p = subprocess.Popen([results[0],
"--password", PASSFILE,
"account", "new"])
p.wait()
print("Success!")
print("Creating shortcut...", end=" ", flush=True)
desktop = os.path.join(os.path.expanduser('~'), 'Desktop')
shortcut_path = os.path.join(desktop, "Augur Launcher.lnk")
wDir = LAUNCHER_PATH
shell = Dispatch('WScript.Shell')
shortcut = shell.CreateShortCut(shortcut_path)
shortcut.Targetpath = results[1]
shortcut.WorkingDirectory = wDir
shortcut.IconLocation = results[1]
shortcut.save()
print("Success!")
def uninstall():
paths = LAUNCHER_PATH, DATA_PATH
for p in paths:
print("Deleting", p, "...", end=" ", flush=True)
shutil.rmtree(p)
print("Success!")
print("Removing desktop shortcut...", end=" ", flush=True)
desktop = os.path.join(os.path.expanduser('~'), 'Desktop')
shortcut_path = os.path.join(desktop, "Augur Launcher.lnk")
os.remove(shortcut_path)
print("Success!")
if __name__ == '__main__':
try:
if len(sys.argv) == 2 and sys.argv[1] == 'uninstall':
uninstall()
elif len(sys.argv) == 1:
main()
else:
assert len(sys.argv) <= 2, "wrong number of arguements!"
except Exception as exc:
traceback.print_exc()
finally:
os.system("pause")
sys.exit(0)
<|fim▁end|> | BASEDIR = os.path.dirname(os.path.abspath(__file__)) |
<|file_name|>Augur Installer.py<|end_file_name|><|fim▁begin|>import os
import sys
import shutil
import binascii
import traceback
import subprocess
from win32com.client import Dispatch
LAUNCHER_PATH = "C:\\Program Files\\Augur"
DATA_PATH = os.path.join(os.path.expanduser('~'), 'AppData', 'Roaming', "Augur")
PASSFILE = os.path.join(DATA_PATH, "password.txt")
if getattr(sys, 'frozen', False):
# we are running in a |PyInstaller| bundle
BASEDIR = sys._MEIPASS
else:
# we are running in a normal Python environment
BASEDIR = os.path.dirname(os.path.abspath(__file__))
GETH_EXE = os.path.join(BASEDIR, 'geth.exe')
LAUNCHER_EXE = os.path.join(BASEDIR, 'augurlauncher.exe')
def main():
# first make all the appropriate directories
print("Making directories...")
for d in LAUNCHER_PATH, DATA_PATH:
print("Creating", d, end=" ", flush=True)
os.mkdir(d)
print("Success!")
print("Generating random password file...", end=" ", flush=True)
# then generate the password
password = binascii.b2a_hex(os.urandom(32))
passfile = open(PASSFILE, "w")
passfile.write(password.decode('ascii'))
passfile.close()
print("Success!")
# Then copy ".exe"s to the launcher path
exes = GETH_EXE, LAUNCHER_EXE
results = []
for exe in exes:
print("Copying", os.path.basename(exe), "to", LAUNCHER_PATH, "...", end=" ", flush=True)
results.append(shutil.copy(exe, LAUNCHER_PATH))
print("Sucess!")
print("Creating node account...", end=" ", flush=True)
# create account on node
p = subprocess.Popen([results[0],
"--password", PASSFILE,
"account", "new"])
p.wait()
print("Success!")
print("Creating shortcut...", end=" ", flush=True)
desktop = os.path.join(os.path.expanduser('~'), 'Desktop')
shortcut_path = os.path.join(desktop, "Augur Launcher.lnk")
wDir = LAUNCHER_PATH
shell = Dispatch('WScript.Shell')
shortcut = shell.CreateShortCut(shortcut_path)
shortcut.Targetpath = results[1]
shortcut.WorkingDirectory = wDir
shortcut.IconLocation = results[1]
shortcut.save()
print("Success!")
def uninstall():
paths = LAUNCHER_PATH, DATA_PATH
for p in paths:
print("Deleting", p, "...", end=" ", flush=True)
shutil.rmtree(p)
print("Success!")
print("Removing desktop shortcut...", end=" ", flush=True)
desktop = os.path.join(os.path.expanduser('~'), 'Desktop')
shortcut_path = os.path.join(desktop, "Augur Launcher.lnk")
os.remove(shortcut_path)
print("Success!")
if __name__ == '__main__':
<|fim_middle|>
<|fim▁end|> | try:
if len(sys.argv) == 2 and sys.argv[1] == 'uninstall':
uninstall()
elif len(sys.argv) == 1:
main()
else:
assert len(sys.argv) <= 2, "wrong number of arguements!"
except Exception as exc:
traceback.print_exc()
finally:
os.system("pause")
sys.exit(0) |
<|file_name|>Augur Installer.py<|end_file_name|><|fim▁begin|>import os
import sys
import shutil
import binascii
import traceback
import subprocess
from win32com.client import Dispatch
LAUNCHER_PATH = "C:\\Program Files\\Augur"
DATA_PATH = os.path.join(os.path.expanduser('~'), 'AppData', 'Roaming', "Augur")
PASSFILE = os.path.join(DATA_PATH, "password.txt")
if getattr(sys, 'frozen', False):
# we are running in a |PyInstaller| bundle
BASEDIR = sys._MEIPASS
else:
# we are running in a normal Python environment
BASEDIR = os.path.dirname(os.path.abspath(__file__))
GETH_EXE = os.path.join(BASEDIR, 'geth.exe')
LAUNCHER_EXE = os.path.join(BASEDIR, 'augurlauncher.exe')
def main():
# first make all the appropriate directories
print("Making directories...")
for d in LAUNCHER_PATH, DATA_PATH:
print("Creating", d, end=" ", flush=True)
os.mkdir(d)
print("Success!")
print("Generating random password file...", end=" ", flush=True)
# then generate the password
password = binascii.b2a_hex(os.urandom(32))
passfile = open(PASSFILE, "w")
passfile.write(password.decode('ascii'))
passfile.close()
print("Success!")
# Then copy ".exe"s to the launcher path
exes = GETH_EXE, LAUNCHER_EXE
results = []
for exe in exes:
print("Copying", os.path.basename(exe), "to", LAUNCHER_PATH, "...", end=" ", flush=True)
results.append(shutil.copy(exe, LAUNCHER_PATH))
print("Sucess!")
print("Creating node account...", end=" ", flush=True)
# create account on node
p = subprocess.Popen([results[0],
"--password", PASSFILE,
"account", "new"])
p.wait()
print("Success!")
print("Creating shortcut...", end=" ", flush=True)
desktop = os.path.join(os.path.expanduser('~'), 'Desktop')
shortcut_path = os.path.join(desktop, "Augur Launcher.lnk")
wDir = LAUNCHER_PATH
shell = Dispatch('WScript.Shell')
shortcut = shell.CreateShortCut(shortcut_path)
shortcut.Targetpath = results[1]
shortcut.WorkingDirectory = wDir
shortcut.IconLocation = results[1]
shortcut.save()
print("Success!")
def uninstall():
paths = LAUNCHER_PATH, DATA_PATH
for p in paths:
print("Deleting", p, "...", end=" ", flush=True)
shutil.rmtree(p)
print("Success!")
print("Removing desktop shortcut...", end=" ", flush=True)
desktop = os.path.join(os.path.expanduser('~'), 'Desktop')
shortcut_path = os.path.join(desktop, "Augur Launcher.lnk")
os.remove(shortcut_path)
print("Success!")
if __name__ == '__main__':
try:
if len(sys.argv) == 2 and sys.argv[1] == 'uninstall':
<|fim_middle|>
elif len(sys.argv) == 1:
main()
else:
assert len(sys.argv) <= 2, "wrong number of arguements!"
except Exception as exc:
traceback.print_exc()
finally:
os.system("pause")
sys.exit(0)
<|fim▁end|> | uninstall() |
<|file_name|>Augur Installer.py<|end_file_name|><|fim▁begin|>import os
import sys
import shutil
import binascii
import traceback
import subprocess
from win32com.client import Dispatch
LAUNCHER_PATH = "C:\\Program Files\\Augur"
DATA_PATH = os.path.join(os.path.expanduser('~'), 'AppData', 'Roaming', "Augur")
PASSFILE = os.path.join(DATA_PATH, "password.txt")
if getattr(sys, 'frozen', False):
# we are running in a |PyInstaller| bundle
BASEDIR = sys._MEIPASS
else:
# we are running in a normal Python environment
BASEDIR = os.path.dirname(os.path.abspath(__file__))
GETH_EXE = os.path.join(BASEDIR, 'geth.exe')
LAUNCHER_EXE = os.path.join(BASEDIR, 'augurlauncher.exe')
def main():
# first make all the appropriate directories
print("Making directories...")
for d in LAUNCHER_PATH, DATA_PATH:
print("Creating", d, end=" ", flush=True)
os.mkdir(d)
print("Success!")
print("Generating random password file...", end=" ", flush=True)
# then generate the password
password = binascii.b2a_hex(os.urandom(32))
passfile = open(PASSFILE, "w")
passfile.write(password.decode('ascii'))
passfile.close()
print("Success!")
# Then copy ".exe"s to the launcher path
exes = GETH_EXE, LAUNCHER_EXE
results = []
for exe in exes:
print("Copying", os.path.basename(exe), "to", LAUNCHER_PATH, "...", end=" ", flush=True)
results.append(shutil.copy(exe, LAUNCHER_PATH))
print("Sucess!")
print("Creating node account...", end=" ", flush=True)
# create account on node
p = subprocess.Popen([results[0],
"--password", PASSFILE,
"account", "new"])
p.wait()
print("Success!")
print("Creating shortcut...", end=" ", flush=True)
desktop = os.path.join(os.path.expanduser('~'), 'Desktop')
shortcut_path = os.path.join(desktop, "Augur Launcher.lnk")
wDir = LAUNCHER_PATH
shell = Dispatch('WScript.Shell')
shortcut = shell.CreateShortCut(shortcut_path)
shortcut.Targetpath = results[1]
shortcut.WorkingDirectory = wDir
shortcut.IconLocation = results[1]
shortcut.save()
print("Success!")
def uninstall():
paths = LAUNCHER_PATH, DATA_PATH
for p in paths:
print("Deleting", p, "...", end=" ", flush=True)
shutil.rmtree(p)
print("Success!")
print("Removing desktop shortcut...", end=" ", flush=True)
desktop = os.path.join(os.path.expanduser('~'), 'Desktop')
shortcut_path = os.path.join(desktop, "Augur Launcher.lnk")
os.remove(shortcut_path)
print("Success!")
if __name__ == '__main__':
try:
if len(sys.argv) == 2 and sys.argv[1] == 'uninstall':
uninstall()
elif len(sys.argv) == 1:
<|fim_middle|>
else:
assert len(sys.argv) <= 2, "wrong number of arguements!"
except Exception as exc:
traceback.print_exc()
finally:
os.system("pause")
sys.exit(0)
<|fim▁end|> | main() |
<|file_name|>Augur Installer.py<|end_file_name|><|fim▁begin|>import os
import sys
import shutil
import binascii
import traceback
import subprocess
from win32com.client import Dispatch
LAUNCHER_PATH = "C:\\Program Files\\Augur"
DATA_PATH = os.path.join(os.path.expanduser('~'), 'AppData', 'Roaming', "Augur")
PASSFILE = os.path.join(DATA_PATH, "password.txt")
if getattr(sys, 'frozen', False):
# we are running in a |PyInstaller| bundle
BASEDIR = sys._MEIPASS
else:
# we are running in a normal Python environment
BASEDIR = os.path.dirname(os.path.abspath(__file__))
GETH_EXE = os.path.join(BASEDIR, 'geth.exe')
LAUNCHER_EXE = os.path.join(BASEDIR, 'augurlauncher.exe')
def main():
# first make all the appropriate directories
print("Making directories...")
for d in LAUNCHER_PATH, DATA_PATH:
print("Creating", d, end=" ", flush=True)
os.mkdir(d)
print("Success!")
print("Generating random password file...", end=" ", flush=True)
# then generate the password
password = binascii.b2a_hex(os.urandom(32))
passfile = open(PASSFILE, "w")
passfile.write(password.decode('ascii'))
passfile.close()
print("Success!")
# Then copy ".exe"s to the launcher path
exes = GETH_EXE, LAUNCHER_EXE
results = []
for exe in exes:
print("Copying", os.path.basename(exe), "to", LAUNCHER_PATH, "...", end=" ", flush=True)
results.append(shutil.copy(exe, LAUNCHER_PATH))
print("Sucess!")
print("Creating node account...", end=" ", flush=True)
# create account on node
p = subprocess.Popen([results[0],
"--password", PASSFILE,
"account", "new"])
p.wait()
print("Success!")
print("Creating shortcut...", end=" ", flush=True)
desktop = os.path.join(os.path.expanduser('~'), 'Desktop')
shortcut_path = os.path.join(desktop, "Augur Launcher.lnk")
wDir = LAUNCHER_PATH
shell = Dispatch('WScript.Shell')
shortcut = shell.CreateShortCut(shortcut_path)
shortcut.Targetpath = results[1]
shortcut.WorkingDirectory = wDir
shortcut.IconLocation = results[1]
shortcut.save()
print("Success!")
def uninstall():
paths = LAUNCHER_PATH, DATA_PATH
for p in paths:
print("Deleting", p, "...", end=" ", flush=True)
shutil.rmtree(p)
print("Success!")
print("Removing desktop shortcut...", end=" ", flush=True)
desktop = os.path.join(os.path.expanduser('~'), 'Desktop')
shortcut_path = os.path.join(desktop, "Augur Launcher.lnk")
os.remove(shortcut_path)
print("Success!")
if __name__ == '__main__':
try:
if len(sys.argv) == 2 and sys.argv[1] == 'uninstall':
uninstall()
elif len(sys.argv) == 1:
main()
else:
<|fim_middle|>
except Exception as exc:
traceback.print_exc()
finally:
os.system("pause")
sys.exit(0)
<|fim▁end|> | assert len(sys.argv) <= 2, "wrong number of arguements!" |
<|file_name|>Augur Installer.py<|end_file_name|><|fim▁begin|>import os
import sys
import shutil
import binascii
import traceback
import subprocess
from win32com.client import Dispatch
LAUNCHER_PATH = "C:\\Program Files\\Augur"
DATA_PATH = os.path.join(os.path.expanduser('~'), 'AppData', 'Roaming', "Augur")
PASSFILE = os.path.join(DATA_PATH, "password.txt")
if getattr(sys, 'frozen', False):
# we are running in a |PyInstaller| bundle
BASEDIR = sys._MEIPASS
else:
# we are running in a normal Python environment
BASEDIR = os.path.dirname(os.path.abspath(__file__))
GETH_EXE = os.path.join(BASEDIR, 'geth.exe')
LAUNCHER_EXE = os.path.join(BASEDIR, 'augurlauncher.exe')
def <|fim_middle|>():
# first make all the appropriate directories
print("Making directories...")
for d in LAUNCHER_PATH, DATA_PATH:
print("Creating", d, end=" ", flush=True)
os.mkdir(d)
print("Success!")
print("Generating random password file...", end=" ", flush=True)
# then generate the password
password = binascii.b2a_hex(os.urandom(32))
passfile = open(PASSFILE, "w")
passfile.write(password.decode('ascii'))
passfile.close()
print("Success!")
# Then copy ".exe"s to the launcher path
exes = GETH_EXE, LAUNCHER_EXE
results = []
for exe in exes:
print("Copying", os.path.basename(exe), "to", LAUNCHER_PATH, "...", end=" ", flush=True)
results.append(shutil.copy(exe, LAUNCHER_PATH))
print("Sucess!")
print("Creating node account...", end=" ", flush=True)
# create account on node
p = subprocess.Popen([results[0],
"--password", PASSFILE,
"account", "new"])
p.wait()
print("Success!")
print("Creating shortcut...", end=" ", flush=True)
desktop = os.path.join(os.path.expanduser('~'), 'Desktop')
shortcut_path = os.path.join(desktop, "Augur Launcher.lnk")
wDir = LAUNCHER_PATH
shell = Dispatch('WScript.Shell')
shortcut = shell.CreateShortCut(shortcut_path)
shortcut.Targetpath = results[1]
shortcut.WorkingDirectory = wDir
shortcut.IconLocation = results[1]
shortcut.save()
print("Success!")
def uninstall():
paths = LAUNCHER_PATH, DATA_PATH
for p in paths:
print("Deleting", p, "...", end=" ", flush=True)
shutil.rmtree(p)
print("Success!")
print("Removing desktop shortcut...", end=" ", flush=True)
desktop = os.path.join(os.path.expanduser('~'), 'Desktop')
shortcut_path = os.path.join(desktop, "Augur Launcher.lnk")
os.remove(shortcut_path)
print("Success!")
if __name__ == '__main__':
try:
if len(sys.argv) == 2 and sys.argv[1] == 'uninstall':
uninstall()
elif len(sys.argv) == 1:
main()
else:
assert len(sys.argv) <= 2, "wrong number of arguements!"
except Exception as exc:
traceback.print_exc()
finally:
os.system("pause")
sys.exit(0)
<|fim▁end|> | main |
<|file_name|>Augur Installer.py<|end_file_name|><|fim▁begin|>import os
import sys
import shutil
import binascii
import traceback
import subprocess
from win32com.client import Dispatch
LAUNCHER_PATH = "C:\\Program Files\\Augur"
DATA_PATH = os.path.join(os.path.expanduser('~'), 'AppData', 'Roaming', "Augur")
PASSFILE = os.path.join(DATA_PATH, "password.txt")
if getattr(sys, 'frozen', False):
# we are running in a |PyInstaller| bundle
BASEDIR = sys._MEIPASS
else:
# we are running in a normal Python environment
BASEDIR = os.path.dirname(os.path.abspath(__file__))
GETH_EXE = os.path.join(BASEDIR, 'geth.exe')
LAUNCHER_EXE = os.path.join(BASEDIR, 'augurlauncher.exe')
def main():
# first make all the appropriate directories
print("Making directories...")
for d in LAUNCHER_PATH, DATA_PATH:
print("Creating", d, end=" ", flush=True)
os.mkdir(d)
print("Success!")
print("Generating random password file...", end=" ", flush=True)
# then generate the password
password = binascii.b2a_hex(os.urandom(32))
passfile = open(PASSFILE, "w")
passfile.write(password.decode('ascii'))
passfile.close()
print("Success!")
# Then copy ".exe"s to the launcher path
exes = GETH_EXE, LAUNCHER_EXE
results = []
for exe in exes:
print("Copying", os.path.basename(exe), "to", LAUNCHER_PATH, "...", end=" ", flush=True)
results.append(shutil.copy(exe, LAUNCHER_PATH))
print("Sucess!")
print("Creating node account...", end=" ", flush=True)
# create account on node
p = subprocess.Popen([results[0],
"--password", PASSFILE,
"account", "new"])
p.wait()
print("Success!")
print("Creating shortcut...", end=" ", flush=True)
desktop = os.path.join(os.path.expanduser('~'), 'Desktop')
shortcut_path = os.path.join(desktop, "Augur Launcher.lnk")
wDir = LAUNCHER_PATH
shell = Dispatch('WScript.Shell')
shortcut = shell.CreateShortCut(shortcut_path)
shortcut.Targetpath = results[1]
shortcut.WorkingDirectory = wDir
shortcut.IconLocation = results[1]
shortcut.save()
print("Success!")
def <|fim_middle|>():
paths = LAUNCHER_PATH, DATA_PATH
for p in paths:
print("Deleting", p, "...", end=" ", flush=True)
shutil.rmtree(p)
print("Success!")
print("Removing desktop shortcut...", end=" ", flush=True)
desktop = os.path.join(os.path.expanduser('~'), 'Desktop')
shortcut_path = os.path.join(desktop, "Augur Launcher.lnk")
os.remove(shortcut_path)
print("Success!")
if __name__ == '__main__':
try:
if len(sys.argv) == 2 and sys.argv[1] == 'uninstall':
uninstall()
elif len(sys.argv) == 1:
main()
else:
assert len(sys.argv) <= 2, "wrong number of arguements!"
except Exception as exc:
traceback.print_exc()
finally:
os.system("pause")
sys.exit(0)
<|fim▁end|> | uninstall |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls import url
from audiotracks import feeds
from audiotracks import views
urlpatterns = [
url(r"^$", views.index, name="audiotracks"),
url(r"^(?P<page_number>\d+)/?$", views.index, name="audiotracks"),
url(r"^track/(?P<track_slug>.*)$", views.track_detail,<|fim▁hole|> url(r"^confirm_delete/(?P<track_id>\d+)$",
views.confirm_delete_track, name="confirm_delete_track"),
url(r"^delete$", views.delete_track, name="delete_track"),
url(r"^tracks$", views.user_index, name="user_index"),
url(r"^tracks/(?P<page_number>\d)/?$", views.user_index,
name="user_index"),
url(r"^feed/?$", feeds.choose_feed, name="tracks_feed"),
url(r"^player.js$", views.player_script, name="player_script"),
url(r"^m3u/?$", views.m3u, name="m3u"),
]<|fim▁end|> | name="track_detail"),
url(r"^upload", views.upload_track, name="upload_track"),
url(r"^edit/(?P<track_id>.+)", views.edit_track, name="edit_track"), |
<|file_name|>client_updater.py<|end_file_name|><|fim▁begin|>import sys
import time
import logging
from socketio import socketio_manage
from socketio.mixins import BroadcastMixin
from socketio.namespace import BaseNamespace
from DataAggregation.webdata_aggregator import getAvailableWorkshops
logger = logging.getLogger(__name__)
std_out_logger = logging.StreamHandler(sys.stdout)
logger.addHandler(std_out_logger)
def broadcast_msg(server, ns_name, event, *args):
pkt = dict(type="event",
name=event,
args=args,
endpoint=ns_name)<|fim▁hole|>
def workshops_monitor(server):
sizes = []
workshops = getAvailableWorkshops()
for w in workshops:
tmp = [w.workshopName, w.q.qsize()]
sizes.append(tmp)
broadcast_msg(server, '', "sizes", tmp)
while True:
logger.info("Participants viewing frontend:" + str(len(server.sockets)))
workshops_available = []
curr_workshops = getAvailableWorkshops()
for w in curr_workshops:
workshops_available.append([w.workshopName, w.q.qsize()])
wq = filter(lambda x: x[0] == w.workshopName, sizes)[0]
if wq[1] != w.q.qsize():
wq[1] = w.q.qsize()
logging.info("client_updater: New update being pushed to clients: " + str(wq))
broadcast_msg(server, '', 'sizes', wq)
logger.info("Workshops available:" + str(workshops_available))
time.sleep(1)
class RequestHandlerApp(object):
def __call__(self, environ, start_response):
if environ['PATH_INFO'].startswith('/socket.io'):
socketio_manage(environ, {'': QueueStatusHandler})
class QueueStatusHandler(BaseNamespace, BroadcastMixin):
def on_connect(self):
sizes = []
workshops = getAvailableWorkshops()
for w in workshops:
tmp = [w.workshopName, w.q.qsize()]
sizes.append(tmp)
self.emit('sizes', tmp)<|fim▁end|> |
for sessid, socket in server.sockets.iteritems():
socket.send_packet(pkt)
|
<|file_name|>client_updater.py<|end_file_name|><|fim▁begin|>import sys
import time
import logging
from socketio import socketio_manage
from socketio.mixins import BroadcastMixin
from socketio.namespace import BaseNamespace
from DataAggregation.webdata_aggregator import getAvailableWorkshops
logger = logging.getLogger(__name__)
std_out_logger = logging.StreamHandler(sys.stdout)
logger.addHandler(std_out_logger)
def broadcast_msg(server, ns_name, event, *args):
<|fim_middle|>
def workshops_monitor(server):
sizes = []
workshops = getAvailableWorkshops()
for w in workshops:
tmp = [w.workshopName, w.q.qsize()]
sizes.append(tmp)
broadcast_msg(server, '', "sizes", tmp)
while True:
logger.info("Participants viewing frontend:" + str(len(server.sockets)))
workshops_available = []
curr_workshops = getAvailableWorkshops()
for w in curr_workshops:
workshops_available.append([w.workshopName, w.q.qsize()])
wq = filter(lambda x: x[0] == w.workshopName, sizes)[0]
if wq[1] != w.q.qsize():
wq[1] = w.q.qsize()
logging.info("client_updater: New update being pushed to clients: " + str(wq))
broadcast_msg(server, '', 'sizes', wq)
logger.info("Workshops available:" + str(workshops_available))
time.sleep(1)
class RequestHandlerApp(object):
def __call__(self, environ, start_response):
if environ['PATH_INFO'].startswith('/socket.io'):
socketio_manage(environ, {'': QueueStatusHandler})
class QueueStatusHandler(BaseNamespace, BroadcastMixin):
def on_connect(self):
sizes = []
workshops = getAvailableWorkshops()
for w in workshops:
tmp = [w.workshopName, w.q.qsize()]
sizes.append(tmp)
self.emit('sizes', tmp)
<|fim▁end|> | pkt = dict(type="event",
name=event,
args=args,
endpoint=ns_name)
for sessid, socket in server.sockets.iteritems():
socket.send_packet(pkt) |
<|file_name|>client_updater.py<|end_file_name|><|fim▁begin|>import sys
import time
import logging
from socketio import socketio_manage
from socketio.mixins import BroadcastMixin
from socketio.namespace import BaseNamespace
from DataAggregation.webdata_aggregator import getAvailableWorkshops
logger = logging.getLogger(__name__)
std_out_logger = logging.StreamHandler(sys.stdout)
logger.addHandler(std_out_logger)
def broadcast_msg(server, ns_name, event, *args):
pkt = dict(type="event",
name=event,
args=args,
endpoint=ns_name)
for sessid, socket in server.sockets.iteritems():
socket.send_packet(pkt)
def workshops_monitor(server):
<|fim_middle|>
class RequestHandlerApp(object):
def __call__(self, environ, start_response):
if environ['PATH_INFO'].startswith('/socket.io'):
socketio_manage(environ, {'': QueueStatusHandler})
class QueueStatusHandler(BaseNamespace, BroadcastMixin):
def on_connect(self):
sizes = []
workshops = getAvailableWorkshops()
for w in workshops:
tmp = [w.workshopName, w.q.qsize()]
sizes.append(tmp)
self.emit('sizes', tmp)
<|fim▁end|> | sizes = []
workshops = getAvailableWorkshops()
for w in workshops:
tmp = [w.workshopName, w.q.qsize()]
sizes.append(tmp)
broadcast_msg(server, '', "sizes", tmp)
while True:
logger.info("Participants viewing frontend:" + str(len(server.sockets)))
workshops_available = []
curr_workshops = getAvailableWorkshops()
for w in curr_workshops:
workshops_available.append([w.workshopName, w.q.qsize()])
wq = filter(lambda x: x[0] == w.workshopName, sizes)[0]
if wq[1] != w.q.qsize():
wq[1] = w.q.qsize()
logging.info("client_updater: New update being pushed to clients: " + str(wq))
broadcast_msg(server, '', 'sizes', wq)
logger.info("Workshops available:" + str(workshops_available))
time.sleep(1) |
<|file_name|>client_updater.py<|end_file_name|><|fim▁begin|>import sys
import time
import logging
from socketio import socketio_manage
from socketio.mixins import BroadcastMixin
from socketio.namespace import BaseNamespace
from DataAggregation.webdata_aggregator import getAvailableWorkshops
logger = logging.getLogger(__name__)
std_out_logger = logging.StreamHandler(sys.stdout)
logger.addHandler(std_out_logger)
def broadcast_msg(server, ns_name, event, *args):
pkt = dict(type="event",
name=event,
args=args,
endpoint=ns_name)
for sessid, socket in server.sockets.iteritems():
socket.send_packet(pkt)
def workshops_monitor(server):
sizes = []
workshops = getAvailableWorkshops()
for w in workshops:
tmp = [w.workshopName, w.q.qsize()]
sizes.append(tmp)
broadcast_msg(server, '', "sizes", tmp)
while True:
logger.info("Participants viewing frontend:" + str(len(server.sockets)))
workshops_available = []
curr_workshops = getAvailableWorkshops()
for w in curr_workshops:
workshops_available.append([w.workshopName, w.q.qsize()])
wq = filter(lambda x: x[0] == w.workshopName, sizes)[0]
if wq[1] != w.q.qsize():
wq[1] = w.q.qsize()
logging.info("client_updater: New update being pushed to clients: " + str(wq))
broadcast_msg(server, '', 'sizes', wq)
logger.info("Workshops available:" + str(workshops_available))
time.sleep(1)
class RequestHandlerApp(object):
<|fim_middle|>
class QueueStatusHandler(BaseNamespace, BroadcastMixin):
def on_connect(self):
sizes = []
workshops = getAvailableWorkshops()
for w in workshops:
tmp = [w.workshopName, w.q.qsize()]
sizes.append(tmp)
self.emit('sizes', tmp)
<|fim▁end|> | def __call__(self, environ, start_response):
if environ['PATH_INFO'].startswith('/socket.io'):
socketio_manage(environ, {'': QueueStatusHandler}) |
<|file_name|>client_updater.py<|end_file_name|><|fim▁begin|>import sys
import time
import logging
from socketio import socketio_manage
from socketio.mixins import BroadcastMixin
from socketio.namespace import BaseNamespace
from DataAggregation.webdata_aggregator import getAvailableWorkshops
logger = logging.getLogger(__name__)
std_out_logger = logging.StreamHandler(sys.stdout)
logger.addHandler(std_out_logger)
def broadcast_msg(server, ns_name, event, *args):
pkt = dict(type="event",
name=event,
args=args,
endpoint=ns_name)
for sessid, socket in server.sockets.iteritems():
socket.send_packet(pkt)
def workshops_monitor(server):
sizes = []
workshops = getAvailableWorkshops()
for w in workshops:
tmp = [w.workshopName, w.q.qsize()]
sizes.append(tmp)
broadcast_msg(server, '', "sizes", tmp)
while True:
logger.info("Participants viewing frontend:" + str(len(server.sockets)))
workshops_available = []
curr_workshops = getAvailableWorkshops()
for w in curr_workshops:
workshops_available.append([w.workshopName, w.q.qsize()])
wq = filter(lambda x: x[0] == w.workshopName, sizes)[0]
if wq[1] != w.q.qsize():
wq[1] = w.q.qsize()
logging.info("client_updater: New update being pushed to clients: " + str(wq))
broadcast_msg(server, '', 'sizes', wq)
logger.info("Workshops available:" + str(workshops_available))
time.sleep(1)
class RequestHandlerApp(object):
def __call__(self, environ, start_response):
<|fim_middle|>
class QueueStatusHandler(BaseNamespace, BroadcastMixin):
def on_connect(self):
sizes = []
workshops = getAvailableWorkshops()
for w in workshops:
tmp = [w.workshopName, w.q.qsize()]
sizes.append(tmp)
self.emit('sizes', tmp)
<|fim▁end|> | if environ['PATH_INFO'].startswith('/socket.io'):
socketio_manage(environ, {'': QueueStatusHandler}) |
<|file_name|>client_updater.py<|end_file_name|><|fim▁begin|>import sys
import time
import logging
from socketio import socketio_manage
from socketio.mixins import BroadcastMixin
from socketio.namespace import BaseNamespace
from DataAggregation.webdata_aggregator import getAvailableWorkshops
logger = logging.getLogger(__name__)
std_out_logger = logging.StreamHandler(sys.stdout)
logger.addHandler(std_out_logger)
def broadcast_msg(server, ns_name, event, *args):
pkt = dict(type="event",
name=event,
args=args,
endpoint=ns_name)
for sessid, socket in server.sockets.iteritems():
socket.send_packet(pkt)
def workshops_monitor(server):
sizes = []
workshops = getAvailableWorkshops()
for w in workshops:
tmp = [w.workshopName, w.q.qsize()]
sizes.append(tmp)
broadcast_msg(server, '', "sizes", tmp)
while True:
logger.info("Participants viewing frontend:" + str(len(server.sockets)))
workshops_available = []
curr_workshops = getAvailableWorkshops()
for w in curr_workshops:
workshops_available.append([w.workshopName, w.q.qsize()])
wq = filter(lambda x: x[0] == w.workshopName, sizes)[0]
if wq[1] != w.q.qsize():
wq[1] = w.q.qsize()
logging.info("client_updater: New update being pushed to clients: " + str(wq))
broadcast_msg(server, '', 'sizes', wq)
logger.info("Workshops available:" + str(workshops_available))
time.sleep(1)
class RequestHandlerApp(object):
def __call__(self, environ, start_response):
if environ['PATH_INFO'].startswith('/socket.io'):
socketio_manage(environ, {'': QueueStatusHandler})
class QueueStatusHandler(BaseNamespace, BroadcastMixin):
<|fim_middle|>
<|fim▁end|> | def on_connect(self):
sizes = []
workshops = getAvailableWorkshops()
for w in workshops:
tmp = [w.workshopName, w.q.qsize()]
sizes.append(tmp)
self.emit('sizes', tmp) |
<|file_name|>client_updater.py<|end_file_name|><|fim▁begin|>import sys
import time
import logging
from socketio import socketio_manage
from socketio.mixins import BroadcastMixin
from socketio.namespace import BaseNamespace
from DataAggregation.webdata_aggregator import getAvailableWorkshops
logger = logging.getLogger(__name__)
std_out_logger = logging.StreamHandler(sys.stdout)
logger.addHandler(std_out_logger)
def broadcast_msg(server, ns_name, event, *args):
pkt = dict(type="event",
name=event,
args=args,
endpoint=ns_name)
for sessid, socket in server.sockets.iteritems():
socket.send_packet(pkt)
def workshops_monitor(server):
sizes = []
workshops = getAvailableWorkshops()
for w in workshops:
tmp = [w.workshopName, w.q.qsize()]
sizes.append(tmp)
broadcast_msg(server, '', "sizes", tmp)
while True:
logger.info("Participants viewing frontend:" + str(len(server.sockets)))
workshops_available = []
curr_workshops = getAvailableWorkshops()
for w in curr_workshops:
workshops_available.append([w.workshopName, w.q.qsize()])
wq = filter(lambda x: x[0] == w.workshopName, sizes)[0]
if wq[1] != w.q.qsize():
wq[1] = w.q.qsize()
logging.info("client_updater: New update being pushed to clients: " + str(wq))
broadcast_msg(server, '', 'sizes', wq)
logger.info("Workshops available:" + str(workshops_available))
time.sleep(1)
class RequestHandlerApp(object):
def __call__(self, environ, start_response):
if environ['PATH_INFO'].startswith('/socket.io'):
socketio_manage(environ, {'': QueueStatusHandler})
class QueueStatusHandler(BaseNamespace, BroadcastMixin):
def on_connect(self):
<|fim_middle|>
<|fim▁end|> | sizes = []
workshops = getAvailableWorkshops()
for w in workshops:
tmp = [w.workshopName, w.q.qsize()]
sizes.append(tmp)
self.emit('sizes', tmp) |
<|file_name|>client_updater.py<|end_file_name|><|fim▁begin|>import sys
import time
import logging
from socketio import socketio_manage
from socketio.mixins import BroadcastMixin
from socketio.namespace import BaseNamespace
from DataAggregation.webdata_aggregator import getAvailableWorkshops
logger = logging.getLogger(__name__)
std_out_logger = logging.StreamHandler(sys.stdout)
logger.addHandler(std_out_logger)
def broadcast_msg(server, ns_name, event, *args):
pkt = dict(type="event",
name=event,
args=args,
endpoint=ns_name)
for sessid, socket in server.sockets.iteritems():
socket.send_packet(pkt)
def workshops_monitor(server):
sizes = []
workshops = getAvailableWorkshops()
for w in workshops:
tmp = [w.workshopName, w.q.qsize()]
sizes.append(tmp)
broadcast_msg(server, '', "sizes", tmp)
while True:
logger.info("Participants viewing frontend:" + str(len(server.sockets)))
workshops_available = []
curr_workshops = getAvailableWorkshops()
for w in curr_workshops:
workshops_available.append([w.workshopName, w.q.qsize()])
wq = filter(lambda x: x[0] == w.workshopName, sizes)[0]
if wq[1] != w.q.qsize():
<|fim_middle|>
logger.info("Workshops available:" + str(workshops_available))
time.sleep(1)
class RequestHandlerApp(object):
def __call__(self, environ, start_response):
if environ['PATH_INFO'].startswith('/socket.io'):
socketio_manage(environ, {'': QueueStatusHandler})
class QueueStatusHandler(BaseNamespace, BroadcastMixin):
def on_connect(self):
sizes = []
workshops = getAvailableWorkshops()
for w in workshops:
tmp = [w.workshopName, w.q.qsize()]
sizes.append(tmp)
self.emit('sizes', tmp)
<|fim▁end|> | wq[1] = w.q.qsize()
logging.info("client_updater: New update being pushed to clients: " + str(wq))
broadcast_msg(server, '', 'sizes', wq) |
<|file_name|>client_updater.py<|end_file_name|><|fim▁begin|>import sys
import time
import logging
from socketio import socketio_manage
from socketio.mixins import BroadcastMixin
from socketio.namespace import BaseNamespace
from DataAggregation.webdata_aggregator import getAvailableWorkshops
logger = logging.getLogger(__name__)
std_out_logger = logging.StreamHandler(sys.stdout)
logger.addHandler(std_out_logger)
def broadcast_msg(server, ns_name, event, *args):
pkt = dict(type="event",
name=event,
args=args,
endpoint=ns_name)
for sessid, socket in server.sockets.iteritems():
socket.send_packet(pkt)
def workshops_monitor(server):
sizes = []
workshops = getAvailableWorkshops()
for w in workshops:
tmp = [w.workshopName, w.q.qsize()]
sizes.append(tmp)
broadcast_msg(server, '', "sizes", tmp)
while True:
logger.info("Participants viewing frontend:" + str(len(server.sockets)))
workshops_available = []
curr_workshops = getAvailableWorkshops()
for w in curr_workshops:
workshops_available.append([w.workshopName, w.q.qsize()])
wq = filter(lambda x: x[0] == w.workshopName, sizes)[0]
if wq[1] != w.q.qsize():
wq[1] = w.q.qsize()
logging.info("client_updater: New update being pushed to clients: " + str(wq))
broadcast_msg(server, '', 'sizes', wq)
logger.info("Workshops available:" + str(workshops_available))
time.sleep(1)
class RequestHandlerApp(object):
def __call__(self, environ, start_response):
if environ['PATH_INFO'].startswith('/socket.io'):
<|fim_middle|>
class QueueStatusHandler(BaseNamespace, BroadcastMixin):
def on_connect(self):
sizes = []
workshops = getAvailableWorkshops()
for w in workshops:
tmp = [w.workshopName, w.q.qsize()]
sizes.append(tmp)
self.emit('sizes', tmp)
<|fim▁end|> | socketio_manage(environ, {'': QueueStatusHandler}) |
<|file_name|>client_updater.py<|end_file_name|><|fim▁begin|>import sys
import time
import logging
from socketio import socketio_manage
from socketio.mixins import BroadcastMixin
from socketio.namespace import BaseNamespace
from DataAggregation.webdata_aggregator import getAvailableWorkshops
logger = logging.getLogger(__name__)
std_out_logger = logging.StreamHandler(sys.stdout)
logger.addHandler(std_out_logger)
def <|fim_middle|>(server, ns_name, event, *args):
pkt = dict(type="event",
name=event,
args=args,
endpoint=ns_name)
for sessid, socket in server.sockets.iteritems():
socket.send_packet(pkt)
def workshops_monitor(server):
sizes = []
workshops = getAvailableWorkshops()
for w in workshops:
tmp = [w.workshopName, w.q.qsize()]
sizes.append(tmp)
broadcast_msg(server, '', "sizes", tmp)
while True:
logger.info("Participants viewing frontend:" + str(len(server.sockets)))
workshops_available = []
curr_workshops = getAvailableWorkshops()
for w in curr_workshops:
workshops_available.append([w.workshopName, w.q.qsize()])
wq = filter(lambda x: x[0] == w.workshopName, sizes)[0]
if wq[1] != w.q.qsize():
wq[1] = w.q.qsize()
logging.info("client_updater: New update being pushed to clients: " + str(wq))
broadcast_msg(server, '', 'sizes', wq)
logger.info("Workshops available:" + str(workshops_available))
time.sleep(1)
class RequestHandlerApp(object):
def __call__(self, environ, start_response):
if environ['PATH_INFO'].startswith('/socket.io'):
socketio_manage(environ, {'': QueueStatusHandler})
class QueueStatusHandler(BaseNamespace, BroadcastMixin):
def on_connect(self):
sizes = []
workshops = getAvailableWorkshops()
for w in workshops:
tmp = [w.workshopName, w.q.qsize()]
sizes.append(tmp)
self.emit('sizes', tmp)
<|fim▁end|> | broadcast_msg |
<|file_name|>client_updater.py<|end_file_name|><|fim▁begin|>import sys
import time
import logging
from socketio import socketio_manage
from socketio.mixins import BroadcastMixin
from socketio.namespace import BaseNamespace
from DataAggregation.webdata_aggregator import getAvailableWorkshops
logger = logging.getLogger(__name__)
std_out_logger = logging.StreamHandler(sys.stdout)
logger.addHandler(std_out_logger)
def broadcast_msg(server, ns_name, event, *args):
pkt = dict(type="event",
name=event,
args=args,
endpoint=ns_name)
for sessid, socket in server.sockets.iteritems():
socket.send_packet(pkt)
def <|fim_middle|>(server):
sizes = []
workshops = getAvailableWorkshops()
for w in workshops:
tmp = [w.workshopName, w.q.qsize()]
sizes.append(tmp)
broadcast_msg(server, '', "sizes", tmp)
while True:
logger.info("Participants viewing frontend:" + str(len(server.sockets)))
workshops_available = []
curr_workshops = getAvailableWorkshops()
for w in curr_workshops:
workshops_available.append([w.workshopName, w.q.qsize()])
wq = filter(lambda x: x[0] == w.workshopName, sizes)[0]
if wq[1] != w.q.qsize():
wq[1] = w.q.qsize()
logging.info("client_updater: New update being pushed to clients: " + str(wq))
broadcast_msg(server, '', 'sizes', wq)
logger.info("Workshops available:" + str(workshops_available))
time.sleep(1)
class RequestHandlerApp(object):
def __call__(self, environ, start_response):
if environ['PATH_INFO'].startswith('/socket.io'):
socketio_manage(environ, {'': QueueStatusHandler})
class QueueStatusHandler(BaseNamespace, BroadcastMixin):
def on_connect(self):
sizes = []
workshops = getAvailableWorkshops()
for w in workshops:
tmp = [w.workshopName, w.q.qsize()]
sizes.append(tmp)
self.emit('sizes', tmp)
<|fim▁end|> | workshops_monitor |
<|file_name|>client_updater.py<|end_file_name|><|fim▁begin|>import sys
import time
import logging
from socketio import socketio_manage
from socketio.mixins import BroadcastMixin
from socketio.namespace import BaseNamespace
from DataAggregation.webdata_aggregator import getAvailableWorkshops
logger = logging.getLogger(__name__)
std_out_logger = logging.StreamHandler(sys.stdout)
logger.addHandler(std_out_logger)
def broadcast_msg(server, ns_name, event, *args):
pkt = dict(type="event",
name=event,
args=args,
endpoint=ns_name)
for sessid, socket in server.sockets.iteritems():
socket.send_packet(pkt)
def workshops_monitor(server):
sizes = []
workshops = getAvailableWorkshops()
for w in workshops:
tmp = [w.workshopName, w.q.qsize()]
sizes.append(tmp)
broadcast_msg(server, '', "sizes", tmp)
while True:
logger.info("Participants viewing frontend:" + str(len(server.sockets)))
workshops_available = []
curr_workshops = getAvailableWorkshops()
for w in curr_workshops:
workshops_available.append([w.workshopName, w.q.qsize()])
wq = filter(lambda x: x[0] == w.workshopName, sizes)[0]
if wq[1] != w.q.qsize():
wq[1] = w.q.qsize()
logging.info("client_updater: New update being pushed to clients: " + str(wq))
broadcast_msg(server, '', 'sizes', wq)
logger.info("Workshops available:" + str(workshops_available))
time.sleep(1)
class RequestHandlerApp(object):
def <|fim_middle|>(self, environ, start_response):
if environ['PATH_INFO'].startswith('/socket.io'):
socketio_manage(environ, {'': QueueStatusHandler})
class QueueStatusHandler(BaseNamespace, BroadcastMixin):
def on_connect(self):
sizes = []
workshops = getAvailableWorkshops()
for w in workshops:
tmp = [w.workshopName, w.q.qsize()]
sizes.append(tmp)
self.emit('sizes', tmp)
<|fim▁end|> | __call__ |
<|file_name|>client_updater.py<|end_file_name|><|fim▁begin|>import sys
import time
import logging
from socketio import socketio_manage
from socketio.mixins import BroadcastMixin
from socketio.namespace import BaseNamespace
from DataAggregation.webdata_aggregator import getAvailableWorkshops
logger = logging.getLogger(__name__)
std_out_logger = logging.StreamHandler(sys.stdout)
logger.addHandler(std_out_logger)
def broadcast_msg(server, ns_name, event, *args):
pkt = dict(type="event",
name=event,
args=args,
endpoint=ns_name)
for sessid, socket in server.sockets.iteritems():
socket.send_packet(pkt)
def workshops_monitor(server):
sizes = []
workshops = getAvailableWorkshops()
for w in workshops:
tmp = [w.workshopName, w.q.qsize()]
sizes.append(tmp)
broadcast_msg(server, '', "sizes", tmp)
while True:
logger.info("Participants viewing frontend:" + str(len(server.sockets)))
workshops_available = []
curr_workshops = getAvailableWorkshops()
for w in curr_workshops:
workshops_available.append([w.workshopName, w.q.qsize()])
wq = filter(lambda x: x[0] == w.workshopName, sizes)[0]
if wq[1] != w.q.qsize():
wq[1] = w.q.qsize()
logging.info("client_updater: New update being pushed to clients: " + str(wq))
broadcast_msg(server, '', 'sizes', wq)
logger.info("Workshops available:" + str(workshops_available))
time.sleep(1)
class RequestHandlerApp(object):
def __call__(self, environ, start_response):
if environ['PATH_INFO'].startswith('/socket.io'):
socketio_manage(environ, {'': QueueStatusHandler})
class QueueStatusHandler(BaseNamespace, BroadcastMixin):
def <|fim_middle|>(self):
sizes = []
workshops = getAvailableWorkshops()
for w in workshops:
tmp = [w.workshopName, w.q.qsize()]
sizes.append(tmp)
self.emit('sizes', tmp)
<|fim▁end|> | on_connect |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>"""api_server URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf<|fim▁hole|>"""
from django.conf.urls import url, include
from django.contrib import admin
version = 'v1.0'
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'api/%s/' % version, include('apis.urls'))
]<|fim▁end|> | 1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) |
<|file_name|>scraper.py<|end_file_name|><|fim▁begin|>import os, scrapy, argparse
from realclearpolitics.spiders.spider import RcpSpider
from scrapy.crawler import CrawlerProcess
parser = argparse.ArgumentParser('Scrap realclearpolitics polls data')
parser.add_argument('url', action="store")
parser.add_argument('--locale', action="store", default='')
parser.add_argument('--race', action="store", default='primary')
parser.add_argument('--csv', dest='to_csv', action='store_true')
parser.add_argument('--output', dest='output', action='store')
args = parser.parse_args()
url = args.url<|fim▁hole|> filename = url.split('/')[-1].split('.')[0]
output = filename + ".csv"
print("No output file specified : using " + output)
else:
output = args.output
if not output.endswith(".csv"):
output = output + ".csv"
if os.path.isfile(output):
os.remove(output)
os.system("scrapy crawl realclearpoliticsSpider -a url="+url+" -o "+output)
else:
settings = {
'ITEM_PIPELINES' : {
'realclearpolitics.pipeline.PollPipeline': 300,
},
'LOG_LEVEL' : 'ERROR',
'DOWNLOAD_HANDLERS' : {'s3': None,}
}
process = CrawlerProcess(settings);
process.crawl(RcpSpider, url, extra_fields)
process.start()<|fim▁end|> | extra_fields = { 'locale': args.locale, 'race': args.race }
if (args.to_csv):
if args.output is None: |
<|file_name|>scraper.py<|end_file_name|><|fim▁begin|>import os, scrapy, argparse
from realclearpolitics.spiders.spider import RcpSpider
from scrapy.crawler import CrawlerProcess
parser = argparse.ArgumentParser('Scrap realclearpolitics polls data')
parser.add_argument('url', action="store")
parser.add_argument('--locale', action="store", default='')
parser.add_argument('--race', action="store", default='primary')
parser.add_argument('--csv', dest='to_csv', action='store_true')
parser.add_argument('--output', dest='output', action='store')
args = parser.parse_args()
url = args.url
extra_fields = { 'locale': args.locale, 'race': args.race }
if (args.to_csv):
<|fim_middle|>
else:
settings = {
'ITEM_PIPELINES' : {
'realclearpolitics.pipeline.PollPipeline': 300,
},
'LOG_LEVEL' : 'ERROR',
'DOWNLOAD_HANDLERS' : {'s3': None,}
}
process = CrawlerProcess(settings);
process.crawl(RcpSpider, url, extra_fields)
process.start()
<|fim▁end|> | if args.output is None:
filename = url.split('/')[-1].split('.')[0]
output = filename + ".csv"
print("No output file specified : using " + output)
else:
output = args.output
if not output.endswith(".csv"):
output = output + ".csv"
if os.path.isfile(output):
os.remove(output)
os.system("scrapy crawl realclearpoliticsSpider -a url="+url+" -o "+output) |
<|file_name|>scraper.py<|end_file_name|><|fim▁begin|>import os, scrapy, argparse
from realclearpolitics.spiders.spider import RcpSpider
from scrapy.crawler import CrawlerProcess
parser = argparse.ArgumentParser('Scrap realclearpolitics polls data')
parser.add_argument('url', action="store")
parser.add_argument('--locale', action="store", default='')
parser.add_argument('--race', action="store", default='primary')
parser.add_argument('--csv', dest='to_csv', action='store_true')
parser.add_argument('--output', dest='output', action='store')
args = parser.parse_args()
url = args.url
extra_fields = { 'locale': args.locale, 'race': args.race }
if (args.to_csv):
if args.output is None:
<|fim_middle|>
else:
output = args.output
if not output.endswith(".csv"):
output = output + ".csv"
if os.path.isfile(output):
os.remove(output)
os.system("scrapy crawl realclearpoliticsSpider -a url="+url+" -o "+output)
else:
settings = {
'ITEM_PIPELINES' : {
'realclearpolitics.pipeline.PollPipeline': 300,
},
'LOG_LEVEL' : 'ERROR',
'DOWNLOAD_HANDLERS' : {'s3': None,}
}
process = CrawlerProcess(settings);
process.crawl(RcpSpider, url, extra_fields)
process.start()
<|fim▁end|> | filename = url.split('/')[-1].split('.')[0]
output = filename + ".csv"
print("No output file specified : using " + output) |
<|file_name|>scraper.py<|end_file_name|><|fim▁begin|>import os, scrapy, argparse
from realclearpolitics.spiders.spider import RcpSpider
from scrapy.crawler import CrawlerProcess
parser = argparse.ArgumentParser('Scrap realclearpolitics polls data')
parser.add_argument('url', action="store")
parser.add_argument('--locale', action="store", default='')
parser.add_argument('--race', action="store", default='primary')
parser.add_argument('--csv', dest='to_csv', action='store_true')
parser.add_argument('--output', dest='output', action='store')
args = parser.parse_args()
url = args.url
extra_fields = { 'locale': args.locale, 'race': args.race }
if (args.to_csv):
if args.output is None:
filename = url.split('/')[-1].split('.')[0]
output = filename + ".csv"
print("No output file specified : using " + output)
else:
<|fim_middle|>
if os.path.isfile(output):
os.remove(output)
os.system("scrapy crawl realclearpoliticsSpider -a url="+url+" -o "+output)
else:
settings = {
'ITEM_PIPELINES' : {
'realclearpolitics.pipeline.PollPipeline': 300,
},
'LOG_LEVEL' : 'ERROR',
'DOWNLOAD_HANDLERS' : {'s3': None,}
}
process = CrawlerProcess(settings);
process.crawl(RcpSpider, url, extra_fields)
process.start()
<|fim▁end|> | output = args.output
if not output.endswith(".csv"):
output = output + ".csv" |
<|file_name|>scraper.py<|end_file_name|><|fim▁begin|>import os, scrapy, argparse
from realclearpolitics.spiders.spider import RcpSpider
from scrapy.crawler import CrawlerProcess
parser = argparse.ArgumentParser('Scrap realclearpolitics polls data')
parser.add_argument('url', action="store")
parser.add_argument('--locale', action="store", default='')
parser.add_argument('--race', action="store", default='primary')
parser.add_argument('--csv', dest='to_csv', action='store_true')
parser.add_argument('--output', dest='output', action='store')
args = parser.parse_args()
url = args.url
extra_fields = { 'locale': args.locale, 'race': args.race }
if (args.to_csv):
if args.output is None:
filename = url.split('/')[-1].split('.')[0]
output = filename + ".csv"
print("No output file specified : using " + output)
else:
output = args.output
if not output.endswith(".csv"):
<|fim_middle|>
if os.path.isfile(output):
os.remove(output)
os.system("scrapy crawl realclearpoliticsSpider -a url="+url+" -o "+output)
else:
settings = {
'ITEM_PIPELINES' : {
'realclearpolitics.pipeline.PollPipeline': 300,
},
'LOG_LEVEL' : 'ERROR',
'DOWNLOAD_HANDLERS' : {'s3': None,}
}
process = CrawlerProcess(settings);
process.crawl(RcpSpider, url, extra_fields)
process.start()
<|fim▁end|> | output = output + ".csv" |
<|file_name|>scraper.py<|end_file_name|><|fim▁begin|>import os, scrapy, argparse
from realclearpolitics.spiders.spider import RcpSpider
from scrapy.crawler import CrawlerProcess
parser = argparse.ArgumentParser('Scrap realclearpolitics polls data')
parser.add_argument('url', action="store")
parser.add_argument('--locale', action="store", default='')
parser.add_argument('--race', action="store", default='primary')
parser.add_argument('--csv', dest='to_csv', action='store_true')
parser.add_argument('--output', dest='output', action='store')
args = parser.parse_args()
url = args.url
extra_fields = { 'locale': args.locale, 'race': args.race }
if (args.to_csv):
if args.output is None:
filename = url.split('/')[-1].split('.')[0]
output = filename + ".csv"
print("No output file specified : using " + output)
else:
output = args.output
if not output.endswith(".csv"):
output = output + ".csv"
if os.path.isfile(output):
<|fim_middle|>
os.system("scrapy crawl realclearpoliticsSpider -a url="+url+" -o "+output)
else:
settings = {
'ITEM_PIPELINES' : {
'realclearpolitics.pipeline.PollPipeline': 300,
},
'LOG_LEVEL' : 'ERROR',
'DOWNLOAD_HANDLERS' : {'s3': None,}
}
process = CrawlerProcess(settings);
process.crawl(RcpSpider, url, extra_fields)
process.start()
<|fim▁end|> | os.remove(output) |
<|file_name|>scraper.py<|end_file_name|><|fim▁begin|>import os, scrapy, argparse
from realclearpolitics.spiders.spider import RcpSpider
from scrapy.crawler import CrawlerProcess
parser = argparse.ArgumentParser('Scrap realclearpolitics polls data')
parser.add_argument('url', action="store")
parser.add_argument('--locale', action="store", default='')
parser.add_argument('--race', action="store", default='primary')
parser.add_argument('--csv', dest='to_csv', action='store_true')
parser.add_argument('--output', dest='output', action='store')
args = parser.parse_args()
url = args.url
extra_fields = { 'locale': args.locale, 'race': args.race }
if (args.to_csv):
if args.output is None:
filename = url.split('/')[-1].split('.')[0]
output = filename + ".csv"
print("No output file specified : using " + output)
else:
output = args.output
if not output.endswith(".csv"):
output = output + ".csv"
if os.path.isfile(output):
os.remove(output)
os.system("scrapy crawl realclearpoliticsSpider -a url="+url+" -o "+output)
else:
<|fim_middle|>
<|fim▁end|> | settings = {
'ITEM_PIPELINES' : {
'realclearpolitics.pipeline.PollPipeline': 300,
},
'LOG_LEVEL' : 'ERROR',
'DOWNLOAD_HANDLERS' : {'s3': None,}
}
process = CrawlerProcess(settings);
process.crawl(RcpSpider, url, extra_fields)
process.start() |
<|file_name|>beta_decrease.py<|end_file_name|><|fim▁begin|>from .naive import StratNaive
import random
import numpy as np
class BetaDecreaseStrat(StratNaive):
def __init__(self, vu_cfg, time_scale=0.9, **strat_cfg2):
StratNaive.__init__(self,vu_cfg=vu_cfg, **strat_cfg2)
self.time_scale = time_scale
def update_speaker(self, ms, w, mh, voc, mem, bool_succ, context=[]):
self.voc_update.beta = max(0,self.voc_update.beta - 1./self.time_scale)
return self.voc_update.update_speaker(ms, w, mh, voc, mem, bool_succ, context)
<|fim▁hole|> self.voc_update.beta = max(0,self.voc_update.beta - 1./self.time_scale)
return self.voc_update.update_hearer(ms, w, mh, voc, mem, bool_succ, context)<|fim▁end|> | def update_hearer(self, ms, w, mh, voc, mem, bool_succ, context=[]): |
<|file_name|>beta_decrease.py<|end_file_name|><|fim▁begin|>
from .naive import StratNaive
import random
import numpy as np
class BetaDecreaseStrat(StratNaive):
<|fim_middle|>
<|fim▁end|> | def __init__(self, vu_cfg, time_scale=0.9, **strat_cfg2):
StratNaive.__init__(self,vu_cfg=vu_cfg, **strat_cfg2)
self.time_scale = time_scale
def update_speaker(self, ms, w, mh, voc, mem, bool_succ, context=[]):
self.voc_update.beta = max(0,self.voc_update.beta - 1./self.time_scale)
return self.voc_update.update_speaker(ms, w, mh, voc, mem, bool_succ, context)
def update_hearer(self, ms, w, mh, voc, mem, bool_succ, context=[]):
self.voc_update.beta = max(0,self.voc_update.beta - 1./self.time_scale)
return self.voc_update.update_hearer(ms, w, mh, voc, mem, bool_succ, context) |
<|file_name|>beta_decrease.py<|end_file_name|><|fim▁begin|>
from .naive import StratNaive
import random
import numpy as np
class BetaDecreaseStrat(StratNaive):
def __init__(self, vu_cfg, time_scale=0.9, **strat_cfg2):
<|fim_middle|>
def update_speaker(self, ms, w, mh, voc, mem, bool_succ, context=[]):
self.voc_update.beta = max(0,self.voc_update.beta - 1./self.time_scale)
return self.voc_update.update_speaker(ms, w, mh, voc, mem, bool_succ, context)
def update_hearer(self, ms, w, mh, voc, mem, bool_succ, context=[]):
self.voc_update.beta = max(0,self.voc_update.beta - 1./self.time_scale)
return self.voc_update.update_hearer(ms, w, mh, voc, mem, bool_succ, context)
<|fim▁end|> | StratNaive.__init__(self,vu_cfg=vu_cfg, **strat_cfg2)
self.time_scale = time_scale |
<|file_name|>beta_decrease.py<|end_file_name|><|fim▁begin|>
from .naive import StratNaive
import random
import numpy as np
class BetaDecreaseStrat(StratNaive):
def __init__(self, vu_cfg, time_scale=0.9, **strat_cfg2):
StratNaive.__init__(self,vu_cfg=vu_cfg, **strat_cfg2)
self.time_scale = time_scale
def update_speaker(self, ms, w, mh, voc, mem, bool_succ, context=[]):
<|fim_middle|>
def update_hearer(self, ms, w, mh, voc, mem, bool_succ, context=[]):
self.voc_update.beta = max(0,self.voc_update.beta - 1./self.time_scale)
return self.voc_update.update_hearer(ms, w, mh, voc, mem, bool_succ, context)
<|fim▁end|> | self.voc_update.beta = max(0,self.voc_update.beta - 1./self.time_scale)
return self.voc_update.update_speaker(ms, w, mh, voc, mem, bool_succ, context) |
<|file_name|>beta_decrease.py<|end_file_name|><|fim▁begin|>
from .naive import StratNaive
import random
import numpy as np
class BetaDecreaseStrat(StratNaive):
def __init__(self, vu_cfg, time_scale=0.9, **strat_cfg2):
StratNaive.__init__(self,vu_cfg=vu_cfg, **strat_cfg2)
self.time_scale = time_scale
def update_speaker(self, ms, w, mh, voc, mem, bool_succ, context=[]):
self.voc_update.beta = max(0,self.voc_update.beta - 1./self.time_scale)
return self.voc_update.update_speaker(ms, w, mh, voc, mem, bool_succ, context)
def update_hearer(self, ms, w, mh, voc, mem, bool_succ, context=[]):
<|fim_middle|>
<|fim▁end|> | self.voc_update.beta = max(0,self.voc_update.beta - 1./self.time_scale)
return self.voc_update.update_hearer(ms, w, mh, voc, mem, bool_succ, context) |
<|file_name|>beta_decrease.py<|end_file_name|><|fim▁begin|>
from .naive import StratNaive
import random
import numpy as np
class BetaDecreaseStrat(StratNaive):
def <|fim_middle|>(self, vu_cfg, time_scale=0.9, **strat_cfg2):
StratNaive.__init__(self,vu_cfg=vu_cfg, **strat_cfg2)
self.time_scale = time_scale
def update_speaker(self, ms, w, mh, voc, mem, bool_succ, context=[]):
self.voc_update.beta = max(0,self.voc_update.beta - 1./self.time_scale)
return self.voc_update.update_speaker(ms, w, mh, voc, mem, bool_succ, context)
def update_hearer(self, ms, w, mh, voc, mem, bool_succ, context=[]):
self.voc_update.beta = max(0,self.voc_update.beta - 1./self.time_scale)
return self.voc_update.update_hearer(ms, w, mh, voc, mem, bool_succ, context)
<|fim▁end|> | __init__ |
<|file_name|>beta_decrease.py<|end_file_name|><|fim▁begin|>
from .naive import StratNaive
import random
import numpy as np
class BetaDecreaseStrat(StratNaive):
def __init__(self, vu_cfg, time_scale=0.9, **strat_cfg2):
StratNaive.__init__(self,vu_cfg=vu_cfg, **strat_cfg2)
self.time_scale = time_scale
def <|fim_middle|>(self, ms, w, mh, voc, mem, bool_succ, context=[]):
self.voc_update.beta = max(0,self.voc_update.beta - 1./self.time_scale)
return self.voc_update.update_speaker(ms, w, mh, voc, mem, bool_succ, context)
def update_hearer(self, ms, w, mh, voc, mem, bool_succ, context=[]):
self.voc_update.beta = max(0,self.voc_update.beta - 1./self.time_scale)
return self.voc_update.update_hearer(ms, w, mh, voc, mem, bool_succ, context)
<|fim▁end|> | update_speaker |
<|file_name|>beta_decrease.py<|end_file_name|><|fim▁begin|>
from .naive import StratNaive
import random
import numpy as np
class BetaDecreaseStrat(StratNaive):
def __init__(self, vu_cfg, time_scale=0.9, **strat_cfg2):
StratNaive.__init__(self,vu_cfg=vu_cfg, **strat_cfg2)
self.time_scale = time_scale
def update_speaker(self, ms, w, mh, voc, mem, bool_succ, context=[]):
self.voc_update.beta = max(0,self.voc_update.beta - 1./self.time_scale)
return self.voc_update.update_speaker(ms, w, mh, voc, mem, bool_succ, context)
def <|fim_middle|>(self, ms, w, mh, voc, mem, bool_succ, context=[]):
self.voc_update.beta = max(0,self.voc_update.beta - 1./self.time_scale)
return self.voc_update.update_hearer(ms, w, mh, voc, mem, bool_succ, context)
<|fim▁end|> | update_hearer |
<|file_name|>_mock.py<|end_file_name|><|fim▁begin|>from typing import Dict, List, Optional
from ray.tune.suggest.suggestion import Searcher, ConcurrencyLimiter
from ray.tune.suggest.search_generator import SearchGenerator
from ray.tune.trial import Trial
class _MockSearcher(Searcher):
def __init__(self, **kwargs):
self.live_trials = {}
self.counter = {"result": 0, "complete": 0}
self.final_results = []
self.stall = False
self.results = []
super(_MockSearcher, self).__init__(**kwargs)
def suggest(self, trial_id: str):
if not self.stall:
self.live_trials[trial_id] = 1
return {"test_variable": 2}
return None
def on_trial_result(self, trial_id: str, result: Dict):
self.counter["result"] += 1
self.results += [result]
def on_trial_complete(
self, trial_id: str, result: Optional[Dict] = None, error: bool = False
):
self.counter["complete"] += 1
if result:
self._process_result(result)
if trial_id in self.live_trials:
del self.live_trials[trial_id]
def _process_result(self, result: Dict):
self.final_results += [result]
class _MockSuggestionAlgorithm(SearchGenerator):
def __init__(self, max_concurrent: Optional[int] = None, **kwargs):
self.searcher = _MockSearcher(**kwargs)
if max_concurrent:
self.searcher = ConcurrencyLimiter(
self.searcher, max_concurrent=max_concurrent
)
super(_MockSuggestionAlgorithm, self).__init__(self.searcher)
@property
def live_trials(self) -> List[Trial]:
return self.searcher.live_trials
<|fim▁hole|> return self.searcher.results<|fim▁end|> | @property
def results(self) -> List[Dict]: |
<|file_name|>_mock.py<|end_file_name|><|fim▁begin|>from typing import Dict, List, Optional
from ray.tune.suggest.suggestion import Searcher, ConcurrencyLimiter
from ray.tune.suggest.search_generator import SearchGenerator
from ray.tune.trial import Trial
class _MockSearcher(Searcher):
<|fim_middle|>
class _MockSuggestionAlgorithm(SearchGenerator):
def __init__(self, max_concurrent: Optional[int] = None, **kwargs):
self.searcher = _MockSearcher(**kwargs)
if max_concurrent:
self.searcher = ConcurrencyLimiter(
self.searcher, max_concurrent=max_concurrent
)
super(_MockSuggestionAlgorithm, self).__init__(self.searcher)
@property
def live_trials(self) -> List[Trial]:
return self.searcher.live_trials
@property
def results(self) -> List[Dict]:
return self.searcher.results
<|fim▁end|> | def __init__(self, **kwargs):
self.live_trials = {}
self.counter = {"result": 0, "complete": 0}
self.final_results = []
self.stall = False
self.results = []
super(_MockSearcher, self).__init__(**kwargs)
def suggest(self, trial_id: str):
if not self.stall:
self.live_trials[trial_id] = 1
return {"test_variable": 2}
return None
def on_trial_result(self, trial_id: str, result: Dict):
self.counter["result"] += 1
self.results += [result]
def on_trial_complete(
self, trial_id: str, result: Optional[Dict] = None, error: bool = False
):
self.counter["complete"] += 1
if result:
self._process_result(result)
if trial_id in self.live_trials:
del self.live_trials[trial_id]
def _process_result(self, result: Dict):
self.final_results += [result] |
<|file_name|>_mock.py<|end_file_name|><|fim▁begin|>from typing import Dict, List, Optional
from ray.tune.suggest.suggestion import Searcher, ConcurrencyLimiter
from ray.tune.suggest.search_generator import SearchGenerator
from ray.tune.trial import Trial
class _MockSearcher(Searcher):
def __init__(self, **kwargs):
<|fim_middle|>
def suggest(self, trial_id: str):
if not self.stall:
self.live_trials[trial_id] = 1
return {"test_variable": 2}
return None
def on_trial_result(self, trial_id: str, result: Dict):
self.counter["result"] += 1
self.results += [result]
def on_trial_complete(
self, trial_id: str, result: Optional[Dict] = None, error: bool = False
):
self.counter["complete"] += 1
if result:
self._process_result(result)
if trial_id in self.live_trials:
del self.live_trials[trial_id]
def _process_result(self, result: Dict):
self.final_results += [result]
class _MockSuggestionAlgorithm(SearchGenerator):
def __init__(self, max_concurrent: Optional[int] = None, **kwargs):
self.searcher = _MockSearcher(**kwargs)
if max_concurrent:
self.searcher = ConcurrencyLimiter(
self.searcher, max_concurrent=max_concurrent
)
super(_MockSuggestionAlgorithm, self).__init__(self.searcher)
@property
def live_trials(self) -> List[Trial]:
return self.searcher.live_trials
@property
def results(self) -> List[Dict]:
return self.searcher.results
<|fim▁end|> | self.live_trials = {}
self.counter = {"result": 0, "complete": 0}
self.final_results = []
self.stall = False
self.results = []
super(_MockSearcher, self).__init__(**kwargs) |
<|file_name|>_mock.py<|end_file_name|><|fim▁begin|>from typing import Dict, List, Optional
from ray.tune.suggest.suggestion import Searcher, ConcurrencyLimiter
from ray.tune.suggest.search_generator import SearchGenerator
from ray.tune.trial import Trial
class _MockSearcher(Searcher):
def __init__(self, **kwargs):
self.live_trials = {}
self.counter = {"result": 0, "complete": 0}
self.final_results = []
self.stall = False
self.results = []
super(_MockSearcher, self).__init__(**kwargs)
def suggest(self, trial_id: str):
<|fim_middle|>
def on_trial_result(self, trial_id: str, result: Dict):
self.counter["result"] += 1
self.results += [result]
def on_trial_complete(
self, trial_id: str, result: Optional[Dict] = None, error: bool = False
):
self.counter["complete"] += 1
if result:
self._process_result(result)
if trial_id in self.live_trials:
del self.live_trials[trial_id]
def _process_result(self, result: Dict):
self.final_results += [result]
class _MockSuggestionAlgorithm(SearchGenerator):
def __init__(self, max_concurrent: Optional[int] = None, **kwargs):
self.searcher = _MockSearcher(**kwargs)
if max_concurrent:
self.searcher = ConcurrencyLimiter(
self.searcher, max_concurrent=max_concurrent
)
super(_MockSuggestionAlgorithm, self).__init__(self.searcher)
@property
def live_trials(self) -> List[Trial]:
return self.searcher.live_trials
@property
def results(self) -> List[Dict]:
return self.searcher.results
<|fim▁end|> | if not self.stall:
self.live_trials[trial_id] = 1
return {"test_variable": 2}
return None |
<|file_name|>_mock.py<|end_file_name|><|fim▁begin|>from typing import Dict, List, Optional
from ray.tune.suggest.suggestion import Searcher, ConcurrencyLimiter
from ray.tune.suggest.search_generator import SearchGenerator
from ray.tune.trial import Trial
class _MockSearcher(Searcher):
def __init__(self, **kwargs):
self.live_trials = {}
self.counter = {"result": 0, "complete": 0}
self.final_results = []
self.stall = False
self.results = []
super(_MockSearcher, self).__init__(**kwargs)
def suggest(self, trial_id: str):
if not self.stall:
self.live_trials[trial_id] = 1
return {"test_variable": 2}
return None
def on_trial_result(self, trial_id: str, result: Dict):
<|fim_middle|>
def on_trial_complete(
self, trial_id: str, result: Optional[Dict] = None, error: bool = False
):
self.counter["complete"] += 1
if result:
self._process_result(result)
if trial_id in self.live_trials:
del self.live_trials[trial_id]
def _process_result(self, result: Dict):
self.final_results += [result]
class _MockSuggestionAlgorithm(SearchGenerator):
def __init__(self, max_concurrent: Optional[int] = None, **kwargs):
self.searcher = _MockSearcher(**kwargs)
if max_concurrent:
self.searcher = ConcurrencyLimiter(
self.searcher, max_concurrent=max_concurrent
)
super(_MockSuggestionAlgorithm, self).__init__(self.searcher)
@property
def live_trials(self) -> List[Trial]:
return self.searcher.live_trials
@property
def results(self) -> List[Dict]:
return self.searcher.results
<|fim▁end|> | self.counter["result"] += 1
self.results += [result] |
<|file_name|>_mock.py<|end_file_name|><|fim▁begin|>from typing import Dict, List, Optional
from ray.tune.suggest.suggestion import Searcher, ConcurrencyLimiter
from ray.tune.suggest.search_generator import SearchGenerator
from ray.tune.trial import Trial
class _MockSearcher(Searcher):
def __init__(self, **kwargs):
self.live_trials = {}
self.counter = {"result": 0, "complete": 0}
self.final_results = []
self.stall = False
self.results = []
super(_MockSearcher, self).__init__(**kwargs)
def suggest(self, trial_id: str):
if not self.stall:
self.live_trials[trial_id] = 1
return {"test_variable": 2}
return None
def on_trial_result(self, trial_id: str, result: Dict):
self.counter["result"] += 1
self.results += [result]
def on_trial_complete(
self, trial_id: str, result: Optional[Dict] = None, error: bool = False
):
<|fim_middle|>
def _process_result(self, result: Dict):
self.final_results += [result]
class _MockSuggestionAlgorithm(SearchGenerator):
def __init__(self, max_concurrent: Optional[int] = None, **kwargs):
self.searcher = _MockSearcher(**kwargs)
if max_concurrent:
self.searcher = ConcurrencyLimiter(
self.searcher, max_concurrent=max_concurrent
)
super(_MockSuggestionAlgorithm, self).__init__(self.searcher)
@property
def live_trials(self) -> List[Trial]:
return self.searcher.live_trials
@property
def results(self) -> List[Dict]:
return self.searcher.results
<|fim▁end|> | self.counter["complete"] += 1
if result:
self._process_result(result)
if trial_id in self.live_trials:
del self.live_trials[trial_id] |
<|file_name|>_mock.py<|end_file_name|><|fim▁begin|>from typing import Dict, List, Optional
from ray.tune.suggest.suggestion import Searcher, ConcurrencyLimiter
from ray.tune.suggest.search_generator import SearchGenerator
from ray.tune.trial import Trial
class _MockSearcher(Searcher):
def __init__(self, **kwargs):
self.live_trials = {}
self.counter = {"result": 0, "complete": 0}
self.final_results = []
self.stall = False
self.results = []
super(_MockSearcher, self).__init__(**kwargs)
def suggest(self, trial_id: str):
if not self.stall:
self.live_trials[trial_id] = 1
return {"test_variable": 2}
return None
def on_trial_result(self, trial_id: str, result: Dict):
self.counter["result"] += 1
self.results += [result]
def on_trial_complete(
self, trial_id: str, result: Optional[Dict] = None, error: bool = False
):
self.counter["complete"] += 1
if result:
self._process_result(result)
if trial_id in self.live_trials:
del self.live_trials[trial_id]
def _process_result(self, result: Dict):
<|fim_middle|>
class _MockSuggestionAlgorithm(SearchGenerator):
def __init__(self, max_concurrent: Optional[int] = None, **kwargs):
self.searcher = _MockSearcher(**kwargs)
if max_concurrent:
self.searcher = ConcurrencyLimiter(
self.searcher, max_concurrent=max_concurrent
)
super(_MockSuggestionAlgorithm, self).__init__(self.searcher)
@property
def live_trials(self) -> List[Trial]:
return self.searcher.live_trials
@property
def results(self) -> List[Dict]:
return self.searcher.results
<|fim▁end|> | self.final_results += [result] |
<|file_name|>_mock.py<|end_file_name|><|fim▁begin|>from typing import Dict, List, Optional
from ray.tune.suggest.suggestion import Searcher, ConcurrencyLimiter
from ray.tune.suggest.search_generator import SearchGenerator
from ray.tune.trial import Trial
class _MockSearcher(Searcher):
def __init__(self, **kwargs):
self.live_trials = {}
self.counter = {"result": 0, "complete": 0}
self.final_results = []
self.stall = False
self.results = []
super(_MockSearcher, self).__init__(**kwargs)
def suggest(self, trial_id: str):
if not self.stall:
self.live_trials[trial_id] = 1
return {"test_variable": 2}
return None
def on_trial_result(self, trial_id: str, result: Dict):
self.counter["result"] += 1
self.results += [result]
def on_trial_complete(
self, trial_id: str, result: Optional[Dict] = None, error: bool = False
):
self.counter["complete"] += 1
if result:
self._process_result(result)
if trial_id in self.live_trials:
del self.live_trials[trial_id]
def _process_result(self, result: Dict):
self.final_results += [result]
class _MockSuggestionAlgorithm(SearchGenerator):
<|fim_middle|>
<|fim▁end|> | def __init__(self, max_concurrent: Optional[int] = None, **kwargs):
self.searcher = _MockSearcher(**kwargs)
if max_concurrent:
self.searcher = ConcurrencyLimiter(
self.searcher, max_concurrent=max_concurrent
)
super(_MockSuggestionAlgorithm, self).__init__(self.searcher)
@property
def live_trials(self) -> List[Trial]:
return self.searcher.live_trials
@property
def results(self) -> List[Dict]:
return self.searcher.results |
<|file_name|>_mock.py<|end_file_name|><|fim▁begin|>from typing import Dict, List, Optional
from ray.tune.suggest.suggestion import Searcher, ConcurrencyLimiter
from ray.tune.suggest.search_generator import SearchGenerator
from ray.tune.trial import Trial
class _MockSearcher(Searcher):
def __init__(self, **kwargs):
self.live_trials = {}
self.counter = {"result": 0, "complete": 0}
self.final_results = []
self.stall = False
self.results = []
super(_MockSearcher, self).__init__(**kwargs)
def suggest(self, trial_id: str):
if not self.stall:
self.live_trials[trial_id] = 1
return {"test_variable": 2}
return None
def on_trial_result(self, trial_id: str, result: Dict):
self.counter["result"] += 1
self.results += [result]
def on_trial_complete(
self, trial_id: str, result: Optional[Dict] = None, error: bool = False
):
self.counter["complete"] += 1
if result:
self._process_result(result)
if trial_id in self.live_trials:
del self.live_trials[trial_id]
def _process_result(self, result: Dict):
self.final_results += [result]
class _MockSuggestionAlgorithm(SearchGenerator):
def __init__(self, max_concurrent: Optional[int] = None, **kwargs):
<|fim_middle|>
@property
def live_trials(self) -> List[Trial]:
return self.searcher.live_trials
@property
def results(self) -> List[Dict]:
return self.searcher.results
<|fim▁end|> | self.searcher = _MockSearcher(**kwargs)
if max_concurrent:
self.searcher = ConcurrencyLimiter(
self.searcher, max_concurrent=max_concurrent
)
super(_MockSuggestionAlgorithm, self).__init__(self.searcher) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.