content
stringlengths 0
894k
| type
stringclasses 2
values |
---|---|
import pdb
import uuid
from decimal import Decimal
from django.apps import apps
from ahj_app.models import User, Edit, Comment, AHJInspection, Contact, Address, Location, AHJ, AHJUserMaintains
from django.urls import reverse
from django.utils import timezone
import pytest
import datetime
from fixtures import create_user, ahj_obj, generate_client_with_webpage_credentials, api_client, create_minimal_obj, \
set_obj_field, get_obj_field, get_value_or_enum_row
from ahj_app.models_field_enums import RequirementLevel, LocationDeterminationMethod
from ahj_app import views_edits
@pytest.fixture
def user_obj(create_user):
user = create_user(Username='someone')
return user
@pytest.fixture
def add_enums():
RequirementLevel.objects.create(Value='ConditionallyRequired')
RequirementLevel.objects.create(Value='Required')
RequirementLevel.objects.create(Value='Optional')
LocationDeterminationMethod.objects.create(Value='AddressGeocoding')
LocationDeterminationMethod.objects.create(Value='GPS')
def edit_is_pending(edit):
return edit.ReviewStatus == 'P' and edit.ApprovedBy is None and edit.DateEffective is None and edit.IsApplied is False
def filter_to_edit(edit_dict):
search_dict = {k: v for k, v in edit_dict.items()}
search_dict['DateRequested__date'] = search_dict.pop('DateRequested')
search_dict['DateEffective__date'] = search_dict.pop('DateEffective')
return Edit.objects.filter(**search_dict)
def check_edit_exists(edit_dict):
return filter_to_edit(edit_dict).exists()
@pytest.mark.parametrize(
'user_type', [
'Admin',
'AHJOfficial'
]
)
@pytest.mark.django_db
def test_edit_review__authenticated_normal_use(user_type, generate_client_with_webpage_credentials, ahj_obj):
client = generate_client_with_webpage_credentials(Username='someone')
user = User.objects.get(Username='someone')
if user_type == 'Admin':
user.is_superuser = True
user.save()
elif user_type == 'AHJOfficial':
AHJUserMaintains.objects.create(UserID=user, AHJPK=ahj_obj, MaintainerStatus=True)
edit_dict = {'ChangedBy': user, 'ApprovedBy': None,
'SourceTable': 'AHJ', 'SourceRow': ahj_obj.pk, 'SourceColumn': 'AHJName',
'OldValue': 'oldname', 'NewValue': 'newname',
'DateRequested': timezone.now(), 'DateEffective': None,
'ReviewStatus': 'P', 'EditType': 'U', 'AHJPK': ahj_obj}
edit = Edit.objects.create(**edit_dict)
url = reverse('edit-review')
response = client.post(url, {'EditID': edit.EditID, 'Status': 'A'})
assert response.status_code == 200
edit = Edit.objects.get(EditID=edit.EditID)
assert edit.ReviewStatus == 'A'
assert edit.ApprovedBy == user
tomorrow = timezone.now() + datetime.timedelta(days=1)
assert edit.DateEffective.date() == tomorrow.date()
@pytest.mark.django_db
def test_edit_review__no_auth_normal_use(generate_client_with_webpage_credentials, ahj_obj):
client = generate_client_with_webpage_credentials(Username='someone')
user = User.objects.get(Username='someone')
edit_dict = {'ChangedBy': user, 'ApprovedBy': None,
'SourceTable': 'AHJ', 'SourceRow': ahj_obj.pk, 'SourceColumn': 'AHJName',
'OldValue': 'oldname', 'NewValue': 'newname',
'DateRequested': timezone.now(), 'DateEffective': None,
'ReviewStatus': 'P', 'EditType': 'U', 'AHJPK': ahj_obj}
edit = Edit.objects.create(**edit_dict)
url = reverse('edit-review')
response = client.post(url, {'EditID': edit.EditID, 'Status': 'A'})
assert response.status_code == 403
@pytest.mark.django_db
def test_edit_review__invalid_status(generate_client_with_webpage_credentials, ahj_obj):
client = generate_client_with_webpage_credentials(Username='someone')
user = User.objects.get(Username='someone')
edit_dict = {'ChangedBy': user, 'ApprovedBy': None,
'SourceTable': 'AHJ', 'SourceRow': ahj_obj.pk, 'SourceColumn': 'AHJName',
'OldValue': 'oldname', 'NewValue': 'newname',
'DateRequested': timezone.now(), 'DateEffective': None,
'ReviewStatus': 'P', 'EditType': 'U', 'AHJPK': ahj_obj}
edit = Edit.objects.create(**edit_dict)
url = reverse('edit-review')
response = client.post(url, {'EditID': edit.EditID, 'Status': 'Z'})
assert response.status_code == 400
@pytest.mark.django_db
def test_edit_review__edit_does_not_exist(generate_client_with_webpage_credentials):
client = generate_client_with_webpage_credentials(Username='someone')
url = reverse('edit-review')
response = client.post(url, {'EditID': 0, 'Status': 'A'})
assert response.status_code == 400
@pytest.mark.django_db
@pytest.mark.parametrize(
'params', [
({}),
({'EditID': '1'}),
({'Status': 'A'}),
]
)
def test_edit_review__missing_param(params, generate_client_with_webpage_credentials):
client = generate_client_with_webpage_credentials(Username='someone')
url = reverse('edit-review')
response = client.post(url, params)
assert response.status_code == 400
@pytest.mark.django_db
def test_edit_addition__normal_use(ahj_obj, generate_client_with_webpage_credentials):
client = generate_client_with_webpage_credentials(Username='someone')
user = User.objects.get(Username='someone')
AHJInspection.objects.create(AHJPK=ahj_obj, AHJInspectionName='Inspection1', TechnicianRequired=1, InspectionStatus=True)
url = reverse('edit-addition')
response = client.post(url, {
'SourceTable': 'AHJInspection',
'AHJPK': ahj_obj.AHJPK,
'ParentTable': 'AHJ',
'ParentID': ahj_obj.AHJPK,
'Value': [
{ 'AHJInspectionName': 'NewName'}
]}, format='json')
assert response.status_code == 200
assert response.data[0]['AHJInspectionName']['Value'] == 'NewName' # confirm returned AHJInspection was updated
edit = Edit.objects.get(AHJPK=ahj_obj.AHJPK)
assert edit.EditType == 'A'
assert edit.NewValue == 'True'
assert edit.SourceRow == response.data[0]['InspectionID']['Value']
@pytest.mark.django_db
@pytest.mark.parametrize(
'params', [
({'SourceTable': 'AHJ', 'ParentID': '1', 'ParentTable': 'AHJ'}),
({'AHJPK': '1', 'ParentID': '1', 'ParentTable': 'AHJ'}),
({'SourceTable': 'AHJ', 'AHJPK': '1', 'ParentTable': 'AHJ'}),
({'SourceTable': 'AHJ', 'AHJPK': '1', 'ParentID': '1'})
]
)
def test_edit_addition__missing_param(params, generate_client_with_webpage_credentials):
client = generate_client_with_webpage_credentials(Username='someone')
url = reverse('edit-addition')
response = client.post(url, params)
assert response.status_code == 400
@pytest.mark.django_db
def test_edit_deletion__normal_use(ahj_obj, generate_client_with_webpage_credentials):
client = generate_client_with_webpage_credentials(Username='someone')
user = User.objects.get(Username='someone')
inspection = AHJInspection.objects.create(AHJPK=ahj_obj, AHJInspectionName='Inspection1', TechnicianRequired=1, InspectionStatus=True)
url = reverse('edit-deletion')
response = client.post(url, {
'SourceTable': 'AHJInspection',
'AHJPK': ahj_obj.AHJPK,
'ParentTable': 'AHJ',
'ParentID': ahj_obj.AHJPK,
'Value': [
inspection.InspectionID
]}, format='json')
assert response.status_code == 200
edit = Edit.objects.get(AHJPK=ahj_obj.AHJPK)
assert edit.EditType == 'D'
assert edit.NewValue == 'False'
assert edit.SourceRow == response.data[0]['InspectionID']['Value']
@pytest.mark.django_db
@pytest.mark.parametrize(
'params', [
({'SourceTable': 'AHJ'}),
({'AHJPK': '1'}),
]
)
def test_edit_deletion__missing_param(params, generate_client_with_webpage_credentials):
client = generate_client_with_webpage_credentials(Username='someone')
url = reverse('edit-deletion')
response = client.post(url, params)
assert response.status_code == 400
@pytest.mark.parametrize(
'ReviewStatus, DateEffective', [
('A', timezone.now()),
('A', timezone.now() - datetime.timedelta(days=1)),
('A', timezone.now() + datetime.timedelta(days=1)),
('A', None),
('P', timezone.now()),
('D', timezone.now())
]
)
@pytest.mark.django_db
def test_apply_edits(ReviewStatus, DateEffective, create_user, ahj_obj):
field_name = 'AHJName'
old_value = 'oldname'
new_value = 'newname'
user = create_user()
set_obj_field(ahj_obj, field_name, old_value)
edit_dict = {'ChangedBy': user, 'ApprovedBy': user if DateEffective is not None else None,
'SourceTable': 'AHJ', 'SourceRow': ahj_obj.pk, 'SourceColumn': field_name,
'OldValue': old_value, 'NewValue': new_value,
'DateRequested': timezone.now(), 'DateEffective': DateEffective,
'ReviewStatus': ReviewStatus, 'IsApplied': False, 'EditType': 'U', 'AHJPK': ahj_obj}
edit = Edit.objects.create(**edit_dict)
views_edits.apply_edits()
ahj = AHJ.objects.get(AHJPK=ahj_obj.AHJPK)
is_date_effective = (DateEffective.date() == datetime.date.today()) if DateEffective is not None else False
edit_should_apply = is_date_effective and ReviewStatus == 'A'
edit_is_applied = getattr(ahj, field_name) == new_value
assert edit_is_applied == edit_should_apply
edit = Edit.objects.get(EditID=edit.EditID)
assert edit.IsApplied == edit_should_apply
@pytest.mark.django_db
def test_edit_update__normal_use(ahj_obj, generate_client_with_webpage_credentials):
client = generate_client_with_webpage_credentials(Username='someone')
user = User.objects.get(Username='someone')
inspection = AHJInspection.objects.create(AHJPK=ahj_obj, AHJInspectionName='Inspection1', TechnicianRequired=1, InspectionStatus=True)
url = reverse('edit-update')
input = [
{
'AHJPK': ahj_obj.AHJPK,
'SourceTable': 'AHJInspection',
'SourceRow': inspection.pk,
'SourceColumn': 'AHJInspectionName',
'NewValue': 'NewName'
}
]
response = client.post(url, input, format='json')
assert response.status_code == 200
edit = Edit.objects.get(AHJPK=ahj_obj.AHJPK) # Got newly created edit object and set it as approved
edit.ReviewStatus = 'A'
edit.DateEffective = timezone.now()
edit.ApprovedBy = user
edit.save()
views_edits.apply_edits() # Now that it's approved, apply edits will apply it.
Inspection = AHJInspection.objects.get(AHJPK=ahj_obj)
assert Inspection.AHJInspectionName == 'NewName'
@pytest.mark.django_db
@pytest.mark.parametrize(
'params', [
({'SourceTable': 'AHJ'}),
({'AHJPK': '1', 'SourceTable': 'AHJ', 'SourceRow': 'row', 'SourceColumn': 'column'}),
]
)
def test_edit_update__missing_param(params, generate_client_with_webpage_credentials):
client = generate_client_with_webpage_credentials(Username='someone')
url = reverse('edit-deletion')
response = client.post(url, params)
assert response.status_code == 400
@pytest.mark.django_db
def test_edit_list__normal_use(ahj_obj, generate_client_with_webpage_credentials):
client = generate_client_with_webpage_credentials(Username='someone')
user = User.objects.get(Username='someone')
Edit.objects.create(EditID=1, AHJPK=ahj_obj, ChangedBy=user, EditType='A', SourceTable='AHJ', SourceColumn='BuildingCode', SourceRow='2118', DateRequested=timezone.now())
Edit.objects.create(EditID=2, AHJPK=ahj_obj, ChangedBy=user, EditType='A', SourceTable='AHJ', SourceColumn='BuildingCode', SourceRow='2118', DateRequested=timezone.now())
url = reverse('edit-list')
response = client.get(url, {'AHJPK':'1'})
assert response.status_code == 200
assert len(response.data) == 2
@pytest.mark.django_db
def test_edit_list__missing_param(generate_client_with_webpage_credentials):
client = generate_client_with_webpage_credentials(Username='someone')
url = reverse('edit-list')
response = client.get(url)
assert response.status_code == 200
assert len(response.data) == 0
@pytest.mark.parametrize(
'model_name, field_name, old_value, new_value, expected_value', [
('AHJ', 'AHJName', 'oldname', 'newname', 'old_value'),
('Contact', 'FirstName', 'oldname', 'newname', 'old_value'),
('Address', 'Country', 'oldcountry', 'newcountry', 'old_value'),
('Location', 'Elevation', Decimal('0.00000000'), Decimal('10000.00000000'), 'old_value'),
('Location', 'LocationDeterminationMethod', '', 'AddressGeocoding', None),
('Location', 'LocationDeterminationMethod', 'AddressGeocoding', '', 'old_value'),
('EngineeringReviewRequirement', 'RequirementLevel', 'ConditionallyRequired', 'Required', 'old_value'),
('AHJInspection', 'FileFolderURL', 'oldurl', 'newurl', 'old_value'),
('FeeStructure', 'FeeStructureID', str(uuid.uuid4()), str(uuid.uuid4()), 'old_value')
]
)
@pytest.mark.django_db
def test_edit_revert__edit_update(model_name, field_name, old_value, new_value, create_user, ahj_obj, expected_value, create_minimal_obj, add_enums):
user = create_user()
obj = create_minimal_obj(model_name)
set_obj_field(obj, field_name, new_value)
edit_dict = {'ChangedBy': user, 'ApprovedBy': user,
'SourceTable': model_name, 'SourceRow': obj.pk, 'SourceColumn': field_name,
'OldValue': old_value, 'NewValue': new_value,
'DateRequested': timezone.now(), 'DateEffective': timezone.now(),
'ReviewStatus': 'A', 'EditType': 'U', 'AHJPK': ahj_obj}
edit = Edit.objects.create(**edit_dict)
assert views_edits.revert_edit(user, edit)
edit_dict['OldValue'], edit_dict['NewValue'] = edit.NewValue, edit.OldValue
if expected_value:
expected_value = get_value_or_enum_row(field_name, old_value)
assert get_obj_field(obj, field_name) == expected_value
assert check_edit_exists(edit_dict)
@pytest.mark.django_db
def test_edit_revert__edit_pending_do_nothing(create_user, ahj_obj):
user = create_user()
old_value = 'oldname'
new_value = 'newname'
set_obj_field(ahj_obj, 'AHJName', old_value)
edit_dict = {'ChangedBy': user, 'ApprovedBy': None,
'SourceTable': 'AHJ', 'SourceRow': ahj_obj.pk, 'SourceColumn': 'AHJName',
'OldValue': old_value, 'NewValue': new_value,
'DateRequested': timezone.now(), 'DateEffective': None,
'ReviewStatus': 'P', 'EditType': 'U', 'AHJPK': ahj_obj}
edit = Edit.objects.create(**edit_dict)
assert not views_edits.revert_edit(user, edit)
edit_dict['OldValue'], edit_dict['NewValue'] = old_value, edit_dict['OldValue']
edit_dict['ReviewStatus'] = 'A'
edit_dict['ApprovedBy'], edit_dict['DateEffective'] = user, timezone.now()
assert not check_edit_exists(edit_dict)
assert Edit.objects.all().count() == 1
@pytest.mark.django_db
def test_edit_revert__current_value_is_old_value_do_nothing(create_user, ahj_obj):
user = create_user()
old_value = 'oldname'
new_value = 'newname'
set_obj_field(ahj_obj, 'AHJName', old_value)
edit_dict = {'ChangedBy': user, 'ApprovedBy': user,
'SourceTable': 'AHJ', 'SourceRow': ahj_obj.pk, 'SourceColumn': 'AHJName',
'OldValue': old_value, 'NewValue': new_value,
'DateRequested': timezone.now(), 'DateEffective': timezone.now(),
'ReviewStatus': 'A', 'EditType': 'U', 'AHJPK': ahj_obj}
edit = Edit.objects.create(**edit_dict)
assert not views_edits.revert_edit(user, edit)
edit_dict['OldValue'], edit_dict['NewValue'] = old_value, edit_dict['OldValue']
assert not check_edit_exists(edit_dict)
assert Edit.objects.all().count() == 1
@pytest.mark.django_db
def test_edit_revert__revert_edit_old_value_uses_current_row_value(create_user, ahj_obj):
user = create_user()
old_value = 'oldname'
middle_value = 'newername'
new_value = 'newestname'
edit_dict = {'ChangedBy': user, 'ApprovedBy': user,
'SourceTable': 'AHJ', 'SourceRow': ahj_obj.pk, 'SourceColumn': 'AHJName',
'OldValue': old_value, 'NewValue': middle_value,
'DateRequested': timezone.now(), 'DateEffective': timezone.now(),
'ReviewStatus': 'A', 'EditType': 'U', 'AHJPK': ahj_obj}
edit = Edit.objects.create(**edit_dict)
edit_dict['OldValue'], edit_dict['NewValue'] = edit_dict['NewValue'], new_value
setattr(ahj_obj, 'AHJName', new_value)
ahj_obj.save()
newer_edit = Edit.objects.create(**edit_dict)
assert views_edits.revert_edit(user, edit)
edit_dict['OldValue'], edit_dict['NewValue'] = edit_dict['NewValue'], old_value
reverting_edit = filter_to_edit(edit_dict)
assert reverting_edit.exists()
assert reverting_edit.first().OldValue == new_value
assert get_obj_field(ahj_obj, 'AHJName')
@pytest.mark.parametrize(
'parent_model_name, model_name', [
('AHJ', 'Contact'),
('AHJInspection', 'Contact'),
('AHJ', 'EngineeringReviewRequirement'),
('AHJ', 'AHJInspection'),
('AHJ', 'DocumentSubmissionMethod'),
('AHJ', 'PermitIssueMethod'),
('AHJ', 'FeeStructure')
]
)
@pytest.mark.django_db
def test_edit_revert__edit_addition(parent_model_name, model_name, create_user, create_minimal_obj, ahj_obj):
user = create_user()
parent_obj = create_minimal_obj(parent_model_name)
obj = create_minimal_obj(model_name)
relation = obj.create_relation_to(parent_obj)
set_obj_field(relation, relation.get_relation_status_field(), True)
edit_dict = {'ChangedBy': user, 'ApprovedBy': user,
'SourceTable': relation.__class__.__name__, 'SourceRow': relation.pk, 'SourceColumn': relation.get_relation_status_field(),
'OldValue': None, 'NewValue': True,
'DateRequested': timezone.now(), 'DateEffective': timezone.now(),
'ReviewStatus': 'A', 'EditType': 'A', 'AHJPK': ahj_obj}
edit = Edit.objects.create(**edit_dict)
assert views_edits.revert_edit(user, edit)
edit_dict['OldValue'], edit_dict['NewValue'] = edit_dict['NewValue'], False
assert check_edit_exists(edit_dict)
assert get_obj_field(relation, relation.get_relation_status_field()) == edit_dict['NewValue']
@pytest.mark.parametrize(
'parent_model_name, model_name', [
('AHJ', 'Contact'),
('AHJInspection', 'Contact'),
('AHJ', 'EngineeringReviewRequirement'),
('AHJ', 'AHJInspection'),
('AHJ', 'DocumentSubmissionMethod'),
('AHJ', 'PermitIssueMethod'),
('AHJ', 'FeeStructure')
]
)
@pytest.mark.django_db
def test_edit_revert__edit_deletion(parent_model_name, model_name, create_user, create_minimal_obj, ahj_obj):
user = create_user()
parent_obj = create_minimal_obj(parent_model_name)
obj = create_minimal_obj(model_name)
relation = obj.create_relation_to(parent_obj)
set_obj_field(relation, relation.get_relation_status_field(), False)
edit_dict = {'ChangedBy': user, 'ApprovedBy': user,
'SourceTable': relation.__class__.__name__, 'SourceRow': relation.pk, 'SourceColumn': relation.get_relation_status_field(),
'OldValue': True, 'NewValue': False,
'DateRequested': timezone.now(), 'DateEffective': timezone.now(),
'ReviewStatus': 'A', 'EditType': 'D', 'AHJPK': ahj_obj}
edit = Edit.objects.create(**edit_dict)
assert views_edits.revert_edit(user, edit)
edit_dict['OldValue'], edit_dict['NewValue'] = edit_dict['NewValue'], edit_dict['OldValue']
assert check_edit_exists(edit_dict)
assert get_obj_field(relation, relation.get_relation_status_field()) == edit_dict['NewValue']
@pytest.mark.parametrize(
'edit_status1, is_applied1, is_applied2, expected_outcome', [
# Rejected edits are resettable.
('R', False, True, True),
# Approved, but not yet applied, edits are resettable.
('A', False, False, True),
('A', False, True, True),
# Approved and applied edits where they are the latest applied are resettable.
('A', True, False, True),
# Approved and applied edits where another edit was since applied are not resettable.
('A', True, True, False)
]
)
@pytest.mark.django_db
def test_edit_is_resettable(edit_status1, is_applied1, is_applied2, expected_outcome, create_user, ahj_obj):
user = create_user()
new_value = 'newname'
old_value = 'oldname'
edit_dict = {'ChangedBy': user, 'ApprovedBy': user,
'SourceTable': 'AHJ', 'SourceRow': ahj_obj.pk, 'SourceColumn': 'AHJName',
'OldValue': old_value, 'NewValue': new_value,
'DateRequested': timezone.now(), 'DateEffective': timezone.now(),
'ReviewStatus': edit_status1, 'IsApplied': is_applied1, 'EditType': 'U', 'AHJPK': ahj_obj}
edit_to_reset = Edit.objects.create(**edit_dict)
tomorrow = timezone.now() + datetime.timedelta(days=1)
edit_dict['DateRequested'], edit_dict['DateEffective'] = tomorrow, tomorrow
edit_dict['ReviewStatus'], edit_dict['IsApplied'] = 'A', is_applied2
later_edit = Edit.objects.create(**edit_dict)
assert expected_outcome == views_edits.edit_is_resettable(edit_to_reset)
@pytest.mark.django_db
def test_edit_make_pending(create_user, ahj_obj):
user = create_user()
set_obj_field(ahj_obj, 'AHJName', 'newername')
edit_dict = {'ChangedBy': user, 'ApprovedBy': user,
'SourceTable': 'AHJ', 'SourceRow': ahj_obj.pk, 'SourceColumn': 'AHJName',
'OldValue': 'oldname', 'NewValue': 'newname',
'DateRequested': timezone.now(), 'DateEffective': timezone.now(),
'ReviewStatus': 'R', 'EditType': 'U', 'AHJPK': ahj_obj}
edit = Edit.objects.create(**edit_dict)
views_edits.edit_make_pending(edit)
edit = Edit.objects.get(EditID=edit.EditID)
assert edit_is_pending(edit)
@pytest.mark.parametrize(
'model_name, field_name, old_value, new_value', [
('AHJ', 'AHJName', 'oldname', 'newname'),
('Contact', 'FirstName', 'oldname', 'newname'),
('Address', 'Country', 'oldcountry', 'newcountry'),
('Location', 'Elevation', Decimal('0.00000000'), Decimal('10000.00000000')),
('Location', 'LocationDeterminationMethod', '', 'AddressGeocoding'),
('Location', 'LocationDeterminationMethod', 'AddressGeocoding', ''),
('EngineeringReviewRequirement', 'RequirementLevel', 'ConditionallyRequired', 'Required'),
('AHJInspection', 'FileFolderURL', 'oldurl', 'newurl'),
('FeeStructure', 'FeeStructureID', str(uuid.uuid4()), str(uuid.uuid4()))
]
)
@pytest.mark.django_db
def test_edit_update_old_value(model_name, field_name, old_value, new_value, create_user, ahj_obj, create_minimal_obj, add_enums):
user = create_user()
obj = create_minimal_obj(model_name)
edit_dict = {'ChangedBy': user, 'ApprovedBy': user,
'SourceTable': model_name, 'SourceRow': obj.pk, 'SourceColumn': field_name,
'OldValue': old_value, 'NewValue': new_value,
'DateRequested': timezone.now(), 'DateEffective': timezone.now(),
'ReviewStatus': 'A', 'EditType': 'U', 'AHJPK': ahj_obj}
edit = Edit.objects.create(**edit_dict)
views_edits.apply_edits(ready_edits=[edit])
views_edits.edit_update_old_value(edit)
edit = Edit.objects.get(EditID=edit.EditID)
assert edit.OldValue == str(new_value)
@pytest.mark.parametrize(
'model_name, field_name, old_value, new_value', [
('AHJ', 'AHJName', 'oldname', 'newname'),
('Contact', 'FirstName', 'oldname', 'newname'),
('Address', 'Country', 'oldcountry', 'newcountry'),
('Location', 'Elevation', Decimal('0.00000000'), Decimal('10000.00000000')),
('Location', 'LocationDeterminationMethod', '', 'AddressGeocoding'),
('Location', 'LocationDeterminationMethod', 'AddressGeocoding', ''),
('EngineeringReviewRequirement', 'RequirementLevel', 'ConditionallyRequired', 'Required'),
('AHJInspection', 'FileFolderURL', 'oldurl', 'newurl'),
('FeeStructure', 'FeeStructureID', str(uuid.uuid4()), str(uuid.uuid4()))
]
)
@pytest.mark.django_db
def test_edit_update_old_value_all_awaiting_apply_or_review(model_name, field_name, old_value, new_value, create_user, ahj_obj, create_minimal_obj, add_enums):
user = create_user()
obj = create_minimal_obj(model_name)
edit_dict = {'ChangedBy': user, 'ApprovedBy': user,
'SourceTable': model_name, 'SourceRow': obj.pk, 'SourceColumn': field_name,
'OldValue': old_value, 'NewValue': new_value,
'DateRequested': timezone.now(), 'DateEffective': timezone.now(),
'ReviewStatus': 'A', 'IsApplied': True, 'EditType': 'U', 'AHJPK': ahj_obj}
edit = Edit.objects.create(**edit_dict)
edit_dict['IsApplied'] = False
approved_edit = Edit.objects.create(**edit_dict)
edit_dict['ReviewStatus'] = 'P'
pending_edit = Edit.objects.create(**edit_dict)
views_edits.apply_edits(ready_edits=[edit])
views_edits.edit_update_old_value_all_awaiting_apply_or_review(edit)
approved_edit = Edit.objects.get(EditID=approved_edit.EditID)
pending_edit = Edit.objects.get(EditID=pending_edit.EditID)
assert approved_edit.OldValue == str(new_value)
assert pending_edit.OldValue == str(new_value)
@pytest.mark.parametrize(
'model_name, field_name, old_value, new_value, expected_value', [
('AHJ', 'AHJName', 'oldname', 'newname', 'old_value'),
('Contact', 'FirstName', 'oldname', 'newname', 'old_value'),
('Address', 'Country', 'oldcountry', 'newcountry', 'old_value'),
('Location', 'Elevation', Decimal('0.00000000'), Decimal('10000.00000000'), 'old_value'),
('Location', 'LocationDeterminationMethod', '', 'AddressGeocoding', None),
('Location', 'LocationDeterminationMethod', 'AddressGeocoding', '', 'old_value'),
('EngineeringReviewRequirement', 'RequirementLevel', 'ConditionallyRequired', 'Required', 'old_value'),
('AHJInspection', 'FileFolderURL', 'oldurl', 'newurl', 'old_value'),
('FeeStructure', 'FeeStructureID', str(uuid.uuid4()), str(uuid.uuid4()), 'old_value')
]
)
@pytest.mark.django_db
def test_edit_undo_apply(model_name, field_name, old_value, new_value, create_user, ahj_obj, expected_value, create_minimal_obj, add_enums):
user = create_user()
obj = create_minimal_obj(model_name)
edit_dict = {'ChangedBy': user, 'ApprovedBy': user,
'SourceTable': model_name, 'SourceRow': obj.pk, 'SourceColumn': field_name,
'OldValue': old_value, 'NewValue': new_value,
'DateRequested': timezone.now(), 'DateEffective': timezone.now(),
'ReviewStatus': 'A', 'EditType': 'U', 'AHJPK': ahj_obj}
edit = Edit.objects.create(**edit_dict)
views_edits.apply_edits(ready_edits=[edit])
views_edits.edit_undo_apply(edit)
if expected_value == 'old_value':
expected_value = get_value_or_enum_row(field_name, old_value)
assert get_obj_field(obj, field_name) == expected_value
@pytest.mark.parametrize(
'model_name, field_name, old_value, new_value, expected_value', [
('AHJ', 'AHJName', 'oldname', 'newname', 'old_value'),
('Contact', 'FirstName', 'oldname', 'newname', 'old_value'),
('Address', 'Country', 'oldcountry', 'newcountry', 'old_value'),
('Location', 'Elevation', Decimal('0.00000000'), Decimal('10000.00000000'), 'old_value'),
('Location', 'LocationDeterminationMethod', '', 'AddressGeocoding', None),
('Location', 'LocationDeterminationMethod', 'AddressGeocoding', '', 'old_value'),
('EngineeringReviewRequirement', 'RequirementLevel', 'ConditionallyRequired', 'Required', 'old_value'),
('AHJInspection', 'FileFolderURL', 'oldurl', 'newurl', 'old_value'),
('FeeStructure', 'FeeStructureID', str(uuid.uuid4()), str(uuid.uuid4()), 'old_value')
]
)
@pytest.mark.django_db
def test_edit_reset__edit_update(model_name, field_name, old_value, new_value, create_user, ahj_obj, create_minimal_obj, expected_value, add_enums):
user = create_user()
obj = create_minimal_obj(model_name)
set_obj_field(obj, field_name, new_value)
edit_dict = {'ChangedBy': user, 'ApprovedBy': user,
'SourceTable': model_name, 'SourceRow': obj.pk, 'SourceColumn': field_name,
'OldValue': old_value, 'NewValue': new_value,
'DateRequested': timezone.now(), 'DateEffective': timezone.now(),
'ReviewStatus': 'A', 'IsApplied': True, 'EditType': 'U', 'AHJPK': ahj_obj}
edit = Edit.objects.create(**edit_dict)
assert views_edits.reset_edit(user, edit)
assert edit_is_pending(edit)
if expected_value == 'old_value':
expected_value = get_value_or_enum_row(field_name, old_value)
assert get_obj_field(obj, field_name) == expected_value
@pytest.mark.parametrize(
'parent_model_name, model_name, review_status', [
('AHJ', 'Contact', 'A'),
('AHJInspection', 'Contact', 'A'),
('AHJ', 'EngineeringReviewRequirement', 'A'),
('AHJ', 'AHJInspection', 'A'),
('AHJ', 'DocumentSubmissionMethod', 'A'),
('AHJ', 'PermitIssueMethod', 'A'),
('AHJ', 'FeeStructure', 'A'),
('AHJ', 'Contact', 'R'),
('AHJInspection', 'Contact', 'R'),
('AHJ', 'EngineeringReviewRequirement', 'R'),
('AHJ', 'AHJInspection', 'R'),
('AHJ', 'DocumentSubmissionMethod', 'R'),
('AHJ', 'PermitIssueMethod', 'R'),
('AHJ', 'FeeStructure', 'R')
]
)
@pytest.mark.django_db
def test_edit_reset__edit_addition(parent_model_name, model_name, review_status, create_user, create_minimal_obj, ahj_obj):
user = create_user()
parent_obj = create_minimal_obj(parent_model_name)
obj = create_minimal_obj(model_name)
relation = obj.create_relation_to(parent_obj)
set_obj_field(relation, relation.get_relation_status_field(), review_status == 'A')
edit_dict = {'ChangedBy': user, 'ApprovedBy': user,
'SourceTable': relation.__class__.__name__, 'SourceRow': relation.pk, 'SourceColumn': relation.get_relation_status_field(),
'OldValue': None, 'NewValue': True,
'DateRequested': timezone.now(), 'DateEffective': timezone.now(),
'ReviewStatus': review_status, 'IsApplied': review_status == 'A', 'EditType': 'A', 'AHJPK': ahj_obj}
edit = Edit.objects.create(**edit_dict)
assert views_edits.reset_edit(user, edit)
assert edit_is_pending(edit)
assert get_obj_field(relation, relation.get_relation_status_field()) == edit_dict['OldValue']
@pytest.mark.parametrize(
'parent_model_name, model_name, review_status', [
('AHJ', 'Contact', 'A'),
('AHJInspection', 'Contact', 'A'),
('AHJ', 'EngineeringReviewRequirement', 'A'),
('AHJ', 'AHJInspection', 'A'),
('AHJ', 'DocumentSubmissionMethod', 'A'),
('AHJ', 'PermitIssueMethod', 'A'),
('AHJ', 'FeeStructure', 'A'),
('AHJ', 'Contact', 'R'),
('AHJInspection', 'Contact', 'R'),
('AHJ', 'EngineeringReviewRequirement', 'R'),
('AHJ', 'AHJInspection', 'R'),
('AHJ', 'DocumentSubmissionMethod', 'R'),
('AHJ', 'PermitIssueMethod', 'R'),
('AHJ', 'FeeStructure', 'R')
]
)
@pytest.mark.django_db
def test_edit_reset__edit_deletion(parent_model_name, model_name, review_status, create_user, create_minimal_obj, ahj_obj):
user = create_user()
parent_obj = create_minimal_obj(parent_model_name)
obj = create_minimal_obj(model_name)
relation = obj.create_relation_to(parent_obj)
set_obj_field(relation, relation.get_relation_status_field(), review_status != 'A')
edit_dict = {'ChangedBy': user, 'ApprovedBy': user,
'SourceTable': relation.__class__.__name__, 'SourceRow': relation.pk, 'SourceColumn': relation.get_relation_status_field(),
'OldValue': True, 'NewValue': False,
'DateRequested': timezone.now(), 'DateEffective': timezone.now(),
'ReviewStatus': review_status, 'IsApplied': review_status == 'A', 'EditType': 'A', 'AHJPK': ahj_obj}
edit = Edit.objects.create(**edit_dict)
assert views_edits.reset_edit(user, edit)
edit = Edit.objects.get(EditID=edit.EditID)
assert edit_is_pending(edit)
assert get_obj_field(relation, relation.get_relation_status_field()) == edit_dict['OldValue']
@pytest.mark.django_db
def test_edit_reset__edit_pending_do_nothing(create_user, ahj_obj):
user = create_user()
old_value = 'oldname'
new_value = 'newname'
set_obj_field(ahj_obj, 'AHJName', old_value)
edit_dict = {'ChangedBy': user, 'ApprovedBy': None,
'SourceTable': 'AHJ', 'SourceRow': ahj_obj.pk, 'SourceColumn': 'AHJName',
'OldValue': old_value, 'NewValue': new_value,
'DateRequested': timezone.now(), 'DateEffective': None,
'ReviewStatus': 'P', 'EditType': 'U', 'AHJPK': ahj_obj}
edit = Edit.objects.create(**edit_dict)
assert not views_edits.reset_edit(user, edit)
edit_dict['OldValue'], edit_dict['NewValue'] = old_value, edit_dict['OldValue']
edit_dict['ReviewStatus'] = 'A'
edit_dict['ApprovedBy'], edit_dict['DateEffective'] = user, timezone.now()
assert not check_edit_exists(edit_dict)
assert Edit.objects.all().count() == 1
@pytest.mark.parametrize(
'force_resettable, skip_undo', [
(True, False),
(True, True)
]
)
@pytest.mark.django_db
def test_edit_reset__kwargs(force_resettable, skip_undo, create_user, ahj_obj):
user = create_user()
old_value = 'oldname'
new_value = 'newname'
later_value = 'newname_later'
set_obj_field(ahj_obj, 'AHJName', later_value)
edit_dict = {'ChangedBy': user, 'ApprovedBy': user,
'SourceTable': 'AHJ', 'SourceRow': ahj_obj.pk, 'SourceColumn': 'AHJName',
'OldValue': old_value, 'NewValue': new_value,
'DateRequested': timezone.now(), 'DateEffective': timezone.now(),
'ReviewStatus': 'A', 'IsApplied': True, 'EditType': 'U', 'AHJPK': ahj_obj}
edit = Edit.objects.create(**edit_dict)
edit_dict['OldValue'], edit_dict['NewValue'] = edit_dict['NewValue'], later_value
later_edit = Edit.objects.create(**edit_dict)
assert views_edits.reset_edit(user, edit, force_resettable=force_resettable, skip_undo=skip_undo)
edit = Edit.objects.get(EditID=edit.EditID)
if force_resettable and not skip_undo:
assert get_obj_field(ahj_obj, 'AHJName') == old_value
elif force_resettable and skip_undo:
assert get_obj_field(ahj_obj, 'AHJName') == later_value
assert edit.OldValue == later_value
assert edit.NewValue == new_value
assert edit_is_pending(edit)
|
python
|
from flask import Blueprint, g, request, current_app
import json
import logging
from ..utils import datetime_to_json, get_time_string, get_default_runtime, match_movie
import datetime
from ..pick_algo import pick_movies_by_num, pick_movies_by_time
from .auth import login_required
import pandas
import pathlib
from .. import db
logger = logging.getLogger(__name__)
bp = Blueprint('movies', __name__, url_prefix='/movie')
@bp.route('/all', methods=['GET'])
@login_required
def get_all_movies():
user_id = g.user.id
# user_id = 1
user_movies_map = db.query_user_movies_map(user_id)
res = []
keys = ['likability', 'have_seen', 'comment', 'create_time']
movie_keys = ['id', 'name', 'rating']
if user_movies_map:
for row in user_movies_map:
temp = {k:getattr(row, k) for k in keys}
movie = db.query_movie(row.movie_id)
for key in movie_keys:
temp[key] = getattr(movie, key)
temp['runtime'] = get_default_runtime(movie.runtime).running_time
temp['starring'] = [s.name for s in movie.starring]
temp['genre'] = [g.genre for g in movie.genre]
res.append(temp)
data = {'statusCode':0, 'message':'query success', 'data':res}
return json.dumps(data, default=datetime_to_json, ensure_ascii=False)
@bp.route('/', methods=['POST'])
@login_required
def insert_one_movie():
r = request.get_json()
if r is None:
logger.warning('req_data is none, may be content-type is not application/json!')
return {'statusCode': -1, 'message':'req data is not json'}
req_params = {key:r.get(key) for key, _ in r.items()}
if req_params.get('create_time') is not None:
try:
req_params['create_time'] = datetime.datetime.strptime(req_params.get('create_time'), '%Y-%m-%d %H:%M:%S')
print(req_params['create_time'])
except Exception as e:
print(e)
return {'statusCode': -1, 'message':'date format must match %Y-%m-%d %H:%M:%S'}
user_id = g.user.id
# user_id = 1
# 先去库中匹配电影,若匹配不到则创建一个,movie_id为匹配到的或新创建的movie
temp_l = db.query_movie_match_name(req_params['name'])
matcher = match_movie(temp_l, {'rating':req_params['rating'], 'runtime':req_params['runtime']})
movie_id = -1
if matcher == None:
movie_id = db.insert_movie(req_params['name'], [db.RunningTime('default', int(req_params['runtime']))], req_params['rating'],
starring=req_params['starring'], genre=req_params['genre'])
else:
movie_id = matcher.id
db.insert_user_movie_map(user_id, movie_id, req_params['likability'], req_params['have_seen'], req_params['comment'], req_params['create_time'])
data = db.query_movie_with_userinfo(user_id, movie_id)
res = {'statusCode': 0, 'message':'insert movie success', 'data': data}
return json.dumps(res, default=datetime_to_json, ensure_ascii=False)
@bp.route('/', methods=['PUT'])
@login_required
def update_one_movie():
r = request.get_json()
if r is None:
logger.warning('req_data is none, may be content-type is not application/json!')
return {'statusCode': -1, 'message':'req data is not json'}
elif r.get('id') is None:
logger.warning('update data does not contain id')
print(r)
return {'statusCode': -1, 'message':'update data must contain id'}
r['movie_id'] = r['id']
del r['id']
db.update_user_movie_map(g.user.id, **r)
return {'statusCode': 0, 'message':'update movie success'}
@bp.route('/', methods=['DELETE'])
@login_required
def remove_one_movie():
movie_id = request.args.get('id', None)
if id is None:
logger.warning('id is None!')
return {'statusCode': -1, 'message':'delete method request id param'}
db.delete_user_movie_map(g.user.id, movie_id)
return {'statusCode': 0, 'message':'remove movie success'}
@bp.route('/pick', methods=['POST'])
@login_required
def pick_movie():
r = request.get_json()
if r is None:
logger.warning('req_data is none, may be content-type is not application/json!')
return {'statusCode': -1, 'message':'req data is not json'}
pick_type = r.get('type')
data = r.get('data')
if data.get('value') == '':
logger.error('value can not be null')
return {'statusCode': -1, 'message':'value can not be null'}
if pick_type is None or data is None:
logger.error('pick_type or data is null, parameter error')
return {'statusCode': -1, 'message':'pick_type or data is null, parameter error'}
movies_havent_seen = db.query_all_movies_havent_seen_by_userid(g.user.id)
starrings = data.get('starring')
genres = data.get('genre')
def filter_by_starring_and_genre(row):
for s in starrings:
if row['starring'] is None:
return False
temp = db.query_starring(s)
if temp is None:
return False
elif temp.name not in row['starring']:
return False
for g in genres:
if row['genre'] is None:
return False
temp = db.query_genre(g)
if temp is None:
return False
elif temp.genre not in row['genre']:
return False
return True
movies_input = list(filter(filter_by_starring_and_genre, movies_havent_seen))
# type=1, pick by time; type=2, pick by num
pick_res = []
if pick_type == 1:
pick_res = pick_movies_by_time(int(data.get('value')), movies_input)
elif pick_type == 2:
pick_res = pick_movies_by_num(int(data.get('value')), movies_input)
res = {'statusCode': 0, 'message':'pick successful', 'data': pick_res}
return json.dumps(res, default=datetime_to_json, ensure_ascii=False)
@bp.route('/export', methods=['GET'])
@login_required
def export_movies_data():
userid = g.user.id
movies = db.query_all_movies_with_userinfo(userid)
export_filename = ''
if movies:
field_list = ['id', 'name', 'rating', 'starring', 'genre', 'runtime', 'likability', 'have_seen', 'comment', 'create_time']
movies_input = []
for m in movies:
temp = {k:m.get(k) for k in field_list}
movies_input.append(temp)
df = pandas.DataFrame(movies_input, columns=field_list)
columns_to_drop = ['id']
for col in columns_to_drop:
del df[col]
# print(df)
def convert_list(m):
if m:
return '/'.join(m)
return
def convert_haveseen(have_seen):
if have_seen == True:
return '是'
elif have_seen == False:
return '否'
return ''
df['starring'] = df['starring'].apply(convert_list)
df['genre'] = df['genre'].apply(convert_list)
df['have_seen'] = df['have_seen'].apply(convert_haveseen)
time_string = get_time_string()
export_filename = f'{userid}-export-{time_string}.xlsx'
export_path = pathlib.Path(current_app.config['DOWNLOAD_FOLDER'])
if export_path.exists() is False:
export_path.mkdir()
df.to_excel(export_path.joinpath(export_filename))
else:
return {'statusCode': 0, 'message':'there are no movies'}
return {'statusCode': 0, 'message':'export successful', 'data': {'filename': export_filename}}
@bp.route('/starrings', methods=['GET'])
@login_required
def get_starrings():
filter_args = request.args.get('filter')
starrings = []
if filter_args is None:
starrings = db.query_all_starring()
else:
starrings = db.query_starring_by_filter(filter_args)
res = []
if starrings:
keys = starrings[0].field_list
for row in starrings:
temp = {k:getattr(row, k) for k in keys}
res.append(temp)
data = {'statusCode':0, 'message':'query success', 'data':res}
return data
@bp.route('/genres', methods=['GET'])
@login_required
def get_genres():
filter_args = request.args.get('filter')
genres = []
if filter_args is None:
genres = db.query_all_genre()
else:
genres = db.query_genre_by_filter(filter_args)
res = []
if genres:
keys = genres[0].field_list
for row in genres:
temp = {k:getattr(row, k) for k in keys}
res.append(temp)
data = {'statusCode':0, 'message':'query success', 'data':res}
return data
@bp.route('/movie', methods=['GET'])
@login_required
def get_match_movie():
match_q = request.args.get('match')
if match_q is None:
logger.warning('match is none, may be content-type is not application/json!')
return {'statusCode': -1, 'message':'parameter match is required'}
match_res = db.query_movie_match_name(match_q)
keys = ['id', 'name', 'starring', 'genre', 'rating', 'runtime']
def filter_field(movie:db.Movie):
temp = {k:getattr(movie,k) for k in keys}
temp['starring'] = [s.name for s in movie.starring]
temp['genre'] = [g.genre for g in movie.genre]
temp['runtime'] = get_default_runtime(movie.runtime).running_time
return temp
map_res = list(map(filter_field, match_res))
data = {'statusCode':0, 'message':'query success', 'data': map_res}
return data
|
python
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# from "SuperShape2D" (Daniel Shiffman)
# Video: https://youtu.be/ksRoh-10lak
# supershapes: http://paulbourke.net/geometry/supershape/
import sys, os
from PyQt5.QtWidgets import *
from PyQt5.QtGui import *
from PyQt5.QtCore import *
import math
import numpy as np
def mapFromTo(x, a, b, c, d):
"""map() function of javascript"""
y = (float(x) - float(a))/(float(b) - float(a)) * \
(float(d) - float(c)) + float(c)
return y
class SuperShape(QWidget):
def __init__(self, parent=None, nstars=500):
QWidget.__init__(self, parent)
self.myTimerId = None
self.setWindowTitle("Coding Train - Supershape2D")
self.setFixedSize(400, 400)
# black background
p = self.palette()
p.setColor(self.backgroundRole(), Qt.black)
self.setAutoFillBackground(True)
self.setPalette(p)
# parameters
self.n1 = 0.3
self.n2 = 0.3
self.n3 = 0.3
self.m = 5
self.a = 1
self.b = 1
self.radius = 100
def paintEvent(self, event):
painter = QPainter(self)
painter.translate(self.width()/2, self.height()/2)
painter.setPen(Qt.white)
#painter.setBrush(Qt.NoBrush)
painter.setBrush(Qt.darkGray)
total = 200
increment = 2 * math.pi/total
points = []
for angle in np.arange(0, 2 * math.pi, increment):
r = self.supershape(angle)
x = self.radius * r * math.cos(angle)
y = self.radius * r * math.sin(angle)
points.append(QPoint(x, y))
painter.drawPolygon(QPolygon(points))
# write some info
painter.resetTransform()
font = painter.font()
font.setPixelSize(10)
painter.setFont(font)
text=''
for var in ['m','a','b','n1','n2','n3']:
text += '%s = %f\n' % (var, getattr(self,var))
rectangle = painter.viewport().adjusted(10,10,-20,-20)
boundingRect = painter.drawText(rectangle, 0, text)
def supershape(self, theta):
part1 = (1.0 / self.a) * math.cos(theta * self.m / 4.0)
part1 = abs(part1)
part1 = math.pow(part1, self.n2)
part2 = (1.0 / self.b) * math.sin(theta * self.m / 4.0)
part2 = abs(part2)
part2 = math.pow(part2, self.n3)
part3 = math.pow(part1 + part2, 1/self.n1)
if part3 == 0.0:
return 0.0
return 1.0 / part3
class Window(QWidget):
def __init__(self):
QWidget.__init__(self)
self.initUI()
def buildSlider(self, widget, rmin, rmax, stp, name):
slider = QSlider(Qt.Horizontal)
slider.setMinimumWidth(200)
slider.setRange(0, stp)
slider.setValue( float(getattr(widget, name) -rmin) /(rmax-rmin) * stp )
slider.valueChanged.connect(lambda x: setattr(widget, name, rmin+x*float(rmax-rmin)/stp))
slider.valueChanged.connect(lambda x: widget.repaint())
return slider
def initUI(self):
iconfile = os.path.join(os.path.dirname(__file__), 'coding_train_icon.png')
self.setWindowIcon(QIcon(iconfile))
widget = SuperShape()
vbox = QFormLayout()
vbox.addRow("m", self.buildSlider(widget, rmin=0, rmax=10, stp=100, name='m'))
vbox.addRow("a", self.buildSlider(widget, rmin=1, rmax=10, stp=100, name='a'))
vbox.addRow("b", self.buildSlider(widget, rmin=1, rmax=10, stp=100, name='b'))
vbox.addRow("n1", self.buildSlider(widget, rmin=0.1, rmax=1, stp=100, name='n1'))
vbox.addRow("n2", self.buildSlider(widget, rmin=0.1, rmax=1, stp=100, name='n2'))
vbox.addRow("n3", self.buildSlider(widget, rmin=0.1, rmax=1, stp=100, name='n3'))
vbox.addRow("radius", self.buildSlider(widget, rmin=1, rmax=500, stp=500, name='radius'))
hbox = QHBoxLayout()
hbox.addWidget(widget)
hbox.addLayout(vbox)
self.setLayout(hbox)
if __name__ == '__main__':
app = QApplication(sys.argv)
ex = Window()
ex.show()
sys.exit(app.exec_())
|
python
|
"""Convert all the old posts.
Author: Alex Alemi
Date: 2019-01-23
"""
import os
import logging
CURRENT_DIR = os.path.dirname(__file__)
POSTS_DIR = os.path.normpath(os.path.join(CURRENT_DIR, '../posts/old'))
def fix_front(line):
"""Redo the front of the metadata lines for the nikola format."""
return '.. ' + line[0].lower() + line[1:]
def has_math(lines):
"""Test if math appears anywhere in the post."""
for line in lines:
if '$$' in line:
return True
elif '$' in line:
return True
return False
def fix_preamble(lines):
"""Convert the preamble to the correct form."""
# get the first empty line
first_empty_line = lines.index('\n')
if first_empty_line == 0:
raise Exception()
preamble = [fix_front(line) for line in lines[:first_empty_line]]
if has_math(lines):
preamble.append('.. has_math: true\n')
lines = ['<--\n'] + preamble + ['-->\n'] + lines[first_empty_line:]
return lines
def fix_static(lines):
"""Fix image links to handle new static path."""
def fix_static_line(line):
return line.replace('/static/images', '/images')
return [fix_static_line(line) for line in lines]
def transform(filepath):
"""Transform a file."""
with open(filepath, 'r') as f:
lines = f.readlines()
try:
lines = fix_preamble(lines)
lines = fix_static(lines)
except Exception:
logging.exception(f'Error on {filepath}')
raise
return lines
if __name__ == "__main__":
if not os.path.exists(POSTS_DIR):
os.makedirs(POSTS_DIR)
for subdir, dirs, files in os.walk(os.path.join(CURRENT_DIR, "../content.bk/old")):
for file in files:
filepath = os.path.normpath(os.path.join(subdir, file))
if filepath.endswith(".md"):
print(f"Processing {filepath}")
transformed_lines = transform(filepath)
new_filepath = os.path.join(POSTS_DIR, file)
with open(new_filepath, 'w') as f:
f.writelines(transformed_lines)
print(f"Wrote {new_filepath}")
|
python
|
# Generated by Django 2.2.24 on 2021-07-26 14:50
import django.core.validators
from django.db import migrations, models
def split_dates(apps, schema_editor):
CompanyObjective = apps.get_model('exportplan', 'CompanyObjectives')
for objective in CompanyObjective.objects.all():
if objective.start_date:
objective.start_month = objective.start_date.month
objective.start_year = objective.start_date.year
if objective.end_date:
objective.end_month = objective.end_date.month
objective.end_year = objective.end_date.year
objective.save()
class Migration(migrations.Migration):
dependencies = [
('exportplan', '0038_auto_20210614_1506'),
]
operations = [
migrations.AddField(
model_name='companyobjectives',
name='end_month',
field=models.IntegerField(blank=True, null=True, validators=[
django.core.validators.MinValueValidator(1), django.core.validators.MaxValueValidator(12)]),
),
migrations.AddField(
model_name='companyobjectives',
name='end_year',
field=models.IntegerField(blank=True, null=True, validators=[
django.core.validators.MinValueValidator(0), django.core.validators.MaxValueValidator(9999)]),
),
migrations.AddField(
model_name='companyobjectives',
name='start_month',
field=models.IntegerField(blank=True, null=True, validators=[
django.core.validators.MinValueValidator(1), django.core.validators.MaxValueValidator(12)]),
),
migrations.AddField(
model_name='companyobjectives',
name='start_year',
field=models.IntegerField(blank=True, null=True, validators=[
django.core.validators.MinValueValidator(0), django.core.validators.MaxValueValidator(9999)]),
),
migrations.RunPython(split_dates),
]
|
python
|
"""https://gist.github.com/alopes/5358189"""
stopwords = [
"de",
"a",
"o",
"que",
"e",
"do",
"da",
"em",
"um",
"para",
"é",
"com",
"não",
"uma",
"os",
"no",
"se",
"na",
"por",
"mais",
"as",
"dos",
"como",
"mas",
"foi",
"ao",
"ele",
"das",
"tem",
"à",
"seu",
"sua",
"ou",
"ser",
"quando",
"muito",
"há",
"nos",
"já",
"está",
"eu",
"também",
"só",
"pelo",
"pela",
"até",
"isso",
"ela",
"entre",
"era",
"depois",
"sem",
"mesmo",
"aos",
"ter",
"seus",
"quem",
"nas",
"me",
"esse",
"eles",
"estão",
"você",
"tinha",
"foram",
"essa",
"num",
"nem",
"suas",
"meu",
"às",
"minha",
"têm",
"numa",
"pelos",
"elas",
"havia",
"seja",
"qual",
"será",
"nós",
"tenho",
"lhe",
"deles",
"essas",
"esses",
"pelas",
"este",
"fosse",
"dele",
"tu",
"te",
"vocês",
"vos",
"lhes",
"meus",
"minhas",
"teu",
"tua",
"teus",
"tuas",
"nosso",
"nossa",
"nossos",
"nossas",
"dela",
"delas",
"esta",
"estes",
"estas",
"aquele",
"aquela",
"aqueles",
"aquelas",
"isto",
"aquilo",
"estou",
"está",
"estamos",
"estão",
"estive",
"esteve",
"estivemos",
"estiveram",
"estava",
"estávamos",
"estavam",
"estivera",
"estivéramos",
"esteja",
"estejamos",
"estejam",
"estivesse",
"estivéssemos",
"estivessem",
"estiver",
"estivermos",
"estiverem",
"hei",
"há",
"havemos",
"hão",
"houve",
"houvemos",
"houveram",
"houvera",
"houvéramos",
"haja",
"hajamos",
"hajam",
"houvesse",
"houvéssemos",
"houvessem",
"houver",
"houvermos",
"houverem",
"houverei",
"houverá",
"houveremos",
"houverão",
"houveria",
"houveríamos",
"houveriam",
"sou",
"somos",
"são",
"era",
"éramos",
"eram",
"fui",
"foi",
"fomos",
"foram",
"fora",
"fôramos",
"seja",
"sejamos",
"sejam",
"fosse",
"fôssemos",
"fossem",
"for",
"formos",
"forem",
"serei",
"será",
"seremos",
"serão",
"seria",
"seríamos",
"seriam",
"tenho",
"tem",
"temos",
"tém",
"tinha",
"tínhamos",
"tinham",
"tive",
"teve",
"tivemos",
"tiveram",
"tivera",
"tivéramos",
"tenha",
"tenhamos",
"tenham",
"tivesse",
"tivéssemos",
"tivessem",
"tiver",
"tivermos",
"tiverem",
"terei",
"terá",
"teremos",
"terão",
"teria",
"teríamos",
"teriam",
]
|
python
|
import pygame
from . import GameEnv, GameEnv_Simple, Ball, Robot, Goal
from typing import Tuple, List, Dict
import random
class AbstractPlayer:
def __init__(self, env: GameEnv, robot: Robot):
self.env = env
self.robot = robot
def get_action(self) -> Tuple[float, float]:
raise Exception("Override this in the child class.")
class OG_Twitchy(AbstractPlayer):
def get_action(self) -> Tuple[float, float]:
rando = random.random()
# ~5% chance to turn left or right, 45% chance to go forward/back
if rando <= 0.05:
# turn left
action = (-1, 1)
elif rando <= 0.5:
# go straight
action = (1, 1)
elif rando < 0.95:
# go back
action = (-1, -1)
else:
# turn right
action = (1, -1)
return action
class Human(AbstractPlayer):
def __init__(self, env: GameEnv,
robot: Robot,
key_left=pygame.K_a,
key_right=pygame.K_d,
key_forwards=pygame.K_w,
key_backwards=pygame.K_s):
super(Human, self).__init__(env, robot)
self.key_left = key_left
self.key_right = key_right
self.key_forwards = key_forwards
self.key_backwards = key_backwards
def get_action(self) -> Tuple[float, float]:
pygame.event.get() # If you don't call this first, doesn't work... worth investigating at some point
# Process player input
dctKeyDown = pygame.key.get_pressed()
lngLThrust = 0
lngRThrust = 0
if dctKeyDown[self.key_forwards]:
lngLThrust += 1
lngRThrust += 1
if dctKeyDown[self.key_backwards]:
lngLThrust -= 1
lngRThrust -= 1
if dctKeyDown[self.key_left]:
lngLThrust -= 1
lngRThrust += 1
if dctKeyDown[self.key_right]:
lngLThrust += 1
lngRThrust -= 1
return (lngLThrust, lngRThrust)
class DistantHuman(Human):
def __init__(self, env: GameEnv, robot: Robot):
super(Human, self).__init__(env, robot)
raise NotImplementedError("SOMEBODY SHOULD TOTALLY MAKE A CLIENT/SERVER PLAYER THO")
|
python
|
class Solution:
def XXX(self, head: ListNode) -> ListNode:
try:
new_head = new_tail = ListNode(head.val)
p = head.next
while p:
if new_tail.val != p.val:
node = ListNode(p.val)
new_tail.next = node
new_tail = node
p = p.next
return new_head
except:
return head
|
python
|
from PIL import Image
# Charger l'image
img = Image.open('/home/popschool/Documents/GitHub/projet_recoplante/Images_test/bruyere_des_marais_NB.jpg')
# Afficher l'image chargée
img.show()
# Récupérer et afficher la taille de l'image (en pixels)
w, h = img.size
print("Largeur : {} px, hauteur : {} px".format(w, h))
# Afficher son mode de quantification
print("Format des pixels : {}".format(img.mode))
# Récupérer et afficher la valeur du pixel à une position précise
px_value = img.getpixel((20,100))
print("Valeur du pixel situé en (20,100) : {}".format(px_value))
import numpy as np
# Récupérer les valeurs de tous les pixels sous forme d'une matrice
mat = np.array(img)
mat
# Afficher la taille de la matrice de pixels
print("Taille de la matrice de pixels : {}".format(mat.shape))
|
python
|
import unittest
from Config import Config
from MossResultsRetriever import MossResultsRetriever
from Result import Result
class MossURLsTests(unittest.TestCase):
def setUp(self):
self.config = Config()
self.validUrl = self.config.getMagicsquare()
self.retriever = MossResultsRetriever()
self.results = Result(1, "f1", "f2", "url", 40, 50, 60)
#
# isValidUrl()
#
# Test a valid URL
def test_validUrl(self):
url = self.validUrl
self.assertTrue(self.retriever.isValidUrl(url))
# Test the same URL twice, which is considered a valid submission
def test_validSameUrl(self):
url = self.validUrl
self.assertTrue(self.retriever.isValidUrl(url))
self.assertTrue(self.retriever.isValidUrl(url))
# Test an invalid String
def test_invalidUrlString(self):
url = "notURL"
self.assertFalse(self.retriever.isValidUrl(url))
# Test an int
def test_invalidUrlInt(self):
url = 1
self.assertFalse(self.retriever.isValidUrl(url))
# Test a double
def test_invalidUrlDouble(self):
url = 0.5
self.assertFalse(self.retriever.isValidUrl(url))
# Test None
def test_invalidUrlNone(self):
url = None
self.assertFalse(self.retriever.isValidUrl(url))
# Test empty list
def test_invalidUrlListEmpty(self):
url = []
self.assertFalse(self.retriever.isValidUrl(url))
# Test a list with valid URLs as entries
def test_invalidUrlListOfUrls(self):
url = [self.validUrl, self.validUrl, self.validUrl]
self.assertFalse(self.retriever.isValidUrl(url))
# Test an invalid URL like MOSSS
def test_invalidUrlLikeMoss(self):
url = "http://moss.stanford.edu/results/12121212121212/"
self.assertFalse(self.retriever.isValidUrl(url))
# Test a URL that's two valid URLs appended together
def test_invalidUrlTwoAppended(self):
url = self.validUrl + self.validUrl
self.assertFalse(self.retriever.isValidUrl(url))
# Test a valid URL that isn't MOSS
def test_validUrlNotMoss(self):
url = "https://google.com"
self.assertFalse(self.retriever.isValidUrl(url))
# Test a valid URL with space
def test_validUrlWithSpace(self):
url = " " + self.validUrl + " "
self.assertFalse(self.retriever.isValidUrl(url))
# Test a valid URL with new line
def test_validUrlWithNewLine(self):
url = "\n" + self.validUrl + "\n"
self.assertFalse(self.retriever.isValidUrl(url))
#
# isValidUrlList()
#
# Test int
def test_isValidUrlListInt(self):
urls = 1
isValid, url = self.retriever.isValidUrlList(urls)
self.assertFalse(isValid)
self.assertEqual(url, "argument " + str(urls) + " is not a valid list")
# Test double
def test_isValidUrlListDouble(self):
urls = 0.5
isValid, url = self.retriever.isValidUrlList(urls)
self.assertFalse(isValid)
self.assertEqual(url, "argument " + str(urls) + " is not a valid list")
# Test empty string
def test_isValidUrlListString(self):
urls = " "
isValid, url = self.retriever.isValidUrlList(urls)
self.assertFalse(isValid)
self.assertEqual(url, "argument " + str(urls) + " is not a valid list")
# Test single, valid url string
def test_isValidUrlListValidUrl(self):
urls = self.validUrl
isValid, url = self.retriever.isValidUrlList(urls)
self.assertFalse(isValid)
self.assertEqual(url, "argument " + str(urls) + " is not a valid list")
# Test None
def test_isValidUrlListNone(self):
urls = None
isValid, url = self.retriever.isValidUrlList(urls)
self.assertFalse(isValid)
self.assertEqual(url, "argument " + str(urls) + " is not a valid list")
# Test empty list
def test_isValidUrlListEmptyList(self):
urls = []
isValid, url = self.retriever.isValidUrlList(urls)
self.assertFalse(isValid)
self.assertEqual(url, "argument " + str(urls) + " is not a valid list")
# Test list of ints
def test_isValidUrlListIntList(self):
urls = [1, 1, 1]
isValid, url = self.retriever.isValidUrlList(urls)
self.assertFalse(isValid)
self.assertEqual(url, 1)
# Test list of doubles
def test_isValidUrlListDoublesList(self):
urls = [0.5, 0.5, 0.5]
isValid, url = self.retriever.isValidUrlList(urls)
self.assertFalse(isValid)
self.assertEqual(url, 0.5)
# Test list of Nones
def test_isValidUrlListNoneList(self):
urls = [None, None, None]
isValid, url = self.retriever.isValidUrlList(urls)
self.assertFalse(isValid)
self.assertEqual(url, None)
# Test list of lists
def test_isValidUrlListOfLists(self):
urls = [[], [], []]
isValid, url = self.retriever.isValidUrlList(urls)
self.assertFalse(isValid)
self.assertEqual(url, [])
# Test mixed list
def test_isValidUrlListMixed(self):
urls = [" ", 1, None, 0.5]
isValid, url = self.retriever.isValidUrlList(urls)
self.assertFalse(isValid)
self.assertEqual(url, " ")
# Test mixed list with valid url
def test_isValidUrlListMixedWithValid(self):
urls = [self.validUrl, " ", 1, None, 0.5]
isValid, url = self.retriever.isValidUrlList(urls)
self.assertFalse(isValid)
self.assertEqual(url, " ")
# Test single valid
def test_isValidUrlListSingleValid(self):
urls = [self.validUrl]
isValid, url = self.retriever.isValidUrlList(urls)
self.assertTrue(isValid)
self.assertEqual(url, "success")
# Test multiple valid
def test_isValidUrlListMultipleValid(self):
urls = [self.config.getMagicsquare(), self.config.getTwentyone(), self.config.getTwentyone()]
isValid, url = self.retriever.isValidUrlList(urls)
self.assertTrue(isValid)
self.assertEqual(url, "success")
# Test multiple valid with duplicates
def test_isValidUrlListMultipleValidDuplicates(self):
urls = [self.config.getMagicsquare(), self.config.getTwentyone(), self.config.getTwentyone(),
self.config.getMagicsquare(), self.config.getTwentyone(), self.config.getTwentyone()]
isValid, url = self.retriever.isValidUrlList(urls)
self.assertTrue(isValid)
self.assertEqual(url, "success")
#
# appendUrl()
#
# Test a valid URL
def test_appendValidUrl(self):
url = self.validUrl
self.retriever.appendUrl(url)
self.assertTrue(url in self.retriever.urls)
# Test the same URL twice, which is considered a valid submission
def test_appendValidSameUrl(self):
url = self.validUrl
self.retriever.appendUrl(url)
self.retriever.appendUrl(url)
self.assertTrue(url in self.retriever.urls)
self.assertEqual(self.retriever.urls.count(url), 1)
self.assertNotEqual(self.retriever.urls.count(url), 2)
# Test an invalid String
def test_appendInvalidUrlString(self):
url = "notURL"
self.retriever.appendUrl(url)
self.assertFalse(url in self.retriever.urls)
# Test an int
def test_appendInvalidUrlInt(self):
url = 1
self.retriever.appendUrl(url)
self.assertFalse(url in self.retriever.urls)
# Test a double
def test_appendInvalidUrlDouble(self):
url = 0.5
self.retriever.appendUrl(url)
self.assertFalse(url in self.retriever.urls)
# Test None
def test_appendInvalidUrlNone(self):
url = None
self.retriever.appendUrl(url)
self.assertFalse(url in self.retriever.urls)
# Test empty list
def test_appendInvalidUrlEmptyList(self):
url = []
self.retriever.appendUrl(url)
self.assertFalse(url in self.retriever.urls)
# Test a list with valid URLs as entries
def test_appendInvalidUrlListOfUrls(self):
url = [self.validUrl, self.validUrl, self.validUrl]
self.retriever.appendUrl(url)
self.assertFalse(url in self.retriever.urls)
# Test an invalid URL like MOSSS
def test_appendInvalidUrlLikeMoss(self):
url = "http://moss.stanford.edu/results/12121212121212/"
self.retriever.appendUrl(url)
self.assertFalse(url in self.retriever.urls)
# Test a URL that's two valid URLs appended together
def test_appendInvalidUrlTwoAppended(self):
url = self.validUrl + self.validUrl
self.retriever.appendUrl(url)
self.assertFalse(url in self.retriever.urls)
# Test a valid URL that isn't MOSS
def test_appendValidUrlNotMoss(self):
url = "https://google.com"
self.retriever.appendUrl(url)
self.assertFalse(url in self.retriever.urls)
# Test a valid URL with space
def test_appendValidUrlWithSpace(self):
url = " " + self.validUrl + " "
self.retriever.appendUrl(url)
self.assertFalse(url in self.retriever.urls)
# Test a valid URL with new line
def test_appendValidUrlWithNewLine(self):
url = "\n" + self.validUrl + "\n"
self.retriever.appendUrl(url)
self.assertFalse(url in self.retriever.urls)
#
# populateResults()
#
def test_populateResultsOneUrl(self):
self.retriever.urls = [self.config.getTwentyone()]
self.retriever.populateResults()
self.assertNotEqual(len(self.retriever.results), 0)
def test_populateResultsMultipleUrls(self):
self.retriever.urls = [self.config.getTwentyone(), self.config.getMagicsquare(), self.config.getPalindrome()]
self.retriever.populateResults()
self.assertGreater(len(self.retriever.results), 3)
#
# getDuplicateUrls()
#
# Test int
def test_getDuplicateUrlsInt(self):
duplicates, nonDuplicates = self.retriever.getDuplicateUrls(1)
self.assertListEqual(duplicates, [])
self.assertListEqual(nonDuplicates, [])
# Test double
def test_getDuplicateUrlsDouble(self):
duplicates, nonDuplicates = self.retriever.getDuplicateUrls(0.5)
self.assertListEqual(duplicates, [])
self.assertListEqual(nonDuplicates, [])
# Test empty string
def test_getDuplicateUrlsString(self):
duplicates, nonDuplicates = self.retriever.getDuplicateUrls(" ")
self.assertListEqual(duplicates, [])
self.assertListEqual(nonDuplicates, [])
# Test single, valid url string
def test_getDuplicateUrlsValidUrl(self):
duplicates, nonDuplicates = self.retriever.getDuplicateUrls(self.validUrl)
self.assertListEqual(duplicates, [])
self.assertListEqual(nonDuplicates, [])
# Test None
def test_getDuplicateUrlsNone(self):
duplicates, nonDuplicates = self.retriever.getDuplicateUrls(None)
self.assertListEqual(duplicates, [])
self.assertListEqual(nonDuplicates, [])
# Test empty list
def test_getDuplicateUrlsEmptyList(self):
duplicates, nonDuplicates = self.retriever.getDuplicateUrls([])
self.assertListEqual(duplicates, [])
self.assertListEqual(nonDuplicates, [])
# Test list of ints
def test_getDuplicateUrlsIntList(self):
duplicates, nonDuplicates = self.retriever.getDuplicateUrls([1, 1, 1])
self.assertListEqual(duplicates, [])
self.assertListEqual(nonDuplicates, [])
# Test list of doubles
def test_getDuplicateUrlsDoubleList(self):
duplicates, nonDuplicates = self.retriever.getDuplicateUrls([0.5, 0.5, 0.5])
self.assertListEqual(duplicates, [])
self.assertListEqual(nonDuplicates, [])
# Test list of Nones
def test_getDuplicateUrlsNoneList(self):
duplicates, nonDuplicates = self.retriever.getDuplicateUrls([None, None, None])
self.assertListEqual(duplicates, [])
self.assertListEqual(nonDuplicates, [])
# Test list of lists
def test_getDuplicateUrlsListOfLists(self):
duplicates, nonDuplicates = self.retriever.getDuplicateUrls([[], [], []])
self.assertListEqual(duplicates, [])
self.assertListEqual(nonDuplicates, [])
# Test mixed list
def test_getDuplicateUrlsMixedList(self):
duplicates, nonDuplicates = self.retriever.getDuplicateUrls([1, " ", 0.5, None])
self.assertListEqual(duplicates, [])
self.assertListEqual(nonDuplicates, [])
# Test mixed list with valid url
def test_getDuplicateUrlsMixedListWithValidUrl(self):
duplicates, nonDuplicates = self.retriever.getDuplicateUrls([self.validUrl, " ", 1])
self.assertListEqual(duplicates, [])
self.assertListEqual(nonDuplicates, [])
# Test no duplicates
def test_getDuplicateUrlsNoDuplicates(self):
urls = [self.config.getMagicsquare(), self.config.getPalindrome(), self.config.getTwentyone()]
duplicates, nonDuplicates = self.retriever.getDuplicateUrls(urls)
self.assertListEqual(duplicates, [])
self.assertListEqual(nonDuplicates, [self.config.getMagicsquare(), self.config.getPalindrome(), self.config.getTwentyone()])
# Test one duplicate
def test_getDuplicateUrlsOneDuplicate(self):
urls = [self.config.getMagicsquare(), self.config.getPalindrome(), self.config.getTwentyone(), self.config.getMagicsquare()]
duplicates, nonDuplicates = self.retriever.getDuplicateUrls(urls)
self.assertListEqual(duplicates, [self.config.getMagicsquare()])
self.assertListEqual(nonDuplicates, [self.config.getMagicsquare(), self.config.getPalindrome(), self.config.getTwentyone()])
# Test all duplicates
def test_getDuplicateUrlsAllDuplicate(self):
urls = [self.config.getMagicsquare(), self.config.getPalindrome(), self.config.getTwentyone(),
self.config.getMagicsquare(), self.config.getPalindrome(), self.config.getTwentyone()]
duplicates, nonDuplicates = self.retriever.getDuplicateUrls(urls)
self.assertListEqual(duplicates, [self.config.getMagicsquare(), self.config.getPalindrome(), self.config.getTwentyone()])
self.assertListEqual(nonDuplicates, [self.config.getMagicsquare(), self.config.getPalindrome(), self.config.getTwentyone()])
#
# resultsAreValid()
#
# Tests all the correct types for Result object
def test_validData(self):
self.retriever.results =[self.results, self.results]
self.assertTrue(self.retriever.resultsAreValid())
# Tests all the incorrect types for Result object
def test_invalidData(self):
self.results.fileOne = 1
self.results.fileTwo = 2
self.results.fileOnePercent = "52"
self.results.fileTwoPercent = "58"
self.results.url = 51
self.retriever.results = [self.results, self.results]
self.assertFalse(self.retriever.resultsAreValid())
def tearDown(self):
self.retriever = None
self.results = None
if __name__ == '__main__':
unittest.main()
|
python
|
import io
import os.path
import shutil
import sys
import tempfile
import re
import unittest
from types import ModuleType
from typing import Any, List, Tuple, Optional
from mypy.test.helpers import (
assert_equal, assert_string_arrays_equal, local_sys_path_set
)
from mypy.test.data import DataSuite, DataDrivenTestCase
from mypy.errors import CompileError
from mypy.stubgen import (
generate_stubs, parse_options, Options, collect_build_targets,
mypy_options, is_blacklisted_path, is_non_library_module
)
from mypy.stubutil import walk_packages, remove_misplaced_type_comments, common_dir_prefix
from mypy.stubgenc import generate_c_type_stub, infer_method_sig, generate_c_function_stub
from mypy.stubdoc import (
parse_signature, parse_all_signatures, build_signature, find_unique_signatures,
infer_sig_from_docstring, infer_prop_type_from_docstring, FunctionSig, ArgSig,
infer_arg_sig_from_docstring, is_valid_type
)
from mypy.moduleinspect import ModuleInspect, InspectError
class StubgenCmdLineSuite(unittest.TestCase):
"""Test cases for processing command-line options and finding files."""
@unittest.skipIf(sys.platform == 'win32', "clean up fails on Windows")
def test_files_found(self) -> None:
current = os.getcwd()
with tempfile.TemporaryDirectory() as tmp:
try:
os.chdir(tmp)
os.mkdir('subdir')
self.make_file('subdir', 'a.py')
self.make_file('subdir', 'b.py')
os.mkdir(os.path.join('subdir', 'pack'))
self.make_file('subdir', 'pack', '__init__.py')
opts = parse_options(['subdir'])
py_mods, c_mods = collect_build_targets(opts, mypy_options(opts))
assert_equal(c_mods, [])
files = {mod.path for mod in py_mods}
assert_equal(files, {os.path.join('subdir', 'pack', '__init__.py'),
os.path.join('subdir', 'a.py'),
os.path.join('subdir', 'b.py')})
finally:
os.chdir(current)
@unittest.skipIf(sys.platform == 'win32', "clean up fails on Windows")
def test_packages_found(self) -> None:
current = os.getcwd()
with tempfile.TemporaryDirectory() as tmp:
try:
os.chdir(tmp)
os.mkdir('pack')
self.make_file('pack', '__init__.py', content='from . import a, b')
self.make_file('pack', 'a.py')
self.make_file('pack', 'b.py')
opts = parse_options(['-p', 'pack'])
py_mods, c_mods = collect_build_targets(opts, mypy_options(opts))
assert_equal(c_mods, [])
files = {os.path.relpath(mod.path or 'FAIL') for mod in py_mods}
assert_equal(files, {os.path.join('pack', '__init__.py'),
os.path.join('pack', 'a.py'),
os.path.join('pack', 'b.py')})
finally:
os.chdir(current)
@unittest.skipIf(sys.platform == 'win32', "clean up fails on Windows")
def test_module_not_found(self) -> None:
current = os.getcwd()
captured_output = io.StringIO()
sys.stdout = captured_output
with tempfile.TemporaryDirectory() as tmp:
try:
os.chdir(tmp)
self.make_file(tmp, 'mymodule.py', content='import a')
opts = parse_options(['-m', 'mymodule'])
py_mods, c_mods = collect_build_targets(opts, mypy_options(opts))
assert captured_output.getvalue() == ''
finally:
sys.stdout = sys.__stdout__
os.chdir(current)
def make_file(self, *path: str, content: str = '') -> None:
file = os.path.join(*path)
with open(file, 'w') as f:
f.write(content)
def run(self, result: Optional[Any] = None) -> Optional[Any]:
with local_sys_path_set():
return super().run(result)
class StubgenCliParseSuite(unittest.TestCase):
def test_walk_packages(self) -> None:
with ModuleInspect() as m:
assert_equal(
set(walk_packages(m, ["mypy.errors"])),
{"mypy.errors"})
assert_equal(
set(walk_packages(m, ["mypy.errors", "mypy.stubgen"])),
{"mypy.errors", "mypy.stubgen"})
all_mypy_packages = set(walk_packages(m, ["mypy"]))
self.assertTrue(all_mypy_packages.issuperset({
"mypy",
"mypy.errors",
"mypy.stubgen",
"mypy.test",
"mypy.test.helpers",
}))
class StubgenUtilSuite(unittest.TestCase):
"""Unit tests for stubgen utility functions."""
def test_parse_signature(self) -> None:
self.assert_parse_signature('func()', ('func', [], []))
def test_parse_signature_with_args(self) -> None:
self.assert_parse_signature('func(arg)', ('func', ['arg'], []))
self.assert_parse_signature('do(arg, arg2)', ('do', ['arg', 'arg2'], []))
def test_parse_signature_with_optional_args(self) -> None:
self.assert_parse_signature('func([arg])', ('func', [], ['arg']))
self.assert_parse_signature('func(arg[, arg2])', ('func', ['arg'], ['arg2']))
self.assert_parse_signature('func([arg[, arg2]])', ('func', [], ['arg', 'arg2']))
def test_parse_signature_with_default_arg(self) -> None:
self.assert_parse_signature('func(arg=None)', ('func', [], ['arg']))
self.assert_parse_signature('func(arg, arg2=None)', ('func', ['arg'], ['arg2']))
self.assert_parse_signature('func(arg=1, arg2="")', ('func', [], ['arg', 'arg2']))
def test_parse_signature_with_qualified_function(self) -> None:
self.assert_parse_signature('ClassName.func(arg)', ('func', ['arg'], []))
def test_parse_signature_with_kw_only_arg(self) -> None:
self.assert_parse_signature('ClassName.func(arg, *, arg2=1)',
('func', ['arg', '*'], ['arg2']))
def test_parse_signature_with_star_arg(self) -> None:
self.assert_parse_signature('ClassName.func(arg, *args)',
('func', ['arg', '*args'], []))
def test_parse_signature_with_star_star_arg(self) -> None:
self.assert_parse_signature('ClassName.func(arg, **args)',
('func', ['arg', '**args'], []))
def assert_parse_signature(self, sig: str, result: Tuple[str, List[str], List[str]]) -> None:
assert_equal(parse_signature(sig), result)
def test_build_signature(self) -> None:
assert_equal(build_signature([], []), '()')
assert_equal(build_signature(['arg'], []), '(arg)')
assert_equal(build_signature(['arg', 'arg2'], []), '(arg, arg2)')
assert_equal(build_signature(['arg'], ['arg2']), '(arg, arg2=...)')
assert_equal(build_signature(['arg'], ['arg2', '**x']), '(arg, arg2=..., **x)')
def test_parse_all_signatures(self) -> None:
assert_equal(parse_all_signatures(['random text',
'.. function:: fn(arg',
'.. function:: fn()',
' .. method:: fn2(arg)']),
([('fn', '()'),
('fn2', '(arg)')], []))
def test_find_unique_signatures(self) -> None:
assert_equal(find_unique_signatures(
[('func', '()'),
('func', '()'),
('func2', '()'),
('func2', '(arg)'),
('func3', '(arg, arg2)')]),
[('func', '()'),
('func3', '(arg, arg2)')])
def test_infer_sig_from_docstring(self) -> None:
assert_equal(infer_sig_from_docstring('\nfunc(x) - y', 'func'),
[FunctionSig(name='func', args=[ArgSig(name='x')], ret_type='Any')])
assert_equal(infer_sig_from_docstring('\nfunc(x, Y_a=None)', 'func'),
[FunctionSig(name='func',
args=[ArgSig(name='x'), ArgSig(name='Y_a', default=True)],
ret_type='Any')])
assert_equal(infer_sig_from_docstring('\nfunc(x, Y_a=3)', 'func'),
[FunctionSig(name='func',
args=[ArgSig(name='x'), ArgSig(name='Y_a', default=True)],
ret_type='Any')])
assert_equal(infer_sig_from_docstring('\nfunc(x, Y_a=[1, 2, 3])', 'func'),
[FunctionSig(name='func',
args=[ArgSig(name='x'), ArgSig(name='Y_a', default=True)],
ret_type='Any')])
assert_equal(infer_sig_from_docstring('\nafunc(x) - y', 'func'), [])
assert_equal(infer_sig_from_docstring('\nfunc(x, y', 'func'), [])
assert_equal(infer_sig_from_docstring('\nfunc(x=z(y))', 'func'),
[FunctionSig(name='func', args=[ArgSig(name='x', default=True)],
ret_type='Any')])
assert_equal(infer_sig_from_docstring('\nfunc x', 'func'), [])
# Try to infer signature from type annotation.
assert_equal(infer_sig_from_docstring('\nfunc(x: int)', 'func'),
[FunctionSig(name='func', args=[ArgSig(name='x', type='int')],
ret_type='Any')])
assert_equal(infer_sig_from_docstring('\nfunc(x: int=3)', 'func'),
[FunctionSig(name='func', args=[ArgSig(name='x', type='int', default=True)],
ret_type='Any')])
assert_equal(infer_sig_from_docstring('\nfunc(x: int=3) -> int', 'func'),
[FunctionSig(name='func', args=[ArgSig(name='x', type='int', default=True)],
ret_type='int')])
assert_equal(infer_sig_from_docstring('\nfunc(x: int=3) -> int \n', 'func'),
[FunctionSig(name='func', args=[ArgSig(name='x', type='int', default=True)],
ret_type='int')])
assert_equal(infer_sig_from_docstring('\nfunc(x: Tuple[int, str]) -> str', 'func'),
[FunctionSig(name='func', args=[ArgSig(name='x', type='Tuple[int,str]')],
ret_type='str')])
assert_equal(
infer_sig_from_docstring('\nfunc(x: Tuple[int, Tuple[str, int], str], y: int) -> str',
'func'),
[FunctionSig(name='func',
args=[ArgSig(name='x', type='Tuple[int,Tuple[str,int],str]'),
ArgSig(name='y', type='int')],
ret_type='str')])
assert_equal(infer_sig_from_docstring('\nfunc(x: foo.bar)', 'func'),
[FunctionSig(name='func', args=[ArgSig(name='x', type='foo.bar')],
ret_type='Any')])
assert_equal(infer_sig_from_docstring('\nfunc(x: list=[1,2,[3,4]])', 'func'),
[FunctionSig(name='func', args=[ArgSig(name='x', type='list', default=True)],
ret_type='Any')])
assert_equal(infer_sig_from_docstring('\nfunc(x: str="nasty[")', 'func'),
[FunctionSig(name='func', args=[ArgSig(name='x', type='str', default=True)],
ret_type='Any')])
assert_equal(infer_sig_from_docstring('\nfunc[(x: foo.bar, invalid]', 'func'), [])
assert_equal(infer_sig_from_docstring('\nfunc(x: invalid::type<with_template>)', 'func'),
[FunctionSig(name='func', args=[ArgSig(name='x', type=None)],
ret_type='Any')])
assert_equal(infer_sig_from_docstring('\nfunc(x: str="")', 'func'),
[FunctionSig(name='func', args=[ArgSig(name='x', type='str', default=True)],
ret_type='Any')])
def test_infer_sig_from_docstring_duplicate_args(self) -> None:
assert_equal(infer_sig_from_docstring('\nfunc(x, x) -> str\nfunc(x, y) -> int', 'func'),
[FunctionSig(name='func', args=[ArgSig(name='x'), ArgSig(name='y')],
ret_type='int')])
def test_infer_sig_from_docstring_bad_indentation(self) -> None:
assert_equal(infer_sig_from_docstring("""
x
x
x
""", 'func'), None)
def test_infer_arg_sig_from_docstring(self) -> None:
assert_equal(infer_arg_sig_from_docstring("(*args, **kwargs)"),
[ArgSig(name='*args'), ArgSig(name='**kwargs')])
assert_equal(
infer_arg_sig_from_docstring(
"(x: Tuple[int, Tuple[str, int], str]=(1, ('a', 2), 'y'), y: int=4)"),
[ArgSig(name='x', type='Tuple[int,Tuple[str,int],str]', default=True),
ArgSig(name='y', type='int', default=True)])
def test_infer_prop_type_from_docstring(self) -> None:
assert_equal(infer_prop_type_from_docstring('str: A string.'), 'str')
assert_equal(infer_prop_type_from_docstring('Optional[int]: An int.'), 'Optional[int]')
assert_equal(infer_prop_type_from_docstring('Tuple[int, int]: A tuple.'),
'Tuple[int, int]')
assert_equal(infer_prop_type_from_docstring('\nstr: A string.'), None)
def test_infer_sig_from_docstring_square_brackets(self) -> None:
assert infer_sig_from_docstring(
'fetch_row([maxrows, how]) -- Fetches stuff',
'fetch_row',
) == []
def test_remove_misplaced_type_comments_1(self) -> None:
good = """
\u1234
def f(x): # type: (int) -> int
def g(x):
# type: (int) -> int
def h():
# type: () int
x = 1 # type: int
"""
assert_equal(remove_misplaced_type_comments(good), good)
def test_remove_misplaced_type_comments_2(self) -> None:
bad = """
def f(x):
# type: Callable[[int], int]
pass
# type: "foo"
# type: 'bar'
x = 1
# type: int
"""
bad_fixed = """
def f(x):
pass
x = 1
"""
assert_equal(remove_misplaced_type_comments(bad), bad_fixed)
def test_remove_misplaced_type_comments_3(self) -> None:
bad = '''
def f(x):
"""docstring"""
# type: (int) -> int
pass
def g(x):
"""docstring
"""
# type: (int) -> int
pass
'''
bad_fixed = '''
def f(x):
"""docstring"""
pass
def g(x):
"""docstring
"""
pass
'''
assert_equal(remove_misplaced_type_comments(bad), bad_fixed)
def test_remove_misplaced_type_comments_4(self) -> None:
bad = """
def f(x):
'''docstring'''
# type: (int) -> int
pass
def g(x):
'''docstring
'''
# type: (int) -> int
pass
"""
bad_fixed = """
def f(x):
'''docstring'''
pass
def g(x):
'''docstring
'''
pass
"""
assert_equal(remove_misplaced_type_comments(bad), bad_fixed)
def test_remove_misplaced_type_comments_5(self) -> None:
bad = """
def f(x):
# type: (int, List[Any],
# float, bool) -> int
pass
def g(x):
# type: (int, List[Any])
pass
"""
bad_fixed = """
def f(x):
# float, bool) -> int
pass
def g(x):
pass
"""
assert_equal(remove_misplaced_type_comments(bad), bad_fixed)
def test_remove_misplaced_type_comments_bytes(self) -> None:
original = b"""
\xbf
def f(x): # type: (int) -> int
def g(x):
# type: (int) -> int
pass
def h():
# type: int
pass
x = 1 # type: int
"""
dest = b"""
\xbf
def f(x): # type: (int) -> int
def g(x):
# type: (int) -> int
pass
def h():
pass
x = 1 # type: int
"""
assert_equal(remove_misplaced_type_comments(original), dest)
def test_common_dir_prefix(self) -> None:
assert common_dir_prefix([]) == '.'
assert common_dir_prefix(['x.pyi']) == '.'
assert common_dir_prefix(['./x.pyi']) == '.'
assert common_dir_prefix(['foo/bar/x.pyi']) == 'foo/bar'
assert common_dir_prefix(['foo/bar/x.pyi',
'foo/bar/y.pyi']) == 'foo/bar'
assert common_dir_prefix(['foo/bar/x.pyi', 'foo/y.pyi']) == 'foo'
assert common_dir_prefix(['foo/x.pyi', 'foo/bar/y.pyi']) == 'foo'
assert common_dir_prefix(['foo/bar/zar/x.pyi', 'foo/y.pyi']) == 'foo'
assert common_dir_prefix(['foo/x.pyi', 'foo/bar/zar/y.pyi']) == 'foo'
assert common_dir_prefix(['foo/bar/zar/x.pyi', 'foo/bar/y.pyi']) == 'foo/bar'
assert common_dir_prefix(['foo/bar/x.pyi', 'foo/bar/zar/y.pyi']) == 'foo/bar'
class StubgenHelpersSuite(unittest.TestCase):
def test_is_blacklisted_path(self) -> None:
assert not is_blacklisted_path('foo/bar.py')
assert not is_blacklisted_path('foo.py')
assert not is_blacklisted_path('foo/xvendor/bar.py')
assert not is_blacklisted_path('foo/vendorx/bar.py')
assert is_blacklisted_path('foo/vendor/bar.py')
assert is_blacklisted_path('foo/vendored/bar.py')
assert is_blacklisted_path('foo/vendored/bar/thing.py')
assert is_blacklisted_path('foo/six.py')
def test_is_non_library_module(self) -> None:
assert not is_non_library_module('foo')
assert not is_non_library_module('foo.bar')
# The following could be test modules, but we are very conservative and
# don't treat them as such since they could plausibly be real modules.
assert not is_non_library_module('foo.bartest')
assert not is_non_library_module('foo.bartests')
assert not is_non_library_module('foo.testbar')
assert is_non_library_module('foo.test')
assert is_non_library_module('foo.test.foo')
assert is_non_library_module('foo.tests')
assert is_non_library_module('foo.tests.foo')
assert is_non_library_module('foo.testing.foo')
assert is_non_library_module('foo.SelfTest.foo')
assert is_non_library_module('foo.test_bar')
assert is_non_library_module('foo.bar_tests')
assert is_non_library_module('foo.testing')
assert is_non_library_module('foo.conftest')
assert is_non_library_module('foo.bar_test_util')
assert is_non_library_module('foo.bar_test_utils')
assert is_non_library_module('foo.bar_test_base')
assert is_non_library_module('foo.setup')
assert is_non_library_module('foo.__main__')
class StubgenPythonSuite(DataSuite):
"""Data-driven end-to-end test cases that generate stub files.
You can use these magic test case name suffixes:
*_semanal
Run semantic analysis (slow as this uses real stubs -- only use
when necessary)
*_import
Import module and perform runtime introspection (in the current
process!)
You can use these magic comments:
# flags: --some-stubgen-option ...
Specify custom stubgen options
# modules: module1 module2 ...
Specify which modules to output (by default only 'main')
"""
required_out_section = True
base_path = '.'
files = ['stubgen.test']
def run_case(self, testcase: DataDrivenTestCase) -> None:
with local_sys_path_set():
self.run_case_inner(testcase)
def run_case_inner(self, testcase: DataDrivenTestCase) -> None:
extra = [] # Extra command-line args
mods = [] # Module names to process
source = '\n'.join(testcase.input)
for file, content in testcase.files + [('./main.py', source)]:
# Strip ./ prefix and .py suffix.
mod = file[2:-3].replace('/', '.')
if mod.endswith('.__init__'):
mod, _, _ = mod.rpartition('.')
mods.append(mod)
if '-p ' not in source:
extra.extend(['-m', mod])
with open(file, 'w') as f:
f.write(content)
options = self.parse_flags(source, extra)
modules = self.parse_modules(source)
out_dir = 'out'
try:
try:
if not testcase.name.endswith('_import'):
options.no_import = True
if not testcase.name.endswith('_semanal'):
options.parse_only = True
generate_stubs(options)
a = [] # type: List[str]
for module in modules:
fnam = module_to_path(out_dir, module)
self.add_file(fnam, a, header=len(modules) > 1)
except CompileError as e:
a = e.messages
assert_string_arrays_equal(testcase.output, a,
'Invalid output ({}, line {})'.format(
testcase.file, testcase.line))
finally:
for mod in mods:
if mod in sys.modules:
del sys.modules[mod]
shutil.rmtree(out_dir)
def parse_flags(self, program_text: str, extra: List[str]) -> Options:
flags = re.search('# flags: (.*)$', program_text, flags=re.MULTILINE)
if flags:
flag_list = flags.group(1).split()
else:
flag_list = []
options = parse_options(flag_list + extra)
if '--verbose' not in flag_list:
options.quiet = True
else:
options.verbose = True
return options
def parse_modules(self, program_text: str) -> List[str]:
modules = re.search('# modules: (.*)$', program_text, flags=re.MULTILINE)
if modules:
return modules.group(1).split()
else:
return ['main']
def add_file(self, path: str, result: List[str], header: bool) -> None:
if not os.path.exists(path):
result.append('<%s was not generated>' % path.replace('\\', '/'))
return
if header:
result.append('# {}'.format(path[4:]))
with open(path, encoding='utf8') as file:
result.extend(file.read().splitlines())
self_arg = ArgSig(name='self')
class StubgencSuite(unittest.TestCase):
"""Unit tests for stub generation from C modules using introspection.
Note that these don't cover a lot!
"""
def test_infer_hash_sig(self) -> None:
assert_equal(infer_method_sig('__hash__'), [self_arg])
def test_infer_getitem_sig(self) -> None:
assert_equal(infer_method_sig('__getitem__'), [self_arg, ArgSig(name='index')])
def test_infer_setitem_sig(self) -> None:
assert_equal(infer_method_sig('__setitem__'),
[self_arg, ArgSig(name='index'), ArgSig(name='object')])
def test_infer_binary_op_sig(self) -> None:
for op in ('eq', 'ne', 'lt', 'le', 'gt', 'ge',
'add', 'radd', 'sub', 'rsub', 'mul', 'rmul'):
assert_equal(infer_method_sig('__%s__' % op), [self_arg, ArgSig(name='other')])
def test_infer_unary_op_sig(self) -> None:
for op in ('neg', 'pos'):
assert_equal(infer_method_sig('__%s__' % op), [self_arg])
def test_generate_c_type_stub_no_crash_for_object(self) -> None:
output = [] # type: List[str]
mod = ModuleType('module', '') # any module is fine
imports = [] # type: List[str]
generate_c_type_stub(mod, 'alias', object, output, imports)
assert_equal(imports, [])
assert_equal(output[0], 'class alias:')
def test_generate_c_type_stub_variable_type_annotation(self) -> None:
# This class mimics the stubgen unit test 'testClassVariable'
class TestClassVariableCls:
x = 1
output = [] # type: List[str]
imports = [] # type: List[str]
mod = ModuleType('module', '') # any module is fine
generate_c_type_stub(mod, 'C', TestClassVariableCls, output, imports)
assert_equal(imports, [])
assert_equal(output, ['class C:', ' x: Any = ...'])
def test_generate_c_type_inheritance(self) -> None:
class TestClass(KeyError):
pass
output = [] # type: List[str]
imports = [] # type: List[str]
mod = ModuleType('module, ')
generate_c_type_stub(mod, 'C', TestClass, output, imports)
assert_equal(output, ['class C(KeyError): ...', ])
assert_equal(imports, [])
def test_generate_c_type_inheritance_same_module(self) -> None:
class TestBaseClass:
pass
class TestClass(TestBaseClass):
pass
output = [] # type: List[str]
imports = [] # type: List[str]
mod = ModuleType(TestBaseClass.__module__, '')
generate_c_type_stub(mod, 'C', TestClass, output, imports)
assert_equal(output, ['class C(TestBaseClass): ...', ])
assert_equal(imports, [])
def test_generate_c_type_inheritance_other_module(self) -> None:
import argparse
class TestClass(argparse.Action):
pass
output = [] # type: List[str]
imports = [] # type: List[str]
mod = ModuleType('module', '')
generate_c_type_stub(mod, 'C', TestClass, output, imports)
assert_equal(output, ['class C(argparse.Action): ...', ])
assert_equal(imports, ['import argparse'])
def test_generate_c_type_with_docstring(self) -> None:
class TestClass:
def test(self, arg0: str) -> None:
"""
test(self: TestClass, arg0: int)
"""
pass
output = [] # type: List[str]
imports = [] # type: List[str]
mod = ModuleType(TestClass.__module__, '')
generate_c_function_stub(mod, 'test', TestClass.test, output, imports,
self_var='self', class_name='TestClass')
assert_equal(output, ['def test(self, arg0: int) -> Any: ...'])
assert_equal(imports, [])
def test_generate_c_type_with_docstring_empty_default(self) -> None:
class TestClass:
def test(self, arg0: str = "") -> None:
"""
test(self: TestClass, arg0: str = "")
"""
pass
output = [] # type: List[str]
imports = [] # type: List[str]
mod = ModuleType(TestClass.__module__, '')
generate_c_function_stub(mod, 'test', TestClass.test, output, imports,
self_var='self', class_name='TestClass')
assert_equal(output, ['def test(self, arg0: str = ...) -> Any: ...'])
assert_equal(imports, [])
def test_generate_c_function_other_module_arg(self) -> None:
"""Test that if argument references type from other module, module will be imported."""
# Provide different type in python spec than in docstring to make sure, that docstring
# information is used.
def test(arg0: str) -> None:
"""
test(arg0: argparse.Action)
"""
pass
output = [] # type: List[str]
imports = [] # type: List[str]
mod = ModuleType(self.__module__, '')
generate_c_function_stub(mod, 'test', test, output, imports)
assert_equal(output, ['def test(arg0: argparse.Action) -> Any: ...'])
assert_equal(imports, ['import argparse'])
def test_generate_c_function_same_module_arg(self) -> None:
"""Test that if argument references type from same module but using full path, no module
will be imported, and type specification will be striped to local reference.
"""
# Provide different type in python spec than in docstring to make sure, that docstring
# information is used.
def test(arg0: str) -> None:
"""
test(arg0: argparse.Action)
"""
pass
output = [] # type: List[str]
imports = [] # type: List[str]
mod = ModuleType('argparse', '')
generate_c_function_stub(mod, 'test', test, output, imports)
assert_equal(output, ['def test(arg0: Action) -> Any: ...'])
assert_equal(imports, [])
def test_generate_c_function_other_module_ret(self) -> None:
"""Test that if return type references type from other module, module will be imported."""
def test(arg0: str) -> None:
"""
test(arg0: str) -> argparse.Action
"""
pass
output = [] # type: List[str]
imports = [] # type: List[str]
mod = ModuleType(self.__module__, '')
generate_c_function_stub(mod, 'test', test, output, imports)
assert_equal(output, ['def test(arg0: str) -> argparse.Action: ...'])
assert_equal(imports, ['import argparse'])
def test_generate_c_function_same_module_ret(self) -> None:
"""Test that if return type references type from same module but using full path,
no module will be imported, and type specification will be striped to local reference.
"""
def test(arg0: str) -> None:
"""
test(arg0: str) -> argparse.Action
"""
pass
output = [] # type: List[str]
imports = [] # type: List[str]
mod = ModuleType('argparse', '')
generate_c_function_stub(mod, 'test', test, output, imports)
assert_equal(output, ['def test(arg0: str) -> Action: ...'])
assert_equal(imports, [])
def test_generate_c_type_with_overload_pybind11(self) -> None:
class TestClass:
def __init__(self, arg0: str) -> None:
"""
__init__(*args, **kwargs)
Overloaded function.
1. __init__(self: TestClass, arg0: str) -> None
2. __init__(self: TestClass, arg0: str, arg1: str) -> None
"""
pass
output = [] # type: List[str]
imports = [] # type: List[str]
mod = ModuleType(TestClass.__module__, '')
generate_c_function_stub(mod, '__init__', TestClass.__init__, output, imports,
self_var='self', class_name='TestClass')
assert_equal(output, [
'@overload',
'def __init__(self, arg0: str) -> None: ...',
'@overload',
'def __init__(self, arg0: str, arg1: str) -> None: ...',
'@overload',
'def __init__(*args, **kwargs) -> Any: ...'])
assert_equal(set(imports), {'from typing import overload'})
class ArgSigSuite(unittest.TestCase):
def test_repr(self) -> None:
assert_equal(repr(ArgSig(name='asd"dsa')),
"ArgSig(name='asd\"dsa', type=None, default=False)")
assert_equal(repr(ArgSig(name="asd'dsa")),
'ArgSig(name="asd\'dsa", type=None, default=False)')
assert_equal(repr(ArgSig("func", 'str')),
"ArgSig(name='func', type='str', default=False)")
assert_equal(repr(ArgSig("func", 'str', default=True)),
"ArgSig(name='func', type='str', default=True)")
class IsValidTypeSuite(unittest.TestCase):
def test_is_valid_type(self) -> None:
assert is_valid_type('int')
assert is_valid_type('str')
assert is_valid_type('Foo_Bar234')
assert is_valid_type('foo.bar')
assert is_valid_type('List[int]')
assert is_valid_type('Dict[str, int]')
assert is_valid_type('None')
assert not is_valid_type('foo-bar')
assert not is_valid_type('x->y')
assert not is_valid_type('True')
assert not is_valid_type('False')
assert not is_valid_type('x,y')
assert not is_valid_type('x, y')
class ModuleInspectSuite(unittest.TestCase):
def test_python_module(self) -> None:
with ModuleInspect() as m:
p = m.get_package_properties('inspect')
assert p is not None
assert p.name == 'inspect'
assert p.file
assert p.path is None
assert p.is_c_module is False
assert p.subpackages == []
def test_python_package(self) -> None:
with ModuleInspect() as m:
p = m.get_package_properties('unittest')
assert p is not None
assert p.name == 'unittest'
assert p.file
assert p.path
assert p.is_c_module is False
assert p.subpackages
assert all(sub.startswith('unittest.') for sub in p.subpackages)
def test_c_module(self) -> None:
with ModuleInspect() as m:
p = m.get_package_properties('_socket')
assert p is not None
assert p.name == '_socket'
assert p.file
assert p.path is None
assert p.is_c_module is True
assert p.subpackages == []
def test_non_existent(self) -> None:
with ModuleInspect() as m:
with self.assertRaises(InspectError) as e:
m.get_package_properties('foobar-non-existent')
assert str(e.exception) == "No module named 'foobar-non-existent'"
def module_to_path(out_dir: str, module: str) -> str:
fnam = os.path.join(out_dir, '{}.pyi'.format(module.replace('.', '/')))
if not os.path.exists(fnam):
alt_fnam = fnam.replace('.pyi', '/__init__.pyi')
if os.path.exists(alt_fnam):
return alt_fnam
return fnam
|
python
|
# Generated by Django 3.2.12 on 2022-02-16 23:46
import django.core.validators
from django.db import migrations, models
import re
class Migration(migrations.Migration):
dependencies = [
('customer', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='customer',
name='user_id',
),
migrations.AddField(
model_name='customer',
name='phone_number',
field=models.CharField(default='n/a', help_text="Phone number must be entered in the format: '+27815742271'. Up to 11 digits allowed.", max_length=12, unique=True, validators=[django.core.validators.RegexValidator(re.compile('^\\+?27?[6-8][0-9]{8}$'), 'Enter a valid phone number', 'Invalid phone number')], verbose_name='phone_number'),
preserve_default=False,
),
migrations.AddField(
model_name='customer',
name='username',
field=models.CharField(db_index=True, default='n/a', help_text='Required. 255 characters or fewer. Letters, numbers and @/./+/-/_ characters', max_length=255, unique=True, validators=[django.core.validators.RegexValidator(re.compile('^[\\w.@+-]+$'), 'Enter a valid username.', 'invalid')], verbose_name='Username'),
preserve_default=False,
),
]
|
python
|
######################################################################
#
# File: b2/download_dest.py
#
# Copyright 2019 Backblaze Inc. All Rights Reserved.
#
# License https://www.backblaze.com/using_b2_code.html
#
######################################################################
from b2sdk.download_dest import * # noqa
import b2._sdk_deprecation
b2._sdk_deprecation.deprecate_module('b2.download_dest')
|
python
|
# AUTOGENERATED BY NBDEV! DO NOT EDIT!
__all__ = ["index", "modules", "custom_doc_links", "git_url"]
index = {"index_flow": "00_core.ipynb",
"query_flow": "00_core.ipynb",
"slugify": "01_loader.ipynb",
"get_image_files": "01_loader.ipynb",
"verify_image": "01_loader.ipynb",
"device": "03_encoder.ipynb",
"archive_loader": "01_loader.ipynb",
"db_loader": "01_loader.ipynb",
"treemap_loader": "01_loader.ipynb",
"make_dataset": "02_crafter.ipynb",
"pil_loader": "02_crafter.ipynb",
"DatasetImagePaths": "02_crafter.ipynb",
"clip_transform": "02_crafter.ipynb",
"crafter": "02_crafter.ipynb",
"preproc": "02_crafter.ipynb",
"model": "03_encoder.ipynb",
"image_encoder": "03_encoder.ipynb",
"text_encoder": "03_encoder.ipynb",
"image_query_encoder": "03_encoder.ipynb",
"join_all": "04_indexer.ipynb",
"build_treemap": "04_indexer.ipynb",
"save_archives": "04_indexer.ipynb",
"ranker": "05_ranker.ipynb",
"nns_to_files": "05_ranker.ipynb",
"app": "07_cli.ipynb",
"recall": "07_cli.ipynb",
"serve": "07_cli.ipynb",
"__main__": "07_cli.ipynb",
"get_image": "08_jupyter_gui.ipynb",
"get_grid": "08_jupyter_gui.ipynb",
"update_tabs": "08_jupyter_gui.ipynb",
"appPage": "08_jupyter_gui.ipynb",
"st_redirect": "09_streamlit_app.ipynb",
"st_stdout": "09_streamlit_app.ipynb",
"st_stderr": "09_streamlit_app.ipynb",
"send_image_query": "09_streamlit_app.ipynb",
"send_text_query": "09_streamlit_app.ipynb",
"path": "09_streamlit_app.ipynb",
"text_query": "09_streamlit_app.ipynb",
"image_query": "09_streamlit_app.ipynb",
"im_display_zone": "09_streamlit_app.ipynb",
"logbox": "09_streamlit_app.ipynb",
"sizes": "09_streamlit_app.ipynb"}
modules = ["core.py",
"loader.py",
"crafter.py",
"encoder.py",
"indexer.py",
"ranker.py",
"cli.py",
"gui.py",
"streamlit_app.py"]
doc_url = "https://deepfates.github.io/memery/"
git_url = "https://github.com/deepfates/memery/tree/main/"
def custom_doc_links(name): return None
|
python
|
from checkov.common.models.enums import CheckCategories
from checkov.terraform.checks.resource.base_resource_negative_value_check import BaseResourceNegativeValueCheck
class VMDisablePasswordAuthentication(BaseResourceNegativeValueCheck):
def __init__(self):
name = "Ensure that Virtual machine does not enable password authentication"
id = "CKV_AZURE_149"
supported_resources = ['azurerm_linux_virtual_machine_scale_set', 'azurerm_linux_virtual_machine']
categories = [CheckCategories.ENCRYPTION]
super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)
def get_forbidden_values(self) -> str:
return [False]
def get_inspected_key(self) -> str:
return "disable_password_authentication"
check = VMDisablePasswordAuthentication()
|
python
|
"""Tests for encoder routines to tf.train.Exammple."""
from absl.testing import parameterized
import tensorflow as tf
from tensorflow_gnn.graph import graph_constants as gc
from tensorflow_gnn.graph import graph_tensor as gt
from tensorflow_gnn.graph import graph_tensor_encode as ge
from tensorflow_gnn.graph import graph_tensor_io as io
from tensorflow_gnn.graph import graph_tensor_random as gr
from tensorflow_gnn.graph import schema_utils as su
import tensorflow_gnn.proto.graph_schema_pb2 as schema_pb2
from tensorflow_gnn.utils import test_utils
# TODO(blais): Move this to graph_tensor_test_utils once ported.
def _find_first_available_tensor(gtensor: gt.GraphTensor) -> gc.Field:
for feature in gtensor.context.features.values():
return feature
for node_set in gtensor.node_sets.values():
for feature in node_set.features.values():
return feature
for edge_set in gtensor.edge_sets.values():
for feature in edge_set.features.values():
return feature
TEST_SHAPES = [[4],
[4, 3],
[None, 4],
[None, 4, 3],
[None, None, 4],
[None, None, 4, 3],
[4, None],
[4, 3, None],
[4, None, None],
[4, 3, None, None],
[5, None, 4, None, 3],
[None, 4, None, 3, None]]
class TestWriteExample(tf.test.TestCase, parameterized.TestCase):
# TODO(blais,aferludin): Replace this with graph_tensor_test_utils
def _compare_graph_tensors(self, rfeatures: gc.Field, pfeatures: gc.Field):
self.assertEqual(rfeatures.shape.as_list(), pfeatures.shape.as_list())
if isinstance(rfeatures, tf.RaggedTensor):
self.assertAllEqual(rfeatures.flat_values, pfeatures.flat_values)
rlist = rfeatures.nested_row_lengths()
plist = pfeatures.nested_row_lengths()
self.assertEqual(len(rlist), len(plist))
for rlengths, plengths in zip(rlist, plist):
self.assertAllEqual(rlengths, plengths)
else:
self.assertAllEqual(rfeatures, pfeatures)
@parameterized.parameters((None, True),
(None, False),
('someprefix_', True))
def test_write_random_graph_tensors(self, prefix, validate):
# Produce a stream of random graph tensors with a complex schema and verify
# that they parse back.
schema = test_utils.get_proto_resource(
'testdata/feature_repr.pbtxt', schema_pb2.GraphSchema())
spec = su.create_graph_spec_from_schema_pb(schema)
# TODO(blais): Turn this into a utility.
def random_graph_tensor_generator(spec) -> tf.data.Dataset:
def generator():
while True:
yield gr.random_graph_tensor(spec)
return tf.data.Dataset.from_generator(generator, output_signature=spec)
for rgraph in random_graph_tensor_generator(spec).take(16):
example = ge.write_example(rgraph, prefix=prefix)
serialized = tf.constant(example.SerializeToString())
pgraph = io.parse_single_example(spec, serialized,
prefix=prefix, validate=validate)
# TODO(blais): When graph_tensor_test_utils is ported, compare the entire
# contents.
rfeatures = _find_first_available_tensor(rgraph)
pfeatures = _find_first_available_tensor(pgraph)
self._compare_graph_tensors(rfeatures, pfeatures)
def _roundtrip_test(self, shape, create_tensor):
# Produce random tensors of various shapes, serialize them, and then run
# them back through our parser and finally check that the shapes are
# identical.
dtype = tf.float32
tensor_spec = (tf.TensorSpec(shape, dtype)
if tf.TensorShape(shape).is_fully_defined()
else tf.RaggedTensorSpec(shape, dtype))
spec = create_tensor(tensor_spec)
rgraph = gr.random_graph_tensor(spec, row_splits_dtype=tf.int64)
example = ge.write_example(rgraph)
serialized = tf.constant(example.SerializeToString())
pgraph = io.parse_single_example(spec, serialized, validate=True)
# Find the available tensor.
# TODO(blais): Replaced these with self.assertGraphTensorEq(rgraph, pgraph).
rfeatures = _find_first_available_tensor(rgraph)
pfeatures = _find_first_available_tensor(pgraph)
self._compare_graph_tensors(rfeatures, pfeatures)
@parameterized.parameters((shape,) for shape in TEST_SHAPES)
def test_write_various_shapes_as_context(self, shape):
def create_tensor(tensor_spec):
return gt.GraphTensorSpec.from_piece_specs(
context_spec=gt.ContextSpec.from_field_specs(
features_spec={'wings': tensor_spec}))
self._roundtrip_test(shape, create_tensor)
@parameterized.parameters((shape,) for shape in TEST_SHAPES)
def test_write_various_shapes_as_node_set(self, shape):
def create_tensor(tensor_spec):
return gt.GraphTensorSpec.from_piece_specs(
node_sets_spec={'butterfly': gt.NodeSetSpec.from_field_specs(
sizes_spec=tf.TensorSpec([1], tf.int64),
features_spec={'wings': tensor_spec})})
self._roundtrip_test(shape, create_tensor)
if __name__ == '__main__':
tf.test.main()
|
python
|
from django import forms
from fir_nuggets.models import NuggetForm
from incidents import models as incident_models
class LandingForm(NuggetForm):
new = forms.BooleanField(initial=True, required=False)
event = forms.ModelChoiceField(queryset=incident_models.Incident.objects.exclude(status='C'), required=False)
status = forms.CharField(required=True, widget=forms.HiddenInput, initial='O')
subject = forms.CharField(required=False)
concerned_business_lines = forms.ModelMultipleChoiceField(required=False, queryset=incident_models.BusinessLine.objects.all())
category = forms.ModelChoiceField(queryset=incident_models.IncidentCategory.objects.all(), required=False)
detection = forms.ModelChoiceField(required=False, queryset=incident_models.Label.objects.filter(group__name='detection'))
severity = forms.ChoiceField(required=False, choices=incident_models.SEVERITY_CHOICES)
description = forms.CharField(required=False, widget=forms.Textarea)
is_incident = forms.BooleanField(initial=False, required=False)
confidentiality = forms.ChoiceField(required=False, choices=incident_models.CONFIDENTIALITY_LEVEL, initial='1')
is_major = forms.BooleanField(initial=False, required=False)
actor = forms.ModelChoiceField(required=False, queryset=incident_models.Label.objects.filter(group__name='actor'))
plan = forms.ModelChoiceField(required=False, queryset=incident_models.Label.objects.filter(group__name='plan'))
def __init__(self, *args, **kwargs):
super(LandingForm, self).__init__(*args, **kwargs)
self.fields['raw_data'].widget.attrs['readonly'] = True
|
python
|
#!/usr/bin/env python
#====================================================
import copy
import uuid
import numpy as np
import threading
from Utilities.decorators import thread
#====================================================
class CircuitCritic(object):
def __init__(self, circuit_params):
self.circuit_params = circuit_params
self.CRITICIZED_CIRCUITS = []
self.EXTRA_TASKS = []
self.RECEIVED_EXTRA_EVALUATIONS = {}
import CircuitQuantifier.critics as critics
self.merit_functions = {}
for merit in dir(critics):
if merit.startswith('__'): continue
self.merit_functions[merit.split('_')[-1]] = getattr(critics, merit)
##############################################################
def report_reevaluations(self, circuits):
for circuit in circuits:
self.RECEIVED_EXTRA_EVALUATIONS[circuit['circuit']['circuit_id']] = circuit
def run_merit_evaluation(self, merit_func, circuit_dict, merit_options, task):
merit_eval_dict = merit_func(circuit_dict, merit_options, circuit_params = self.circuit_params)
if len(merit_eval_dict['extra_tasks']) > 0:
# check if the merit evaluation requests new tasks
remaining_extra_circuit_ids = []
received_extra_task_evaluations = {}
for extra_task in merit_eval_dict['extra_tasks']:
# we need to modify the circuit_id of the proposed circuit parameters
new_circuit_id = str(uuid.uuid4())
extra_task['circuit']['circuit_id'] = new_circuit_id
self.EXTRA_TASKS.append(extra_task)
remaining_extra_circuit_ids.append(new_circuit_id)
while len(received_extra_task_evaluations) < len(remaining_extra_circuit_ids):
# check if we have any new evaluated circuits
extra_circuit_ids = list(self.RECEIVED_EXTRA_EVALUATIONS.keys())
for extra_circuit_id in extra_circuit_ids:
# memorize received evaluations
if extra_circuit_id in remaining_extra_circuit_ids:
received_extra_task_evaluations[extra_circuit_id] = self.RECEIVED_EXTRA_EVALUATIONS[extra_circuit_id]
del self.RECEIVED_EXTRA_EVALUATIONS[extra_circuit_id]
# call evaluator again
merit_eval_dict = merit_func(circuit_dict, merit_options,
circuit_params = self.circuit_params,
context_circuits = received_extra_task_evaluations.values())
circuit_dict['loss'] = merit_eval_dict['loss']
circuit_dict['context_circuits'] = list(received_extra_task_evaluations.values())
else:
circuit_dict['loss'] = merit_eval_dict['loss']
circuit_dict['context_circuits'] = None
self.CRITICIZED_CIRCUITS.append([circuit_dict, task])
##############################################################
@thread
def criticize_circuit(self, circuit, task_set, task):
# circuit: dict | information about circuit
merit = task_set.settings['merit']
merit_options = task_set.settings['merit_options']
# check if simulation timed out
if 'PLACEHOLDER' in circuit['measurements']:
loss = np.nan
# use specified merit function to calculate loss
else:
if not merit in self.merit_functions:
print('# ERROR | ... could not find merit function: %s' % merit)
return None
# merit function needs to be put on a separate thread in case it likes to launch new tasks
merit_func = self.merit_functions[merit]
self.run_merit_evaluation(merit_func, circuit, merit_options, task)
def get_requested_tasks(self):
new_tasks = copy.deepcopy(self.EXTRA_TASKS)
for new_task in new_tasks:
self.EXTRA_TASKS.pop(0)
return new_tasks
def criticize_circuits(self, circuits, task_set, tasks):
for circuit_index, circuit in enumerate(circuits):
self.criticize_circuit(circuit, task_set, tasks[circuit_index])
def get_criticized_circuits(self):
circuits = copy.deepcopy(self.CRITICIZED_CIRCUITS)
for circuit in circuits:
self.CRITICIZED_CIRCUITS.pop(0)
return circuits
def get_extra_tasks(self):
circuits = copy.deepcopy(self.EXTRA_TASKS)
for circuit in circuits:
self.EXTRA_TASKS.pop(0)
return circuits
|
python
|
# -*- coding: utf-8 -*-
"""
easybimehlanding
This file was automatically generated by APIMATIC v2.0 ( https://apimatic.io ).
"""
import easybimehlanding.models.travel_insurance_policy_extend
class TravelInsurancePolicyExtendView(object):
"""Implementation of the 'TravelInsurancePolicyExtendView' model.
TODO: type model description here.
Attributes:
travel_insurance_policy_extend (TravelInsurancePolicyExtend): TODO:
type description here.
travel_insurance_policy_extend_ages (list of string): TODO: type
description here.
travel_insurance_policy_extend_passengers (list of string): TODO: type
description here.
"""
# Create a mapping from Model property names to API property names
_names = {
"travel_insurance_policy_extend":'travelInsurancePolicyExtend',
"travel_insurance_policy_extend_ages":'travelInsurancePolicyExtendAges',
"travel_insurance_policy_extend_passengers":'travelInsurancePolicyExtendPassengers'
}
def __init__(self,
travel_insurance_policy_extend=None,
travel_insurance_policy_extend_ages=None,
travel_insurance_policy_extend_passengers=None):
"""Constructor for the TravelInsurancePolicyExtendView class"""
# Initialize members of the class
self.travel_insurance_policy_extend = travel_insurance_policy_extend
self.travel_insurance_policy_extend_ages = travel_insurance_policy_extend_ages
self.travel_insurance_policy_extend_passengers = travel_insurance_policy_extend_passengers
@classmethod
def from_dictionary(cls,
dictionary):
"""Creates an instance of this model from a dictionary
Args:
dictionary (dictionary): A dictionary representation of the object as
obtained from the deserialization of the server's response. The keys
MUST match property names in the API description.
Returns:
object: An instance of this structure class.
"""
if dictionary is None:
return None
# Extract variables from the dictionary
travel_insurance_policy_extend = easybimehlanding.models.travel_insurance_policy_extend.TravelInsurancePolicyExtend.from_dictionary(dictionary.get('travelInsurancePolicyExtend')) if dictionary.get('travelInsurancePolicyExtend') else None
travel_insurance_policy_extend_ages = dictionary.get('travelInsurancePolicyExtendAges')
travel_insurance_policy_extend_passengers = dictionary.get('travelInsurancePolicyExtendPassengers')
# Return an object of this model
return cls(travel_insurance_policy_extend,
travel_insurance_policy_extend_ages,
travel_insurance_policy_extend_passengers)
|
python
|
# Copyright (c) 2016-2020, The Bifrost Authors. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of The Bifrost Authors nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import
from bifrost.blocks.copy import copy, CopyBlock
from bifrost.blocks.transpose import transpose, TransposeBlock
from bifrost.blocks.reverse import reverse, ReverseBlock
from bifrost.blocks.fft import fft, FftBlock
from bifrost.blocks.fftshift import fftshift, FftShiftBlock
from bifrost.blocks.fdmt import fdmt, FdmtBlock
from bifrost.blocks.detect import detect, DetectBlock
from bifrost.blocks.guppi_raw import read_guppi_raw, GuppiRawSourceBlock
from bifrost.blocks.print_header import print_header, PrintHeaderBlock
from bifrost.blocks.sigproc import read_sigproc, SigprocSourceBlock
from bifrost.blocks.sigproc import write_sigproc, SigprocSinkBlock
from bifrost.blocks.scrunch import scrunch, ScrunchBlock
from bifrost.blocks.accumulate import accumulate, AccumulateBlock
from bifrost.blocks.binary_io import BinaryFileReadBlock, BinaryFileWriteBlock
from bifrost.blocks.binary_io import binary_read, binary_write
from bifrost.blocks.unpack import unpack, UnpackBlock
from bifrost.blocks.quantize import quantize, QuantizeBlock
from bifrost.blocks.wav import read_wav, WavSourceBlock
from bifrost.blocks.wav import write_wav, WavSinkBlock
from bifrost.blocks.serialize import serialize, SerializeBlock, deserialize, DeserializeBlock
from bifrost.blocks.reduce import reduce, ReduceBlock
from bifrost.blocks.correlate import correlate, CorrelateBlock
from bifrost.blocks.convert_visibilities import convert_visibilities, ConvertVisibilitiesBlock
try: # Avoid error if portaudio library not installed
from bifrost.blocks.audio import read_audio, AudioSourceBlock
except:
pass
try: # Avoid error if psrdada library not installed
from bifrost.blocks.psrdada import read_psrdada_buffer, PsrDadaSourceBlock
except:
pass
|
python
|
from .clock import Clock
from .identity import Identity
from .license import License
from .note import Note
from .resource import Resource
__all__ = ["Clock", "Identity", "License", "Note", "Resource"]
|
python
|
from behave import *
from src.hamming import distance
from assertpy import assert_that
use_step_matcher("re")
@given("two strands")
def step_impl(context):
context.distance = distance
@when("(?P<strand1>.+) and (?P<strand2>.+) are same length")
def step_impl(context, strand1, strand2):
context.result = context.distance(strand1, strand2)
@then("result should be (?P<result>.+)")
def step_impl(context, result):
assert_that(context.result).is_equal_to(int(result))
|
python
|
#
# This file is an example to set the environment.
# The configs will be used in dmrgci.py and chemps2.py
#
import os
from pyscf import lib
# To install Block as the FCI solver for CASSCF, see
# http://sunqm.github.io/Block/build.html
# https://github.com/sanshar/Block
BLOCKEXE = '/path/to/Block/block.spin_adapted'
BLOCKEXE_COMPRESS_NEVPT = '/path/to/serially/compiled/Block/block.spin_adapted'
#BLOCKSCRATCHDIR = os.path.join('./scratch', str(os.getpid()))
BLOCKSCRATCHDIR = os.path.join(lib.param.TMPDIR, str(os.getpid()))
#BLOCKRUNTIMEDIR = '.'
BLOCKRUNTIMEDIR = str(os.getpid())
MPIPREFIX = 'mpirun' # change to srun for SLURM job system
# Use ChemPS2 as the FCI solver for CASSCF
# building PyChemPS2, a python module will be generated in
# /path/to/ChemPS2/build/PyChemPS2
# see more details in the ChemPS2 document
# https://github.com/SebWouters/CheMPS2
PYCHEMPS2BIN = '/path/to/CheMPS2/build/PyCheMPS2/PyCheMPS2.so'
|
python
|
from nipype.interfaces.base import BaseInterface, \
BaseInterfaceInputSpec, traits, File, TraitedSpec, InputMultiPath, Directory
from nipype.utils.filemanip import split_filename
import nibabel as nb
import numpy as np
import os
class ConsensusInputSpec(BaseInterfaceInputSpec):
in_Files = traits.Either(InputMultiPath(File(exists=True)),
Directory(exists=True),
traits.Str(),
traits.List(),
mandatory=True)
maskfile = File(exists=True, desc='total target mask', mandatory=True)
class ConsensusOutputSpec(TraitedSpec):
#out_File = File(exists=True, desc="out_File")
variation_mat = File(exists=True, desc="variation_mat")
consensus_mat = File(exists=True, desc="consensus_mat")
class Consensus(BaseInterface):
input_spec = ConsensusInputSpec
output_spec = ConsensusOutputSpec
def _get_filelist(self, trait_input):
if os.path.isdir(trait_input[0]):
filelist = []
for directory in trait_input:
for root, dirnames, fnames in os.walk(directory):
for f in fnames:
if f.endswith('.nii'):
filelist.append(os.path.join(root,f))
return filelist
if os.path.isfile(trait_input[0]):
return trait_input
def makeConsensus(self, eachFile, mask):
clustermap=nb.load(eachFile).get_data()
maskedmap = clustermap[np.where(np.squeeze(mask))]
consensus = np.zeros((len(maskedmap),len(maskedmap)))
for j in range(len(maskedmap)):
consensus[j] = maskedmap == maskedmap[j]
return consensus
def _run_interface(self, runtime):
mask = nb.load(self.inputs.maskfile).get_data()
src_paths = self._get_filelist(self.inputs.in_Files)
_, base, _ = split_filename(self.inputs.in_Files[0])
cons_dim = len(nb.load(src_paths[0]).get_data())
totalConsensus = np.zeros((cons_dim,cons_dim), dtype=np.float64)
for src_path in src_paths:
totalConsensus += self.makeConsensus(src_path)
##average across all consensus instances and output##
totalConsensus = totalConsensus/len(src_paths)
cImg = nb.Nifti1Image(totalConsensus, None)
nb.save(cImg, os.path.abspath(base+'_ConsensusMat.nii'))
##make consensus into stability measure## remove stability measure for now...
#likeness = abs(totalConsensus-0.5)
#stability = np.mean(likeness,axis=0)
##make into NiftiImage##
#nImg = nb.Nifti1Image(stability, None)
#nb.save(nImg, os.path.abspath(base+'_Stability.nii'))
return runtime
def _list_outputs(self):
outputs = self._outputs().get()
_, base, _ = split_filename(self.inputs.in_Files[0])
#outputs["out_File"] = os.path.abspath(base+'_Stability.nii')
outputs["variation_mat"] = os.path.abspath(base+'_VariationMat.nii')
outputs["consensus_mat"] = os.path.abspath(base+'_ConsensusMat.nii')
return outputs
|
python
|
#!/usr/bin/env python
## Program: VMTK
## Module: $RCSfile: vmtksurfacedistance.py,v $
## Language: Python
## Date: $Date: 2005/09/14 09:49:59 $
## Version: $Revision: 1.6 $
## Copyright (c) Luca Antiga, David Steinman. All rights reserved.
## See LICENSE file for details.
## This software is distributed WITHOUT ANY WARRANTY; without even
## the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
## PURPOSE. See the above copyright notices for more information.
from __future__ import absolute_import #NEEDS TO STAY AS TOP LEVEL MODULE FOR Py2-3 COMPATIBILITY
import vtk
from vmtk import vtkvmtk
import sys
from vmtk import pypes
class vmtkSurfaceFeatureEdges(pypes.pypeScript):
def __init__(self):
pypes.pypeScript.__init__(self)
self.Surface = None
self.BoundaryEdges = 1
self.FeatureEdges = 1
self.FeatureAngle = 30
self.NonManifoldEdges = 0
self.Coloring = 0
self.SetScriptName('vmtksurfacefeatureedges')
self.SetScriptDoc('extract feature edges from a surface')
self.SetInputMembers([
['Surface','i','vtkPolyData',1,'','the input surface','vmtksurfacereader'],
['BoundaryEdges','boundaryedges','bool',1,'',''],
['FeatureEdges','featureedges','bool',1,'',''],
['FeatureAngle','featureangle','float',1,'(0,)',''],
['NonManifoldEdges','nonmanifoldedges','bool',1,'',''],
['Coloring','coloring','bool',1,'',''],
])
self.SetOutputMembers([
['Surface','o','vtkPolyData',1,'','the output feature edges','vmtksurfacewriter']
])
def Execute(self):
if self.Surface == None:
self.PrintError('Error: No Surface.')
extractor = vtk.vtkFeatureEdges()
extractor.SetInputData(self.Surface)
extractor.SetBoundaryEdges(self.BoundaryEdges)
extractor.SetFeatureEdges(self.FeatureEdges)
extractor.SetFeatureAngle(self.FeatureAngle)
extractor.SetNonManifoldEdges(self.NonManifoldEdges)
extractor.SetColoring(self.Coloring)
extractor.CreateDefaultLocator()
extractor.Update()
self.Surface = extractor.GetOutput()
if __name__=='__main__':
main = pypes.pypeMain()
main.Arguments = sys.argv
main.Execute()
|
python
|
# -*- coding: utf-8 -*-
#______________________________________________________________________________
#______________________________________________________________________________
#
# Coded by Daniel González Duque
#______________________________________________________________________________
#______________________________________________________________________________
'''
This package uses functions from Matlab to run models made in COMSOL, it is
necessary to have access to the main folder of COMSOL to run the algorithms
in Matlab.
This package can also open the information from exported files and use them
to generate new data. Although this package is focused on flow through
porosity media in 2D right now, it can be use widely to other applications.
____________________________________________________________________________
This class is of free use and can be modify, if you have some
problem please contact the programmer to the following e-mails:
- [email protected]
- [email protected]
____________________________________________________________________________
'''
from setuptools import setup
setup(
name="pyDGDutil",
version="1.0.1",
author="Daniel González Duque",
description="Complementary scripts of other codes",
license="MIT",
packages=["pyDGDutil"],
pyhon_requires='>=3.6'
)
|
python
|
from app import db
import os
import requests
class Movies(db.Model):
"""
Models the data of movies related to a given location.
"""
id = db.Column(db.Integer, primary_key=True)
movies = db.Column(db.Text)
@staticmethod
def create_entry(query):
"""
Takes in a search query.
Retrieves MovieDB API movie data.
Returns an Movies instance.
"""
MOVIE_API_KEY = os.getenv('MOVIE_API_KEY')
url = 'https://api.themoviedb.org/3/search/movie/'
url += f'?api_key={MOVIE_API_KEY}&language=en-US&page=1&query={query}'
api_data = requests.get(url).json()
return Movies.instantiate_movies(api_data)
@staticmethod
def instantiate_movies(api_data):
"""
Takes in MovieDB API data.
Returns a Movies object.
"""
movies = []
for movie in api_data['results'][:5]:
title = movie['title']
overview = movie['overview']
average_votes = movie['vote_average']
total_votes = movie['vote_count']
image_url = 'https://image.tmdb.org/t/p/w500' + movie['poster_path']
popularity = movie['popularity']
released_on = movie['release_date']
movies.append({
'title': title,
'overview': overview,
'average_votes': average_votes,
'total_votes': total_votes,
'image_url': image_url,
'popularity': popularity,
'released_on': released_on
})
return Movies(movies=movies)
|
python
|
#
# Copyright (c) 2015-2021 Thierry Florac <tflorac AT ulthar.net>
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
"""PyAMS_zfiles.zmi module
This module defines base documents container management views.
"""
from zope.interface import Interface
from pyams_form.ajax import ajax_form_config
from pyams_form.field import Fields
from pyams_layer.interfaces import IPyAMSLayer
from pyams_security.interfaces.base import VIEW_SYSTEM_PERMISSION
from pyams_site.interfaces import ISiteRoot
from pyams_skin.interfaces.viewlet import IBreadcrumbItem
from pyams_utils.adapter import adapter_config
from pyams_utils.registry import get_utility, query_utility
from pyams_utils.url import absolute_url
from pyams_viewlet.viewlet import viewlet_config
from pyams_zfiles.interfaces import IDocumentContainer, MANAGE_APPLICATION_PERMISSION, \
MANAGE_DOCUMENT_PERMISSION
from pyams_zmi.form import AdminEditForm
from pyams_zmi.interfaces import IAdminLayer
from pyams_zmi.interfaces.table import ITableElementEditor
from pyams_zmi.interfaces.viewlet import IControlPanelMenu, IMenuHeader, IPropertiesMenu, \
ISiteManagementMenu
from pyams_zmi.table import TableElementEditor
from pyams_zmi.zmi.viewlet.breadcrumb import AdminLayerBreadcrumbItem
from pyams_zmi.zmi.viewlet.menu import NavigationMenuItem
__docformat__ = 'restructuredtext'
from pyams_zfiles import _ # pylint: disable=ungrouped-imports
@viewlet_config(name='document-container.menu',
context=ISiteRoot, layer=IAdminLayer,
manager=IControlPanelMenu, weight=40,
permission=VIEW_SYSTEM_PERMISSION)
class DocumentContainerMenu(NavigationMenuItem):
"""Document container menu"""
icon_class = 'far fa-file-archive'
def __new__(cls, context, request, view, manager): # pylint: disable=unused-arguments
container = query_utility(IDocumentContainer)
if (container is None) or not container.show_home_menu:
return None
return NavigationMenuItem.__new__(cls)
def __init__(self, context, request, view, manager):
super().__init__(context, request, view, manager)
self.container = get_utility(IDocumentContainer)
@property
def label(self):
"""Label getter"""
return self.container.__name__
def get_href(self):
"""Menu URL getter"""
return absolute_url(self.container, self.request, 'admin')
@adapter_config(required=(IDocumentContainer, IAdminLayer, Interface, ISiteManagementMenu),
provides=IMenuHeader)
def document_container_menu_header(context, request, view, manager): # pylint: disable=unused-argument
"""Document container menu header"""
return _("Documents container")
@adapter_config(required=(IDocumentContainer, IAdminLayer, Interface),
provides=ITableElementEditor)
class DocumentContainerElementEditor(TableElementEditor):
"""Document container element editor"""
view_name = 'admin'
modal_target = False
def __new__(cls, context, request, view): # pylint: disable=unused-argument
if not request.has_permission(MANAGE_APPLICATION_PERMISSION, context=context) and \
not request.has_permission(MANAGE_DOCUMENT_PERMISSION, context=context):
return None
return TableElementEditor.__new__(cls)
@adapter_config(required=(IDocumentContainer, IAdminLayer, Interface),
provides=IBreadcrumbItem)
class DocumentContainerBreadcrumbItem(AdminLayerBreadcrumbItem):
"""Document container breadcrumb item"""
label = _("Documents container")
@viewlet_config(name='configuration.menu',
context=IDocumentContainer, layer=IAdminLayer,
manager=ISiteManagementMenu, weight=20,
permission=MANAGE_APPLICATION_PERMISSION,
provides=IPropertiesMenu)
class DocumentContainerPropertiesMenu(NavigationMenuItem):
"""Document container properties menu"""
label = _("Configuration")
icon_class = 'fas fa-sliders-h'
href = '#configuration.html'
@ajax_form_config(name='configuration.html',
context=IDocumentContainer, layer=IPyAMSLayer,
permission=MANAGE_APPLICATION_PERMISSION)
class DocumentContainerConfigurationEditForm(AdminEditForm):
"""Document container properties edit form"""
legend = _("Configuration")
fields = Fields(IDocumentContainer).omit('__parent__', '__name__')
|
python
|
from pydocstyle.checker import check
from pydocstyle.checker import violations
import testing
registry = violations.ErrorRegistry
_disabled_checks = [
'D202', # No blank lines allowed after function docstring
'D205', # 1 blank line required between summary line and description
]
def check_all_files():
for filename in testing.list_all_py_files():
for err in check([filename]):
if not err.code in _disabled_checks:
yield err
def lookup_error_params(code):
for group in registry.groups:
for error_params in group.errors:
if error_params.code == code:
return error_params
violations = list(check_all_files())
if violations:
counts = dict()
for e in violations:
print(e)
counts[e.code] = counts.get(e.code, 0) + 1
for n, code in sorted([(n, code) for code, n in counts.items()], reverse=True):
p = lookup_error_params(code)
print('%s %8d %s' % (code, n, p.short_desc))
print('%s %8d violations' % ('tot', len(violations)))
# TODO: exit(1)
|
python
|
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import gsf
def Hex2(val):
return '0x' + ('%02x' % ord(val)).upper()
def Pieces(data, max_size):
"""Yield max_size components from data."""
for i in range(0, len(data), max_size):
yield data[i:i + max_size]
def DumpHex(filename, include_cpp=True):
gsf_file = gsf.GsfFile(filename)
if include_cpp:
print 'c++ setup:'
print
print ' #include <array>'
print ' using std::array;'
print
for record_num, record in enumerate(gsf_file):
if record_num:
print
header_data = record['header_data']
data = record['data']
type_str = record['record_type_str']
header_hex = [Hex2(v) for v in header_data]
data_hex = [Hex2(v) for v in data]
print 'record: ', record_num, type_str
print 'sizes = (%d, %d, %d)' % (record['size_total'],
len(header_hex),
len(data_hex))
print 'header = (', ', '.join(header_hex), ')'
print 'data = (', ', '.join(data_hex), ')'
if not include_cpp:
continue
print 'c++ data:'
print
print ' // Record type:', type_str
print ' const uint32_t size_%d = %d;' % (record_num, len(data));
print ' array<uint8_t, size_%d> data_%d = {{' % (record_num, record_num)
for piece in Pieces(data, 11):
print ' ' + ', '.join([Hex2(v) for v in piece]) + ','
print ' }};'
def main():
parser = argparse.ArgumentParser()
parser.add_argument('filenames', metavar='N', type=str, nargs='+',
help='Files to get info about.')
args = parser.parse_args()
for filename in args.filenames:
DumpHex(filename)
|
python
|
# Generated by Django 3.0.2 on 2020-10-13 07:23
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('accounts', '0005_thirdpartycreds'),
]
operations = [
migrations.AlterModelOptions(
name='thirdpartycreds',
options={'verbose_name': 'Third Party Credentials', 'verbose_name_plural': 'Third Party Credentials'},
),
]
|
python
|
from skynet.common.base_daos import BaseDao
class BaseModel(object):
DEFAULT_DAO = BaseDao
def __init__(self, dao=None):
if dao is None:
dao = self.DEFAULT_DAO()
self.dao = dao
def populate(self, data):
for k, v in data.iteritems():
k_translated = self.translate(k)
if k_translated and hasattr(self, k_translated):
setattr(self, k_translated, v)
def translate(self, key):
return {}.get(key, key)
|
python
|
import os
import json
import html
from datetime import datetime, timedelta
from fastapi import FastAPI, HTTPException
from fastapi.middleware.cors import CORSMiddleware
from starlette.requests import Request
from starlette.responses import JSONResponse
from auth import LEADERBOARD_API_TOKEN
app = FastAPI(redoc_url=None, docs_url=None)
app.token = None
LEADERBOARD = 'leaderboard/leaderboard.json'
app.add_middleware(
CORSMiddleware,
allow_origins="*",
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
class LeaderboardCache:
last_updated = None
data = None
@classmethod
def ensure_file_exists(cls):
if not os.path.exists(LEADERBOARD):
with open(LEADERBOARD, 'w') as fp:
fp.write('{}')
@classmethod
def load(cls):
with open(LEADERBOARD, "r") as fp:
cls.data = json.loads(fp.read())
cls.last_updated = datetime.now()
@classmethod
def dump(cls, data: dict):
with open(LEADERBOARD, "w") as fp:
json.dump(data, fp)
@classmethod
def get(cls):
if not cls.data:
raise HTTPException(500, "Missing data.")
return cls.data
@classmethod
def update(cls, data: str):
data = json.loads(data)
for _, user in data.items():
user['username'] = html.escape(user['username'])
cls.dump(data)
cls.data = data
LeaderboardCache.ensure_file_exists()
LeaderboardCache.load()
@app.post('/leaderboard')
async def post_leaderboard(request: Request):
if request.headers.get("X-Authorization") != LEADERBOARD_API_TOKEN:
raise HTTPException(401)
body = (await request.body()).decode()
LeaderboardCache.update(body)
return "OK"
@app.get('/leaderboard')
async def get_leaderboard():
return JSONResponse(LeaderboardCache.get())
|
python
|
# -*- encoding: utf-8 -*-
"""Handle root-services sessions endpoints."""
from .base import RootService
from ..decorators import dyndoc_insert, endpoint
from .responses.sessions import responses
@endpoint("openapi/root/v1/sessions/capabilities/")
class GetSessionCapabilities(RootService):
"""Get the sessions capabilities."""
@dyndoc_insert(responses)
def __init__(self):
"""Instantiate a GetSessionCapabilities request.
>>> import saxo_openapi
>>> import saxo_openapi.endpoints.rootservices as rs
>>> import json
>>> client = saxo_openapi.API(access_token=...)
>>> r = rs.sessions.GetSessionCapabilities()
>>> rv = client.request(r)
>>> print(rv)
Output::
{_v3_GetSessionCapabilities_resp}
"""
super(GetSessionCapabilities, self).__init__()
@endpoint("openapi/root/v1/sessions/capabilities/", "PUT", 202)
class ChangeSessionCapabilities(RootService):
"""Change sessions capabilities."""
RESPONSE_DATA = None
@dyndoc_insert(responses)
def __init__(self, data):
"""Instantiate a ChangeSessionCapabilities request.
>>> import saxo_openapi
>>> import saxo_openapi.endpoints.rootservices as rs
>>> import json
>>> client = saxo_openapi.API(access_token=...)
>>> data = {_v3_ChangeSessionCapabilities_body}
>>> r = rs.sessions.ChangeSessionCapabilities(data=data)
>>> rv = client.request(r)
>>> assert r.status_code == r.expected_status
No data is returned
"""
super(ChangeSessionCapabilities, self).__init__()
self.data = data
@endpoint("openapi/root/v1/sessions/events/subscriptions/", "POST", 201)
class CreateSessionCapabilitiesSubscription(RootService):
"""Set up a new session capabilities subscription. The data stream will
deliver updates from this point."""
@dyndoc_insert(responses)
def __init__(self, data):
"""Instantiate a ChangeSessionCapabilitiesSubscription request.
>>> import saxo_openapi
>>> import saxo_openapi.endpoints.rootservices as rs
>>> import json
>>> client = saxo_openapi.API(access_token=...)
>>> data = {_v3_CreateSessionCapabilitiesSubscription_body}
>>> r = rs.sessions.ChangeSessionCapabilitiesSubscription(data=data)
>>> rv = client.request(r)
>>> print(rv)
Output::
{_v3_CreateSessionCapabilitiesSubscription_resp}
"""
super(CreateSessionCapabilitiesSubscription, self).__init__()
self.data = data
@endpoint("openapi/root/v1/sessions/events/subscriptions/"
"{ContextId}/{ReferenceId}",
"DELETE", 202)
class RemoveSessionCapabilitiesSubscription(RootService):
"""Removes the subscription identified by the specified reference id.
(and streaming context id)."""
RESPONSE_DATA = None
@dyndoc_insert(responses)
def __init__(self, ContextId, ReferenceId):
"""Instantiate a RemoveSessionCapabilitiesSubscription request.
>>> import saxo_openapi
>>> import saxo_openapi.endpoints.rootservices as rs
>>> import json
>>> client = saxo_openapi.API(access_token=...)
>>> r = rs.sessions.RemoveSessionCapabilitiesSubscripion(
... ContextId=ContextId,
... ReferenceId=ReferenceId)
>>> rv = client.request(r)
>>> assert rv.status_code == r.expected_status
No data is returned.
"""
super(RemoveSessionCapabilitiesSubscription, self).__init__(
ContextId=ContextId,
ReferenceId=ReferenceId)
|
python
|
from __future__ import unicode_literals
from . import model
from . import collection
from . import fields
from . import related
|
python
|
from collection.property_dictionary import PropertyDict
from collection.xml_interface import XMLError
from collection.xml_interface import XMLInterface
from metadata.metadata_api import MetadataError
from metadata.metadata_api import Metadata
from image.envi import ENVIHeader
|
python
|
import json
import logging
import re
from datetime import datetime
from decimal import Decimal
from enum import Enum
from functools import singledispatch
from sys import version_info
from typing import Any, Optional, Tuple, Union
from urllib.parse import urlsplit
PY37 = version_info >= (3, 7)
class JSONEncoder(json.JSONEncoder):
def default(self, obj: Any) -> str:
try:
return convert_to_str(obj)
except TypeError:
return json.JSONEncoder.default(self, obj)
def get_host_port(uri: str) -> Tuple[Optional[str], Optional[int]]:
"""Get host and port from provided URI."""
split_uri = urlsplit(uri)
return split_uri.hostname, split_uri.port
def validate_topic_channel_name(name: str) -> None:
"""Validate topic/channel names.
The regex is ``^[.a-zA-Z0-9_-]{2,64}+(#ephemeral)?$``
:raises AssertionError: Value not matches regex.
"""
assert re.match(
r"^[.a-zA-Z0-9_\-]{2,64}(#ephemeral)?$", name,
), "Topic name must matches ^[.a-zA-Z0-9_-]{2,64}+(#ephemeral)?$ regex"
@singledispatch
def convert_to_bytes(value: Any) -> bytes:
"""Dispatch for convertible types.
Allowed types: ``bytes``, ``bytearray``, ``str``, ``int``, ``float``,
``dict``, ``Decimal``, ``dataclass``.
:raises TypeError:
"""
if PY37:
from dataclasses import asdict, is_dataclass
if is_dataclass(value) and not isinstance(value, type):
return convert_to_bytes(asdict(value))
raise TypeError(
"Argument {} expected to be type of "
"bytes, bytearray, str, int, float, dict, Decimal, datetime "
"or dataclass".format(value),
)
@convert_to_bytes.register(bytes)
@convert_to_bytes.register(bytearray)
def _(value: Union[bytes, bytearray]) -> bytes:
"""Convert ``bytes`` or ``bytearray`` to bytes"""
return value
@convert_to_bytes.register(str)
def _str_to_bytes(value: str) -> bytes:
"""Convert ``str`` to bytes"""
return value.encode("utf-8")
@convert_to_bytes.register(int)
@convert_to_bytes.register(float)
@convert_to_bytes.register(Decimal)
def _numbers_to_bytes(value: Union[int, float, Decimal]) -> bytes:
"""Convert ``int``, ``float`` or ``Decimal`` to bytes"""
return str(value).encode("utf-8")
@convert_to_bytes.register(dict)
def _dict_to_bytes(value: dict) -> bytes:
"""Convert ``dict`` to bytes"""
return json.dumps(value, cls=JSONEncoder, separators=(",", ":")).encode("utf-8")
@convert_to_bytes.register(Enum)
def _enum_to_bytes(value: Enum) -> bytes:
"""Convert ``enum`` to bytes"""
return convert_to_bytes(value.name)
@convert_to_bytes.register(datetime)
def _datetime_to_bytes(value: datetime) -> bytes:
"""Convert ``datetime`` to bytes"""
return value.isoformat().encode("utf-8")
@singledispatch
def convert_to_str(value: Any) -> str:
"""Dispatch for convertible types.
Allowed types: ``bytes``, ``bytearray``, ``str``, ``int``, ``float``,
``dict``, ``Decimal``, ``dataclass``.
:raises TypeError:
"""
if PY37:
from dataclasses import asdict, is_dataclass
if is_dataclass(value) and not isinstance(value, type):
return convert_to_str(asdict(value))
raise TypeError(
"Argument {} expected to be type of "
"bytes, bytearray, str, int, float, dict, Decimal, datetime "
"or dataclass".format(value),
)
@convert_to_str.register(str)
def _str_to_str(value: str) -> str:
"""Convert ``str`` to ``str``"""
return value
@convert_to_str.register(bytes)
def _bytes_to_str(value: bytes) -> str:
"""Convert ``bytes`` to ``str``"""
return value.decode("utf-8")
@convert_to_str.register(bytearray)
def _bytearray_to_str(value: bytearray) -> str:
"""Convert ``bytearray`` to ``str``"""
return bytes(value).decode("utf-8")
@convert_to_str.register(int)
@convert_to_str.register(float)
@convert_to_str.register(Decimal)
def _numbers_to_str(value: Union[int, float, Decimal]) -> str:
"""Convert ``int``, ``float`` or ``Decimal`` to ``str``"""
return str(value)
@convert_to_str.register(dict)
def _dict_to_str(value: dict) -> str:
"""Convert ``dict`` to JSON string"""
return json.dumps(value)
@convert_to_str.register(Enum)
def _enum_to_str(value: Enum) -> str:
"""Convert ``enum`` to str"""
return convert_to_str(value.name)
@convert_to_str.register(datetime)
def _datetime_to_str(value: datetime) -> str:
"""Convert ``datetime`` to bytes"""
return value.isoformat()
def get_logger(
debug: bool = False, unique_name: Optional[str] = None,
) -> logging.Logger:
"""Get the ansq logger.
:params debug: Set up debug level.
:type debug: :class:`bool`
:params unique_name: Used to make all loggers unique.
:type unique_name: :class:`str`
"""
logger = logging.getLogger(f"ansq {unique_name}" if unique_name else "ansq")
log_format = "%(asctime)s - %(levelname)s - %(name)s: %(message)s"
logging.basicConfig(format=log_format)
logger.setLevel(logging.DEBUG if debug else logging.INFO)
return logger
def truncate_text(text: str, limit: int = 256) -> str:
"""Truncate a given `text` if the `limit` is reached"""
if limit <= 0:
raise ValueError("limit must be greater than 0")
return text[:limit] + "..." if len(text) > limit else text
|
python
|
"""
Adapted from https://github.com/kirubarajan/roft/blob/master/generation/interactive_test.py to
process a batch of inputs.
"""
import argparse
import json
import numpy as np
import os
import torch
from transformers import AutoModelForCausalLM, AutoTokenizer
def main(args):
np.random.seed(args.random_seed)
torch.manual_seed(args.random_seed)
if torch.cuda.is_available():
torch.cuda.manual_seed_all(args.random_seed)
tokenizer = AutoTokenizer.from_pretrained(args.model_name)
model = AutoModelForCausalLM.from_pretrained(args.model_name)
if torch.cuda.is_available():
model = model.cuda()
dirname = os.path.dirname(args.output_file)
if dirname:
os.makedirs(dirname, exist_ok=True)
with open(args.output_file, "w") as out:
with open(args.input_file, "r") as f:
for line in f:
data = json.loads(line)
name = data["name"]
ingredients = "\n".join(data["ingredients"])
input_text = f"HOW TO MAKE: {name}\nIngredients:\n{ingredients}."
input_tensor = tokenizer.encode(input_text, return_tensors="pt").to(
model.device
)
outputs = model.generate(
input_tensor,
do_sample=True,
top_p=args.top_p,
repetition_penalty=args.repetition_penalty,
pad_token_id=tokenizer.eos_token_id,
max_length=args.max_length,
)
recipe = [tokenizer.decode(x) for x in outputs][0]
out.write(json.dumps({"recipe": recipe}) + "\n")
if __name__ == "__main__":
argp = argparse.ArgumentParser()
argp.add_argument("--input-file", required=True)
argp.add_argument("--model-name", required=True)
argp.add_argument("--top-p", type=float, default=0.7)
argp.add_argument("--repetition-penalty", type=float, default=1.2)
argp.add_argument("--max-length", type=int, default=256)
argp.add_argument("--random-seed", type=int, default=4)
argp.add_argument("--output-file", required=True)
args = argp.parse_args()
main(args)
|
python
|
#!/bin/python3
import math
count = 0
def count_inversions(a):
length = len(a)
if (length <= 1):
return a
else:
midP = int(math.floor(length / 2))
left = a[:midP]
right = a[midP:]
return merge(count_inversions(left), count_inversions(right))
def merge(left, right):
global count
result = []
i = 0
j = 0
lenL = len(left)
lenR = len(right)
while(i < lenL and j < lenR):
if (left[i] <= right[j]):
result.append(left[i])
i += 1
else:
result.append(right[j])
count += lenL - i
j += 1
while (i < lenL):
result.append(left[i])
i += 1
while (j < lenR):
result.append(right[j])
j += 1
return result
a = [2, 1, 3, 1, 4, 2]
print(count_inversions(a))
print(count)
|
python
|
import sklearn
from sklearn.linear_model import Perceptron
from sklearn.datasets import load_iris
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
# load data
iris = load_iris()
df = pd.DataFrame(iris.data, columns=iris.feature_names)
df['label'] = iris.target
df.columns = [
'sepal length', 'sepal width', 'petal length', 'petal width', 'label'
]
sklearn.__version__
data = np.array(df.iloc[:100, [0, 1, -1]])
X, y = data[:,:-1], data[:,-1]
y = np.array([1 if i == 1 else -1 for i in y])
"""
clf = Perceptron(fit_intercept=True,
max_iter=1000,
shuffle=True)
clf.fit(X, y)
"""
clf = Perceptron(fit_intercept=True,
max_iter=1000,
# tol 默认收敛就不迭代了 可以比较一下收敛和不收敛的迭代次数
tol=None,
shuffle=True)
clf.fit(X, y)
# Weights assigned to the features.
print(clf.coef_)
# 截距 Constants in decision function.
print(clf.intercept_)
# 画布大小
plt.figure(figsize=(10,10))
# 中文标题
plt.rcParams['font.sans-serif']=['SimHei']
plt.rcParams['axes.unicode_minus'] = False
plt.title('鸢尾花线性数据示例')
plt.scatter(data[:50, 0], data[:50, 1], c='b', label='Iris-setosa',)
plt.scatter(data[50:100, 0], data[50:100, 1], c='orange', label='Iris-versicolor')
# 画感知机的线
x_ponits = np.arange(4, 8)
y_ = -(clf.coef_[0][0]*x_ponits + clf.intercept_)/clf.coef_[0][1]
plt.plot(x_ponits, y_)
# 其他部分
plt.legend() # 显示图例
plt.grid(False) # 不显示网格
plt.xlabel('sepal length')
plt.ylabel('sepal width')
plt.legend()
plt.show()
|
python
|
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
"""A basic unit test for the Python interface of the BMG C++ Graph.infer method"""
import unittest
import beanmachine.ppl as bm
from beanmachine.ppl.inference import BMGInference
from torch import tensor
from torch.distributions import Bernoulli, Dirichlet
@bm.functional
def c():
return tensor(2.5)
@bm.functional
def c2():
return tensor([1.5, -2.5])
@bm.random_variable
def flip():
return Bernoulli(0.5)
@bm.functional
def flip2():
return flip()
@bm.functional
def flip3():
return flip() + 0
@bm.functional
def flip4():
return 0 + flip()
@bm.functional
def always_false_1():
return 1 < flip()
@bm.functional
def always_false_2():
return flip() < 0
@bm.functional
def invalid_tensor_1():
return tensor([])
@bm.functional
def invalid_tensor_2():
return tensor([[[1.0, 2.0], [3.0, 4.0]], [[5.0, 6.0], [7.0, 8.0]]])
class BMGInferInterfaceTest(unittest.TestCase):
def test_infer_interface_constant_functional(self) -> None:
self.maxDiff = None
# First, let's check expected behavior from a regular BM inference method
samples = bm.SingleSiteNewtonianMonteCarlo().infer([c(), c2()], {}, 1, 1)
observed = samples[c()]
expected = "tensor([[2.5000]])"
self.assertEqual(expected.strip(), str(observed).strip())
observed = samples[c2()]
expected = "tensor([[[ 1.5000, -2.5000]]])" # Note, no ", dtype=torch.float64)"
self.assertEqual(expected.strip(), str(observed).strip())
# Now let's do this in BMG Inference
samples = BMGInference().infer([c(), c2()], {}, 1, 1)
observed = samples[c()]
expected = "tensor([[2.5000]])"
self.assertEqual(expected.strip(), str(observed).strip())
observed = samples[c2()]
expected = "tensor([[[ 1.5000, -2.5000]]], dtype=torch.float64)"
self.assertEqual(expected.strip(), str(observed).strip())
# Again, let's check expected behavior from a regular BM inference method
samples = bm.SingleSiteNewtonianMonteCarlo().infer([c(), c2()], {}, 1, 2)
observed = samples[c()]
expected = """
tensor([[2.5000],
[2.5000]])"""
self.assertEqual(expected.strip(), str(observed).strip())
observed = samples[c2()]
expected = """
tensor([[[ 1.5000, -2.5000]],
[[ 1.5000, -2.5000]]])""" # Note, no ", dtype=torch.float64)"
self.assertEqual(expected.strip(), str(observed).strip())
# And again, in BMG inference
samples = BMGInference().infer([c(), c2()], {}, 1, 2)
observed = samples[c()]
expected = """
tensor([[2.5000],
[2.5000]])"""
self.assertEqual(expected.strip(), str(observed).strip())
observed = samples[c2()]
expected = """
tensor([[[ 1.5000, -2.5000]],
[[ 1.5000, -2.5000]]], dtype=torch.float64)"""
self.assertEqual(expected.strip(), str(observed).strip())
def test_infer_interface_redundant_functionals_1(self) -> None:
self.maxDiff = None
samples = BMGInference().infer([flip(), flip2()], {}, 10)
f = samples[flip()]
f2 = samples[flip2()]
self.assertEqual(str(f), str(f2))
samples = BMGInference().infer([always_false_1(), always_false_2()], {}, 2, 1)
af1 = samples[always_false_1()]
af2 = samples[always_false_2()]
expected = "tensor([[False, False]])"
self.assertEqual(expected, str(af1))
self.assertEqual(expected, str(af2))
def test_infer_interface_redundant_functionals_2(self) -> None:
self.maxDiff = None
samples = BMGInference().infer([flip3(), flip4()], {}, 10)
f3 = samples[flip3()]
f4 = samples[flip4()]
self.assertEqual(str(f3), str(f4))
class SampleModel:
@bm.random_variable
def a(self):
return Dirichlet(tensor([0.5, 0.5]))
@bm.functional
def b(self):
return self.a()[2] ## The index 2 is intentionally out of bounds
def test_infer_interface_runtime_error(self) -> None:
model = self.SampleModel()
with self.assertRaisesRegex(RuntimeError, "Error during BMG inference.*"):
BMGInference().infer([model.a(), model.b()], {}, 10, 4)
|
python
|
# this brainfuck source code from https://github.com/kgabis/brainfuck-go/blob/master/bf.go
# and karminski port it to PHP
# and is ported to Python 3.x again
# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
import sys
class Brainfuck:
# operators
op_inc_dp = 1
op_dec_dp = 2
op_inc_val = 3
op_dec_val = 4
op_out = 5
op_in = 6
op_jmp_fwd = 7
op_jmp_bck = 8
operator = 0
operand = 1
def compileBf(self, input):
pc = 0
jmpPc = 0
jmpStack = []
program = []
for c in input:
if c == '>':
program.append((self.op_inc_dp, 0))
elif c == '<':
program.append((self.op_dec_dp, 0))
elif c == '+':
program.append((self.op_inc_val, 0))
elif c == '-':
program.append((self.op_dec_val, 0))
elif c == '.':
program.append((self.op_out, 0))
elif c == ',':
program.append((self.op_in, 0))
elif c == '[':
program.append((self.op_jmp_fwd, 0))
jmpStack.append(pc)
elif c == ']':
if not jmpStack:
raise ValueError("Invalid Program")
jmpPc = jmpStack.pop()
program.append((self.op_jmp_bck, jmpPc))
program[jmpPc] = (program[jmpPc][0], pc)
else:
pc -= 1
pc += 1
if jmpStack:
raise ValueError("Invalid Program")
return program
def executeBf(self, program):
data = [0] * 65535
dataPtr = 0
pc = 0
while pc < len(program):
c, val = program[pc]
#print("pc:", pc, "c:", c, "val:", val)
if c == self.op_inc_dp:
dataPtr += 1
elif c == self.op_dec_dp:
dataPtr -= 1
elif c == self.op_inc_val:
data[dataPtr] += 1
elif c == self.op_dec_val:
data[dataPtr] -= 1
elif c == self.op_out:
print(chr(data[dataPtr]), end='')
elif c == self.op_in:
data[dataPtr] = sys.stdin.buffer.read(1)[0]
elif c == self.op_jmp_fwd:
if data[dataPtr] == 0:
pc = val
elif c == self.op_jmp_bck:
if data[dataPtr] > 0:
pc = val
else:
raise ValueError("Unknown operator")
pc += 1
# A mandelbrot set fractal viewer in brainfuck written by Erik Bosman
mandelbrotDotBf = """+++++++++++++[->++>>>+++++>++>+<<<<<<]>>>>>++++++>--->>>>>>>>>>+++++++++++++++[[
>>>>>>>>>]+[<<<<<<<<<]>>>>>>>>>-]+[>>>>>>>>[-]>]<<<<<<<<<[<<<<<<<<<]>>>>>>>>[-]+
<<<<<<<+++++[-[->>>>>>>>>+<<<<<<<<<]>>>>>>>>>]>>>>>>>+>>>>>>>>>>>>>>>>>>>>>>>>>>
>+<<<<<<<<<<<<<<<<<[<<<<<<<<<]>>>[-]+[>>>>>>[>>>>>>>[-]>>]<<<<<<<<<[<<<<<<<<<]>>
>>>>>[-]+<<<<<<++++[-[->>>>>>>>>+<<<<<<<<<]>>>>>>>>>]>>>>>>+<<<<<<+++++++[-[->>>
>>>>>>+<<<<<<<<<]>>>>>>>>>]>>>>>>+<<<<<<<<<<<<<<<<[<<<<<<<<<]>>>[[-]>>>>>>[>>>>>
>>[-<<<<<<+>>>>>>]<<<<<<[->>>>>>+<<+<<<+<]>>>>>>>>]<<<<<<<<<[<<<<<<<<<]>>>>>>>>>
[>>>>>>>>[-<<<<<<<+>>>>>>>]<<<<<<<[->>>>>>>+<<+<<<+<<]>>>>>>>>]<<<<<<<<<[<<<<<<<
<<]>>>>>>>[-<<<<<<<+>>>>>>>]<<<<<<<[->>>>>>>+<<+<<<<<]>>>>>>>>>+++++++++++++++[[
>>>>>>>>>]+>[-]>[-]>[-]>[-]>[-]>[-]>[-]>[-]>[-]<<<<<<<<<[<<<<<<<<<]>>>>>>>>>-]+[
>+>>>>>>>>]<<<<<<<<<[<<<<<<<<<]>>>>>>>>>[>->>>>[-<<<<+>>>>]<<<<[->>>>+<<<<<[->>[
-<<+>>]<<[->>+>>+<<<<]+>>>>>>>>>]<<<<<<<<[<<<<<<<<<]]>>>>>>>>>[>>>>>>>>>]<<<<<<<
<<[>[->>>>>>>>>+<<<<<<<<<]<<<<<<<<<<]>[->>>>>>>>>+<<<<<<<<<]<+>>>>>>>>]<<<<<<<<<
[>[-]<->>>>[-<<<<+>[<->-<<<<<<+>>>>>>]<[->+<]>>>>]<<<[->>>+<<<]<+<<<<<<<<<]>>>>>
>>>>[>+>>>>>>>>]<<<<<<<<<[<<<<<<<<<]>>>>>>>>>[>->>>>>[-<<<<<+>>>>>]<<<<<[->>>>>+
<<<<<<[->>>[-<<<+>>>]<<<[->>>+>+<<<<]+>>>>>>>>>]<<<<<<<<[<<<<<<<<<]]>>>>>>>>>[>>
>>>>>>>]<<<<<<<<<[>>[->>>>>>>>>+<<<<<<<<<]<<<<<<<<<<<]>>[->>>>>>>>>+<<<<<<<<<]<<
+>>>>>>>>]<<<<<<<<<[>[-]<->>>>[-<<<<+>[<->-<<<<<<+>>>>>>]<[->+<]>>>>]<<<[->>>+<<
<]<+<<<<<<<<<]>>>>>>>>>[>>>>[-<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<+>>>>>>>>>>>>>
>>>>>>>>>>>>>>>>>>>>>>>]>>>>>]<<<<<<<<<[<<<<<<<<<]>>>>>>>>>+++++++++++++++[[>>>>
>>>>>]<<<<<<<<<-<<<<<<<<<[<<<<<<<<<]>>>>>>>>>-]+>>>>>>>>>>>>>>>>>>>>>+<<<[<<<<<<
<<<]>>>>>>>>>[>>>[-<<<->>>]+<<<[->>>->[-<<<<+>>>>]<<<<[->>>>+<<<<<<<<<<<<<[<<<<<
<<<<]>>>>[-]+>>>>>[>>>>>>>>>]>+<]]+>>>>[-<<<<->>>>]+<<<<[->>>>-<[-<<<+>>>]<<<[->
>>+<<<<<<<<<<<<[<<<<<<<<<]>>>[-]+>>>>>>[>>>>>>>>>]>[-]+<]]+>[-<[>>>>>>>>>]<<<<<<
<<]>>>>>>>>]<<<<<<<<<[<<<<<<<<<]<<<<<<<[->+>>>-<<<<]>>>>>>>>>+++++++++++++++++++
+++++++>>[-<<<<+>>>>]<<<<[->>>>+<<[-]<<]>>[<<<<<<<+<[-<+>>>>+<<[-]]>[-<<[->+>>>-
<<<<]>>>]>>>>>>>>>>>>>[>>[-]>[-]>[-]>>>>>]<<<<<<<<<[<<<<<<<<<]>>>[-]>>>>>>[>>>>>
[-<<<<+>>>>]<<<<[->>>>+<<<+<]>>>>>>>>]<<<<<<<<<[<<<<<<<<<]>>>>>>>>>[>>[-<<<<<<<<
<+>>>>>>>>>]>>>>>>>]<<<<<<<<<[<<<<<<<<<]>>>>>>>>>+++++++++++++++[[>>>>>>>>>]+>[-
]>[-]>[-]>[-]>[-]>[-]>[-]>[-]>[-]<<<<<<<<<[<<<<<<<<<]>>>>>>>>>-]+[>+>>>>>>>>]<<<
<<<<<<[<<<<<<<<<]>>>>>>>>>[>->>>>>[-<<<<<+>>>>>]<<<<<[->>>>>+<<<<<<[->>[-<<+>>]<
<[->>+>+<<<]+>>>>>>>>>]<<<<<<<<[<<<<<<<<<]]>>>>>>>>>[>>>>>>>>>]<<<<<<<<<[>[->>>>
>>>>>+<<<<<<<<<]<<<<<<<<<<]>[->>>>>>>>>+<<<<<<<<<]<+>>>>>>>>]<<<<<<<<<[>[-]<->>>
[-<<<+>[<->-<<<<<<<+>>>>>>>]<[->+<]>>>]<<[->>+<<]<+<<<<<<<<<]>>>>>>>>>[>>>>>>[-<
<<<<+>>>>>]<<<<<[->>>>>+<<<<+<]>>>>>>>>]<<<<<<<<<[<<<<<<<<<]>>>>>>>>>[>+>>>>>>>>
]<<<<<<<<<[<<<<<<<<<]>>>>>>>>>[>->>>>>[-<<<<<+>>>>>]<<<<<[->>>>>+<<<<<<[->>[-<<+
>>]<<[->>+>>+<<<<]+>>>>>>>>>]<<<<<<<<[<<<<<<<<<]]>>>>>>>>>[>>>>>>>>>]<<<<<<<<<[>
[->>>>>>>>>+<<<<<<<<<]<<<<<<<<<<]>[->>>>>>>>>+<<<<<<<<<]<+>>>>>>>>]<<<<<<<<<[>[-
]<->>>>[-<<<<+>[<->-<<<<<<+>>>>>>]<[->+<]>>>>]<<<[->>>+<<<]<+<<<<<<<<<]>>>>>>>>>
[>>>>[-<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<+>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
]>>>>>]<<<<<<<<<[<<<<<<<<<]>>>>>>>>>[>>>[-<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<+>
>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>]>>>>>>]<<<<<<<<<[<<<<<<<<<]>>>>>>>>>++++++++
+++++++[[>>>>>>>>>]<<<<<<<<<-<<<<<<<<<[<<<<<<<<<]>>>>>>>>>-]+[>>>>>>>>[-<<<<<<<+
>>>>>>>]<<<<<<<[->>>>>>>+<<<<<<+<]>>>>>>>>]<<<<<<<<<[<<<<<<<<<]>>>>>>>>>[>>>>>>[
-]>>>]<<<<<<<<<[<<<<<<<<<]>>>>+>[-<-<<<<+>>>>>]>[-<<<<<<[->>>>>+<++<<<<]>>>>>[-<
<<<<+>>>>>]<->+>]<[->+<]<<<<<[->>>>>+<<<<<]>>>>>>[-]<<<<<<+>>>>[-<<<<->>>>]+<<<<
[->>>>->>>>>[>>[-<<->>]+<<[->>->[-<<<+>>>]<<<[->>>+<<<<<<<<<<<<[<<<<<<<<<]>>>[-]
+>>>>>>[>>>>>>>>>]>+<]]+>>>[-<<<->>>]+<<<[->>>-<[-<<+>>]<<[->>+<<<<<<<<<<<[<<<<<
<<<<]>>>>[-]+>>>>>[>>>>>>>>>]>[-]+<]]+>[-<[>>>>>>>>>]<<<<<<<<]>>>>>>>>]<<<<<<<<<
[<<<<<<<<<]>>>>[-<<<<+>>>>]<<<<[->>>>+>>>>>[>+>>[-<<->>]<<[->>+<<]>>>>>>>>]<<<<<
<<<+<[>[->>>>>+<<<<[->>>>-<<<<<<<<<<<<<<+>>>>>>>>>>>[->>>+<<<]<]>[->>>-<<<<<<<<<
<<<<<+>>>>>>>>>>>]<<]>[->>>>+<<<[->>>-<<<<<<<<<<<<<<+>>>>>>>>>>>]<]>[->>>+<<<]<<
<<<<<<<<<<]>>>>[-]<<<<]>>>[-<<<+>>>]<<<[->>>+>>>>>>[>+>[-<->]<[->+<]>>>>>>>>]<<<
<<<<<+<[>[->>>>>+<<<[->>>-<<<<<<<<<<<<<<+>>>>>>>>>>[->>>>+<<<<]>]<[->>>>-<<<<<<<
<<<<<<<+>>>>>>>>>>]<]>>[->>>+<<<<[->>>>-<<<<<<<<<<<<<<+>>>>>>>>>>]>]<[->>>>+<<<<
]<<<<<<<<<<<]>>>>>>+<<<<<<]]>>>>[-<<<<+>>>>]<<<<[->>>>+>>>>>[>>>>>>>>>]<<<<<<<<<
[>[->>>>>+<<<<[->>>>-<<<<<<<<<<<<<<+>>>>>>>>>>>[->>>+<<<]<]>[->>>-<<<<<<<<<<<<<<
+>>>>>>>>>>>]<<]>[->>>>+<<<[->>>-<<<<<<<<<<<<<<+>>>>>>>>>>>]<]>[->>>+<<<]<<<<<<<
<<<<<]]>[-]>>[-]>[-]>>>>>[>>[-]>[-]>>>>>>]<<<<<<<<<[<<<<<<<<<]>>>>>>>>>[>>>>>[-<
<<<+>>>>]<<<<[->>>>+<<<+<]>>>>>>>>]<<<<<<<<<[<<<<<<<<<]>>>>>>>>>+++++++++++++++[
[>>>>>>>>>]+>[-]>[-]>[-]>[-]>[-]>[-]>[-]>[-]>[-]<<<<<<<<<[<<<<<<<<<]>>>>>>>>>-]+
[>+>>>>>>>>]<<<<<<<<<[<<<<<<<<<]>>>>>>>>>[>->>>>[-<<<<+>>>>]<<<<[->>>>+<<<<<[->>
[-<<+>>]<<[->>+>+<<<]+>>>>>>>>>]<<<<<<<<[<<<<<<<<<]]>>>>>>>>>[>>>>>>>>>]<<<<<<<<
<[>[->>>>>>>>>+<<<<<<<<<]<<<<<<<<<<]>[->>>>>>>>>+<<<<<<<<<]<+>>>>>>>>]<<<<<<<<<[
>[-]<->>>[-<<<+>[<->-<<<<<<<+>>>>>>>]<[->+<]>>>]<<[->>+<<]<+<<<<<<<<<]>>>>>>>>>[
>>>[-<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<+>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>]>
>>>>>]<<<<<<<<<[<<<<<<<<<]>>>>>[-]>>>>+++++++++++++++[[>>>>>>>>>]<<<<<<<<<-<<<<<
<<<<[<<<<<<<<<]>>>>>>>>>-]+[>>>[-<<<->>>]+<<<[->>>->[-<<<<+>>>>]<<<<[->>>>+<<<<<
<<<<<<<<[<<<<<<<<<]>>>>[-]+>>>>>[>>>>>>>>>]>+<]]+>>>>[-<<<<->>>>]+<<<<[->>>>-<[-
<<<+>>>]<<<[->>>+<<<<<<<<<<<<[<<<<<<<<<]>>>[-]+>>>>>>[>>>>>>>>>]>[-]+<]]+>[-<[>>
>>>>>>>]<<<<<<<<]>>>>>>>>]<<<<<<<<<[<<<<<<<<<]>>>[-<<<+>>>]<<<[->>>+>>>>>>[>+>>>
[-<<<->>>]<<<[->>>+<<<]>>>>>>>>]<<<<<<<<+<[>[->+>[-<-<<<<<<<<<<+>>>>>>>>>>>>[-<<
+>>]<]>[-<<-<<<<<<<<<<+>>>>>>>>>>>>]<<<]>>[-<+>>[-<<-<<<<<<<<<<+>>>>>>>>>>>>]<]>
[-<<+>>]<<<<<<<<<<<<<]]>>>>[-<<<<+>>>>]<<<<[->>>>+>>>>>[>+>>[-<<->>]<<[->>+<<]>>
>>>>>>]<<<<<<<<+<[>[->+>>[-<<-<<<<<<<<<<+>>>>>>>>>>>[-<+>]>]<[-<-<<<<<<<<<<+>>>>
>>>>>>>]<<]>>>[-<<+>[-<-<<<<<<<<<<+>>>>>>>>>>>]>]<[-<+>]<<<<<<<<<<<<]>>>>>+<<<<<
]>>>>>>>>>[>>>[-]>[-]>[-]>>>>]<<<<<<<<<[<<<<<<<<<]>>>[-]>[-]>>>>>[>>>>>>>[-<<<<<
<+>>>>>>]<<<<<<[->>>>>>+<<<<+<<]>>>>>>>>]<<<<<<<<<[<<<<<<<<<]>>>>+>[-<-<<<<+>>>>
>]>>[-<<<<<<<[->>>>>+<++<<<<]>>>>>[-<<<<<+>>>>>]<->+>>]<<[->>+<<]<<<<<[->>>>>+<<
<<<]+>>>>[-<<<<->>>>]+<<<<[->>>>->>>>>[>>>[-<<<->>>]+<<<[->>>-<[-<<+>>]<<[->>+<<
<<<<<<<<<[<<<<<<<<<]>>>>[-]+>>>>>[>>>>>>>>>]>+<]]+>>[-<<->>]+<<[->>->[-<<<+>>>]<
<<[->>>+<<<<<<<<<<<<[<<<<<<<<<]>>>[-]+>>>>>>[>>>>>>>>>]>[-]+<]]+>[-<[>>>>>>>>>]<
<<<<<<<]>>>>>>>>]<<<<<<<<<[<<<<<<<<<]>>>[-<<<+>>>]<<<[->>>+>>>>>>[>+>[-<->]<[->+
<]>>>>>>>>]<<<<<<<<+<[>[->>>>+<<[->>-<<<<<<<<<<<<<+>>>>>>>>>>[->>>+<<<]>]<[->>>-
<<<<<<<<<<<<<+>>>>>>>>>>]<]>>[->>+<<<[->>>-<<<<<<<<<<<<<+>>>>>>>>>>]>]<[->>>+<<<
]<<<<<<<<<<<]>>>>>[-]>>[-<<<<<<<+>>>>>>>]<<<<<<<[->>>>>>>+<<+<<<<<]]>>>>[-<<<<+>
>>>]<<<<[->>>>+>>>>>[>+>>[-<<->>]<<[->>+<<]>>>>>>>>]<<<<<<<<+<[>[->>>>+<<<[->>>-
<<<<<<<<<<<<<+>>>>>>>>>>>[->>+<<]<]>[->>-<<<<<<<<<<<<<+>>>>>>>>>>>]<<]>[->>>+<<[
->>-<<<<<<<<<<<<<+>>>>>>>>>>>]<]>[->>+<<]<<<<<<<<<<<<]]>>>>[-]<<<<]>>>>[-<<<<+>>
>>]<<<<[->>>>+>[-]>>[-<<<<<<<+>>>>>>>]<<<<<<<[->>>>>>>+<<+<<<<<]>>>>>>>>>[>>>>>>
>>>]<<<<<<<<<[>[->>>>+<<<[->>>-<<<<<<<<<<<<<+>>>>>>>>>>>[->>+<<]<]>[->>-<<<<<<<<
<<<<<+>>>>>>>>>>>]<<]>[->>>+<<[->>-<<<<<<<<<<<<<+>>>>>>>>>>>]<]>[->>+<<]<<<<<<<<
<<<<]]>>>>>>>>>[>>[-]>[-]>>>>>>]<<<<<<<<<[<<<<<<<<<]>>>[-]>[-]>>>>>[>>>>>[-<<<<+
>>>>]<<<<[->>>>+<<<+<]>>>>>>>>]<<<<<<<<<[<<<<<<<<<]>>>>>>>>>[>>>>>>[-<<<<<+>>>>>
]<<<<<[->>>>>+<<<+<<]>>>>>>>>]<<<<<<<<<[<<<<<<<<<]>>>>>>>>>+++++++++++++++[[>>>>
>>>>>]+>[-]>[-]>[-]>[-]>[-]>[-]>[-]>[-]>[-]<<<<<<<<<[<<<<<<<<<]>>>>>>>>>-]+[>+>>
>>>>>>]<<<<<<<<<[<<<<<<<<<]>>>>>>>>>[>->>>>[-<<<<+>>>>]<<<<[->>>>+<<<<<[->>[-<<+
>>]<<[->>+>>+<<<<]+>>>>>>>>>]<<<<<<<<[<<<<<<<<<]]>>>>>>>>>[>>>>>>>>>]<<<<<<<<<[>
[->>>>>>>>>+<<<<<<<<<]<<<<<<<<<<]>[->>>>>>>>>+<<<<<<<<<]<+>>>>>>>>]<<<<<<<<<[>[-
]<->>>>[-<<<<+>[<->-<<<<<<+>>>>>>]<[->+<]>>>>]<<<[->>>+<<<]<+<<<<<<<<<]>>>>>>>>>
[>+>>>>>>>>]<<<<<<<<<[<<<<<<<<<]>>>>>>>>>[>->>>>>[-<<<<<+>>>>>]<<<<<[->>>>>+<<<<
<<[->>>[-<<<+>>>]<<<[->>>+>+<<<<]+>>>>>>>>>]<<<<<<<<[<<<<<<<<<]]>>>>>>>>>[>>>>>>
>>>]<<<<<<<<<[>>[->>>>>>>>>+<<<<<<<<<]<<<<<<<<<<<]>>[->>>>>>>>>+<<<<<<<<<]<<+>>>
>>>>>]<<<<<<<<<[>[-]<->>>>[-<<<<+>[<->-<<<<<<+>>>>>>]<[->+<]>>>>]<<<[->>>+<<<]<+
<<<<<<<<<]>>>>>>>>>[>>>>[-<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<+>>>>>>>>>>>>>>>>>
>>>>>>>>>>>>>>>>>>>]>>>>>]<<<<<<<<<[<<<<<<<<<]>>>>>>>>>+++++++++++++++[[>>>>>>>>
>]<<<<<<<<<-<<<<<<<<<[<<<<<<<<<]>>>>>>>>>-]+>>>>>>>>>>>>>>>>>>>>>+<<<[<<<<<<<<<]
>>>>>>>>>[>>>[-<<<->>>]+<<<[->>>->[-<<<<+>>>>]<<<<[->>>>+<<<<<<<<<<<<<[<<<<<<<<<
]>>>>[-]+>>>>>[>>>>>>>>>]>+<]]+>>>>[-<<<<->>>>]+<<<<[->>>>-<[-<<<+>>>]<<<[->>>+<
<<<<<<<<<<<[<<<<<<<<<]>>>[-]+>>>>>>[>>>>>>>>>]>[-]+<]]+>[-<[>>>>>>>>>]<<<<<<<<]>
>>>>>>>]<<<<<<<<<[<<<<<<<<<]>>->>[-<<<<+>>>>]<<<<[->>>>+<<[-]<<]>>]<<+>>>>[-<<<<
->>>>]+<<<<[->>>>-<<<<<<.>>]>>>>[-<<<<<<<.>>>>>>>]<<<[-]>[-]>[-]>[-]>[-]>[-]>>>[
>[-]>[-]>[-]>[-]>[-]>[-]>>>]<<<<<<<<<[<<<<<<<<<]>>>>>>>>>[>>>>>[-]>>>>]<<<<<<<<<
[<<<<<<<<<]>+++++++++++[-[->>>>>>>>>+<<<<<<<<<]>>>>>>>>>]>>>>+>>>>>>>>>+<<<<<<<<
<<<<<<[<<<<<<<<<]>>>>>>>[-<<<<<<<+>>>>>>>]<<<<<<<[->>>>>>>+[-]>>[>>>>>>>>>]<<<<<
<<<<[>>>>>>>[-<<<<<<+>>>>>>]<<<<<<[->>>>>>+<<<<<<<[<<<<<<<<<]>>>>>>>[-]+>>>]<<<<
<<<<<<]]>>>>>>>[-<<<<<<<+>>>>>>>]<<<<<<<[->>>>>>>+>>[>+>>>>[-<<<<->>>>]<<<<[->>>
>+<<<<]>>>>>>>>]<<+<<<<<<<[>>>>>[->>+<<]<<<<<<<<<<<<<<]>>>>>>>>>[>>>>>>>>>]<<<<<
<<<<[>[-]<->>>>>>>[-<<<<<<<+>[<->-<<<+>>>]<[->+<]>>>>>>>]<<<<<<[->>>>>>+<<<<<<]<
+<<<<<<<<<]>>>>>>>-<<<<[-]+<<<]+>>>>>>>[-<<<<<<<->>>>>>>]+<<<<<<<[->>>>>>>->>[>>
>>>[->>+<<]>>>>]<<<<<<<<<[>[-]<->>>>>>>[-<<<<<<<+>[<->-<<<+>>>]<[->+<]>>>>>>>]<<
<<<<[->>>>>>+<<<<<<]<+<<<<<<<<<]>+++++[-[->>>>>>>>>+<<<<<<<<<]>>>>>>>>>]>>>>+<<<
<<[<<<<<<<<<]>>>>>>>>>[>>>>>[-<<<<<->>>>>]+<<<<<[->>>>>->>[-<<<<<<<+>>>>>>>]<<<<
<<<[->>>>>>>+<<<<<<<<<<<<<<<<[<<<<<<<<<]>>>>[-]+>>>>>[>>>>>>>>>]>+<]]+>>>>>>>[-<
<<<<<<->>>>>>>]+<<<<<<<[->>>>>>>-<<[-<<<<<+>>>>>]<<<<<[->>>>>+<<<<<<<<<<<<<<[<<<
<<<<<<]>>>[-]+>>>>>>[>>>>>>>>>]>[-]+<]]+>[-<[>>>>>>>>>]<<<<<<<<]>>>>>>>>]<<<<<<<
<<[<<<<<<<<<]>>>>[-]<<<+++++[-[->>>>>>>>>+<<<<<<<<<]>>>>>>>>>]>>>>-<<<<<[<<<<<<<
<<]]>>>]<<<<.>>>>>>>>>>[>>>>>>[-]>>>]<<<<<<<<<[<<<<<<<<<]>++++++++++[-[->>>>>>>>
>+<<<<<<<<<]>>>>>>>>>]>>>>>+>>>>>>>>>+<<<<<<<<<<<<<<<[<<<<<<<<<]>>>>>>>>[-<<<<<<
<<+>>>>>>>>]<<<<<<<<[->>>>>>>>+[-]>[>>>>>>>>>]<<<<<<<<<[>>>>>>>>[-<<<<<<<+>>>>>>
>]<<<<<<<[->>>>>>>+<<<<<<<<[<<<<<<<<<]>>>>>>>>[-]+>>]<<<<<<<<<<]]>>>>>>>>[-<<<<<
<<<+>>>>>>>>]<<<<<<<<[->>>>>>>>+>[>+>>>>>[-<<<<<->>>>>]<<<<<[->>>>>+<<<<<]>>>>>>
>>]<+<<<<<<<<[>>>>>>[->>+<<]<<<<<<<<<<<<<<<]>>>>>>>>>[>>>>>>>>>]<<<<<<<<<[>[-]<-
>>>>>>>>[-<<<<<<<<+>[<->-<<+>>]<[->+<]>>>>>>>>]<<<<<<<[->>>>>>>+<<<<<<<]<+<<<<<<
<<<]>>>>>>>>-<<<<<[-]+<<<]+>>>>>>>>[-<<<<<<<<->>>>>>>>]+<<<<<<<<[->>>>>>>>->[>>>
>>>[->>+<<]>>>]<<<<<<<<<[>[-]<->>>>>>>>[-<<<<<<<<+>[<->-<<+>>]<[->+<]>>>>>>>>]<<
<<<<<[->>>>>>>+<<<<<<<]<+<<<<<<<<<]>+++++[-[->>>>>>>>>+<<<<<<<<<]>>>>>>>>>]>>>>>
+>>>>>>>>>>>>>>>>>>>>>>>>>>>+<<<<<<[<<<<<<<<<]>>>>>>>>>[>>>>>>[-<<<<<<->>>>>>]+<
<<<<<[->>>>>>->>[-<<<<<<<<+>>>>>>>>]<<<<<<<<[->>>>>>>>+<<<<<<<<<<<<<<<<<[<<<<<<<
<<]>>>>[-]+>>>>>[>>>>>>>>>]>+<]]+>>>>>>>>[-<<<<<<<<->>>>>>>>]+<<<<<<<<[->>>>>>>>
-<<[-<<<<<<+>>>>>>]<<<<<<[->>>>>>+<<<<<<<<<<<<<<<[<<<<<<<<<]>>>[-]+>>>>>>[>>>>>>
>>>]>[-]+<]]+>[-<[>>>>>>>>>]<<<<<<<<]>>>>>>>>]<<<<<<<<<[<<<<<<<<<]>>>>[-]<<<++++
+[-[->>>>>>>>>+<<<<<<<<<]>>>>>>>>>]>>>>>->>>>>>>>>>>>>>>>>>>>>>>>>>>-<<<<<<[<<<<
<<<<<]]>>>]
"""
def test():
bf = Brainfuck()
program = bf.compileBf(mandelbrotDotBf)
bf.executeBf(program)
if __name__ == '__main__':
test()
|
python
|
import os
import platform
import getpass
if(platform.system() == "Windows"):
os.system("cls")
print(" _")
print("__ _____| | ___ ___ _ __ ___ ___ ")
print("\ \ /\ / / _ \ |/ __/ _ \| '_ ` _ \ / _ \ ")
print(" \ V V / __/ | (_| (_) | | | | | | __/ ")
print(" \_/\_/ \___|_|\___\___/|_| |_| |_|\___| ")
print("\n\n Hi " + getpass.getuser() + ", i'm cento and i'm happy to help you")
print("\n ---------------------------------------------")
print("\n italiano")
print("\n ---------------------------------------------")
language = input("\n please, enter a language : ")
if(language == "italiano"):
os.system("python3 language/italiano/verifica.py")
if(platform.system() == "Linux"):
print("\n questo bot non è supportato per linux \n\n")
exit
|
python
|
"""
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
SPDX-License-Identifier: MIT-0
Permission is hereby granted, free of charge, to any person obtaining a copy of this
software and associated documentation files (the "Software"), to deal in the Software
without restriction, including without limitation the rights to use, copy, modify,
merge, publish, distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
import os
import sys
import time
import json
import logging
logger = logging.getLogger()
logger.setLevel(logging.INFO)
log_handler = logging.StreamHandler(sys.stdout)
logger.addHandler(log_handler)
import http_request_tester as tester
def handle(event, context):
logger.info('handler is triggered: start-test, event={}'.format(event))
logger.info('Records count: {}'.format(len(event['Records'])))
profile_name = os.environ.get('PROFILE_NAME', None)
project_name = os.environ.get('PROJECT_NAME', 'project_name_empty')
project_stage = os.environ.get('PROJECT_STAGE', 'project_stage_empty')
api_endpoint = os.environ.get('API_ENDPOINT', 'api_endpoint_empty')
logger.info('project_name: {}'.format(project_name))
logger.info('project_stage: {}'.format(project_stage))
logger.info('api_endpoint: {}'.format(api_endpoint))
for record in event['Records']:
message = json.loads(record['Sns']['Message'])
interval_in_sec = int(message['Config']['IntervalInSec'])
duration_in_sec = int(message['Config']['DurationInSec'])
logger.info('handler start one-record, message={}'.format(message))
api_gateway_tester = tester.HttpRequestTester(
TestName='ApiGateway',
ProfileName=profile_name,
ProjectName=project_name,
ProjectStage=project_stage,
Endpoint=api_endpoint,
ApiKey=None,
Interval=interval_in_sec,
Duration=duration_in_sec
)
api_gateway_tester.start_loop(message['TestData'])
logger.info('handler finish one record: test-timeout duration_in_sec-{}'.format(duration_in_sec))
|
python
|
"""Utility code for argparse"""
import argparse
import yaml
#class StoreDictKeyPair(argparse.Action):
# """An action for reading key-value pairs from command line"""
# def __call__(self, parser, namespace, values, option_string=None):
# my_dict = {}
# for kv in values.split(","):
# k,v = kv.split("=")
# my_dict[k] = v
# setattr(namespace, self.dest, my_dict)
class ReadYaml(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
my_dict = yaml.load(values, Loader=yaml.Loader)
setattr(namespace, self.dest, my_dict)
|
python
|
# datastore transations and methods
from sqlalchemy.orm import load_only
from sqlalchemy.sql import text
def count_records(session, model, **kwargs):
row_count = session.query(model).filter_by(**kwargs).count()
return row_count
def delete_record(session, model, **kwargs):
instance = session.query(model).filter_by(**kwargs).one()
session.delete(instance)
def get_column_values(session, model, column, **kwargs):
instances = session.query(model).filter_by(**kwargs).options(
load_only(column)).order_by(column)
return instances
def insert(session, model, **kwargs):
instance = model(**kwargs)
session.add(instance)
session.flush()
return instance
def insert_or_ignore(session, model, **kwargs):
instance = session.query(model).filter_by(**kwargs).first()
if not instance:
instance = model(**kwargs)
session.add(instance)
return instance
def retrieve_first_n(session, model, n, **kwargs):
instances = session.query(model).filter_by(**kwargs).limit(n).all()
return instances
def retrieve_first_record(session, model, **kwargs):
instance = session.query(model).filter_by(**kwargs).order_by(
model.did).first()
return instance
def retrieve_last_record(session, model):
instance = session.query(model).order_by(model.did.desc()).first()
return instance
def retrieve_last_record_filtered(session, model, **kwargs):
instance = session.query(model).filter_by(**kwargs).order_by(
model.did.desc()).first()
return instance
def retrieve_record(session, model, **kwargs):
instance = session.query(model).filter_by(**kwargs).first()
return instance
def retrieve_records(session, model, **kwargs):
instances = session.query(model).filter_by(**kwargs).order_by(
model.did).all()
return instances
def retrieve_cart_order_ids(session, cart_id):
stmn = text("""
SELECT `order`.did
FROM `order`
WHERE cart_id=:cart_id
ORDER BY `order`.did
""")
stmn = stmn.bindparams(cart_id=cart_id)
instances = session.execute(stmn)
return instances
def get_cart_data_view_records(
session, system_id, user='All users', status=''):
if user == 'All users' and status:
stmn = text("""
SELECT cart_id, cart_name, cart_date,
system_id, cart_status, cart_owner, linked
FROM carts_meta
WHERE system_id=:system_id AND cart_status=:status
ORDER BY cart_date DESC
""")
stmn = stmn.bindparams(system_id=system_id, status=status)
elif user == 'All users' and not status:
stmn = text("""
SELECT cart_id, cart_name, cart_date, system_id,
cart_status, cart_owner, linked
FROM carts_meta
WHERE system_id=:system_id
ORDER BY cart_date DESC
""")
stmn = stmn.bindparams(system_id=system_id)
elif user != 'All users' and not status:
stmn = text("""
SELECT cart_id, cart_name, cart_date, system_id,
cart_status, cart_owner, linked
FROM carts_meta
WHERE system_id=:system_id AND cart_owner=:user
ORDER BY cart_date DESC
""")
stmn = stmn.bindparams(system_id=system_id, user=user)
else:
stmn = text("""
SELECT cart_id, cart_name, cart_date, system_id,
cart_status, cart_owner, linked
FROM carts_meta
WHERE system_id=:system_id AND cart_owner=:user AND cart_status=:status
ORDER BY cart_date DESC
""")
stmn = stmn.bindparams(system_id=system_id, user=user, status=status)
instances = session.execute(stmn)
return instances
def retrieve_cart_details_view_stmn(cart_id):
stmn = text("""
SELECT * FROM cart_details
WHERE cart_id=:cart_id
""")
stmn = stmn.bindparams(cart_id=cart_id)
return stmn
def retrieve_unique_vendors_from_cart(session, cart_id):
stmn = text("""
SELECT DISTINCT name
FROM vendor
JOIN `order` ON `order`.vendor_id = vendor.did
WHERE `order`.cart_id=:cart_id
;
""")
stmn = stmn.bindparams(cart_id=cart_id)
instances = session.execute(stmn)
return instances
def update_record(session, model, did, **kwargs):
instance = session.query(model).filter_by(did=did).one()
for key, value in kwargs.items():
setattr(instance, key, value)
def construct_report_query_stmn(system_id, library_id,
user_ids, start_date, end_date):
"""
Creates SQL query statemanet to select datastore records matching
report criteria
args:
system_id: int, datastore system.did
library_id: int, datastore library.did
user_ids: list, list of datastore user.did
start_date: str, starting date (inclusive) in format YYYY-MM-DD
end_date: str, ending date (inclusive) in format YYYY-MM-DD
returns:
stmn: instance of sqlalchemy.sql.expression.TextClause
"""
sql_str = """
SELECT cart.did as cart_id,
cart.created as cart_date,
status.name as cart_status,
user.name as user,
system.name as system,
library.name as library,
`order`.did as order_id,
lang.name as lang_name,
lang.code as lang_code,
audn.name as audn,
vendor.name as vendor,
mattype.name as mattype,
resource.price_disc as price,
branch.code as branch_code,
branch.name as branch_name,
orderlocation.qty as qty,
fund.code as fund
FROM cart
JOIN status ON cart.status_id = status.did
JOIN user ON cart.user_id = user.did
JOIN system ON cart.system_id = system.did
JOIN library ON cart.library_id = library.did
JOIN `order` ON cart.did = `order`.cart_id
JOIN lang ON `order`.lang_id = lang.did
JOIN audn ON `order`.audn_id = audn.did
JOIN vendor ON `order`.vendor_id = vendor.did
JOIN mattype ON `order`.matType_id = mattype.did
JOIN resource ON `order`.did = resource.order_id
JOIN orderlocation ON `order`.did = orderlocation.order_id
JOIN branch ON orderlocation.branch_id = branch.did
JOIN fund ON orderlocation.fund_id = fund.did
WHERE cart.created BETWEEN CAST(:start_date AS DATE) AND CAST(:end_date AS DATE)
AND cart.system_id=:system_id
"""
params = dict(
system_id=system_id,
start_date=f'{start_date}',
end_date=f'{end_date}')
if user_ids:
s = []
sql_str += ' AND ('
for user in list(enumerate(user_ids)):
arg = f'user_{user[0]}'
params[arg] = user[1]
s.append(f'cart.user_id=:{arg}')
sql_str += ' OR '.join(s)
sql_str += ' )'
if library_id is not None:
params['library_id'] = library_id
sql_str += ' AND cart.library_id=:library_id'
stmn = text(sql_str)
stmn = stmn.bindparams(**params)
return stmn
|
python
|
import rclpy
from rclpy.node import Node
from rclpy.qos import qos_profile_sensor_data
from sensor_msgs.msg import Image # Image is the message type
import cv2 # OpenCV library
from cv_bridge import CvBridge # Package to convert between ROS and OpenCV Images
import numpy as np
# Naming the Output window
windowname = 'Result'
cv2.namedWindow(windowname)
output = None
x, y, w, h = 0, 0, 0, 0
first_point_saved = False
second_point_saved = False
track_window = (x, y, w, h)
can_track = False
class CamShift(Node):
def __init__(self):
super().__init__('camshift')
self.subscription = self.create_subscription(
Image,
'/image',
self.listener_callback,
qos_profile_sensor_data)
self.subscription # prevent unused variable warning
# Used to convert between ROS and OpenCV images
self.br = CvBridge()
def listener_callback(self, data):
global x, y, w, h, first_point_saved,second_point_saved, track_window, can_track, output, roi_hist, roi
# Display the message on the console
#self.get_logger().info('Receiving image')
# Convert ROS Image message to OpenCV image
#frame = self.br.imgmsg_to_cv2(data, "bgr8")
#ret, frame = self.br.imgmsg_to_cv2(data, "bgr8")
frame = self.br.imgmsg_to_cv2(data, "bgr8")
hsv = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)
# Check if 2nd point is also saved then initialize the tracker
if second_point_saved:
roi_hist, roi = self.initialize(frame, track_window)
second_point_saved = False
can_track = True
# Start tracking
if can_track == True:
dst = cv2.calcBackProject([hsv],[0],roi_hist,[0,180],1)
# apply camshift to get the new location
ret, track_window = cv2.CamShift(dst, track_window, self.term_crit)
# Draw it on image
pts = cv2.boxPoints(ret)
pts = np.int0(pts)
print("track_window")
print("x, y, w, h")
print(track_window)
cv2.imshow('roi', roi)
output = cv2.polylines(frame,[pts],True, 255,2)
else:
output = frame
if first_point_saved:
cv2.circle(output, (x, y), 5, (0, 0, 255), -1)
cv2.destroyWindow('roi')
# Show the output
cv2.imshow(windowname,output)
cv2.waitKey(1)
def click_event(event, px, py, flags, param):
global x, y, w, h, first_point_saved, second_point_saved, track_window, can_track, output
# Left mouse button release event
if event == cv2.EVENT_LBUTTONUP:
if first_point_saved:
w = px-x
h = py-y
track_window = (x, y, w, h)
first_point_saved = False
second_point_saved = True
else:
x = px
y = py
first_point_saved = True
can_track = False
# Right mouse button press event
if event == cv2.EVENT_RBUTTONDOWN:
can_track = False
cv2.setMouseCallback(windowname, click_event) # Start the mouse event
# initialize tracker
def initialize(self, frame, track_window):
x, y, w, h = track_window
# set up the ROI for tracking
roi = frame[y:y+h, x:x+w]
hsv_roi = cv2.cvtColor(roi, cv2.COLOR_BGR2HSV)
roi_hist = cv2.calcHist([hsv_roi],[0],None,[180],[0,180])
roi_hist = cv2.normalize(roi_hist,roi_hist,0,255,cv2.NORM_MINMAX)
return roi_hist, roi
# Setup the termination criteria
term_crit = ( cv2.TERM_CRITERIA_EPS | cv2.TERM_CRITERIA_COUNT, 10, 1 )
def main(args=None):
rclpy.init(args=args)
camshift = CamShift()
rclpy.spin(camshift)
# Destroy the node explicitly
# (optional - otherwise it will be done automatically
# when the garbage collector destroys the node object)
camshift.destroy_node()
rclpy.shutdown()
if __name__ == '__main__':
main()
|
python
|
from random import randint
import pygame as pg
from scripts import constants as const
class Bird(pg.sprite.Sprite):
SIZE = const.SPRITE_SIZE[0]
MIN_SPEED = 1
MAX_SPEED = 10
def __init__(self, bird_image):
pg.sprite.Sprite.__init__(self)
self.image = bird_image
self.rect = self.image.get_rect()
self.rect.center = (randint(self.SIZE/2, const.WIDTH/2 - self.SIZE/2),
randint(self.SIZE/2, const.HEIGHT/2 - self.SIZE/2))
self.speed_x = (-1) ** randint(0, 1) * randint(self.MIN_SPEED, self.MAX_SPEED)
self.speed_y = (-1) ** randint(0, 1) * randint(self.MIN_SPEED, self.MAX_SPEED)
if self.speed_x < 0:
self.image = pg.transform.flip(self.image, True, False)
def move(self):
self.rect.x += self.speed_x
self.rect.y += self.speed_y
if self.rect.left < 0 or self.rect.right > const.WIDTH:
self.image = pg.transform.flip(self.image, True, False)
self.speed_x = -self.speed_x
if self.rect.top < 0 or self.rect.bottom > const.HEIGHT:
self.speed_y = -self.speed_y
|
python
|
"""
Example showing for tkinter and ttk how to do:
-- Simple animation
-- on a tkinter Canvas.
References:
-- https://effbot.org/tkinterbook/canvas.htm
This is the simplest explanation,
but very old and possibly somewhat out of date.
Everywhere that it says "pack" use "grid" instead.
-- The tkinter.pdf document in this project.
This is by far the most complete reference work for tkinter and ttk.
It is for reference, NOT a tutorial.
-- https://tkdocs.com/tutorial/canvas.html
This is a more complete and up-to-date tutorial than the one above.
It shows each example in four different languages.
Python is the fourth (last) one. Ignore the other-language examples.
The key ideas are:
1. Drawing (and hence animation) is on a tkinter.Canvas.
2. You put an object onto a Canvas with:
id = canvas.create_XXX(POSITION, OTHER-OPTIONS)
where XXX can be any of:
oval, arc, bitmap, image, line, polygon, rectangle, text, window,
and where the specifics of POSITION and OTHER-OPTIONS depends on the
type of object being created. See the example in the code below
for an oval. See the above reference work for details on other types.
3. The ID returned by a call to create_XXX is how you keep track of objects
on a Canvas for future animation (movements, color changes, etc.).
4. There are three basic methods for animating (changing) an object.
Each method is a Canvas method whose first argument
is the ID of the object on the Canvas. You can:
a. MOVE an object BY a given amount by:
canvas.move(ID, delta_x, delta_y)
b. MOVE an object TO a certain position by:
canvas.coords(ID, NEW_POSITION ...)
where the specifics of NEW_POSITION depend on the type of the object.
c. CHANGE OTHER CHARACTERISTICS of objects as in this example:
canvas.coords(ID, fill="blue") # Changes the fill color to "blue"
The specifics of what you can change (and how) depends on the type
of object. See the above reference work for details.
5. You must FIRST construct everything needed for the animation,
and THEN do the root.mainloop() to start the GUI running.
The code below shows one way to accomplish that, using this structure:
a. The main method constructs and then starts an Animation object.
b. The Animation object constructs the GUI, passing itself to the GUI
so that the GUI can later ask the Animation to do stuff.
c. The GUI contains:
-- The one-and-only tkinter.Tk object.
-- Frame(s) and other widgets as desired.
-- A tkinter.Canvas on a Frame.
d. When the GUI is constructed, you include all the tkinter/ttk code
that you have seen in previous examples EXCEPT not (yet) the
root.mainloop()
e. The GUI includes a start method that contains:
root.mainloop()
f. The Animation object (which constructed the GUI) calls the GUI's
start method to start the animation running.
g. The Animation object has a method:
run_one_cycle
that makes all the changes to all the objects in the Animation,
for ONE cycle of the animation, by using the Canvas methods:
move coords itemconfigure
The Animation has access to the Canvas because the Animation
constructed (and stores) the GUI, and the GUI makes and stores
the Canvas.
h. The Animation's run_one_cycle method
is called repeatedly BY THE GUI as follows, all in the GUI class:
def __init__(self, animation):
self.animation = animation
self.root = tkinter.Tk()
...
self.root.after(1000, self.animation_loop)
def animation_loop(self):
self.animation.run_one_cycle()
self.root.after(10, self.animation_loop)
The after method sets a TIMER that is triggered
after the given number of milliseconds (1000 ms in the first call
to after in the above, and 10 ms in the second call to after).
Because it is a TIMER, Tkinter is able to react to button presses
and other stuff while the TIMER is waiting to ring its alarm.
When the TIMER rings its alarm, it calls the second argument
to the after method, which is self.animation_loop in the
above. So, self.animation_loop is called the first time after
1 second (1000 ms), and it runs one cycle of the animation at
that time. Thereafter it repeatedly:
-- Waits 10 ms (via a TIMER that allows other stuff to happen)
-- Calls animation_loop again
-- Runs one cycle of the animation.
In the actual code below, instead of running every 10 ms,
it runs every animation.cycle_ms, so that the Animation object
can control the "refresh rate" of the animation.
See the code below for an example that uses the above structure.
While you are not REQUIRED to use the same structure, it is probably a good
idea to do so for any video-game style game.
This example does NOT include any message-passing with MQTT to other computers.
Other examples cover that topic.
SEE THE UML CLASS DIAGRAM include with this project.
Authors: David Mutchler and his colleagues
at Rose-Hulman Institute of Technology.
"""
import random
import tkinter
from tkinter import ttk
def main():
animation = Animation()
animation.start()
class Animation(object):
""" An animation of Ball objects (per the Ball class defined below). """
def __init__(self):
# Construct the GUI, which constructs and stores a Canvas.
# Store that Canvas in THIS object too, so that animated objects can
# act upon it. Here, our animated objects are all Ball objects,
# stored in the self.balls list, which starts with a single Ball.
# Each Ball needs to have the Canvas so that the Ball can change its
# position and fill color (and anything else it might want to change).
self.gui = GUI(self)
self.canvas = self.gui.canvas
ball = Ball(self.canvas) # Note how each Ball gets the Canvas
self.balls = [ball]
self.cycle_ms = 10 # Run an animation step every 10 ms (approximately)
def start(self):
# Called after the GUI, the Animation, and all the animated objects
# are constructed. The GUI's start method starts the mainloop
# in which the program remains for the remainder of its run.
self.gui.start()
def run_one_cycle(self):
"""
Must make whatever changes animated objects need to make on the Canvas,
for one iteration (cycle) of the animation loop.
"""
# One out of every 200 cycles, make a new Ball.
r = random.randrange(1, 201) # r is between 1 and 200, inclusive
if r == 1:
self.balls.append(Ball(self.canvas))
# Animate each ball.
for ball in self.balls:
ball.run_one_cycle()
class GUI(object):
def __init__(self, animation):
"""
Stores the given Animation object in order to call the Animation
object's run_one_cycle method repeatedly, by using root.after(...)
Constructs all the GUI widgets, but does NOT (yet) call root.mainloop.
:type animation: Animation
"""
self.animation = animation
# The usual Tk and Frame objects, plus any other widgets you want.
self.root = tkinter.Tk()
self.frame = ttk.Frame(self.root, padding=10)
self.frame.grid()
self.canvas = self.make_canvas()
# Starts the animation loop AFTER 1000 ms (i.e., 1 second).
self.root.after(1000, self.animation_loop)
def make_canvas(self):
canvas_width = 400
canvas_height = 300
canvas = tkinter.Canvas(self.frame, width=canvas_width,
height=canvas_height)
canvas.width = canvas_width
canvas.height = canvas_height
canvas.grid()
return canvas
def start(self):
# Called by the Animation object when the program is ready to enter the
# Tk object's mainloop and remain there for the remainder of the run.
self.root.mainloop()
def animation_loop(self):
# Tells the Animation to run one cycle of the animation.
# Then sets up a timer to call this same method again after a few ms.
self.animation.run_one_cycle()
self.root.after(self.animation.cycle_ms, self.animation_loop)
class Ball(object):
def __init__(self, canvas):
"""
The Ball needs the Canvas so that it can update its characteristics
(position, fill color, etc) as the animation runs.
:type canvas: tkinter.Canvas
"""
self.canvas = canvas
# Set the characteristics of the Ball:
# specific x, y and diameter, with a random color.
x = 200
y = 200
self.diameter = 20
self.colors = ["red", "green", "blue"]
r = random.randrange(len(self.colors))
self.color = self.colors[r]
# Make the item on the Canvas for drawing the Ball, storing its ID
# for making changes to the Ball (moving it, changing color, etc.).
# Here, each Ball is a filled circle (actually an oval),
# defined by its upper-left and lower-right corners.
self.id = self.canvas.create_oval(x, y,
x + self.diameter, y + self.diameter,
fill=self.color)
def run_one_cycle(self):
""" Illustrates the 3 basic ways to change (animate) an item. """
# Move RED balls BY a small random amount
# (using the Canvas move method):
if self.color == "red":
delta_x = random.randrange(-5, 6) # Between -5 and 5, inclusive
delta_y = random.randrange(-2, 3) # Between -2 and 2, inclusive
self.canvas.move(self.id, delta_x, delta_y)
# Move GREEN balls TO a certain position, randomly inside a box near
# the upper-left of the window (using the Canvas coords method):
elif self.color == "green":
x = random.randrange(50, 101) # Between 50 and 100, inclusive
y = random.randrange(20, 41) # Between 20 and 40, inclusive
self.canvas.coords(self.id, x, y,
x + self.diameter, y + self.diameter)
# Change balls to a random color, every 100 cycles or so,
# about once a second (using the Canvas itemconfigure method):
r1 = random.randrange(1, 101) # Random between 1 and 100, inclusive
if r1 == 1:
r2 = random.randrange(len(self.colors))
self.color = self.colors[r2]
self.canvas.itemconfigure(self.id, fill=self.color)
main()
|
python
|
class LinkedList:
def __init__(self, head):
self.head = head
self.current_element = self.head
# Node navigation
def next(self):
if self.current_element.next is None:
return
self.current_element = self.current_element.next
def go_back_to_head(self):
self.current_element = self.head
# Node queries
def get_current_element(self):
return self.current_element.data
# Subordinate classes
class Node:
"""A Node has two properties:
`data` which represents the instance of data stored in the node
`next` which is a pointer to the next node
"""
def __init__(self, data=None, next=None):
self.data = data
self.next = next
if __name__ == '__main__':
data_set = ['alex', 'siobhan', 'lucy', 'rosie']
linked_list = LinkedList(head=LinkedList.Node(data='alex', next=None))
linked_list.head.next = LinkedList.Node(data='siobhan')
print(linked_list.get_current_element())
linked_list.next()
print(linked_list.get_current_element())
linked_list.go_back_to_head()
print(linked_list.get_current_element())
|
python
|
import pandas as pd
import os
import subprocess as sub
import re
import sys
from Bio import SeqUtils
import matplotlib.pyplot as plt
import numpy as np
from scipy import stats
# path = os.path.join(os.path.expanduser('~'),'GENOMES_BACTER_RELEASE69/genbank')
path = "."
# ['DbxRefs','Description','FeaturesNum','assembly_accession','GenomicLen','GenomicName','Keywords','NucsPresent','Organism_des',
# 'SourceDbxRefs','SourceOrganism','SourcePlasmid','SourceStrain','Taxonomy','BioProject','TaxonID','Organism_env',
# 'OptimumTemperature','TemperatureRange','OxygenReq','Habitat','Salinity','crit_NC','crit_WGS','crit_genlen',
# 'crit_features','crit_comp_genome','crit_plasmid']
env_dat = pd.read_csv(os.path.join(path,"summary_organisms_interest.dat"))
taxon_dat = pd.read_csv(os.path.join(path,"arch_taxonomy_interest.dat"))
check_halo = lambda tax_class: any(_ in tax_class for _ in ('Halobacteria','Nanohaloarchaea'))
taxon_dat['halo'] = taxon_dat['tax_lineages'].apply(lambda lins: any( check_halo(lin.split(';')) for lin in lins.split(':') ) )
#['assembly_accession','cDNA','fid','pid','product','protein','status','table','ribosomal','CAI','TrOp']
gen_dat = pd.read_csv(os.path.join(path,"complete_arch_CDS_CAI_DNA_Rnd.dat"))
# PROTEOME LEVEL AMINO ACID FREQUENCIES ...
# "proteome_all.dat"
# # file with the organisms of interest
# dat_fname = os.path.join(bib2_scr_path,'catalog_with_accesion.dat')
# dat = pd.read_csv(dat_fname)
aacids = sorted(list('CMFILVWYAGTSNQDEHRKP'))
cost_vec_path = path
akashi = os.path.join(cost_vec_path,'akashi-cost.d')
argentina = os.path.join(cost_vec_path,'argentina-cost.d')
akashi_cost = pd.read_csv(akashi,header=None,sep=' ')
argentina_cost = pd.read_csv(argentina,header=None,sep=' ')
thermo_freq = pd.read_csv(os.path.join(path,'arch_thermo.dat'),header=None,sep=' ')
akashi_cost.set_index(0,inplace=True)
argentina_cost.set_index(0,inplace=True)
thermo_freq.set_index(0,inplace=True)
akashi_cost.sort_index(inplace=True)
argentina_cost.sort_index(inplace=True)
thermo_freq.sort_index(inplace=True)
#
gen_dat_org = gen_dat.groupby('assembly_accession')
# genom_id = orgs.groups.keys() # env_dat['assembly_accession'] ...
# gen_dat_grouped.get_group(idx)
#
# how to get quantile ...
# q75 = pid_cai['CAI'].quantile(q=0.75)
#
#
num_of_quantiles = 5
#
stat_dat = {'assembly_accession':[],
'OptimumTemperature':[],
'TrOp':[]}
for i in range(num_of_quantiles):
stat_dat['q%d'%i] = []
stat_dat['R20_q%d'%i] = []
stat_dat['Akashi_q%d'%i] = []
#
env_dat_tax = pd.merge(env_dat,taxon_dat,on='assembly_accession')
#
for idx,topt,halo in env_dat_tax[['assembly_accession','OptimumTemperature','halo']].itertuples(index=False):
# excluding halophiles ...
if not halo:
cds_cai_dat = gen_dat_org.get_group(idx)
# is it a translationally optimized organism ?
all,any = cds_cai_dat['TrOp'].all(),cds_cai_dat['TrOp'].any()
if all == any:
trans_opt = all
else: #any != all
print "%s@T=%f: Something wrong is happening: TrOp flag is not same for all ..."%(idx,topt)
# THIS IS just a stupid precaution measure, in case we messed something upstream ...
# not that stupid after all, because NaN is behaving badly here ...
if cds_cai_dat['TrOp'].notnull().all():
#
# we can use this 'qcut' function from pandas to divide our proteins by the quantiles ...
category,bins = pd.qcut(cds_cai_dat['CAI'],q=num_of_quantiles,retbins=True,labels=False)
#
stat_dat['assembly_accession'].append(idx)
stat_dat['OptimumTemperature'].append(topt)
stat_dat['TrOp'].append(trans_opt)
#
# then we could iterate over proteins/cDNAs in these categories ...
for cat in range(num_of_quantiles):
cds_cai_category = cds_cai_dat[category==cat]
total_length = cds_cai_category['protein'].str.len().sum()
IVYWREL = sum(cds_cai_category['protein'].str.count(aa).sum() for aa in list('IVYWREL'))
# IVYWREL = cds_cai_category['protein'].str.count('|'.join("IVYWREL")).sum() # tiny bit slower ...
f_IVYWREL = float(IVYWREL)/float(total_length)
# 20-vector for of amino acid composition ...
aa_freq_20 = np.true_divide([cds_cai_category['protein'].str.count(aa).sum() for aa in aacids],float(total_length))
# slope, intercept, r_value, p_value, std_err = stats.linregress(x,y)
_1,_2,R20,_4,_5 = stats.linregress(aa_freq_20, thermo_freq[1])
# Akashi ...
cost = np.dot(aa_freq_20,akashi_cost[1])
# appending ...
#
#
stat_dat['q%d'%cat].append(f_IVYWREL)
stat_dat['R20_q%d'%cat].append(R20)
stat_dat['Akashi_q%d'%cat].append(cost)
#
#
#
cai_stats_quant = pd.DataFrame(stat_dat)
#
cai_stats_quant_TrOp = cai_stats_quant[cai_stats_quant.TrOp]
cai_stats_quant_noTrOp = cai_stats_quant[~cai_stats_quant.TrOp]
plt.clf()
bins = np.linspace(-0.05,0.05,50)
# plt.hist(list(cai_stats_quant_TrOp.q4 - cai_stats_quant_TrOp.q1),bins=bins,color='blue')
plt.hist(list(cai_stats_quant.q4 - cai_stats_quant.q1),bins=bins,color='red',alpha=0.8)#,cumulative=True)
plt.xlabel("IVYWREL(HExp)-IVYWREL(LExp)")
# plt.show()
plt.savefig("IVYWREL_quantile_hist_arch.png")
plt.clf()
plt.plot(cai_stats_quant.OptimumTemperature,cai_stats_quant.q1,'bo',alpha=0.8)
plt.plot(cai_stats_quant.OptimumTemperature,cai_stats_quant.q4,'ro',alpha=0.8)
plt.xlabel('Temperature')
plt.ylabel('IVYWREL(HE:red;LE:blue)')
# plt.show()
plt.savefig("IVYWREL_dots_compare_arch.png")
plt.clf()
for i in range(num_of_quantiles):
k1 = 'q%d'%i
k2 = 'R20_q%d'%i
k3 = 'Akashi_q%d'%i
#
plt.errorbar([i+1,],cai_stats_quant_noTrOp[cai_stats_quant_noTrOp.OptimumTemperature>0][k1].mean(),yerr=cai_stats_quant_noTrOp[cai_stats_quant_noTrOp.OptimumTemperature>0][k1].std(),fmt='o')
plt.xlim(0,6)
plt.ylabel(k1)
plt.xlabel('CAI quantile')
plt.savefig("IVYWREL_arch_qunatile_trend_Shuff.noTrop.png")
plt.clf()
for i in range(num_of_quantiles):
k1 = 'q%d'%i
k2 = 'R20_q%d'%i
k3 = 'Akashi_q%d'%i
#
plt.errorbar([i+1,],cai_stats_quant_noTrOp[cai_stats_quant_noTrOp.OptimumTemperature>0][k2].mean(),yerr=cai_stats_quant_noTrOp[cai_stats_quant_noTrOp.OptimumTemperature>0][k2].std(),fmt='o')
plt.xlim(0,6)
plt.ylabel(k2)
plt.xlabel('CAI quantile')
plt.savefig("R20_arch_qunatile_trend_Shuff.noTrop.png")
plt.clf()
for i in range(num_of_quantiles):
k1 = 'q%d'%i
k2 = 'R20_q%d'%i
k3 = 'Akashi_q%d'%i
#
plt.errorbar([i+1,],cai_stats_quant_noTrOp[cai_stats_quant_noTrOp.OptimumTemperature>0][k3].mean(),yerr=cai_stats_quant_noTrOp[cai_stats_quant_noTrOp.OptimumTemperature>0][k3].std(),fmt='o')
plt.xlim(0,6)
plt.ylabel(k3)
plt.xlabel('CAI quantile')
plt.savefig("Akashi_arch_qunatile_trend_Shuff.noTrop.png")
#####################################################################################################
plt.clf()
for i in range(num_of_quantiles):
k1 = 'q%d'%i
k2 = 'R20_q%d'%i
k3 = 'Akashi_q%d'%i
#
plt.errorbar([i+1,],cai_stats_quant[cai_stats_quant.OptimumTemperature>0][k1].mean(),yerr=cai_stats_quant[cai_stats_quant.OptimumTemperature>0][k1].std(),fmt='o')
plt.xlim(0,6)
plt.ylabel(k1)
plt.xlabel('CAI quantile')
plt.savefig("IVYWREL_arch_qunatile_trend_Shuff.ALL.png")
plt.clf()
for i in range(num_of_quantiles):
k1 = 'q%d'%i
k2 = 'R20_q%d'%i
k3 = 'Akashi_q%d'%i
#
plt.errorbar([i+1,],cai_stats_quant[cai_stats_quant.OptimumTemperature>0][k2].mean(),yerr=cai_stats_quant[cai_stats_quant.OptimumTemperature>0][k2].std(),fmt='o')
plt.xlim(0,6)
plt.ylabel(k2)
plt.xlabel('CAI quantile')
plt.savefig("R20_arch_qunatile_trend_Shuff.ALL.png")
plt.clf()
for i in range(num_of_quantiles):
k1 = 'q%d'%i
k2 = 'R20_q%d'%i
k3 = 'Akashi_q%d'%i
#
plt.errorbar([i+1,],cai_stats_quant[cai_stats_quant.OptimumTemperature>0][k3].mean(),yerr=cai_stats_quant[cai_stats_quant.OptimumTemperature>0][k3].std(),fmt='o')
plt.xlim(0,6)
plt.ylabel(k3)
plt.xlabel('CAI quantile')
plt.savefig("Akashi_arch_qunatile_trend_Shuff.ALL.png")
#####################################################################################################
plt.clf()
for i in range(num_of_quantiles):
k1 = 'q%d'%i
k2 = 'R20_q%d'%i
k3 = 'Akashi_q%d'%i
#
plt.errorbar([i+1,],cai_stats_quant_TrOp[cai_stats_quant_TrOp.OptimumTemperature>0][k1].mean(),yerr=cai_stats_quant_TrOp[cai_stats_quant_TrOp.OptimumTemperature>0][k1].std(),fmt='o')
plt.xlim(0,6)
plt.ylabel(k1)
plt.xlabel('CAI quantile')
plt.savefig("IVYWREL_arch_qunatile_trend_Shuff.TrOp.png")
plt.clf()
for i in range(num_of_quantiles):
k1 = 'q%d'%i
k2 = 'R20_q%d'%i
k3 = 'Akashi_q%d'%i
#
plt.errorbar([i+1,],cai_stats_quant_TrOp[cai_stats_quant_TrOp.OptimumTemperature>0][k2].mean(),yerr=cai_stats_quant_TrOp[cai_stats_quant_TrOp.OptimumTemperature>0][k2].std(),fmt='o')
plt.xlim(0,6)
plt.ylabel(k2)
plt.xlabel('CAI quantile')
plt.savefig("R20_arch_qunatile_trend_Shuff.TrOp.png")
plt.clf()
for i in range(num_of_quantiles):
k1 = 'q%d'%i
k2 = 'R20_q%d'%i
k3 = 'Akashi_q%d'%i
#
plt.errorbar([i+1,],cai_stats_quant_TrOp[cai_stats_quant_TrOp.OptimumTemperature>0][k3].mean(),yerr=cai_stats_quant_TrOp[cai_stats_quant_TrOp.OptimumTemperature>0][k3].std(),fmt='o')
plt.xlim(0,6)
plt.ylabel(k3)
plt.xlabel('CAI quantile')
plt.savefig("Akashi_arch_qunatile_trend_Shuff.TrOp.png")
# R20 is flat on average (strange bi-modality?!)
# | meso thermo
# ------+-------------
# TrOp | NA NA
# noTrOp| ~~+ ~~-
# Akashi is flat on average (strange local minimum at middle CAI quantile)
# | meso thermo
# ------+-------------
# TrOp | NA NA
# noTrOp| ~ ~
# IVYWREL is declining on average (?!)
# | meso thermo
# ------+-------------
# TrOp | NA NA
# noTrOp| -- --
|
python
|
from flask import *
from flask_sqlalchemy import SQLAlchemy
from sqlalchemy.schema import Sequence
app = Flask(__name__, static_url_path='/static') #referencing this while
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///App.sqlite3'
app.config['SECRET_KEY'] = "secret key"
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
db = SQLAlchemy(app)
class Destination(db.Model):
__tablename__ = "Destination"
DID = db.Column(db.Integer,primary_key=True,autoincrement=True)
Pincode = db.Column(db.Integer)
dod = db.Column(db.String(30))
doa = db.Column(db.String(30))
city = db.Column(db.String(50))
def __init__(self,pin,dod,doa,city):
self.Pincode=pin
self.dod=dod
self.doa=doa
self.city=city
class Passenger(db.Model):
__tablename__ = "Passenger"
PID = db.Column(db.Integer,primary_key=True,autoincrement=True)
fname = db.Column(db.String(30))
lname = db.Column(db.String(30))
noc = db.Column(db.Integer)
noa = db.Column(db.Integer)
address = db.Column(db.String(50))
dob = db.Column(db.String(30))
DID = db.Column(db.Integer, db.ForeignKey('Destination.DID'))
Destination = db.relationship("Destination", backref=db.backref("Destination", uselist=False))
def __init__(self,fname,lname,noc,noa,address,dob,did):
self.fname=fname
self.lname=lname
self.noc=noc
self.noa=noa
self.address=address
self.dob=dob
self.DID=did
class PassengerMobileNumber(db.Model):
__tablename__ = 'PassengerMobileNumber'
id = db.Column(db.Integer, primary_key=True)
PID = db.Column(db.Integer,db.ForeignKey('Passenger.PID'))
MobileNumber=db.Column(db.Integer)
__table_args__ = ( db.UniqueConstraint('PID','MobileNumber'), )
def __init__(self,pid,phnno):
self.MobileNumber=phnno
self.PID=pid
class PassengerDestination(db.Model):
__tablename__ = 'PassengerDestination'
id = db.Column(db.Integer, primary_key=True)
PID = db.Column(db.Integer,db.ForeignKey('Passenger.PID'))
DID = db.Column(db.Integer,db.ForeignKey('Destination.DID'))
__table_args__ = ( db.UniqueConstraint('PID','DID'), )
def __init__(self,pid,did):
self.DID=did
self.PID=pid
class Transaction(db.Model):
__tablename__ = "Transaction"
TransID = db.Column(db.Integer,primary_key=True,autoincrement=True)
Amount = db.Column(db.Integer)
PaymentMode = db.Column(db.String(30))
PID=db.Column(db.Integer, db.ForeignKey('Passenger.PID'))
Passenger = db.relationship("Passenger", backref=db.backref("Passenger", uselist=False))
def __init__(self,Amount,PaymentMode,pid):
self.Amount=Amount
self.PaymentMode=PaymentMode
self.PID=pid
class Room(db.Model):
__tablename__ = "Room"
ROOM_NUMBER = db.Column(db.Integer,primary_key=True)
status = db.Column(db.String(20))
roomtype = db.Column(db.String(20))
PID = db.Column(db.Integer,db.ForeignKey('Passenger.PID'))
def __init__(self,roomtype,Passenger_ID):
self.status="Occupied"
self.roomtype=roomtype
self.PID=Passenger_ID
class Restaurant(db.Model):
__tablename__="Restaurant"
Rest_ID = db.Column(db.String(30),primary_key=True)
No_of_tables = db.Column(db.Integer)
Cuisine = db.Column(db.String(30))
def __init__(self,Restid,c):
self.Rest_ID=Restid
self.Cuisine=c
self.No_of_tables=50
class Table(db.Model):
__tablename__="Table"
S_No = db.Column(db.Integer,primary_key=True)
Table_Number = db.Column(db.Integer,nullable=False)
Rest_ID = db.Column(db.Integer,db.ForeignKey('Restaurant.Rest_ID'),nullable=False)
People_per_table = db.Column(db.Integer)
Tstatus = db.Column(db.String(30),default="Vacant")
PID = db.Column(db.Integer,db.ForeignKey('Passenger.PID'))
__table_args__ = ( db.UniqueConstraint('Table_Number','Rest_ID'), )
def __init__(self,id,ppt,pid):
self.PID=pid
self.Rest_ID=id
self.People_per_table=ppt
@app.route('/Destination.html', methods=['POST',"GET"])
def destination():
return render_template("Destination.html")
@app.route('/Login.html',methods=["POST","GET"])
def login():
return render_template("Login.html")
@app.route('/Restaurants.html')
def restaurant():
return render_template("Restaurants.html")
@app.route('/Restaurants1.html')
def Create():
rest1=Restaurant("ShangPalace","Chinese")
db.session.add(rest1)
rest2=Restaurant("LosLobos","Italian")
db.session.add(rest2)
rest3=Restaurant("SpiceCrossing","Mexican")
db.session.add(rest3)
rest4=Restaurant("LaCucina","Thai")
db.session.add(rest4)
rest5=Restaurant("FoodRepublic","Indian")
db.session.add(rest5)
db.session.commit()
return "<h1>Added successfully<h1>"
@app.route('/')
def home_page():
return render_template("HomePage.html")
@app.route('/About.html')
def about():
return render_template("About.html")
@app.route('/Casino.html')
def casino():
return render_template("Casino.html")
@app.route('/CruiseActivities.html')
def cruise_activities():
return render_template("CruiseActivities.html")
@app.route('/Entertainment.html')
def entertainment():
return render_template("Entertainment.html")
@app.route('/Fitness.html')
def fitness():
return render_template("Fitness.html")
@app.route('/index.html')
def index():
return render_template("index.html")
@app.route('/RestaurantsFoodRepublic.html')
def food_republic():
return render_template("RestaurantsFoodRepublic.html")
@app.route('/RestaurantsLaCucina.html')
def la_cucina():
return render_template("RestaurantsLaCucina.html")
@app.route('/RestaurantsLosLobos.html')
def los_lobos():
return render_template("RestaurantsLosLobos.html")
@app.route('/RestaurantsShangPalace.html')
def shang_palace():
return render_template("RestaurantsShangPalace.html")
@app.route('/RestaurantsSpiceCrossing.html')
def spice_crossing():
return render_template("RestaurantsSpiceCrossing.html")
@app.route('/Spa.html')
def spa():
return render_template("Spa.html")
@app.route('/login', methods = ['POST'])
def login_form():
Pass_ID=request.form['Pass_ID']
passenger_obj = db.session.query(Passenger).get(Pass_ID)
if passenger_obj:
phn = db.session.query(PassengerMobileNumber).filter_by(PID=passenger_obj.PID).all()
if len(phn)==1:
phn1=phn[0].MobileNumber
phn2="Not entered"
else:
phn1=phn[0].MobileNumber
phn2=phn[1].MobileNumber
rooms = db.session.query(Room).filter_by(PID=passenger_obj.PID).all()
rooms_str=""
for a_room in rooms:
rooms_str = rooms_str + str(a_room.ROOM_NUMBER) + ","
trans = db.session.query(Transaction).filter_by(PID=passenger_obj.PID).all()
return render_template('LoginDisplay.html',psngr=passenger_obj,phn1=phn1,phn2=phn2,room=a_room,rooms_str=rooms_str[0:len(rooms_str)-1],trans_obj=trans[0])
else:
return render_template("Warning.html", pid = Pass_ID)
@app.route('/display', methods = ['POST'])
def display():
dest_obj=Destination(request.form['dest_pin'],request.form['dod'],request.form['doa'],request.form['city'])
db.session.add(dest_obj)
db.session.commit()
passenger_obj=Passenger(request.form['firstname'],request.form['lastname'],request.form['children'],request.form['adults'],request.form['address'],request.form['dob'],dest_obj.DID)
db.session.add(passenger_obj)
db.session.commit()
p_d_obj=PassengerDestination(passenger_obj.PID,dest_obj.DID)
db.session.add(p_d_obj)
db.session.commit()
mob_obj=PassengerMobileNumber(passenger_obj.PID,request.form['phn1'])
db.session.add(mob_obj)
db.session.commit()
mob_obj=PassengerMobileNumber(passenger_obj.PID,request.form['phn2'])
db.session.add(mob_obj)
db.session.commit()
trans_obj=Transaction(request.form['amount'],request.form['payment_mode'],passenger_obj.PID)
db.session.add(trans_obj)
db.session.commit()
no_of_rooms = int(request.form['rooms'])
for i in range(no_of_rooms):
room_obj=Room(request.form['roomtype'],passenger_obj.PID)
db.session.add(room_obj)
db.session.commit()
return render_template("Greet.html", obj = passenger_obj)
@app.route('/Restaurant', methods = ['POST'])
def restaurant_booking():
pid = request.form['PID']
query_obj = db.session.query(Passenger).get(pid)
if not query_obj:
return render_template("Warning.html", pid = pid)
else:
query_obj = db.session.query(Restaurant).get(request.form['restaurant'])
if int(request.form['tables']) > query_obj.No_of_tables:
return "We don't have "+str(request.form['tables'])+" tables vacant for now. Sorry for the inconvenience"
else:
query_obj.No_of_tables -= int(request.form['tables'])
for i in range(int(request.form['tables'])):
table=Table(request.form['restaurant'],request.form['ppt'],pid)
return str(request.form['tables'])+" tables have been booked for you Mr."+db.session.query(Passenger).get(pid).fname
if __name__ == "__main__":
db.create_all();
app.run(debug = True)
|
python
|
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
import scriptcontext as sc
import compas_rhino
from compas_ags.rhino import SettingsForm
from compas_ags.rhino import FormObject
from compas_ags.rhino import ForceObject
__commandname__ = "AGS_toolbar_display"
def RunCommand(is_interactive):
if 'AGS' not in sc.sticky:
compas_rhino.display_message('AGS has not been initialised yet.')
return
scene = sc.sticky['AGS']['scene']
if not scene:
return
# TODO: deal with undo redo
SettingsForm.from_scene(scene, object_types=[FormObject, ForceObject], global_settings=['AGS'])
# ==============================================================================
# Main
# ==============================================================================
if __name__ == '__main__':
RunCommand(True)
|
python
|
class DianpingConfig:
def __init__(self):
self.instance_name = "BERTModel.pt"
self.model_name = self.instance_name
self.BERT_MODEL = "bert-base-chinese"
self.max_sent_lens = 64
class SSTConfig:
def __init__(self):
self.instance_name = "BERTModel.pt"
self.model_name = self.instance_name
self.BERT_MODEL = "bert-base-uncased"
self.max_sent_lens = 32
class SNLIConfig:
def __init__(self):
self.instance_name = "BERTModel.pt"
self.model_name = self.instance_name
self.BERT_MODEL = "bert-base-uncased"
self.max_sent_lens = 64
class IMDBConfig:
def __init__(self):
self.instance_name = "BERTModel.pt"
self.model_name = self.instance_name
self.BERT_MODEL = "bert-base-uncased"
self.max_sent_lens = 254
class LCQMCConfig:
def __init__(self):
self.instance_name = "BERTModel.pt"
self.model_name = self.instance_name
self.BERT_MODEL = "bert-base-chinese"
self.max_sent_lens = 64
|
python
|
from __future__ import unicode_literals
from djangobmf.apps import ContribTemplate
class EmployeeConfig(ContribTemplate):
name = 'djangobmf.contrib.employee'
label = "djangobmf_employee"
|
python
|
import eel
try:
from pyfirmata import Arduino, util
except:
from pip._internal import main as pipmain
pipmain(['install','pyfirmata'])
from pyfirmata import Arduino, util
#Get Operating System Type
import platform
currentOs = platform.system()
if "linux" in currentOs.lower():
currentOs = "linux"
if "windows" in currentOs.lower():
currentOs = "windows"
#Automatically get the port that the Arduino is on and setup the board
port = ""
if currentOs == "linux":
import os
feedback = "/dev/" + os.popen("ls /dev/ | grep ttyACM").read().strip()
if len(feedback) > 11:
port = feedback
elif currentOs == "windows":
import serial.tools.list_ports
ports = list(serial.tools.list_ports.comports())
for p in ports:
p = str(p)
if "Arduino" in p:
port = p.split(' ', 1)[0]
break
board=Arduino(port)
#Set up pins
red = board.get_pin('d:3:p')
green = board.get_pin('d:5:p')
blue = board.get_pin('d:6:p')
commonAnode = True # set this to false for common cathode setup
theloop = ''
loopIncrementor = 0
#Start the web interface
eel.init('web')
def hexToRgb(hex):
hex = str(hex).lstrip('#')
hlen = len(hex)
return(tuple(int(hex[i:i+2], 16) for i in (0, 2, 4)))
def writeRgb(r,g,b):
if commonAnode:
r = 1 - r
g = 1 - g
b = 1 - b
red.write(r)
green.write(g)
blue.write(b)
def writeHex(hex):
myhex = hexToRgb(hex)
writeRgb(myhex[0]/255,myhex[1]/255,myhex[2]/255)
#Turn off LEDs to begin with
if commonAnode:
writeRgb(0,0,0)
else:
writeRgb(1,1,1)
def getSteps(hex,steps):
if type(hex) is list:
rgb = hex
elif type(hex) is tuple:
rgb = list(hex)
else:
rgb = list(hexToRgb(hex))
for i in range(3):
rgb.append(rgb[0]/255/steps)
rgb.pop(0)
return(rgb)
def writeColorPct(color, pct):
rgb = list(hexToRgb(color))
for i in range(3):
rgb[i] = rgb[i] * pct / 100
writeRgb(rgb[0],rgb[1],rgb[2])
@eel.expose
def solid(color):
global loopIncrementor
loopIncrementor += 1
writeHex(color)
@eel.expose
def pulse(colors):
global loopIncrementor
loopIncrementor += 1
theloop = lightLoop(loopIncrementor)
theloop.pulse(colors)
@eel.expose
def fade(colors):
global loopIncrementor
loopIncrementor += 1
theloop = lightLoop(loopIncrementor)
theloop.fade(colors)
@eel.expose
def lightning(color):
global loopIncrementor
loopIncrementor += 1
theloop = lightLoop(loopIncrementor)
theloop.lightning(color)
@eel.expose
def neon(color):
global loopIncrementor
loopIncrementor += 1
theloop = lightLoop(loopIncrementor)
theloop.neon(color)
class lightLoop:
def __init__(self, name):
self.name = name
self.running = True
def pulse(self, colors):
while self.running:
for c in colors:
toWrite = [0,0,0]
increasing = True
steps = getSteps(c,255)
pulseIncrementor = 0
while (increasing == True):
for i in range(3):
toWrite[i] = toWrite[i] + steps[i]
if toWrite[i] > 255:
toWrite[i] = 255
pulseIncrementor += 1
if self.name < loopIncrementor:
self.running = False
if self.running == True:
writeRgb(toWrite[0],toWrite[1],toWrite[2])
eel.sleep(0.01)
else:pass
if pulseIncrementor >= 255:
eel.sleep(1.0)
increasing = False
while increasing == False:
for i in range(3):
toWrite[i] = toWrite[i] - steps[i]
if toWrite[i] <= 0:
toWrite[i] = 0
pulseIncrementor -= 1
if self.name < loopIncrementor:
self.running = False
if self.running == True:
writeRgb(toWrite[0],toWrite[1],toWrite[2])
eel.sleep(0.01)
else: pass
if pulseIncrementor <= 0:
increasing = True
def fade(self, colors):
currentColor = [0,0,0]
while self.running:
for c in colors:
toWrite = list(currentColor)
goto = list(hexToRgb(c))
for i in range(3):
goto[i] = goto[i] - toWrite[i]
steps = goto
for i in range(3):
steps[i] /= 255 #put steps in decimal form
toWrite[i] /= 255 #put toWrite in decimal form
steps[i] /= 255 #break steps into 255 steps
pulseIncrementor = 0
increasing = True
while (increasing == True):
for i in range(3):
toWrite[i] += steps[i]
if toWrite[i] > 1:
toWrite[i] = 1
elif toWrite[i] < 0:
toWrite[i] = 0
pulseIncrementor += 1
if self.name < loopIncrementor:
self.running = False
if self.running == True:
writeRgb(toWrite[0],toWrite[1],toWrite[2])
eel.sleep(0.02)
else:pass
if pulseIncrementor >= 255:
eel.sleep(1.0)
increasing = False
currentColor = list(hexToRgb(c))
def lightning(self, color):
while self.running:
if self.name < loopIncrementor:
self.running = False
if self.running:
writeHex(color)
def neon(self, color):
while self.running:
if self.name < loopIncrementor:
self.running = False
if self.running:
writeHex(color)
eel.start('main.html')
|
python
|
from sys import argv
script, filename=argv
print(f"We're going to erase{filename}.")
print("If you don't want that,hit CTRL-C(^C).")
print("If you do want that,hit RETURN.")
input("?")
print("Opening the file..")
target=open(filename,'w')
print("Truncating the file,Goodbye!")
target.truncate()
print("Now I'm going to ask you for three lines.")
line1=input("line1:")
line2=input("line2:")
line3=input("line3:")
print("I'm going to write these to the file.")
target.write(line1)
target.write("\n")
target.write(line2)
target.write("\n")
target.write(line3)
target.write("\n")
print("And finally,we close it")
target.close()
|
python
|
# =============================================================================== #
# #
# This file has been generated automatically!! Do not change this manually! #
# #
# =============================================================================== #
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class CheckStickerSetName(BaseObject):
"""
Checks whether a name can be used for a new sticker set
:param name: Name to be checked
:type name: :class:`str`
"""
ID: str = Field("checkStickerSetName", alias="@type")
name: str
@staticmethod
def read(q: dict) -> CheckStickerSetName:
return CheckStickerSetName.construct(**q)
|
python
|
#MenuTitle: Check glyphsets match across open fonts
'''
Find missing glyphs across fonts
'''
def main():
fonts = Glyphs.fonts
glyphsets = {}
try:
for font in fonts:
if font.instances[0].name not in glyphsets:
glyphsets[font.instances[0].name] = set()
print 'Name: %s, Glyphs: %s' % (font.instances[0].name, len(font.glyphs))
for glyph in font.glyphs:
glyphsets[font.instances[0].name].add(glyph.name)
for font1 in glyphsets:
for font2 in glyphsets:
diff_glyphs = glyphsets[font1] - glyphsets[font2]
print font1, '-', font2, diff_glyphs
except AttributeError:
print 'Font does not have any instances'
raise
if __name__ == '__main__':
main()
|
python
|
initial = """\
.|||.#..|##.#||..#.|..|..||||..#|##.##..#...|.....
.|#.|#..##...|#.........#.#..#..|#.|#|##..#.#|..#.
#....#|.#|.###||..#.|...|.|.#........#.|.#.#|..#..
|..|#....|#|...#.#..||.#..||......#.........|....|
.|.|..#|...#.|.###.|...||.|.|..|...|#|.#..|.|..|.|
#.....||.#..|..|..||#.||#..|.||..||##.......#.....
||.#..........|....##...|..#.|..#..#|#.#....#..#.#
.#.#|.|.|.##|..#......|...#||..#.||..|..|#....|##.
#.#..||.|...#|...|..#.#.||#.||.#.|.....|##.|....#.
.#......||.|#......#|#.|...||...||##...#...####.#.
.....#..|..#..#|..#...#.|#...||...#.##.||.|..|.||.
.#|.#.|.....|#..#||..|...|...##.#.###|..|.###.|#..
..#.......#.|#.##....#..|##.#......#|......#..#...
.|..#|.#.....#..||..#.#.|##..|#.||#..|.#..|.|##|#|
##|.#........|#.#.#|..|....|.......#..#|.#.|....#.
....##...|....#..............||.|..#........|.....
##||.|.#...|.#|..#....#..|...|..#..#..|##||.....|.
.|.#...|#.......#...#.#..|#....#|#|#..#|...##..||.
.|..|.|..#...##...||#..##|#|..|...#.....#||...##..
.|...|..||#..#|.|.#...|||.|#.||#|......|#|.#..|#..
|##.....|.|#...#||.....#..#.|.#..|.....||....||..#
|.|#|||.....|||..#......#..||........||.#.#..||#||
#.|.|.#.....#....#.#..#||.||..|.#.|....|...#.#...#
|.|....#.#||...#.....#|#|.|.#......##.|.||...#.||.
|...|...|##........|.|...#...|.........|..##..|.##
|.||..|.#.#|.#||...|.|.....#...#.####|.||||..|||.|
.....#..##..|..#|.||#...|..##...##|....##||.##....
#|##..#|.#..|##...|..#.##.|##.....###.|..#.|..#.|.
|.##..|#...|.|.||.......#..#||.....#|..#||##..#|..
..|.#.#.....##.|#|...#........##......#...#...||..
|.#....###|..|##.#...#|....|..#.....#.##.|..|...||
.....#..#.....|.##......#......|..|...##|.|.#..#||
...##.#.......#|.#..||.#|..#|...#...|||.#.......|#
#|..#|....|||...|..#|....#......#..#...|#.......||
...#|##|..........|..###||..#|...|.##.|.#.#...#...
#|##|.#|#...|..#......||..#.|#|..#..|..#|..#......
#||#.#.....|...|..|##|..#|...##.||..#|.|#||.|..|..
#..#..|.|.||...#|.|.|..|..|..|....#.#||.#.....|#.#
#.|.#..##...|..#.|..#..#..#.#||.#.............#...
..|##|.#|.|......|#...|#.#.....|#|#.#.|...|#......
.|.|.|...#..##..#|###..|#....#..#.#..|||.###|##...
|#...|......|...##..|.|#...#..|.#.........#..##.#.
.|...##||#.....#..#..|..#..#.|#.|.||.##.|....|..#|
|#..|..|.#..||...#...#|..##|||##..|.##||#.#.|....|
.......#......|.....||.#..|#.#.#|#.##....|...|.#..
.....#..|...|..##.....|...#...|.|||.##..|.#||.##|.
..#||...|#.#|#|....#..|||.|##..#|.|.........|....#
..#...|.#...|#..#........#...###..##..##||...|..#.
..|.||.#.....|#..|.##...#.|...|#...#||..####..#.|.
.|.....#....||.#...#.......#|........#...#|#|...|#"""
initial = initial.splitlines()
size = (max(map(len, initial)), len(initial))
def convert(grid, pos, debug=False):
x, y = pos
squ = grid[y][x]
adj = []
for xi in range(max((x-1, 0)), min((x+2, size[0]))):
for yi in range(max((y-1, 0)), min((y+2, size[1]))):
if xi == x and yi == y:
continue
adj.append(grid[yi][xi])
if debug:
print(adj)
if squ == ".":
if adj.count("|") >= 3:
return("|")
return(".")
elif squ == "|":
if adj.count("#") >= 3:
return("#")
return("|")
elif squ == "#":
if adj.count("|")>=1 and adj.count("#")>=1:
return("#")
return(".")
def update(grid):
new_grid = []
for y in range(0, size[1]):
new_grid.append("")
for x in range(0, size[0]):
new_grid[y] += convert(grid, (x,y))
return(new_grid)
grid = initial
seen_grids = [grid]
for x in range(1, 1000000001):
grid = update(grid)
if grid in seen_grids:
break
seen_grids.append(grid)
repeat_i = seen_grids.index(grid)
grid = seen_grids[(1000000000-repeat_i) % (len(seen_grids)-repeat_i) + repeat_i]
gridstr = "\n".join(grid)
clear = gridstr.count(".")
wooded = gridstr.count("|")
lumber = gridstr.count("#")
print(wooded*lumber)
|
python
|
from group import GroupTestCases
from user import UserTestCases
from permission import PermissionTestCases
from core import *
|
python
|
'''
Defines the training step.
'''
import sys
sys.path.append('tfutils')
import tensorflow as tf
from tfutils.base import get_optimizer, get_learning_rate
import numpy as np
import cv2
from curiosity.interaction import models
import h5py
import json
class RawDepthDiscreteActionUpdater:
'''
Provides the training step.
This is probably where we can put parallelization.
Not finished!
'''
def __init__(world_model, rl_model, data_provider, eta):
self.data_provider = data_provider
self.world_model = world_model
self.rl_model = rl_model
self.eta = eta
self.global_step = tf.get_variable('global_step', [], tf.int32, initializer = tf.constant_initializer(0,dtype = tf.int32))
self.action = tf.placeholder = tf.placeholder(tf.float32, [None] + world_model.action_one_hot.get_shape().as_list()[1:])
self.adv = tf.placeholder(tf.float32, [None])
self.r = tf.placeholder(tf.float32, [None])
log_prob_tf = tf.nn.log_softmax(rl_model.logits)
prob_tf = tf.nn.softmax(rl_model.logits)
pi_loss = -tf.reduce_sum(tf.reduce_sum(log_prob_tf * self.ac, [1]) * self.adv)
vf_loss = .5 * tf.reduce_sum(tf.square(rl_model.vf - self.r))
entropy = -tf.reduce_sum(prob_tf * log_prob_tf)
self.rl_loss = pi_loss + 0.5 * vf_loss - entropy * 0.01
rl_opt_params, rl_opt = get_optimizer(learning_rate, self.rl_loss, )
def replace_the_nones(my_list):
'''
Assumes my_list[-1] is np array
'''
return [np.zeros(my_list[-1].shape, dtype = my_list[-1].dtype) if elt is None else elt for elt in my_list]
def postprocess_batch_depth(batch, state_desc):
obs, msg, act, act_post = batch
depths = replace_the_nones(obs[state_desc])
obs_past = np.array([depths[:-1]])
obs_fut = np.array([depths[1:]])
actions = np.array([replace_the_nones(act)])
actions_post = np.array([replace_the_nones(act_post)])
return obs_past, actions, actions_post, obs_fut
# def postprocess_batch_depth(batch):
# depths = np.array([[timepoint if timepoint is not None else np.zeros(obs['depths1'][-1].shape, dtype = obs['depths1'][-1].dtype) for timepoint in obs['depths1']] for obs in batch.states])
# actions = np.array(batch.actions)
# next_depth = np.array([batch.next_state['depths1']])
# return depths, actions, next_depth
def postprocess_batch_for_actionmap(batch, state_desc):
obs, msg, act = batch
prepped = {}
depths = replace_the_nones(obs[state_desc])
depths_past = np.array([depths[:-1]])
depths_fut = np.array([depths[:1]])
objects = np.array([replace_the_nones(obs[state_desc])[:-1]])
actions = np.array([replace_the_nones(act)])
action_ids_list = []
for i in range(2):
action_msg = msg[i]['msg']['actions'] if msg[i] is not None else []
if len(action_msg):
idx = int(action_msg[0]['id'])
else:
idx = -10000#just something that's not an id seen
action_ids_list.append(idx)
action_ids = np.array([action_ids_list])
return depths_past, objects, actions, action_ids, depths_fut
# def postprocess_batch_for_actionmap(batch):
# prepped = {}
# for desc in ['depths1', 'objects1']:
# prepped[desc] = np.array([[timepoint if timepoint is not None else np.zeros(obs[desc][-1].shape, dtype = obs[desc][-1].dtype) for timepoint in obs[desc]] for obs in batch.states])
# actions = np.array([[np.zeros(batch.next_state['action'][-1].shape, batch.next_state['action'][-1].dtype) if timepoint is None else timepoint for timepoint in batch.next_state['action']]])
# print('actions shape')
# print(actions.shape)
# print(len(batch.next_state['action']))
# action_ids_list = []
# for i in range(2):
# action_msg = batch.next_state['msg'][i]['msg']['actions'] if batch.next_state['msg'][i] is not None else []
# if len(action_msg):
# idx = int(action_msg[0]['id'])
# action_ids_list.append(idx)
# action_ids = np.array([action_ids_list])
# next_depths = np.array([batch.next_state['depths1']])
# return prepped['depths1'], prepped['objects1'], actions, action_ids, next_depths
class ExperienceReplayPostprocessor:
def __init__(self, big_save_keys = None, little_save_keys = None, big_save_len = None, big_save_freq = None, state_descriptor = None):
self.big_save_keys = big_save_keys
self.little_save_keys = little_save_keys
self.big_save_len = big_save_len
self.big_save_freq = big_save_freq
self.state_descriptor = state_descriptor
self.big_save_keys.append('map_draw')
self.little_save_keys.append('map_draw')
self.big_save_keys.extend(['act_lr', 'um_lr'])
self.little_save_keys.extend(['act_lr', 'um_lr'])
def postprocess(self, training_results, batch):
global_step = training_results['global_step']
res = {}
if (global_step) % self.big_save_freq < self.big_save_len:
save_keys = self.big_save_keys
#est_losses = [other[1] for other in batch['other']]
#action_sample = [other[2] for other in batch['other']]
res['batch'] = {}
for desc, val in batch.iteritems():
if desc not in ['recent', 'depths1', 'objects1', 'images1']:
res['batch'][desc] = val
res['recent'] = batch['recent']
else:
save_keys = self.little_save_keys
res.update(dict(pair for pair in training_results.iteritems() if pair[0] in save_keys))
#if 'other' in batch['recent']:
# entropies = [other[0] for other in batch['recent']['other']]
# entropies = np.mean(entropies)
# res['entropy'] = entropies
if 'msg' in batch['recent']:
looking_at_obj = [1 if msg is not None and msg['msg']['action_type'] == 'OBJ_ACT' else 0 for msg in batch['recent']['msg']]
res['obj_freq'] = np.mean(looking_at_obj)
elif type(batch['recent']) == list and len(batch['recent'][0]) > 0:
mean_per_provider = []
for provider_recent in batch['recent']:
looking_at_obj = [1 if msg is not None and msg['msg']['action_type'] == 'OBJ_ACT' else 0 for msg in provider_recent['msg']]
mean_per_provider.append(np.mean(looking_at_obj))
res['obj_freq'] = np.mean(mean_per_provider)
res['obj_freq_per_provider_noprint'] = mean_per_provider
return res
class UncertaintyPostprocessor:
def __init__(self, big_save_keys = None, little_save_keys = None, big_save_len = None, big_save_freq = None, state_descriptor = None):
self.big_save_keys = big_save_keys
self.little_save_keys = little_save_keys
self.big_save_len = big_save_len
self.big_save_freq = big_save_freq
self.state_descriptor = state_descriptor
def postprocess(self, training_results, batch):
global_step = training_results['global_step']
res = {}
print('postprocessor deets')
print(global_step)
print(self.big_save_freq)
print(self.big_save_len)
if (global_step) % self.big_save_freq < self.big_save_len:
print('big time')
save_keys = self.big_save_keys
est_losses = [other[1] for other in batch['recent']['other']]
action_sample = [other[2] for other in batch['recent']['other']]
res['batch'] = {'obs' : batch['depths1'], 'act' : batch['action'], 'act_post' : batch['action_post'], 'est_loss' : est_losses, 'action_sample' : action_sample}
res['msg'] = batch['recent']['msg']
else:
print('little time')
save_keys = self.little_save_keys
res.update(dict((k, v) for (k, v) in training_results.iteritems() if k in save_keys))
#res['msg'] = batch['msg'][-1]
entropies = [other[0] for other in batch['recent']['other']]
entropies = np.mean(entropies)
res['entropy'] = entropies
looking_at_obj = [1 if msg is not None and msg['msg']['action_type']['OBJ_ACT'] else 0 for msg in batch['recent']['msg']]
res['obj_freq'] = np.mean(looking_at_obj)
return res
class DataWriteUpdater:
def __init__(self, data_provider, updater_params):
self.data_provider = data_provider
fn = updater_params['hdf5_filename']
N = updater_params['N_save']
height, width = updater_params['image_shape']
act_dim = updater_params['act_dim']
print('setting up save loc')
self.hdf5 = hdf5 = h5py.File(fn, mode = 'a')
dt = h5py.special_dtype(vlen = str)
self.handles = {'msg' : hdf5.require_dataset('msg', shape = (N,), dtype = dt),
'depths1' : hdf5.require_dataset('depths1', shape = (N, height, width, 3), dtype = np.uint8),
'objects1' : hdf5.require_dataset('objects1', shape = (N, height, width, 3), dtype = np.uint8),
'images1': hdf5.require_dataset('images1', shape = (N, height, width, 3), dtype = np.uint8),
'action' : hdf5.require_dataset('action', shape = (N, act_dim), dtype = np.float32),
'action_post' : hdf5.require_dataset('action_post', shape = (N, act_dim), dtype = np.float32)}
print('save loc set up')
self.start = 0
def update(self):
batch = self.data_provider.dequeue_batch()
bs = len(batch['recent']['msg'])
end = self.start + bs
for k in ['depths1', 'objects1', 'images1', 'action', 'action_post']:
tosave = batch['recent'][k]
if k in ['action', 'action_post']:
tosave = tosave.astype(np.float32)
self.handles[k][self.start : end] = batch['recent'][k]
self.handles['msg'][self.start : end] = [json.dumps(msg) for msg in batch['recent']['msg']]
self.start = end
def close(self):
self.hdf5.close()
class LatentUncertaintyValidator:
def __init__(self, models, data_provider):
self.um = models['uncertainty_model']
self.wm = models['world_model']
self.targets = {
'act_pred' : self.wm.act_pred,
'fut_loss' : self.wm.fut_loss, 'act_loss' : self.wm.act_loss, 'um_loss' : self.um.uncertainty_loss,
'estimated_world_loss' : self.um.estimated_world_loss, 'loss_per_example' : self.um.true_loss,
'act_loss_per_example' : self.wm.act_loss_per_example
}
self.dp = data_provider
def run(self, sess):
batch = self.dp.dequeue_batch()
feed_dict = {
self.wm.states : batch['depths1'],
self.wm.action : batch['action'],
self.wm.action_post : batch['action_post'],
self.wm.obj_there : batch['obj_there']
}
res = sess.run(self.targets, feed_dict = feed_dict)
res['batch'] = {}
for desc, val in batch.iteritems():
print(desc)
if desc == 'obj_there':
res['batch'][desc] = val
elif desc != 'recent':
res['batch'][desc] = val[:, -1]
res['recent'] = batch['recent']
class ObjectThereValidater:
def __init__(self, models, data_provider):
self.um = models['uncertainty_model']
self.wm = models['world_model']
self.targets = {'um_loss' : self.um.uncertainty_loss, 'loss_per_example' : self.um.true_loss,
'estimated_world_loss' : self.um.estimated_world_loss}
self.dp = data_provider
def run(self, sess):
batch = self.dp.dequeue_batch()
feed_dict = {
self.wm.states : batch['depths1'],
self.wm.action : batch['action'],
self.wm.obj_there : batch['obj_there']
}
return sess.run(self.targets, feed_dict = feed_dict)
class ActionUncertaintyValidator:
def __init__(self, models, data_provider):
self.um = um = models['uncertainty_model']
self.wm = wm = models['world_model']
self.targets = {'act_pred' : self.wm.act_pred, 'act_loss' : self.wm.act_loss,
'estimated_world_loss' : self.um.estimated_world_loss,
'um_loss' : self.um.uncertainty_loss, 'loss_per_example' : self.um.true_loss}
self.dp = data_provider
def run(self, sess):
batch = self.dp.dequeue_batch()
feed_dict = {
self.wm.states : batch['depths1'],
self.wm.action : batch['action'],
self.wm.action_post : batch['action_post']
}
res = sess.run(self.targets, feed_dict = feed_dict)
res['batch'] = batch
return res
class ActionUncertaintyValidatorWithReadouts:
def __init__(self, model, data_provider):
self.dp = data_provider
self.wm = model['world_model']
self.um = model['uncertainty_model']
self.targets = {}
self.targets.update({k : v for k, v in self.wm.readouts.items() if k not in self.wm.save_to_gfs})
self.targets.update({k : v for k, v in self.um.readouts.items() if k not in self.um.save_to_gfs})
#this should be changed for an online data provider, set to do nothing
self.map_draw_mode = 'specified_indices'
#relies on there being just one obs type
self.state_desc = data_provider.data_lengths['obs'].keys()[0]
self.insert_objthere = False if data_provider.num_objthere is None else True
def run(self, sess):
batch = self.dp.dequeue_batch()
feed_dict = {
self.wm.states : batch[self.state_desc],
self.wm.action : batch['action'],
self.wm.action_post : batch ['action_post']
}
if self.insert_objthere:
feed_dict[self.wm.obj_there_via_msg] = batch['obj_there']
res = sess.run(self.targets, feed_dict = feed_dict)
#TODO case it for online
res['recent'] = {}
#if self.map_draw_mode == 'specified_indices':
# map_draw_res = []
# for idx in self.map_draw_example_indices:
# obs_for_actor = [batch[self.state_desc][idx][t] for t in self.map_draw_timestep_indices]
# action_samples = self.action_sampler.sample_actions()
# action, entropy, estimated_world_loss = self.um.act(sess, action_samples, obs_for_actor)
# to_add = {'example_id' : idx, 'action_sample' : action, 'estimated_world_loss' : estimated_world_loss,
# 'action_samples' : action_samples, 'depths1' : batch[self.state_desc][idx],
# 'action' : batch['action'][idx], 'action_post' : batch['action_post'][idx]}
# map_draw_res.append(to_add)
#res['map_draw'] = map_draw_res
return res
class ObjectThereUpdater:
def __init__(self, world_model, uncertainty_model, data_provider, optimizer_params, learning_rate_params, postprocessor, updater_params):
self.data_provider = data_provider
self.wm = world_model
self.um = uncertainty_model
self.postprocessor = postprocessor
self.global_step = tf.get_variable('global_step', [], tf.int32, initializer = tf.constant_initializer(0,dtype = tf.int32))
self.um_lr_params, um_lr = get_learning_rate(self.global_step, ** learning_rate_params['uncertainty_model'])
um_opt_params, um_opt = get_optimizer(um_lr, self.um.uncertainty_loss, self.global_step, optimizer_params['uncertainty_model'], var_list = self.um.var_list)
self.targets = {'um_loss' : self.um.uncertainty_loss, 'um_lr' : um_lr, 'um_optimizer' : um_opt,
'global_step' : self.global_step, 'loss_per_example' : self.um.true_loss,
'estimated_world_loss' : self.um.estimated_world_loss
}
self.state_desc = updater_params['state_desc']
def update(self, sess, visualize = False):
batch = self.data_provider.dequeue_batch()
state_desc = self.state_desc
feed_dict = {
self.wm.states : batch[state_desc],
self.wm.action : batch['action'],
self.wm.obj_there : batch['obj_there']
}
res = sess.run(self.targets, feed_dict = feed_dict)
res = self.postprocessor.postprocess(res, batch)
return res
class SquareForceMagUpdater:
def __init__(self, models, data_provider, optimizer_params, learning_rate_params, postprocessor, updater_params):
self.dp = data_provider
self.wm = models['world_model']
self.um = models['uncertainty_model']
self.postprocessor = postprocessor
self.global_step = tf.get_variable('global_step', [], tf.int32, initializer = tf.constant_initializer(0,dtype = tf.int32))
self.um_lr_params, um_lr = get_learning_rate(self.global_step, ** learning_rate_params['uncertainty_model'])
um_opt_params, um_opt = get_optimizer(um_lr, self.um.uncertainty_loss, self.global_step, optimizer_params['uncertainty_model'], var_list = self.um.var_list)
self.targets = {'um_loss' : self.um.uncertainty_loss, 'um_lr' : um_lr, 'um_optimizer' : um_opt,
'global_step' : self.global_step, 'loss_per_example' : self.um.true_loss,
'estimated_world_loss' : self.um.estimated_world_loss
}
if self.um.exactly_whats_needed:
self.targets['oh_my_god'] = self.um.oh_my_god
self.state_desc = updater_params['state_desc']
def update(self, sess, visualize = False):
batch = self.dp.dequeue_batch()
state_desc = self.state_desc
feed_dict = {
self.wm.states : batch[state_desc],
self.wm.action : batch['action'],
self.wm.action_post : batch['action_post']
}
if self.um.insert_obj_there:
print('adding obj_there to feed dict')
feed_dict[self.um.obj_there] = batch['obj_there']
res = sess.run(self.targets, feed_dict = feed_dict)
res = self.postprocessor.postprocess(res, batch)
return res
class DebuggingForceMagUpdater:
def __init__(self, models, data_provider, optimizer_params, learning_rate_params, postprocessor, updater_params):
self.dp = data_provider
self.wm = models['world_model']
self.um = models['uncertainty_model']
self.postprocessor = postprocessor
self.global_step = tf.get_variable('global_step', [], tf.int32, initializer = tf.constant_initializer(0, dtype = tf.int32))
print(learning_rate_params.keys())
um_lr_params, um_lr = get_learning_rate(self.global_step, **learning_rate_params['uncertainty_model'])
um_opt_params, um_opt = get_optimizer(um_lr, self.um.uncertainty_loss, self.global_step, optimizer_params['uncertainty_model'])
self.targets = {'um_loss' : self.um.uncertainty_loss, 'um_optimizer' : um_opt, 'global_step' : self.global_step,
'loss_per_example' : self.um.true_loss, 'estimated_world_loss' : self.um.estimated_world_loss, 'ans' : self.um.ans,
'oh_my_god' : self.um.oh_my_god, 'model_parameters' : self.um.var_list}
def update(self, sess):
batch = self.dp.dequeue_batch()
feed_dict = {
self.wm.action : batch['action'],
self.wm.action_post : batch['action_post'],
self.um.obj_there : batch['obj_there']
}
res = sess.run(self.targets, feed_dict = feed_dict)
res = self.postprocessor.postprocess(res, batch)
return res
class LatentFreezeUpdater:
def __init__(self, models, data_provider, optimizer_params, learning_rate_params, postprocessor, updater_params):
self.data_provider = data_provider\
if isinstance(data_provider, list) else [data_provider]
self.wm = models['world_model']
self.um = models['uncertainty_model']
freeze_wm = updater_params['freeze_wm']
freeze_um = updater_params['freeze_um']
self.postprocessor = postprocessor
self.global_step = tf.get_variable('global_step', [], tf.int32, initializer = tf.constant_initializer(0,dtype = tf.int32))
self.act_step = tf.get_variable('act_step', [], tf.int32, initializer = tf.constant_initializer(0,dtype = tf.int32))
self.fut_step = tf.get_variable('fut_step', [], tf.int32, initializer = tf.constant_initializer(0,dtype = tf.int32))
self.um_step = tf.get_variable('ext_uncertainty_step', [], tf.int32, initializer = tf.constant_initializer(0,dtype = tf.int32))
self.targets = {}
self.state_desc = updater_params.get('state_desc', 'depths1')
if not freeze_wm:
act_lr_params, act_lr = get_learning_rate(self.act_step, **learning_rate_params['world_model']['act_model'])
fut_lr_params, fut_lr = get_learning_rate(self.fut_step, **learning_rate_params['world_model']['fut_model'])
act_opt_params, act_opt = get_optimizer(act_lr, self.wm.act_loss, self.act_step, optimizer_params['world_model']['act_model'], var_list = self.wm.act_var_list + self.wm.encode_var_list)
fut_opt_params, fut_opt = get_optimizer(fut_lr, self.wm.fut_loss, self.fut_step, optimizer_params['world_model']['fut_model'], var_list = self.wm.fut_var_list)
self.targets['act_opt'] = act_opt
self.targets['fut_opt'] = fut_opt
self.targets['act_lr'] = act_lr
self.targets['fut_lr'] = fut_lr
if not freeze_um:
um_lr_params, um_lr = get_learning_rate(self.um_step, **learning_rate_params['uncertainty_model'])
um_opt_params, um_opt = get_optimizer(um_lr, self.um.uncertainty_loss, self.um_step, optimizer_params['uncertainty_model'], var_list = self.um.var_list)
self.targets['um_opt'] = um_opt
self.targets['um_lr'] = um_lr
self.targets['global_step'] = self.global_step
global_increment = tf.assign_add(self.global_step, 1)
um_increment = tf.assign_add(self.um.step, 1)
self.targets.update({'global_increment' : global_increment, 'um_increment' : um_increment})
self.targets.update(self.wm.readouts)
self.targets.update(self.um.readouts)
assert set(self.wm.readouts.keys()) != set(self.um.readouts.keys())
def update(self, sess, visualize = False):
if self.um.just_random:
print('Selecting action at random')
batch = {}
for i, dp in enumerate(self.data_provider):
provider_batch = dp.dequeue_batch()
for k in provider_batch:
if k in batch:
batch[k].append(provider_batch[k])
else:
batch[k] = [provider_batch[k]]
for k in ['action', 'action_post', self.state_desc]:
batch[k] = np.concatenate(batch[k], axis=0)
feed_dict = {
self.wm.states : batch[self.state_desc],
self.wm.action : batch['action'],
self.wm.action_post : batch['action_post']
}
res = sess.run(self.targets, feed_dict = feed_dict)
res.pop('um_increment')
res.pop('global_increment')
global_step = res['global_step']
#if self.map_draw_mode is not None and global_step % self.map_draw_freq == 0:
# if self.map_draw_mode == 'specified_indices':
# map_draw_res = []
# for idx in self.map_draw_example_indices:
# obs_for_actor = [batch[self.state_desc][idx][t] for t in self.map_draw_timestep_indices]
# action_samples = self.action_sampler.sample_actions()
# action, entropy, estimated_world_loss = self.um.act(sess, action_samples, obs_for_actor)
# to_add = {'example_id' : idx, 'action_sample' : action, 'estimated_world_loss' : estimated_world_loss,
# 'action_samples' : action_samples, 'depths1' : batch[self.state_desc][idx],
# 'action' : batch['action'][idx], 'action_post' : batch['action_post'][idx]}
# map_draw_res.append(to_add)
# res['map_draw'] = map_draw_res
res = self.postprocessor.postprocess(res, batch)
return res, global_step
class FreezeUpdater:
def __init__(self, models, data_provider, optimizer_params, learning_rate_params, postprocessor, updater_params):
self.data_provider = data_provider \
if isinstance(data_provider, list) else [data_provider]
self.wm = models['world_model']
self.um = models['uncertainty_model']
freeze_wm = updater_params['freeze_wm']
freeze_um = updater_params['freeze_um']
self.postprocessor = postprocessor
self.global_step = tf.get_variable('global_step', [], tf.int32, initializer = tf.constant_initializer(0,dtype = tf.int32))
self.act_lr_params, act_lr = get_learning_rate(self.global_step, ** learning_rate_params['world_model']['act_model'])
self.um_lr_params, um_lr = get_learning_rate(self.global_step, ** learning_rate_params['uncertainty_model'])
num_not_frozen = 0
self.targets = {}
self.state_desc = updater_params.get('state_desc', 'depths1')
if not freeze_wm:
num_not_frozen += 1
act_opt_params, act_opt = get_optimizer(act_lr, self.wm.act_loss, self.global_step, optimizer_params['world_model']['act_model'], var_list = self.wm.act_var_list + self.wm.encode_var_list)
self.targets['act_opt'] = act_opt
if not freeze_um:
num_not_frozen += 1
um_opt_params, um_opt = get_optimizer(um_lr, self.um.uncertainty_loss, self.global_step, optimizer_params['uncertainty_model'], var_list = self.um.var_list)
self.targets['um_opt'] = um_opt
if num_not_frozen == 0:
self.targets['global_step'] = self.global_step
self.targets['increment'] = tf.assign_add(self.global_step, 1)
else:
self.global_step = self.global_step / num_not_frozen
self.targets['global_step'] = self.global_step
self.targets.update({'act_lr' : act_lr, 'um_lr' : um_lr})
assert set(self.wm.readouts.keys()) != set(self.um.readouts.keys())
self.targets.update(self.wm.readouts)
self.targets.update(self.um.readouts)
um_increment = tf.assign_add(self.um.step, 1)
assert 'um_increment' not in self.targets
self.targets['um_increment'] = um_increment
self.obj_there_supervision = updater_params.get('include_obj_there', False)
#self.map_draw_mode = None
#Map drawing. Meant to have options, but for now just assuming one sort of specification
#self.state_desc = updater_params.get('state_desc', 'depths1')
#self.map_draw_mode = updater_params['map_draw_mode']
#this specification specifices batch example indices for which we do a forward pass.
#need to do one forward pass each index because action sampling is the 'batch.'
#self.action_sampler = action_sampler
#assert self.map_draw_mode == 'specified_indices' and self.action_sampler is not None, (self.map_draw_mode, action_sampler)
#self.map_draw_example_indices = updater_params['map_draw_example_indices']
#self.map_draw_timestep_indices = updater_params['map_draw_timestep_indices']
#self.map_draw_freq = updater_params['map_draw_freq']
def update(self, sess, visualize = False):
if self.um.just_random:
print('Selecting action at random')
batch = {}
for i, dp in enumerate(self.data_provider):
provider_batch = dp.dequeue_batch()
for k in provider_batch:
if k in batch:
batch[k].append(provider_batch[k])
else:
batch[k] = [provider_batch[k]]
for k in ['action', 'action_post', self.state_desc]:
batch[k] = np.concatenate(batch[k], axis=0)
feed_dict = {
self.wm.states : batch[self.state_desc],
self.wm.action : batch['action'],
self.wm.action_post : batch['action_post']
}
if self.obj_there_supervision:
batch['obj_there'] = np.concatenate(batch['obj_there'], axis = 0)
feed_dict[self.wm.obj_there_via_msg] = batch['obj_there']
print('state desc! ' + self.state_desc)
res = sess.run(self.targets, feed_dict = feed_dict)
res.pop('um_increment')
global_step = res['global_step']
#if self.map_draw_mode is not None and global_step % self.map_draw_freq == 0:
# if self.map_draw_mode == 'specified_indices':
# map_draw_res = []
# for idx in self.map_draw_example_indices:
# obs_for_actor = [batch[self.state_desc][idx][t] for t in self.map_draw_timestep_indices]
# action_samples = self.action_sampler.sample_actions()
# action, entropy, estimated_world_loss = self.um.act(sess, action_samples, obs_for_actor)
# to_add = {'example_id' : idx, 'action_sample' : action, 'estimated_world_loss' : estimated_world_loss,
# 'action_samples' : action_samples, 'depths1' : batch[self.state_desc][idx],
# 'action' : batch['action'][idx], 'action_post' : batch['action_post'][idx]}
# map_draw_res.append(to_add)
# res['map_draw'] = map_draw_res
res = self.postprocessor.postprocess(res, batch)
return res, global_step
class JustUncertaintyUpdater:
def __init__(self, models, data_provider, optimizer_params, learning_rate_params, postprocessor, updater_params, action_sampler = None):
self.data_provider = data_provider \
if isinstance(data_provider, list) else [data_provider]
self.wm = models['world_model']
self.um = models['uncertainty_model']
self.postprocessor = postprocessor
self.global_step = tf.get_variable('global_step', [], tf.int32, initializer = tf.constant_initializer(0,dtype = tf.int32))
self.um_lr_params, um_lr = get_learning_rate(self.global_step, ** learning_rate_params['uncertainty_model'])
um_opt_params, um_opt = get_optimizer(um_lr, self.um.uncertainty_loss, self.global_step, optimizer_params['uncertainty_model'], var_list = self.um.var_list)
self.targets = {'global_step' : self.global_step, 'um_optimizer' : um_opt}
assert set(self.wm.readouts.keys()) != set(self.um.readouts.keys())
self.targets.update(self.wm.readouts)
self.targets.update(self.um.readouts)
#self.targets = {
# 'fut_pred' : self.wm.fut_pred, 'act_pred' : self.wm.act_pred,
# 'fut_loss' : self.wm.fut_loss, 'act_loss' : self.wm.act_loss,
# 'estimated_world_loss' : self.um.estimated_world_loss,
# ''
# }
#self.targets.update({'um_loss' : self.um.uncertainty_loss, 'um_lr' : um_lr, 'um_optimizer' : um_opt,
# 'global_step' : self.global_step, 'loss_per_example' : self.um.true_loss})
self.map_draw_mode = None
#Map drawing. Meant to have options, but for now just assuming one sort of specification
self.state_desc = updater_params.get('state_desc', 'depths1')
self.map_draw_mode = updater_params['map_draw_mode']
#this specification specifices batch example indices for which we do a forward pass.
#need to do one forward pass each index because action sampling is the 'batch.'
self.action_sampler = action_sampler
assert self.map_draw_mode == 'specified_indices' and self.action_sampler is not None, (self.map_draw_mode, action_sampler)
self.map_draw_example_indices = updater_params['map_draw_example_indices']
self.map_draw_timestep_indices = updater_params['map_draw_timestep_indices']
self.map_draw_freq = updater_params['map_draw_freq']
def update(self, sess, visualize = False):
batch = {}
for i, dp in enumerate(self.data_provider):
provider_batch = dp.dequeue_batch()
for k in provider_batch:
if k in batch:
batch[k].append(provider_batch[k])
else:
batch[k] = [provider_batch[k]]
for k in ['action', 'action_post', 'depths1']:
batch[k] = np.concatenate(batch[k], axis=0)
feed_dict = {
self.wm.states : batch[self.state_desc],
self.wm.action : batch['action'],
self.wm.action_post : batch['action_post']
}
self.targets['global_step'] = self.global_step
res = sess.run(self.targets, feed_dict = feed_dict)
global_step = res['global_step']
if self.map_draw_mode is not None and global_step % self.map_draw_freq == 0:
if self.map_draw_mode == 'specified_indices':
map_draw_res = []
for idx in self.map_draw_example_indices:
obs_for_actor = [batch[self.state_desc][idx][t] for t in self.map_draw_timestep_indices]
action_samples = self.action_sampler.sample_actions()
action, entropy, estimated_world_loss = self.um.act(sess, action_samples, obs_for_actor)
to_add = {'example_id' : idx, 'action_sample' : action, 'estimated_world_loss' : estimated_world_loss,
'action_samples' : action_samples, 'depths1' : batch[self.state_desc][idx],
'action' : batch['action'][idx], 'action_post' : batch['action_post'][idx]}
map_draw_res.append(to_add)
res['map_draw'] = map_draw_res
res = self.postprocessor.postprocess(res, batch)
return res, global_step
class ActionUncertaintyUpdater:
def __init__(self, models, data_provider, optimizer_params, learning_rate_params, postprocessor, updater_params):
self.data_provider = data_provider \
if isinstance(data_provider, list) else [data_provider]
self.wm = models['world_model']
self.um = models['uncertainty_model']
self.postprocessor = postprocessor
self.global_step = tf.get_variable('global_step', [], tf.int32, initializer = tf.constant_initializer(0,dtype = tf.int32))
self.act_lr_params, act_lr = get_learning_rate(self.global_step, ** learning_rate_params['world_model']['act_model'])
self.um_lr_params, um_lr = get_learning_rate(self.global_step, ** learning_rate_params['uncertainty_model'])
act_opt_params, act_opt = get_optimizer(act_lr, self.wm.act_loss, self.global_step, optimizer_params['world_model']['act_model'], var_list = self.wm.act_var_list + self.wm.encode_var_list)
um_opt_params, um_opt = get_optimizer(um_lr, self.um.uncertainty_loss, self.global_step, optimizer_params['uncertainty_model'], var_list = self.um.var_list)
self.global_step = self.global_step / 2
self.targets = {'act_pred' : self.wm.act_pred, 'act_loss' : self.wm.act_loss,
'act_optimizer' : act_opt, 'um_optimizer' : um_opt,
'estimated_world_loss' : self.um.estimated_world_loss,
'um_loss' : self.um.uncertainty_loss, 'loss_per_example' : self.um.true_loss,
'global_step' : self.global_step}
def update(self, sess, visualize = False):
batch = {}
for i, dp in enumerate(self.data_provider):
provider_batch = dp.dequeue_batch()
for k in provider_batch:
if k in batch:
batch[k].append(provider_batch[k])
else:
batch[k] = [provider_batch[k]]
for k in ['action', 'action_post', 'depths1']:
batch[k] = np.concatenate(batch[k], axis=0)
state_desc = 'depths1'
#depths, actions, actions_post, next_depth = postprocess_batch_depth(batch, state_desc)
feed_dict = {
self.wm.states : batch[state_desc],
self.wm.action : batch['action'],
self.wm.action_post : batch['action_post']
}
self.targets['global_step'] = self.global_step
res = sess.run(self.targets, feed_dict = feed_dict)
glstep = res['global_step']
res = self.postprocessor.postprocess(res, batch)
return res, glstep
class LatentUncertaintyUpdater:
def __init__(self, world_model, uncertainty_model, data_provider, optimizer_params, learning_rate_params, postprocessor, updater_params = None):
self.data_provider = data_provider
self.wm = world_model
self.um = uncertainty_model
self.postprocessor = postprocessor
self.global_step = tf.get_variable('global_step', [], tf.int32, initializer = tf.constant_initializer(0,dtype = tf.int32))
self.act_lr_params, act_lr = get_learning_rate(self.global_step, ** learning_rate_params['world_model']['act_model'])
self.fut_lr_params, fut_lr = get_learning_rate(self.global_step, ** learning_rate_params['world_model']['fut_model'])
self.um_lr_params, um_lr = get_learning_rate(self.global_step, ** learning_rate_params['uncertainty_model'])
act_opt_params, act_opt = get_optimizer(act_lr, self.wm.act_loss, self.global_step, optimizer_params['world_model']['act_model'], var_list = self.wm.act_var_list + self.wm.encode_var_list)
fut_opt_params, fut_opt = get_optimizer(fut_lr, self.wm.fut_loss, self.global_step, optimizer_params['world_model']['fut_model'], var_list = self.wm.fut_var_list)
um_opt_params, um_opt = get_optimizer(um_lr, self.um.uncertainty_loss, self.global_step, optimizer_params['uncertainty_model'], var_list = self.um.var_list)
self.global_step = self.global_step / 3
self.targets = {'encoding_i' : self.wm.encoding_i, 'encoding_f' : self.wm.encoding_f,
'fut_pred' : self.wm.fut_pred, 'act_pred' : self.wm.act_pred,
'act_optimizer' : act_opt, 'fut_optimizer' : fut_opt,
'act_lr' : act_lr, 'fut_lr' : fut_lr,
'fut_loss' : self.wm.fut_loss, 'act_loss' : self.wm.act_loss,
'estimated_world_loss' : self.um.estimated_world_loss
}
self.targets.update({'um_loss' : self.um.uncertainty_loss, 'um_lr' : um_lr, 'um_optimizer' : um_opt,
'global_step' : self.global_step, 'loss_per_example' : self.um.true_loss})
self.state_desc = updater_params['state_desc']
#checking that we don't have repeat names
def start(self, sess):
self.data_provider.start_runner(sess)
sess.run(tf.global_variables_initializer())
def update(self, sess, visualize = False):
batch = self.data_provider.dequeue_batch()
state_desc = self.state_desc
#depths, actions, actions_post, next_depth = postprocess_batch_depth(batch, state_desc)
feed_dict = {
self.wm.states : batch[state_desc],
self.wm.action : batch['action'],
self.wm.action_post : batch['action_post']
}
res = sess.run(self.targets, feed_dict = feed_dict)
res = self.postprocessor.postprocess(res, batch)
return res
class UncertaintyUpdater:
def __init__(self, world_model, uncertainty_model, data_provider, optimizer_params, learning_rate_params, postprocessor):
self.data_provider = data_provider
self.world_model = world_model
self.um = uncertainty_model
self.global_step = tf.get_variable('global_step', [], tf.int32, initializer = tf.constant_initializer(0,dtype = tf.int32))
self.wm_lr_params, wm_learning_rate = get_learning_rate(self.global_step, ** learning_rate_params['world_model'])
self.wm_opt_params, wm_opt = get_optimizer(wm_learning_rate, self.world_model.loss, self.global_step, optimizer_params['world_model'])
self.world_model_targets = {'given' : self.world_model.processed_input, 'loss' : self.world_model.loss, 'loss_per_example' : self.world_model.loss_per_example, 'learning_rate' : wm_learning_rate, 'optimizer' : wm_opt, 'prediction' : self.world_model.pred, 'tv' : self.world_model.tv}
self.inc_step = self.global_step.assign_add(1)
self.um_lr_params, um_learning_rate = get_learning_rate(self.global_step, **learning_rate_params['uncertainty_model'])
self.um_lr_params, um_opt = get_optimizer(um_learning_rate, self.um.uncertainty_loss, self.global_step, optimizer_params['uncertainty_model'])
self.global_step = self.global_step / 2
self.um_targets = {'loss' : self.um.uncertainty_loss, 'learning_rate' : um_learning_rate, 'optimizer' : um_opt, 'global_step' : self.global_step}
self.postprocessor = postprocessor
self.world_action_time = self.world_model.action.get_shape().as_list()[1]
def start(self, sess):
self.data_provider.start_runner(sess)
sess.run(tf.global_variables_initializer())
def update(self, sess, visualize = False):
batch = self.data_provider.dequeue_batch()
state_desc = self.um.state_descriptor
wm_feed_dict = {
self.world_model.states : batch[state_desc],
self.world_model.action : batch['action'][:, -self.world_action_time : ]
}
world_model_res = sess.run(self.world_model_targets, feed_dict = wm_feed_dict)
um_feed_dict = {
self.um.s_i : batch[state_desc][:, :-1],
self.um.action_sample : batch['action'][:, -1],
self.um.true_loss : world_model_res['loss_per_example']
}
um_res = sess.run(self.um_targets, feed_dict = um_feed_dict)
wm_res_new = dict(('wm_' + k, v) for k, v in world_model_res.iteritems())
um_res_new = dict(('um_' + k, v) for k, v in um_res.iteritems())
wm_res_new.update(um_res_new)
res = wm_res_new
res['global_step'] = res.pop('um_global_step')
res = self.postprocessor.postprocess(wm_res_new, batch)
return res
class DamianWMUncertaintyUpdater:
def __init__(self, world_model, uncertainty_model, data_provider, optimizer_params, learning_rate_params, postprocessor):
self.data_provider = data_provider
self.world_model = world_model
self.um = uncertainty_model
self.global_step = tf.get_variable('global_step', [], tf.int32, initializer = tf.constant_initializer(0,dtype = tf.int32))
self.wm_lr_params, wm_learning_rate = get_learning_rate(self.global_step, ** learning_rate_params['world_model'])
self.wm_opt_params, wm_opt = get_optimizer(wm_learning_rate, self.world_model.loss, self.global_step, optimizer_params['world_model'])
self.world_model_targets = {'given' : self.world_model.processed_input, 'loss' : self.world_model.loss, 'learning_rate' : wm_learning_rate, 'optimizer' : wm_opt, 'prediction' : self.world_model.pred, 'tv' : self.world_model.tv}
self.inc_step = self.global_step.assign_add(1)
self.wm_lr_params, um_learning_rate = get_learning_rate(self.global_step, **learning_rate_params['uncertainty_model'])
self.wm_lr_params, um_opt = get_optimizer(um_learning_rate, self.um.uncertainty_loss, self.global_step, optimizer_params['uncertainty_model'])
self.um_targets = {'loss' : self.um.uncertainty_loss, 'learning_rate' : um_learning_rate, 'optimizer' : um_opt, 'global_step' : self.global_step}
self.postprocessor = postprocessor
def start(self, sess):
self.data_provider.start_runner(sess)
sess.run(tf.global_variables_initializer())
def update(self, sess, visualize = False):
batch = self.data_provider.dequeue_batch()
depths, objects, actions, action_ids, next_depth = postprocess_batch_for_actionmap(batch)
wm_feed_dict = {
self.world_model.s_i : depths,
self.world_model.s_f : next_depth,
self.world_model.action : actions,
self.world_model.action_id : action_ids,
self.world_model.objects : objects
}
world_model_res = sess.run(self.world_model_targets, feed_dict = wm_feed_dict)
if visualize:
cv2.imshow('pred', world_model_res['prediction'][0] / 4.)#TODO clean up w colors
cv2.imshow('tv', world_model_res['tv'][0] / 4.)
cv2.imshow('processed0', world_model_res['given'][0, 0] / 4.)
cv2.imshow('processed1', world_model_res['given'][0, 1] / 4.)
cv2.waitKey(1)
print('wm loss: ' + str(world_model_res['loss']))
um_feed_dict = {
self.um.s_i : depths,
self.um.action_sample : actions[:, -1],
self.um.true_loss : np.array([world_model_res['loss']])
}
um_res = sess.run(self.um_targets, feed_dict = um_feed_dict)
wm_res_new = dict(('wm_' + k, v) for k, v in world_model_res.iteritems())
um_res_new = dict(('um_' + k, v) for k, v in um_res.iteritems())
wm_res_new.update(um_res_new)
res['global_step'] = res.pop('um_global_step')
res = self.postprocessor.postprocess(wm_res_new, batch)
return res
|
python
|
'''
Given an array of integers, there is a sliding window of size k which is moving from the left side of the array to the right, one element at a time. You can only interact with the k numbers in the window. Return an array consisting of the maximum value of each window of elements.
'''
def sliding_window_max(arr, k):
output = []
# loop from k-1 til len(arr) - (k - 1)
for i in range(len(arr)):
if i + (k - 1) == len(arr):
return output
# compare values in windows size
highest = arr[i]
for j in range(1, k):
if arr[i+j] > highest:
highest = arr[i+j]
output.append(highest)
|
python
|
# terrascript/provider/chanzuckerberg/snowflake.py
# Automatically generated by tools/makecode.py (24-Sep-2021 15:27:17 UTC)
import terrascript
class snowflake(terrascript.Provider):
"""Terraform provider for managing Snowflake accounts"""
__description__ = "Terraform provider for managing Snowflake accounts"
__namespace__ = "chanzuckerberg"
__name__ = "snowflake"
__source__ = "https://github.com/chanzuckerberg/terraform-provider-snowflake"
__version__ = "0.25.19"
__published__ = "2021-09-10T23:25:20Z"
__tier__ = "community"
__all__ = ["snowflake"]
|
python
|
def move_tower(height, from_pole, middle_pole, to_pole):
if height >= 1:
move_tower(height-1, from_pole, to_pole, middle_pole)
print "move disk from {} to {}".format(from_pole, to_pole)
move_tower(height-1, middle_pole, from_pole, to_pole)
|
python
|
from getratings.models.ratings import Ratings
class NA_Karthus_Mid_Aatrox(Ratings):
pass
class NA_Karthus_Mid_Ahri(Ratings):
pass
class NA_Karthus_Mid_Akali(Ratings):
pass
class NA_Karthus_Mid_Alistar(Ratings):
pass
class NA_Karthus_Mid_Amumu(Ratings):
pass
class NA_Karthus_Mid_Anivia(Ratings):
pass
class NA_Karthus_Mid_Annie(Ratings):
pass
class NA_Karthus_Mid_Ashe(Ratings):
pass
class NA_Karthus_Mid_AurelionSol(Ratings):
pass
class NA_Karthus_Mid_Azir(Ratings):
pass
class NA_Karthus_Mid_Bard(Ratings):
pass
class NA_Karthus_Mid_Blitzcrank(Ratings):
pass
class NA_Karthus_Mid_Brand(Ratings):
pass
class NA_Karthus_Mid_Braum(Ratings):
pass
class NA_Karthus_Mid_Caitlyn(Ratings):
pass
class NA_Karthus_Mid_Camille(Ratings):
pass
class NA_Karthus_Mid_Cassiopeia(Ratings):
pass
class NA_Karthus_Mid_Chogath(Ratings):
pass
class NA_Karthus_Mid_Corki(Ratings):
pass
class NA_Karthus_Mid_Darius(Ratings):
pass
class NA_Karthus_Mid_Diana(Ratings):
pass
class NA_Karthus_Mid_Draven(Ratings):
pass
class NA_Karthus_Mid_DrMundo(Ratings):
pass
class NA_Karthus_Mid_Ekko(Ratings):
pass
class NA_Karthus_Mid_Elise(Ratings):
pass
class NA_Karthus_Mid_Evelynn(Ratings):
pass
class NA_Karthus_Mid_Ezreal(Ratings):
pass
class NA_Karthus_Mid_Fiddlesticks(Ratings):
pass
class NA_Karthus_Mid_Fiora(Ratings):
pass
class NA_Karthus_Mid_Fizz(Ratings):
pass
class NA_Karthus_Mid_Galio(Ratings):
pass
class NA_Karthus_Mid_Gangplank(Ratings):
pass
class NA_Karthus_Mid_Garen(Ratings):
pass
class NA_Karthus_Mid_Gnar(Ratings):
pass
class NA_Karthus_Mid_Gragas(Ratings):
pass
class NA_Karthus_Mid_Graves(Ratings):
pass
class NA_Karthus_Mid_Hecarim(Ratings):
pass
class NA_Karthus_Mid_Heimerdinger(Ratings):
pass
class NA_Karthus_Mid_Illaoi(Ratings):
pass
class NA_Karthus_Mid_Irelia(Ratings):
pass
class NA_Karthus_Mid_Ivern(Ratings):
pass
class NA_Karthus_Mid_Janna(Ratings):
pass
class NA_Karthus_Mid_JarvanIV(Ratings):
pass
class NA_Karthus_Mid_Jax(Ratings):
pass
class NA_Karthus_Mid_Jayce(Ratings):
pass
class NA_Karthus_Mid_Jhin(Ratings):
pass
class NA_Karthus_Mid_Jinx(Ratings):
pass
class NA_Karthus_Mid_Kalista(Ratings):
pass
class NA_Karthus_Mid_Karma(Ratings):
pass
class NA_Karthus_Mid_Karthus(Ratings):
pass
class NA_Karthus_Mid_Kassadin(Ratings):
pass
class NA_Karthus_Mid_Katarina(Ratings):
pass
class NA_Karthus_Mid_Kayle(Ratings):
pass
class NA_Karthus_Mid_Kayn(Ratings):
pass
class NA_Karthus_Mid_Kennen(Ratings):
pass
class NA_Karthus_Mid_Khazix(Ratings):
pass
class NA_Karthus_Mid_Kindred(Ratings):
pass
class NA_Karthus_Mid_Kled(Ratings):
pass
class NA_Karthus_Mid_KogMaw(Ratings):
pass
class NA_Karthus_Mid_Leblanc(Ratings):
pass
class NA_Karthus_Mid_LeeSin(Ratings):
pass
class NA_Karthus_Mid_Leona(Ratings):
pass
class NA_Karthus_Mid_Lissandra(Ratings):
pass
class NA_Karthus_Mid_Lucian(Ratings):
pass
class NA_Karthus_Mid_Lulu(Ratings):
pass
class NA_Karthus_Mid_Lux(Ratings):
pass
class NA_Karthus_Mid_Malphite(Ratings):
pass
class NA_Karthus_Mid_Malzahar(Ratings):
pass
class NA_Karthus_Mid_Maokai(Ratings):
pass
class NA_Karthus_Mid_MasterYi(Ratings):
pass
class NA_Karthus_Mid_MissFortune(Ratings):
pass
class NA_Karthus_Mid_MonkeyKing(Ratings):
pass
class NA_Karthus_Mid_Mordekaiser(Ratings):
pass
class NA_Karthus_Mid_Morgana(Ratings):
pass
class NA_Karthus_Mid_Nami(Ratings):
pass
class NA_Karthus_Mid_Nasus(Ratings):
pass
class NA_Karthus_Mid_Nautilus(Ratings):
pass
class NA_Karthus_Mid_Nidalee(Ratings):
pass
class NA_Karthus_Mid_Nocturne(Ratings):
pass
class NA_Karthus_Mid_Nunu(Ratings):
pass
class NA_Karthus_Mid_Olaf(Ratings):
pass
class NA_Karthus_Mid_Orianna(Ratings):
pass
class NA_Karthus_Mid_Ornn(Ratings):
pass
class NA_Karthus_Mid_Pantheon(Ratings):
pass
class NA_Karthus_Mid_Poppy(Ratings):
pass
class NA_Karthus_Mid_Quinn(Ratings):
pass
class NA_Karthus_Mid_Rakan(Ratings):
pass
class NA_Karthus_Mid_Rammus(Ratings):
pass
class NA_Karthus_Mid_RekSai(Ratings):
pass
class NA_Karthus_Mid_Renekton(Ratings):
pass
class NA_Karthus_Mid_Rengar(Ratings):
pass
class NA_Karthus_Mid_Riven(Ratings):
pass
class NA_Karthus_Mid_Rumble(Ratings):
pass
class NA_Karthus_Mid_Ryze(Ratings):
pass
class NA_Karthus_Mid_Sejuani(Ratings):
pass
class NA_Karthus_Mid_Shaco(Ratings):
pass
class NA_Karthus_Mid_Shen(Ratings):
pass
class NA_Karthus_Mid_Shyvana(Ratings):
pass
class NA_Karthus_Mid_Singed(Ratings):
pass
class NA_Karthus_Mid_Sion(Ratings):
pass
class NA_Karthus_Mid_Sivir(Ratings):
pass
class NA_Karthus_Mid_Skarner(Ratings):
pass
class NA_Karthus_Mid_Sona(Ratings):
pass
class NA_Karthus_Mid_Soraka(Ratings):
pass
class NA_Karthus_Mid_Swain(Ratings):
pass
class NA_Karthus_Mid_Syndra(Ratings):
pass
class NA_Karthus_Mid_TahmKench(Ratings):
pass
class NA_Karthus_Mid_Taliyah(Ratings):
pass
class NA_Karthus_Mid_Talon(Ratings):
pass
class NA_Karthus_Mid_Taric(Ratings):
pass
class NA_Karthus_Mid_Teemo(Ratings):
pass
class NA_Karthus_Mid_Thresh(Ratings):
pass
class NA_Karthus_Mid_Tristana(Ratings):
pass
class NA_Karthus_Mid_Trundle(Ratings):
pass
class NA_Karthus_Mid_Tryndamere(Ratings):
pass
class NA_Karthus_Mid_TwistedFate(Ratings):
pass
class NA_Karthus_Mid_Twitch(Ratings):
pass
class NA_Karthus_Mid_Udyr(Ratings):
pass
class NA_Karthus_Mid_Urgot(Ratings):
pass
class NA_Karthus_Mid_Varus(Ratings):
pass
class NA_Karthus_Mid_Vayne(Ratings):
pass
class NA_Karthus_Mid_Veigar(Ratings):
pass
class NA_Karthus_Mid_Velkoz(Ratings):
pass
class NA_Karthus_Mid_Vi(Ratings):
pass
class NA_Karthus_Mid_Viktor(Ratings):
pass
class NA_Karthus_Mid_Vladimir(Ratings):
pass
class NA_Karthus_Mid_Volibear(Ratings):
pass
class NA_Karthus_Mid_Warwick(Ratings):
pass
class NA_Karthus_Mid_Xayah(Ratings):
pass
class NA_Karthus_Mid_Xerath(Ratings):
pass
class NA_Karthus_Mid_XinZhao(Ratings):
pass
class NA_Karthus_Mid_Yasuo(Ratings):
pass
class NA_Karthus_Mid_Yorick(Ratings):
pass
class NA_Karthus_Mid_Zac(Ratings):
pass
class NA_Karthus_Mid_Zed(Ratings):
pass
class NA_Karthus_Mid_Ziggs(Ratings):
pass
class NA_Karthus_Mid_Zilean(Ratings):
pass
class NA_Karthus_Mid_Zyra(Ratings):
pass
|
python
|
# WARNING: you are on the master branch; please refer to examples on the branch corresponding to your `cortex version` (e.g. for version 0.24.*, run `git checkout -b 0.24` or switch to the `0.24` branch on GitHub)
import mlflow.sklearn
import numpy as np
class PythonPredictor:
def __init__(self, config, python_client):
self.client = python_client
def load_model(self, model_path):
return mlflow.sklearn.load_model(model_path)
def predict(self, payload, query_params):
model_name = query_params["model"]
model_version = query_params.get("version", "latest")
model = self.client.get_model(model_name, model_version)
model_input = [
payload["cylinders"],
payload["displacement"],
payload["horsepower"],
payload["weight"],
payload["acceleration"],
]
result = model.predict([model_input]).item()
return {"prediction": result, "model": {"name": model_name, "version": model_version}}
|
python
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
"""PyVoiceChanger."""
import sys
from datetime import datetime
from subprocess import call
from time import sleep
from PyQt5.QtCore import QProcess, Qt, QTimer
from PyQt5.QtGui import QColor, QCursor, QIcon
from PyQt5.QtWidgets import (QApplication, QDial, QGraphicsDropShadowEffect,
QGroupBox, QLabel, QMainWindow, QMenu,
QShortcut, QSystemTrayIcon, QVBoxLayout)
from anglerfish import (check_encoding, make_logger, make_post_exec_msg,
set_process_name, set_single_instance,
set_desktop_launcher)
__version__ = '1.0.0'
__license__ = ' GPLv3+ LGPLv3+ '
__author__ = ' juancarlos '
__email__ = ' [email protected] '
__url__ = 'https://github.com/juancarlospaco/pyvoicechanger#pyvoicechanger'
start_time = datetime.now()
desktop_file_content = """
[Desktop Entry]
Comment=Voice Changer App.
Exec=chrt --idle 0 pyvoicechanger.py
GenericName=Voice Changer App.
Icon=audio-input-microphone
Name=PyVoiceChanger
StartupNotify=true
Terminal=false
Type=Application
Categories=Utility
X-DBUS-ServiceName=pyvoicechanger
X-KDE-StartupNotify=true
"""
###############################################################################
class MainWindow(QMainWindow):
"""Voice Changer main window."""
def __init__(self, parent=None):
super(MainWindow, self).__init__()
self.statusBar().showMessage("Move Dial to Deform Microphone Voice !.")
self.setWindowTitle(__doc__)
self.setMinimumSize(240, 240)
self.setMaximumSize(480, 480)
self.resize(self.minimumSize())
self.setWindowIcon(QIcon.fromTheme("audio-input-microphone"))
self.tray = QSystemTrayIcon(self)
self.center()
QShortcut("Ctrl+q", self, activated=lambda: self.close())
self.menuBar().addMenu("&File").addAction("Quit", lambda: exit())
self.menuBar().addMenu("Sound").addAction(
"STOP !", lambda: call('killall rec', shell=True))
windowMenu = self.menuBar().addMenu("&Window")
windowMenu.addAction("Hide", lambda: self.hide())
windowMenu.addAction("Minimize", lambda: self.showMinimized())
windowMenu.addAction("Maximize", lambda: self.showMaximized())
windowMenu.addAction("Restore", lambda: self.showNormal())
windowMenu.addAction("FullScreen", lambda: self.showFullScreen())
windowMenu.addAction("Center", lambda: self.center())
windowMenu.addAction("Top-Left", lambda: self.move(0, 0))
windowMenu.addAction("To Mouse", lambda: self.move_to_mouse_position())
# widgets
group0 = QGroupBox("Voice Deformation")
self.setCentralWidget(group0)
self.process = QProcess(self)
self.process.error.connect(
lambda: self.statusBar().showMessage("Info: Process Killed", 5000))
self.control = QDial()
self.control.setRange(-10, 20)
self.control.setSingleStep(5)
self.control.setValue(0)
self.control.setCursor(QCursor(Qt.OpenHandCursor))
self.control.sliderPressed.connect(
lambda: self.control.setCursor(QCursor(Qt.ClosedHandCursor)))
self.control.sliderReleased.connect(
lambda: self.control.setCursor(QCursor(Qt.OpenHandCursor)))
self.control.valueChanged.connect(
lambda: self.control.setToolTip("<b>" + str(self.control.value())))
self.control.valueChanged.connect(
lambda: self.statusBar().showMessage(
"Voice deformation: " + str(self.control.value()), 5000))
self.control.valueChanged.connect(self.run)
self.control.valueChanged.connect(lambda: self.process.kill())
# Graphic effect
self.glow = QGraphicsDropShadowEffect(self)
self.glow.setOffset(0)
self.glow.setBlurRadius(99)
self.glow.setColor(QColor(99, 255, 255))
self.control.setGraphicsEffect(self.glow)
self.glow.setEnabled(False)
# Timer to start
self.slider_timer = QTimer(self)
self.slider_timer.setSingleShot(True)
self.slider_timer.timeout.connect(self.on_slider_timer_timeout)
# an icon and set focus
QLabel(self.control).setPixmap(
QIcon.fromTheme("audio-input-microphone").pixmap(32))
self.control.setFocus()
QVBoxLayout(group0).addWidget(self.control)
self.menu = QMenu(__doc__)
self.menu.addAction(__doc__).setDisabled(True)
self.menu.setIcon(self.windowIcon())
self.menu.addSeparator()
self.menu.addAction(
"Show / Hide",
lambda: self.hide() if self.isVisible() else self.showNormal())
self.menu.addAction("STOP !", lambda: call('killall rec', shell=True))
self.menu.addSeparator()
self.menu.addAction("Quit", lambda: exit())
self.tray.setContextMenu(self.menu)
self.make_trayicon()
def run(self):
"""Run/Stop the QTimer."""
if self.slider_timer.isActive():
self.slider_timer.stop()
self.glow.setEnabled(True)
call('killall rec', shell=True)
self.slider_timer.start(3000)
def on_slider_timer_timeout(self):
"""Run subprocess to deform voice."""
self.glow.setEnabled(False)
value = int(self.control.value()) * 100
cmd = 'play -q -V0 "|rec -q -V0 -n -d -R riaa bend pitch {0} "'
command = cmd.format(int(value))
log.debug("Voice Deformation Value: {0}".format(value))
log.debug("Voice Deformation Command: {0}".format(command))
self.process.start(command)
if self.isVisible():
self.statusBar().showMessage("Minimizing to System TrayIcon", 3000)
log.debug("Minimizing Main Window to System TrayIcon now...")
sleep(3)
self.hide()
def center(self):
"""Center Window on the Current Screen,with Multi-Monitor support."""
window_geometry = self.frameGeometry()
mousepointer_position = QApplication.desktop().cursor().pos()
screen = QApplication.desktop().screenNumber(mousepointer_position)
centerPoint = QApplication.desktop().screenGeometry(screen).center()
window_geometry.moveCenter(centerPoint)
self.move(window_geometry.topLeft())
def move_to_mouse_position(self):
"""Center the Window on the Current Mouse position."""
window_geometry = self.frameGeometry()
window_geometry.moveCenter(QApplication.desktop().cursor().pos())
self.move(window_geometry.topLeft())
def make_trayicon(self):
"""Make a Tray Icon."""
if self.windowIcon() and __doc__:
self.tray.setIcon(self.windowIcon())
self.tray.setToolTip(__doc__)
self.tray.activated.connect(
lambda: self.hide() if self.isVisible()
else self.showNormal())
return self.tray.show()
###############################################################################
def main():
"""Main Loop."""
global log
log = make_logger("pyvoicechanger")
log.debug(__doc__ + __version__ + __url__)
check_encoding()
set_process_name("pyvoicechanger")
set_single_instance("pyvoicechanger")
set_desktop_launcher("pyvoicechanger", desktop_file_content)
application = QApplication(sys.argv)
application.setApplicationName("pyvoicechanger")
application.setOrganizationName("pyvoicechanger")
application.setOrganizationDomain("pyvoicechanger")
application.setWindowIcon(QIcon.fromTheme("audio-input-microphone"))
application.aboutToQuit.connect(lambda: call('killall rec', shell=True))
mainwindow = MainWindow()
mainwindow.show()
make_post_exec_msg(start_time)
sys.exit(application.exec_())
if __name__ in '__main__':
main()
|
python
|
from setuptools import setup
setup(
name='ctab',
version='0.1',
author='Thomas Hunger',
author_email='[email protected]',
packages=[
'ctab',
]
)
|
python
|
""" Methods to setup the logging """
import os
import yaml
import platform
import logging
import coloredlogs
import logging.config
from funscript_editor.definitions import WINDOWS_LOG_CONFIG_FILE, LINUX_LOG_CONFIG_FILE
from funscript_editor.utils.config import SETTINGS
def create_log_directories(config: dict) -> None:
""" create all log directories for a log configuration
Args:
config (dict): the logging configuration dictionary
"""
if isinstance(config, dict):
for k in config.keys():
create_log_directories(config[k])
if k == 'filename':
os.makedirs(os.path.dirname(os.path.abspath(config[k])), exist_ok=True)
def get_log_config_path() -> str:
""" Get the log config file path for current platfrom
Returns:
str: the log config file path
"""
return WINDOWS_LOG_CONFIG_FILE if platform.system() == 'Windows' else LINUX_LOG_CONFIG_FILE
class LoggerInterface:
""" Logger interface
Args:
name (str): name of the logger instance
"""
def __init__(self, name):
self.name = name
def debug(self, *args):
pass
def info(self, *args):
pass
def warning(self, *args):
pass
def error(self, *args):
pass
def critical(self, *args, exc_info=None):
pass
class DevZeroLogger(LoggerInterface):
""" Logger replacement to suppresses all log messages
Args:
name (str): name of the logger instance
"""
def __init__(self, name):
self.name = name
def debug(self, *args):
pass
def info(self, *args):
pass
def warning(self, *args):
pass
def error(self, *args):
pass
def critical(self, *args, exc_info=None):
pass
class PythonLogger(LoggerInterface):
""" Python Logger Wrapper
Args:
name (str): name of the logger instance
"""
def __init__(self, name):
self.logger = logging.getLogger(name)
def debug(self, *args):
self.logger.debug(*args)
def info(self, *args):
self.logger.info(*args)
def warning(self, *args):
self.logger.warning(*args)
def error(self, *args):
self.logger.error(*args)
def critical(self, *args, exc_info=None):
self.logger.critical(*args, exc_info=exc_info)
def getLogger(name) -> LoggerInterface:
""" Get logger wrapper for python logging.getLogger
Args:
name (str): name of the logger instance
"""
if platform.system() == 'Windows':
if SETTINGS['logging']:
return PythonLogger(name)
else:
return DevZeroLogger(name)
else:
return PythonLogger(name)
def get_logfiles_paths() -> list:
""" Get the logfiles paths from log config
Returns:
list: all logiles paths
"""
try:
result = []
config_path = get_log_config_path()
with open(config_path, 'rt') as f:
for line in f.readlines():
if "filename:" in line:
result.append(line.split(':')[1].strip())
return result
except:
return []
def setup_logging(
default_level :int = logging.INFO,
env_key :str = 'LOG_CFG') -> None:
""" Logging Setup
Args:
default_level (int): logging level e.g. `logging.INFO` (default is `logging.DEBUG`).
env_key (str, optional): env variable name to load a configuration file via environment variable (default is `LOG_CFG`).
"""
config_path = get_log_config_path()
value = os.getenv(env_key, None)
if value: config_path = value
if os.path.exists(config_path):
with open(config_path, 'rt') as f:
try:
config = yaml.safe_load(f.read())
create_log_directories(config)
logging.config.dictConfig(config)
coloredlogs.install(level=default_level)
logging.debug('Loging setup completed')
except Exception as e:
print(e)
print('Error in Logging Configuration. Using default configs')
logging.basicConfig(level=default_level)
coloredlogs.install(level=default_level)
else:
logging.basicConfig(level=default_level)
coloredlogs.install(level=default_level)
print('Failed to load configuration file. Using default configs')
|
python
|
#####################################################
# Read active and reactive power from the atm90e32 then
# store within mongodb.
#
# copyright Margaret Johnson, 2020.
# Please credit when evolving your code with this code.
########################################################
from FHmonitor.error_handling import handle_exception
from FHmonitor.atm90_e32_pi import ATM90e32
from FHmonitor.store import MongoDB
from FHmonitor.calibrate import Calibrate
import threading # for blinking LED.
import board # for blinking LED.
import digitalio # for blinking LED.
import logging
logger = logging.getLogger(__name__)
class Monitor:
"""Take active and reactive power readings
from an atm90e32 and store the readings in
the Rasp Pi's mongodb.
Example::
m = Monitor()
m.init_sensor()
Make sure to read all the parameters that can be input to
:meth:`~FHmonitor.monitor.Monitor.init_sensor`.
The values depend on the Power Transformer and CTs being used.
The :meth:`~FHmonitor.monitor.Monitor.blink` method is useful
to turn on and off the LED (for debugging purposes).
"""
def __init__(self, led_pin=None):
self.db = None
self.energy_sensor = None
if led_pin is None:
led_pin = board.D18 # We always wire to GPIO 18.
self.led = digitalio.DigitalInOut(board.D18)
self.led.direction = digitalio.Direction.OUTPUT
####################################################
# Initialize the energy sensor. The properties are
# are written to atm90e32 registers during initialization.
# They are specific to the Power and Current Transformers
# being used. An exception occurs if the write cannot
# be verified.
####################################################
def init_sensor(self):
"""
Initialize the atm90e32 by setting the calibration registry properties.
Calibration is discussed within our
`FitHome wiki <https://github.com/BitKnitting/FitHome/wiki/ElectricityMonitor#calibration>`_ .
:param lineFreq: 4485 for 60 Hz (North America, Default), 389 for 50 Hz (rest of world)
:param PGAGain: Programmable Gain - 0 for 10A (1x), 21 for 100A (2x, Default), 42 for 100A - 200A (4x)
:param VoltageGain: Dependent on transformer being used. Should be measured prior to taking readings.
See the Calibration discussion linked to above.
:param CurrentGainCT1: Dependent on the CTs being used. Should be measured prior to taking readings.
See the Calibration discussion linked to above.
:param CurrentGainCT2: Similar to CurrentGainCT1, but for the second CT.
:return: True if meter is initialized.
False if meter could not be initialized.
""" # noqa
# Get the calibratiion parameters
c = Calibrate()
try:
self.energy_sensor = ATM90e32(c.lineFreq, c.PGAGain, c.VoltageGain,
c.CurrentGain, 0, c.CurrentGain)
logger.info('Energy meter has been initialized.')
# We have an instance of the atm90e32. Let's check if we get
# sensible readings.
sys0 = self.energy_sensor.sys_status0
if (sys0 == 0xFFFF or sys0 == 0):
e = 'EXCEPTION: Cannot connect to the energy meter.'
handle_exception(e)
logger.info('Energy meter is working.')
return True
except Exception as e:
handle_exception(e)
return False
def open_db(self, mongodb="mongodb://localhost:27017/", db="FitHome",
collection="aggregate"):
"""Opens and maintains an instance to the mongo database where
the power readings will be stored.
:param mongodb: URI to the mongo database running on the Raspberry Pi
:param db: Database within mongodb that holds the readings.
:param collection: name of the collection where the readings are held.
:return: True if the database can be opened.
"""
try:
self.db = MongoDB(mongodb, db, collection)
except Exception as e:
self.db = None
handle_exception(e)
return False
return True
def close_db(self):
"""It is more efficient to keep the mongodb open while
using it. However, if you know you will not be doing
any more transactions, it is good to clean up the
connections.
"""
if self.db is not None:
self.db.close()
####################################################
# Get the current active and reactive power readings.
####################################################
def take_reading(self):
"""Read the active and reactive power readings from
the atm90e32 registers.
:return: (Pa, Pr) Where Pa is the float value for the
active power reading and Pr is the float value for
the reactive power reading.
"""
Pa = self.energy_sensor.total_active_power
Pr = self.energy_sensor.total_reactive_power
logger.info(
f'Active Power reading: {Pa:.2f} Reactive Power Reading: {Pr:.2f}')
return Pa, Pr
####################################################
# Store the reading into mongo db.
####################################################
def store_reading(self, Pa, Pr):
"""Store the active and reactive power readings into
the mongodb database.
:param Pa: A floating value representing the active power reading.
Obtained through a call to take_reading().
:param Pr: A floating value representing the reactive power reading.
As with Pa, use take_reading() to retrieve the value from the
energy meter.
Returns True if the readings could be stored.
"""
if self.db is None:
# Try opening with the defaults.
db_opened = self.open_db()
if db_opened is False:
handle_exception('Cannot open the mongo database.')
return False
reading = {"Pa": Pa, "Pr": Pr, }
reading_saved = self.db.save(reading)
if reading_saved is False:
handle_exception('Cannot store the readings.')
return False
return True
####################################################
# Blink the LED
####################################################
def blink(self, ntimes=1):
"""Blink the monitor's LED. Uses Python's Timer object
so that blinking does not pause data capture and
storage.
:param ntimes: Number of times to blink, defaults to 1
:type ntimes: int, optional
"""
def turn_led_on(n):
self.led.value = True
t = threading.Timer(0.5, turn_led_off, [n])
t.start()
def check_led(n):
n -= 1
if n > 0:
turn_led_on(n)
def turn_led_off(n):
self.led.value = False
t = threading.Timer(0.5, check_led, [n])
t.start()
# Start blinking.
assert ntimes > 0
turn_led_on(ntimes)
|
python
|
import torch
import numpy as np
from torch import Tensor
from torch.utils.data import Dataset, DataLoader
from torchvision import io
from pathlib import Path
from typing import Tuple
class Wound(Dataset):
"""
num_classes: 18
"""
# explain the purpose of the model
# where is it, how big it is,
# give examples of what each of segments are
# people who are familiar: segmentation
# medical background: application site, trying to identify different areas in a an image
# in the wound we are looking for different types of tissues
# get the story
CLASSES = ['Boundary','PeriWoundPerimeter','WoundPerimeter','Epithellialization','Granulation','Hypergranulation','NecroticSlough','Eschar','OtherWound','DamagedToeNail','HealthyToeNail','Oedematous','Erythematous','OtherSkinUnbroken','Maceration','Excoriation','OtherSkinBroken','HealthySkin']
PALETTE = torch.tensor([[192, 192, 192],[0, 183, 235],[0, 255, 255],[255, 255, 0],[212, 175, 55],[127, 255, 212],[138, 43, 226],[204, 255, 0],[220, 208, 255],[0, 250, 154],[255, 69, 0],[255, 165, 0],[30, 144, 255],[221, 160, 221],[0, 255, 0],[0, 128, 128],[252, 15, 192],[220, 20, 60]])
ID2TRAINID = {0: 255, 1: 255, 2: 255, 3: 255, 4: 255, 5: 255, 6: 255, 7: 0, 8: 1, 9: 255, 10: 255, 11: 2, 12: 3, 13: 4, 14: 255, 15: 255, 16: 255,
17: 5, 18: 255, 19: 6, 20: 7, 21: 8, 22: 9, 23: 10, 24: 11, 25: 12, 26: 13, 27: 14, 28: 15, 29: 255, 30: 255, 31: 16, 32: 17, 33: 18, -1: -1}
def __init__(self, root: str, split: str = 'train', transform = None) -> None:
super().__init__()
assert split in ['train', 'val', 'test']
self.transform = transform
self.n_classes = len(self.CLASSES)
self.ignore_label = 255
self.label_map = np.arange(256)
for id, trainid in self.ID2TRAINID.items():
self.label_map[id] = trainid
img_path = Path(root) / 'leftImg8bit' / split
self.files = list(img_path.rglob('*.png'))
if not self.files:
raise Exception(f"No images found in {img_path}")
print(f"Found {len(self.files)} {split} images.")
def __len__(self) -> int:
return len(self.files)
def __getitem__(self, index: int) -> Tuple[Tensor, Tensor]:
img_path = str(self.files[index])
lbl_path = str(self.files[index]).replace('leftImg8bit', 'gtFine').replace('.png', '_labelIds.png')
image = io.read_image(img_path)
label = io.read_image(lbl_path)
if self.transform:
image, label = self.transform(image, label)
return image, self.encode(label.squeeze().numpy()).long()
def encode(self, label: Tensor) -> Tensor:
label = self.label_map[label]
return torch.from_numpy(label)
# for id, trainid in self.ID2TRAINID.items():
# label[label == id] = trainid
# return label
def decode(self, label: Tensor) -> Tensor:
return self.PALETTE[label.to(int)]
if __name__ == '__main__':
import matplotlib.pyplot as plt
from torchvision import transforms as T
from torchvision.utils import make_grid
from transforms import Compose, RandomResizedCrop, Normalize
root = 'C:\\Users\\sithu\\Documents\\Datasets\\CityScapes'
transform = Compose([RandomResizedCrop((1024, 1024)), Normalize()])
dataset = CityScapes(root, split="train", transform=transform)
dataloader = DataLoader(dataset, shuffle=True, batch_size=4)
image, label = next(iter(dataloader))
print('=========================')
print(image.shape, label.shape)
print(label.unique())
label[label==255] = 0
labels = [dataset.decode(lbl).permute(2, 0, 1) for lbl in label]
labels = torch.stack(labels)
inv_normalize = T.Normalize(
mean=(-0.485/0.229, -0.456/0.224, -0.406/0.225),
std=(1/0.229, 1/0.224, 1/0.225)
)
image = inv_normalize(image)
image *= 255
images = torch.vstack([image, labels])
plt.imshow(make_grid(images, nrow=4).to(torch.uint8).numpy().transpose((1, 2, 0)))
plt.show()
|
python
|
#!/usr/bin/python3
def best_score(a_dictionary):
if a_dictionary:
return max(a_dictionary, key=a_dictionary.get)
|
python
|
print("before loop")
for count in range(10):
if count > 5:
continue
print(count)
print("after loop")
|
python
|
"""Application management util tests"""
# pylint: disable=redefined-outer-name
from types import SimpleNamespace
import pytest
import factory
from django.core.exceptions import ValidationError
from django.core.files.uploadedfile import SimpleUploadedFile
from mitol.common.utils import now_in_utc
from applications.api import derive_application_state
from applications.constants import (
REVIEW_STATUS_APPROVED,
SUBMISSION_VIDEO,
AppStates,
SUBMISSION_QUIZ,
)
from applications.factories import (
BootcampApplicationFactory,
BootcampRunApplicationStepFactory,
ApplicationStepFactory,
ApplicationStepSubmissionFactory,
VideoInterviewSubmissionFactory,
QuizSubmissionFactory,
)
from applications.management.utils import (
migrate_application,
has_same_application_steps,
)
from ecommerce.factories import OrderFactory
from ecommerce.models import Order
from klasses.factories import BootcampFactory, BootcampRunFactory, InstallmentFactory
from profiles.factories import UserFactory
FAKE_FILE_NAME = "file.txt"
FAKE_LINKEDIN_URL = "http://example.com/linkedin"
BOOTCAMP_PRICE = 100
@pytest.fixture()
def bootcamp_data():
"""Fixture for bootcamps data"""
bootcamp = BootcampFactory.create()
bootcamp_runs = BootcampRunFactory.create_batch(2, bootcamp=bootcamp)
InstallmentFactory.create_batch(
len(bootcamp_runs),
amount=BOOTCAMP_PRICE,
bootcamp_run=factory.Iterator(bootcamp_runs),
)
submission_types = [SUBMISSION_VIDEO, SUBMISSION_VIDEO, SUBMISSION_QUIZ]
app_steps = ApplicationStepFactory.create_batch(
len(submission_types),
bootcamp=bootcamp,
submission_type=factory.Iterator(submission_types),
step_order=factory.Iterator([1, 2, 3]),
)
run_app_steps = {
run.id: BootcampRunApplicationStepFactory.create_batch(
len(app_steps),
bootcamp_run=run,
application_step=factory.Iterator(app_steps),
)
for run in bootcamp_runs
}
return SimpleNamespace(
bootcamp=bootcamp,
runs=bootcamp_runs,
app_steps=app_steps,
run_app_steps=run_app_steps,
submission_types=submission_types,
)
@pytest.fixture()
def completed_app_data(bootcamp_data):
"""Fixture with a completed bootcamp application and associated data"""
user = UserFactory.create()
run = bootcamp_data.runs[0]
now = now_in_utc()
application = BootcampApplicationFactory.create(
user=user,
bootcamp_run=run,
resume_file=SimpleUploadedFile(
f"path/to/{FAKE_FILE_NAME}", b"these are the file contents"
),
linkedin_url=FAKE_LINKEDIN_URL,
resume_upload_date=now,
)
submissions = ApplicationStepSubmissionFactory.create_batch(
run.application_steps.count(),
bootcamp_application=application,
run_application_step=factory.Iterator(
run.application_steps.order_by("application_step__step_order").all()
),
content_object=factory.Iterator(
[
VideoInterviewSubmissionFactory.create(),
VideoInterviewSubmissionFactory.create(),
QuizSubmissionFactory.create(),
]
),
submitted_date=now,
review_status=REVIEW_STATUS_APPROVED,
review_status_date=now,
)
order = OrderFactory.create(
application=application,
user=user,
status=Order.FULFILLED,
total_price_paid=BOOTCAMP_PRICE,
)
application.state = derive_application_state(application)
application.save()
return SimpleNamespace(
application=application, submissions=submissions, order=order
)
@pytest.mark.django_db
def test_migrate_application(bootcamp_data, completed_app_data):
"""
migrate_application should create a new application for a user in a new bootcamp run and
copy over data from an existing application.
"""
to_run = bootcamp_data.runs[1]
to_run_application = migrate_application(
from_run_application=completed_app_data.application, to_run=to_run
)
assert completed_app_data.application.state == AppStates.COMPLETE.value
assert to_run_application.state == AppStates.AWAITING_PAYMENT.value
assert to_run_application.user == completed_app_data.application.user
assert to_run_application.bootcamp_run == to_run
assert (
to_run_application.resume_file.name
== completed_app_data.application.resume_file.name
)
assert to_run_application.linkedin_url == FAKE_LINKEDIN_URL
for i, submission in enumerate(to_run_application.submissions.all()):
assert submission.review_status == REVIEW_STATUS_APPROVED
assert submission.run_application_step in bootcamp_data.run_app_steps[to_run.id]
assert submission.object_id == completed_app_data.submissions[i].object_id
@pytest.mark.django_db
def test_migrate_application_different_order(bootcamp_data, completed_app_data):
"""
migrate_application should be able to migrate an application between runs of two different bootcamps, even if the
application steps are in a different order.
"""
new_bootcamp_run = BootcampRunFactory.create()
InstallmentFactory.create(amount=BOOTCAMP_PRICE, bootcamp_run=new_bootcamp_run)
new_app_steps = ApplicationStepFactory.create_batch(
len(bootcamp_data.app_steps),
bootcamp=new_bootcamp_run.bootcamp,
# Use the same application steps as the existing bootcamp, but in reverse order
submission_type=factory.Iterator(reversed(bootcamp_data.submission_types)),
step_order=factory.Iterator([1, 2, 3]),
)
run_app_steps = BootcampRunApplicationStepFactory.create_batch(
len(new_app_steps),
bootcamp_run=new_bootcamp_run,
application_step=factory.Iterator(new_app_steps),
)
new_run_application = migrate_application(
from_run_application=completed_app_data.application, to_run=new_bootcamp_run
)
assert new_run_application.state == AppStates.AWAITING_PAYMENT.value
ordered_submissions = list(
new_run_application.submissions.order_by(
"run_application_step__application_step__step_order"
)
)
for i, submission in enumerate(ordered_submissions):
assert submission.review_status == REVIEW_STATUS_APPROVED
assert submission.run_application_step == run_app_steps[i]
# The submissions for the new application should be copied over for the existing one, but the application steps
# are in a different order.
assert [sub.object_id for sub in ordered_submissions] == [
completed_app_data.submissions[2].object_id,
completed_app_data.submissions[0].object_id,
completed_app_data.submissions[1].object_id,
]
@pytest.mark.django_db
def test_migrate_application_existing(bootcamp_data, completed_app_data):
"""
migrate_application should raise an exception if there is already an application in an approved
state for the 'to' run.
"""
to_run = bootcamp_data.runs[1]
BootcampApplicationFactory.create(
bootcamp_run=to_run,
user=completed_app_data.application.user,
state=AppStates.COMPLETE,
)
with pytest.raises(ValidationError):
migrate_application(
from_run_application=completed_app_data.application, to_run=to_run
)
@pytest.mark.django_db
def test_has_same_application_steps(bootcamp_data):
"""
has_same_application_steps should return True if the two bootcamp ids refer to a
set of equivalent application steps
"""
existing_bootcamp = bootcamp_data.runs[0].bootcamp
assert (
has_same_application_steps(existing_bootcamp.id, existing_bootcamp.id) is True
)
new_bootcamp = BootcampFactory.create()
existing_bootcamp_steps = list(bootcamp_data.app_steps)
ApplicationStepFactory.create_batch(
len(bootcamp_data.app_steps),
bootcamp=new_bootcamp,
submission_type=factory.Iterator(
[step.submission_type for step in existing_bootcamp_steps]
),
step_order=factory.Iterator(
[step.step_order for step in existing_bootcamp_steps]
),
)
assert has_same_application_steps(existing_bootcamp.id, new_bootcamp.id) is True
# If a step is removed/added/updated, this function should return False
step = new_bootcamp.application_steps.first()
step.delete()
assert has_same_application_steps(existing_bootcamp.id, new_bootcamp.id) is False
@pytest.mark.django_db
def test_has_same_application_steps_order():
"""
has_same_application_steps should take a flag that determines whether it will return True if the bootcamps
have the same steps in a different order.
"""
submission_types = [SUBMISSION_VIDEO, SUBMISSION_QUIZ]
bootcamps = BootcampFactory.create_batch(2)
ApplicationStepFactory.create_batch(
len(submission_types),
bootcamp=bootcamps[0],
submission_type=factory.Iterator(submission_types),
step_order=factory.Iterator([1, 2]),
)
ApplicationStepFactory.create_batch(
len(submission_types),
bootcamp=bootcamps[1],
submission_type=factory.Iterator(reversed(submission_types)),
step_order=factory.Iterator([1, 2]),
)
assert (
has_same_application_steps(bootcamps[0].id, bootcamps[1].id, ignore_order=True)
is True
)
assert (
has_same_application_steps(bootcamps[0].id, bootcamps[1].id, ignore_order=False)
is False
)
|
python
|
# -*- coding: utf-8 -*-
"""
Copyright (c) 2020. Huawei Technologies Co.,Ltd.ALL rights reserved.
This program is licensed under Mulan PSL v2.
You can use it according to the terms and conditions of the Mulan PSL v2.
http://license.coscl.org.cn/MulanPSL2
THIS PROGRAM IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
See the Mulan PSL v2 for more details.
"""
####################################
# @Author : lemon.higgins
# @Contact : [email protected]
# @Date : 2020-11-10 02:40:04
# @License : Mulan PSL v2
# @Version : 1.0
# @Desc : 收集系统的基础信息
#####################################
import subprocess
import os
import logging
from ruamel import yaml
import json
logging.basicConfig(
level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s"
)
ENV_INFO = {}
def basic_info():
"""
获取linux的基本信息
Returns:
[dict]: [获取的环境信息总结]
"""
ENV_INFO["os"] = subprocess.getoutput(
"cat /etc/os-release | grep '^PRETTY_NAME' | awk -F '=' '{print $NF}' | tr -d '\"\"'"
)
ENV_INFO["hostname"] = subprocess.getoutput("hostname")
ENV_INFO["platform"] = subprocess.getoutput(
"hostnamectl | grep 'Virtualization: kvm' >/dev/nul && echo kvm || echo physical"
)
ENV_INFO["frame"] = subprocess.getoutput("uname -m")
ENV_INFO["kernel version"] = subprocess.getoutput("uname -r")
ENV_INFO["cmdline"] = subprocess.getoutput("cat /proc/cmdline")
return ENV_INFO
def mem_info():
"""
获取环境内存信息
Returns:
[dict]: [获取的环境信息总结]
"""
ENV_INFO["mem info"] = {}
ENV_INFO["mem info"]["mem"] = {}
ENV_INFO["mem info"]["swap"] = {}
ENV_INFO["mem info"]["mem"]["total"] = (
subprocess.getoutput("cat /proc/meminfo | grep MemTotal | awk '{print $2}'")
+ "kB"
)
ENV_INFO["mem info"]["mem"]["free"] = (
subprocess.getoutput("cat /proc/meminfo | grep MemFree | awk '{print $2}'")
+ "kB"
)
ENV_INFO["mem info"]["mem"]["available"] = (
subprocess.getoutput(
"cat /proc/meminfo | grep MemAvailable | awk '{print $2}'"
)
+ "kB"
)
ENV_INFO["mem info"]["mem"]["buffers"] = (
subprocess.getoutput("cat /proc/meminfo | grep Buffers | awk '{print $2}'")
+ "kB"
)
ENV_INFO["mem info"]["mem"]["cache"] = (
subprocess.getoutput("cat /proc/meminfo | grep Cached | awk '{print $2}'")
+ "kB"
)
ENV_INFO["mem info"]["swap"]["total"] = (
subprocess.getoutput("cat /proc/meminfo | grep SwapTotal | awk '{print $2}'")
+ "kB"
)
ENV_INFO["mem info"]["swap"]["free"] = (
subprocess.getoutput("cat /proc/meminfo | grep SwapFree | awk '{print $2}'")
+ "kB"
)
ENV_INFO["mem info"]["swap"]["cache"] = (
subprocess.getoutput("cat /proc/meminfo | grep SwapCached | awk '{print $2}'")
+ "kB"
)
return ENV_INFO
def cpu_info():
"""
获取环境的CPU信息
Returns:
[dict]: [获取的环境信息总结]
"""
ENV_INFO["cpu info"] = {}
ENV_INFO["cpu info"]["processor"] = subprocess.getoutput(
"cat /proc/cpuinfo | grep processor | wc -l"
)
core_num = 0
cores = subprocess.getoutput(
"cat /proc/cpuinfo | grep 'cpu cores' | awk '{print $NF}'"
).split("\n")
for core in cores:
core_num += int(core)
ENV_INFO["cpu info"]["core"] = core_num
ENV_INFO["cpu info"]["model name"] = subprocess.getoutput(
"cat /proc/cpuinfo | grep 'model name' | awk -F ':' '{print $NF}' | sed 's/^ //g' | uniq"
)
ENV_INFO["cpu info"]["cpu MHz"] = subprocess.getoutput(
"cat /proc/cpuinfo | grep 'cpu MHz' | awk '{print $NF}' | uniq"
)
ENV_INFO["cpu info"]["cache size"] = subprocess.getoutput(
"cat /proc/cpuinfo | grep 'cache size' | awk '{print $NF}' | uniq"
)
return ENV_INFO
class NetInfo(object):
"""
获取环境网络基本信息
"""
def dns():
"""
获取系统dns信息
Returns:
[dict]: [获取的环境信息总结]
"""
ENV_INFO["net info"] = {}
resolv = []
for dns in subprocess.getoutput(
"cat /etc/resolv.conf | grep nameserver | awk '{print $NF}'"
).split("\n"):
nameserver = {}
nameserver["nameserver"] = dns
resolv.append(nameserver)
ENV_INFO["net info"]["resolv"] = resolv
return ENV_INFO
def eth_info():
"""
获取网卡信息
Returns:
[dict]: [获取的环境信息总结]
"""
ENV_INFO["net info"] = {}
ENV_INFO["net info"]["eth info"] = []
for id in subprocess.getoutput(
"lspci | grep 'Ethernet' | awk '{print $1}'"
).split("\n"):
if id != "":
ENV_INFO["net info"]["eth info"].append(
subprocess.getoutput(
"lspci -s "
+ id
+ " -v | grep Subsystem: | awk -F 'Subsystem: ' '{print $NF}'"
)
)
return ENV_INFO
def mac(nic):
"""
获取网卡mac地址
Args:
nic ([string]): [网卡名]
Returns:
[dict]: [获取的环境信息总结]
"""
return subprocess.getoutput("cat /sys/class/net/" + nic + "/address")
def status(nic):
"""获取网卡的status信息
Args:
nic ([string]): [网卡名]
Returns:
[dict]: [获取的环境信息总结]
"""
return subprocess.getoutput(
"ip addr show " + nic + " | grep '<.*>' | awk '{print $3}'| tr -d '<>'"
)
def mtu(nic):
"""获取网卡的mtu值
Args:
nic ([string]): [网卡名]
Returns:
[string]: [mtu值]
"""
return subprocess.getoutput(
"ip addr show "
+ nic
+ " | grep 'mtu' | sed -n 's/ /\\n/gp' | sed -n \"$(echo \"$(ip addr show "
+ nic
+ " | grep 'mtu' | sed -n 's/ /\\n/gp' | sed -n '/mtu/=') + 1\" | bc)p\" "
)
def driver(nic):
"""获取网卡驱动信息
Args:
nic ([string]): [网卡名]
Returns:
[string]: [mtu值]
"""
return subprocess.getoutput(
"ethtool -i " + nic + " | grep driver | awk '{print $NF}'"
)
def brigde(nic):
"""确定当前网卡是否是网桥
Returns:
[string]: [YES or NO]
"""
return subprocess.getoutput(
"brctl show | grep " + nic + " >/dev/nul && echo 'YES' || echo 'NO'"
)
def v4_ip(nic):
"""获取ip,route,genmask信息
Returns:
[list]: [ip, route, genmask]
"""
v4_ip = []
for ip in subprocess.getoutput(
"ip addr show " + nic + " | grep 'inet ' | awk '{print $2}' "
).split("\n"):
ipv4 = {}
ipv4["ipv4"] = ip
if ip == "":
ipv4["route"] = ""
ipv4["genmask"] = ""
return ENV_INFO["net info"]["nic"]["v4 ip"].append(ipv4)
ipv4["route"] = subprocess.getoutput(
'ip route | grep "$(echo '
+ ip
+ " | awk -F '/' '{print $1}')\" | awk '{print $1}'"
)
ipv4["genmask"] = subprocess.getoutput(
"ip addr show " + nic + ' | grep "' + ip + " brd\" | awk '{print $4}'"
)
v4_ip.append(ipv4)
return v4_ip
def v6_ip(nic):
"""获取ipv6的基础信息
Returns:
[list]: [ip, route]
"""
v6_ip = []
tmp = []
v6_routes = subprocess.getoutput(
"ip -6 route | grep nexthop | grep " + nic + " | awk '{print $3}'"
).split("\n")
if "fe80::" in subprocess.getoutput(
"ip -6 route | grep 'fe80::' | grep " + nic
):
v6_routes.append("fe80::")
for route in v6_routes:
ipv6 = {}
v6_route = []
if route == "" or route in tmp:
continue
route_h = route.split("::")[0] + ":"
for r in v6_routes:
if route_h in r:
v6_route.append(r)
tmp.append(r)
ipv6["ipv6"] = subprocess.getoutput(
"ip addr show "
+ nic
+ ' | grep "inet6 '
+ route_h
+ "\" | awk '{print $2}'"
)
ipv6["route"] = v6_route
v6_ip.append(ipv6)
return v6_ip
def auto_negotiation(nic):
"""查看网卡的自动协商机制
Returns:
[string]: [off or on]
"""
return subprocess.getoutput(
"ethtool " + nic + " | grep 'Auto-negotiation' | awk '{print $NF}'"
)
def link_detected(nic):
"""链路状态
Returns:
[string]: [yes or no]
"""
return subprocess.getoutput(
"ethtool " + nic + " | grep 'Link detected' | awk '{print $NF}'"
)
def nic_info(nic):
"""获取网卡相关所有信息
Args:
nic (string): 网卡名称
Returns:
[dict]: 网卡信息
"""
nic_info = {}
nic_info["name"] = nic
nic_info["mac"] = NetInfo.mac(nic)
nic_info["status"] = NetInfo.status(nic)
nic_info["mtu"] = NetInfo.mtu(nic)
nic_info["driver"] = NetInfo.driver(nic)
nic_info["brigde"] = NetInfo.brigde(nic)
nic_info["v4 ip"] = NetInfo.v4_ip(nic)
nic_info["v6 ip"] = NetInfo.v6_ip(nic)
nic_info["Auto-negotiation"] = NetInfo.auto_negotiation(nic)
nic_info["Link detected"] = NetInfo.link_detected(nic)
try:
ENV_INFO["net info"]
except:
ENV_INFO["net info"] = {}
ENV_INFO["net info"]["nic"] = nic_info
else:
ENV_INFO["net info"]["nic"].append(nic_info)
return ENV_INFO
def all_nic_info():
"""获取网卡所有的基础信息
Returns:
[list]: [所有的网卡信息]
"""
ENV_INFO["net info"] = {}
ENV_INFO["net info"]["nic"] = []
for nic in subprocess.getoutput("ls /sys/class/net/").split("\n"):
NetInfo.nic_info(nic)
return ENV_INFO
def disk_info():
"""
获取磁盘,目录挂载信息
"""
disk_json = subprocess.getoutput("lsblk -J")
disk = json.loads(disk_json).get("blockdevices")
ENV_INFO["disk info"] = disk
return ENV_INFO
def service_info():
"""
获取环境中所有服务的状态信息
"""
ENV_INFO["service info"] = []
for service in subprocess.getoutput(
"systemctl --all --no-pager | grep -w 'active\|inactive' | sed 's/● / /g' | awk '{print $1}'"
).split("\n"):
service_info = {}
service_info["UNIT"] = service
service = service.replace("\\", "\\\\")
service_info["LOAD"] = subprocess.getoutput(
"systemctl --all --no-pager | grep -w '" + service + "' | awk '{print $2}'"
)
service_info["ACTIVE"] = subprocess.getoutput(
"systemctl --all --no-pager | grep -w '" + service + "' | awk '{print $3}'"
)
service_info["SUB"] = subprocess.getoutput(
"systemctl --all --no-pager | grep -w '" + service + "' | awk '{print $4}'"
)
ENV_INFO["service info"].append(service_info)
pass # TODO
def socket_info():
"""
获取环境socket信息
"""
ENV_INFO["socket info"] = {}
ENV_INFO["socket info"]["used num"] = subprocess.getoutput(
"cat /proc/net/sockstat | grep sockets | awk '{print $NF}'"
)
return ENV_INFO
def process_info():
"""
获取进程信息
"""
ENV_INFO["process info"] = []
for pid in subprocess.getoutput(
"ps -eo pid,ppid,user,rss,pmem,pcpu,vsize,args | grep -vw 'PID PPID USER' | awk '{print $1}'"
):
process = {}
process["pid"] = pid
process["ppid"] = subprocess.getoutput(
"ps -eo pid,ppid,user,rss,pmem,pcpu,vsize,args | grep -w "
+ pid
+ "| awk '{print $2}'"
)
process["user"] = subprocess.getoutput(
"ps -eo pid,ppid,user,rss,pmem,pcpu,vsize,args | grep -w "
+ pid
+ "| awk '{print $2}'"
)
process["rss"] = subprocess.getoutput(
"ps -eo pid,ppid,user,rss,pmem,pcpu,vsize,args | grep -w "
+ pid
+ "| awk '{print $2}'"
)
process["pmem"] = subprocess.getoutput(
"ps -eo pid,ppid,user,rss,pmem,pcpu,vsize,args | grep -w "
+ pid
+ "| awk '{print $2}'"
)
process["pcpu"] = subprocess.getoutput(
"ps -eo pid,ppid,user,rss,pmem,pcpu,vsize,args | grep -w "
+ pid
+ "| awk '{print $2}'"
)
process["vsize"] = subprocess.getoutput(
"ps -eo pid,ppid,user,rss,pmem,pcpu,vsize,args | grep -w "
+ pid
+ "| awk '{print $2}'"
)
process["args"] = subprocess.getoutput(
"ps -eo pid,ppid,user,rss,pmem,pcpu,vsize,args | grep -w "
+ pid
+ "| awk '{print $2}'"
)
ENV_INFO["process info"].append(process)
def collect_log():
"""收集message日志
"""
exitcode, output = subprocess.getstatusoutput(
"log_dir=$(mktemp -d) && cp /var/log/message* ${log_dir} -fr && dmesg > ${log_dir}/kmesg && tar -zcvf "
+ os.getcwd()
+ "/log.tar.gz ${log_dir} && rm -rf ${log_dir}"
)
if exitcode != 0:
logging.error("failed to collect logs.")
exit(1)
def write_yaml(info):
"""
将数据写入导yaml文件中
Args:
info ([dict]): [环境信息数据]
"""
with open(
os.path.split(os.path.realpath(__file__))[0] + "/envInfo.yaml", "w+"
) as f:
yaml.dump(info, f, Dumper=yaml.RoundTripDumper, allow_unicode=True)
def install_rpm(rpm):
"""安装环境信息收集需要的rpm软件包
Args:
rpm (string): 软件包名
"""
exitcode, output = subprocess.getstatusoutput(
"rpm -qa " + rpm + "&& yum -y install " + rpm
)
if exitcode != 0:
logging.error("failed to install rpms:" + rpm)
exit(1)
if __name__ == "__main__":
install_rpm("coreutils grep gawk hostname systemd util-linux systemd procps-ng")
basic_info()
mem_info()
cpu_info()
NetInfo.all_nic_info()
disk_info()
service_info()
process_info()
collect_log()
write_yaml(ENV_INFO)
|
python
|
# -*- coding: utf-8 -*-
# @Time : 2022/2/20
# @Author : Zhelong Huang
# @File : client2.py
# @Description: client2
_POS = 2
import os, sys
sys.path.append(os.path.abspath('.'))
from coach import LoadCoach
import argparse
arg = argparse.ArgumentParser()
arg.add_argument('-r', '--render', default=True)
arg.add_argument('-c', '--client', default="Demo")
args = vars(arg.parse_args())
CLIENT_ARGS = {
'url' : 'ws://127.0.0.1:23456/game/client{}'.format(_POS),
'render' : bool(int(args['render']))
}
if __name__ == '__main__':
try:
ws = LoadCoach(args['client'])(**CLIENT_ARGS)
ws.connect()
ws.run_forever()
except KeyboardInterrupt:
ws.close()
|
python
|
# A non-empty zero-indexed array A consisting of N integers is given.
#
# A permutation is a sequence containing each element from 1 to N once, and
# only once.
#
# For example, array A such that:
# A = [4, 1, 3, 2]
# is a permutation, but array A such that:
# A = [4, 1, 3]
# is not a permutation, because value 2 is missing.
#
# The goal is to check whether array A is a permutation.
#
# Write a function:
# def solution(A)
# that, given a zero-indexed array A, returns 1 if array A is a permutation
# and 0 if it is not.
#
# For example, given array A such that:
# A = [4, 1, 3, 2]
# the function should return 1.
#
# Given array A such that:
# A = [4, 1, 3]
# the function should return 0.
#
# Assume that:
# * N is an integer within the range [1..100,000];
# * each element of array A is an integer within the range [1..1,000,000,000].
#
# Complexity:
# * expected worst-case time complexity is O(N);
# * expected worst-case space complexity is O(N), beyond input storage (not
# counting the storage required for input arguments).
def solution(A):
N = len(A)
if N == 1:
if A[0] == 1:
return 1
else:
return 0
count = {}
for i in range(N):
if A[i] not in count:
count[A[i]] = 0
count[A[i]] += 1
if count[A[i]] > 1:
return 0
# print(count)
values = count.keys()
# print(values)
if max(values) == N:
return 1
return 0
|
python
|
"""Flexmock public API."""
# pylint: disable=no-self-use,too-many-lines
import inspect
import re
import sys
import types
from types import BuiltinMethodType, TracebackType
from typing import Any, Callable, Dict, Iterator, List, NoReturn, Optional, Tuple, Type
from flexmock.exceptions import (
CallOrderError,
ExceptionClassError,
ExceptionMessageError,
FlexmockError,
MethodCallError,
MethodSignatureError,
MockBuiltinError,
StateError,
)
AT_LEAST = "at least"
AT_MOST = "at most"
EXACTLY = "exactly"
SPECIAL_METHODS = (classmethod, staticmethod)
UPDATED_ATTRS = ["should_receive", "should_call", "new_instances"]
DEFAULT_CLASS_ATTRIBUTES = [attr for attr in dir(type) if attr not in dir(type("", (object,), {}))]
# Fix Python 3.6 does not have re.Pattern type
RE_TYPE = type(re.compile(""))
class ReturnValue:
"""ReturnValue"""
def __init__(self, value: Optional[Any] = None, raises: Optional[Exception] = None) -> None:
self.value = value
self.raises = raises
def __str__(self) -> str:
if self.raises:
return f"{self.raises}({_arg_to_str(self.value)})"
if not isinstance(self.value, tuple):
return str(_arg_to_str(self.value))
if len(self.value) == 1:
return str(_arg_to_str(self.value[0]))
values = ", ".join([_arg_to_str(x) for x in self.value])
return f"({values})"
class Mock:
"""Fake object class returned by the flexmock() function."""
def __init__(self, **kwargs: Any) -> None:
"""Mock constructor.
Args:
- kwargs: dict of attribute/value pairs used to initialize the mock object
"""
self._object: Any = self
for attr, value in kwargs.items():
if isinstance(value, property):
setattr(self.__class__, attr, value)
else:
setattr(self, attr, value)
def __enter__(self) -> Any:
return self._object
def __exit__(
self,
exc_type: Optional[Type[BaseException]],
exc_value: Optional[BaseException],
traceback: Optional[TracebackType],
) -> None:
pass
def __call__(self, *args: Any, **kwargs: Any) -> "Mock":
"""Make Expectation.mock() work with parens."""
return self
def __iter__(self) -> Iterator[Any]:
"""Makes the mock object iterable.
Call the instance's version of __iter__ if available, otherwise yield self.
"""
if (
hasattr(self, "__dict__")
and isinstance(self.__dict__, dict)
and "__iter__" in self.__dict__
):
for item in self.__dict__["__iter__"](self):
yield item
else:
yield self
def should_receive(self, name: str) -> "Expectation":
"""Replaces the specified attribute with a fake.
Args:
- name: string name of the attribute to replace
Returns:
- Expectation object which can be used to modify the expectations
on the fake attribute
"""
if name in UPDATED_ATTRS:
raise FlexmockError("unable to replace flexmock methods")
chained_methods = None
if "." in name:
name, chained_methods = name.split(".", 1)
name = self._update_name_if_mangled(name)
self._ensure_object_has_named_attribute(name)
if chained_methods:
if not isinstance(self._object, Mock) and not hasattr(
getattr(self._object, name), "__call__"
):
# Create a partial mock if the given name is callable
# this allows chaining attributes
return_value = _create_partial_mock(getattr(self._object, name))
else:
return_value = Mock()
self._create_expectation(name, return_value)
return return_value.should_receive(chained_methods)
return self._create_expectation(name)
def _update_name_if_mangled(self, name: str) -> str:
"""This allows flexmock to mock methods with name mangling."""
if name.startswith("__") and not name.endswith("__") and not inspect.ismodule(self._object):
class_name: str
if inspect.isclass(self._object):
class_name = self._object.__name__
else:
class_name = self._object.__class__.__name__
name = f"_{class_name.lstrip('_')}__{name.lstrip('_')}"
return name
def _ensure_object_has_named_attribute(self, name: str) -> None:
if not isinstance(self._object, Mock) and not self._hasattr(self._object, name):
if hasattr(self._object, "__name__"):
obj_name = self._object.__name__
else:
obj_name = str(self._object)
raise FlexmockError(f"{obj_name} does not have attribute '{name}'")
def _hasattr(self, obj: Any, name: str) -> bool:
"""Ensure hasattr checks don't create side-effects for properties."""
if not inspect.isclass(obj) and hasattr(obj, "__dict__") and name not in obj.__dict__:
if name in DEFAULT_CLASS_ATTRIBUTES:
return False # avoid false positives for things like __call__
return hasattr(obj.__class__, name)
return hasattr(obj, name)
def should_call(self, name: str) -> "Expectation":
"""Creates a spy.
This means that the original method will be called rather than the fake
version. However, we can still keep track of how many times it's called and
with what arguments, and apply expectations accordingly.
should_call is meaningless/not allowed for non-callable attributes.
Args:
- name: string name of the method
Returns:
- Expectation object
"""
if isinstance(self._object, Mock) and not hasattr(self._object, name):
raise FlexmockError(
f"Mock object does not have attribute '{name}'. "
f'Did you mean to call should_receive("{name}") instead?'
)
expectation = self.should_receive(name)
return expectation.replace_with(expectation.__dict__["_original"])
def new_instances(self, *kargs: Any) -> "Expectation":
"""Overrides __new__ method on the class to return custom objects.
Alias for should_receive('__new__').and_return(kargs).one_by_one
Args:
- kargs: objects to return on each successive call to __new__
Returns:
- Expectation object
"""
if inspect.isclass(self._object):
return self.should_receive("__new__").and_return(kargs).one_by_one()
raise FlexmockError("new_instances can only be called on a class mock")
def _create_expectation(self, name: str, return_value: Optional[Any] = None) -> "Expectation":
expectation = self._get_or_create_expectation(name, return_value)
FlexmockContainer.add_expectation(self, expectation)
if _isproperty(self._object, name):
self._update_property(expectation, name)
elif (
isinstance(self._object, Mock)
or hasattr(getattr(self._object, name), "__call__")
or inspect.isclass(getattr(self._object, name))
):
self._update_method(expectation, name)
else:
self._update_attribute(expectation, name, return_value)
return expectation
def _get_or_create_expectation(
self, name: str, return_value: Optional[Any] = None
) -> "Expectation":
saved_expectations = FlexmockContainer.get_expectations_with_name(self, name)
if saved_expectations:
# If there is already an expectation for the same name, get the
# original object from the FIRST saved expectation.
return Expectation(
self._object,
name=name,
return_value=return_value,
original=saved_expectations[0].__dict__.get("_original"),
method_type=saved_expectations[0].__dict__.get("_method_type"),
)
return Expectation(self._object, name=name, return_value=return_value)
def _create_placeholder_mock_for_proper_teardown(
self, obj: Any, name: str, original: Any
) -> None:
"""Ensures that the given function is replaced on teardown."""
mock = Mock()
mock._object = obj
expectation = Expectation(obj, name=name, original=original)
FlexmockContainer.add_expectation(mock, expectation)
def _update_method(self, expectation: "Expectation", name: str) -> None:
method_instance = self._create_mock_method(name)
if self._hasattr(self._object, name) and not hasattr(expectation, "_original"):
expectation._update_original(name, self._object)
expectation._method_type = self._get_method_type(name, expectation._original)
if expectation._method_type in SPECIAL_METHODS:
expectation._original_function = getattr(self._object, name)
if not inspect.isclass(self._object) or expectation._method_type in SPECIAL_METHODS:
method_instance = types.MethodType(method_instance, self._object)
expectation._local_override = _setattr(self._object, name, method_instance)
if (
expectation._local_override
and not inspect.isclass(self._object)
and not isinstance(self._object, Mock)
and hasattr(self._object.__class__, name)
):
self._update_class_for_magic_builtins(name)
def _get_method_type(self, name: str, method: Callable[..., Any]) -> Any:
"""Get method type of the original method.
Method type is saved because after mocking the base class, it is difficult to determine
the original method type.
"""
method_type = self._get_saved_method_type(name, method)
if method_type is not None:
return method_type
if _is_class_method(method, name):
method_type = classmethod
elif _is_static_method(self._object, name):
method_type = staticmethod
else:
method_type = type(method)
setattr(self._object, f"{name}__flexmock__method_type", method_type)
return method_type
def _get_saved_method_type(self, name: str, method: Callable[..., Any]) -> Optional[Any]:
"""Check method type of the original method if it was saved to the class or base class."""
bound_to = getattr(method, "__self__", None)
if bound_to is not None and inspect.isclass(bound_to):
# Check if the method type was saved in a base class
for cls in inspect.getmro(bound_to):
method_type = vars(cls).get(f"{name}__flexmock__method_type")
if method_type:
return method_type
return None
def _update_class_for_magic_builtins(self, name: str) -> None:
"""Fixes method resolution order for built-in methods.
Replacing magic builtins on instances has no effect as the one attached
to the class takes precedence. To work around it, we update the class'
method to check if the instance in question has one in its own __dict__
and call that instead.
"""
if not (name.startswith("__") and name.endswith("__") and len(name) > 4):
return
original = getattr(self._object.__class__, name)
def updated(self: Any, *kargs: Any, **kwargs: Any) -> Any:
if (
hasattr(self, "__dict__")
and isinstance(self.__dict__, dict)
and name in self.__dict__
):
return self.__dict__[name](*kargs, **kwargs)
return original(self, *kargs, **kwargs)
setattr(self._object.__class__, name, updated)
if updated.__code__ != original.__code__:
self._create_placeholder_mock_for_proper_teardown(
self._object.__class__, name, original
)
def _update_attribute(
self, expectation: "Expectation", name: str, return_value: Optional[Any] = None
) -> None:
expectation._callable = False
if self._hasattr(self._object, name) and not hasattr(expectation, "_original"):
expectation._update_original(name, self._object)
expectation._local_override = _setattr(self._object, name, return_value)
def _update_property(self, expectation: "Expectation", name: str) -> None:
new_name = f"_flexmock__{name}"
obj = self._object
if not inspect.isclass(obj):
obj = obj.__class__
expectation._callable = False
original = getattr(obj, name)
@property # type: ignore
def updated(self: Any) -> Any:
if (
hasattr(self, "__dict__")
and isinstance(self.__dict__, dict)
and name in self.__dict__
):
return self.__dict__[name]
# Return original for instances that are not mocked
return getattr(self, new_name)
setattr(obj, name, updated)
if not hasattr(obj, new_name):
# don't try to double update
FlexmockContainer.add_teardown_property(obj, new_name)
setattr(obj, new_name, original)
self._create_placeholder_mock_for_proper_teardown(obj, name, original)
def _create_mock_method(self, name: str) -> Callable[..., Any]:
def _handle_exception_matching(expectation: Expectation) -> None:
# pylint: disable=misplaced-bare-raise
return_values = _getattr(expectation, "_return_values")
if return_values:
raised, instance = sys.exc_info()[:2]
assert raised, "no exception was raised"
message = str(instance)
expected = return_values[0].raises
if not expected:
raise
args = return_values[0].value
if inspect.isclass(expected):
expected_instance = expected(*args["kargs"], **args["kwargs"])
expected_message = str(expected_instance)
if expected is not raised and expected not in raised.__bases__:
raise ExceptionClassError(
f"Raised exception for call {expectation._name} "
"did not match expectation:\n"
f" Expected:\t{expected}\n"
f" Raised:\t{raised}"
)
if args["kargs"] and isinstance(args["kargs"][0], RE_TYPE):
if not args["kargs"][0].search(message):
raise ExceptionMessageError(
f"Error message mismatch with raised {expected.__name__}:\n"
f" Expected pattern:\n\t/{args['kargs'][0].pattern}/\n"
f" Received message:\n\t'{message}'"
)
elif expected_message and expected_message != message:
raise (
ExceptionMessageError(
f"Error message mismatch with raised {expected.__name__}:\n"
f" Expected message:\n\t'{message}'\n"
f" Received message:\n\t'{expected_message}'"
)
)
elif expected is not raised:
raise ExceptionClassError(
f"Raised exception for call {expectation._name} "
f"did not match expectation:\n"
f" Expected:\t{repr(expected)}\n"
f" Raised:\t{raised}\n\n"
"Did you try to call and_raise with an instance?\n"
'Instead of and_raise(Exception("arg")), try and_raise(Exception, "arg")'
)
else:
raise
def match_return_values(expected: Any, received: Any) -> bool:
if not isinstance(expected, tuple):
expected = (expected,)
if not isinstance(received, tuple):
received = (received,)
if len(received) != len(expected):
return False
for i, val in enumerate(received):
if not _arguments_match(val, expected[i]):
return False
return True
def pass_thru(
expectation: Expectation, runtime_self: Any, *kargs: Any, **kwargs: Any
) -> Any:
return_values = None
try:
original = _getattr(expectation, "_original")
_mock = _getattr(expectation, "_mock")
if inspect.isclass(_mock):
if expectation._method_type in SPECIAL_METHODS:
original = _getattr(expectation, "_original_function")
return_values = original(*kargs, **kwargs)
else:
return_values = original(runtime_self, *kargs, **kwargs)
else:
return_values = original(*kargs, **kwargs)
except Exception:
return _handle_exception_matching(expectation)
expected_values = _getattr(expectation, "_return_values")
if expected_values and not match_return_values(expected_values[0].value, return_values):
expected_value = expected_values[0].value
# Display strings with quotes in the error message
if isinstance(return_values, str):
return_values = repr(return_values)
if isinstance(expected_value, str):
expected_value = repr(expected_value)
raise (
MethodSignatureError(
f"Returned values for call {expectation._name} did not match expectation:\n"
f" Expected:\t{expected_value}\n"
f" Returned:\t{return_values}"
)
)
return return_values
def _handle_matched_expectation(
expectation: Expectation, runtime_self: Any, *kargs: Any, **kwargs: Any
) -> Any:
if not expectation._runnable():
raise StateError(
f"{name} expected to be called when {expectation._get_runnable()} is True"
)
expectation._times_called += 1
expectation._verify(final=False)
_pass_thru = _getattr(expectation, "_pass_thru")
_replace_with = _getattr(expectation, "_replace_with")
if _pass_thru:
return pass_thru(expectation, runtime_self, *kargs, **kwargs)
if _replace_with:
return _replace_with(*kargs, **kwargs)
return_values = _getattr(expectation, "_return_values")
if return_values:
return_value = return_values[0]
del return_values[0]
return_values.append(return_value)
else:
return_value = ReturnValue()
if return_value.raises:
if inspect.isclass(return_value.raises):
raise return_value.raises(
*return_value.value["kargs"], **return_value.value["kwargs"]
)
raise return_value.raises # pylint: disable=raising-bad-type
return return_value.value
def mock_method(runtime_self: Any, *kargs: Any, **kwargs: Any) -> Any:
arguments = {"kargs": kargs, "kwargs": kwargs}
expectation = FlexmockContainer.get_flexmock_expectation(self, name, arguments)
if expectation:
return _handle_matched_expectation(expectation, runtime_self, *kargs, **kwargs)
# inform the user which expectation(s) for the method were _not_ matched
saved_expectations = reversed(FlexmockContainer.get_expectations_with_name(self, name))
error_msg = (
f"Arguments for call {name} did not match expectations:\n"
f" Received call:\t{_format_args(name, arguments)}\n"
)
if saved_expectations:
error_msg += "\n".join(
f" Expected call[{index}]:\t{_format_args(name, expectation._args)}"
for index, expectation in enumerate(saved_expectations, 1)
)
raise MethodSignatureError(error_msg)
return mock_method
def flexmock_teardown() -> None:
"""Performs flexmock-specific teardown tasks."""
saved = {}
instances = []
classes = []
for mock_object, expectations in FlexmockContainer.flexmock_objects.items():
saved[mock_object] = expectations[:]
for expectation in expectations:
_getattr(expectation, "_reset")()
for expectation in expectations:
# Remove method type attributes set by flexmock. This needs to be done after
# resetting all the expectations because method type is needed in expectation teardown.
if inspect.isclass(mock_object) or hasattr(mock_object, "__class__"):
try:
delattr(mock_object._object, f"{expectation._name}__flexmock__method_type")
except (AttributeError, TypeError):
pass
for mock in saved:
obj = mock._object
if not isinstance(obj, Mock) and not inspect.isclass(obj):
instances.append(obj)
if inspect.isclass(obj):
classes.append(obj)
for obj in instances + classes:
for attr in UPDATED_ATTRS:
try:
obj_dict = obj.__dict__
if obj_dict[attr].__code__ is Mock.__dict__[attr].__code__:
del obj_dict[attr]
except Exception:
try:
if getattr(obj, attr).__code__ is Mock.__dict__[attr].__code__:
delattr(obj, attr)
except AttributeError:
pass
FlexmockContainer.teardown_properties()
FlexmockContainer.reset()
# make sure this is done last to keep exceptions here from breaking
# any of the previous steps that cleanup all the changes
for mock_object, expectations in saved.items():
for expectation in expectations:
_getattr(expectation, "_verify")()
class Expectation:
"""Holds expectations about methods.
The information contained in the Expectation object includes method name,
its argument list, return values, and any exceptions that the method might
raise.
"""
def __init__(
self,
mock: Mock,
name: Optional[str] = None,
return_value: Optional[Any] = None,
original: Optional[Any] = None,
method_type: Optional[Any] = None,
) -> None:
if original is not None:
self._original = original
self._name = name
self._times_called: int = 0
self._modifier: str = EXACTLY
self._args: Optional[Dict[str, Any]] = None
self._method_type = method_type
self._argspec: Optional[inspect.FullArgSpec] = None
self._return_values = [ReturnValue(return_value)] if return_value is not None else []
self._replace_with: Optional[Callable[..., Any]] = None
self._original_function: Optional[Callable[..., Any]] = None
self._expected_calls = {EXACTLY: None, AT_LEAST: None, AT_MOST: None}
self._runnable: Callable[..., bool] = lambda: True
self._mock = mock
self._pass_thru = False
self._ordered = False
self._one_by_one = False
self._verified = False
self._callable = True
self._local_override = False
def __str__(self) -> str:
args = _format_args(str(self._name), self._args)
return_values = ", ".join(str(x) for x in self._return_values)
return f"{args} -> ({return_values})"
def __call__(self) -> "Expectation":
return self
def __getattribute__(self, name: str) -> Any:
if name == "once":
return _getattr(self, "times")(1)
if name == "twice":
return _getattr(self, "times")(2)
if name == "never":
return _getattr(self, "times")(0)
if name in ("at_least", "at_most", "ordered", "one_by_one"):
return _getattr(self, name)()
if name == "mock":
return _getattr(self, "mock")()
return _getattr(self, name)
def __getattr__(self, name: str) -> NoReturn:
self.__raise(
AttributeError, f"'{self.__class__.__name__}' object has not attribute '{name}'"
)
def _get_runnable(self) -> str:
"""Ugly hack to get the name of when() condition from the source code."""
name = "condition"
try:
source = inspect.getsource(self._runnable)
if "when(" in source:
name = source.split("when(")[1].split(")")[0]
elif "def " in source:
name = source.split("def ")[1].split("(")[0]
except Exception:
# couldn't get the source, oh well
pass
return name
def _verify_signature_match(self, *kargs: Any, **kwargs: Any) -> None:
if isinstance(self._mock, Mock):
return # no sense in enforcing this for fake objects
allowed = self._argspec
args_len = len(allowed.args)
# self is the first expected argument
has_self = allowed.args and allowed.args[0] == "self"
# Builtin methods take `self` as the first argument but `inspect.ismethod` returns False
# so we need to check for them explicitly
is_builtin_method = isinstance(self._original, BuiltinMethodType) and has_self
# Methods take `self` if not a staticmethod
is_method = inspect.ismethod(self._original) and self._method_type is not staticmethod
# Class init takes `self`
is_class = inspect.isclass(self._original)
# When calling class methods or instance methods on a class method takes `cls`
is_class_method = (
inspect.isfunction(self._original)
and inspect.isclass(self._mock)
and self._method_type is not staticmethod
)
if is_builtin_method or is_method or is_class or is_class_method:
# Do not count `self` or `cls`.
args_len -= 1
minimum = args_len - (allowed.defaults and len(allowed.defaults) or 0)
maximum = None
if allowed.varargs is None and allowed.varkw is None:
maximum = args_len
total_positional = len(kargs + tuple(a for a in kwargs if a in allowed.args))
named_optionals = [
a
for a in kwargs
if allowed.defaults
if a in allowed.args[len(allowed.args) - len(allowed.defaults) :]
]
if allowed.defaults and total_positional == minimum and named_optionals:
minimum += len(named_optionals)
if total_positional < minimum:
arguments = "argument" if minimum == 1 else "arguments"
raise MethodSignatureError(
f"{self._name} requires at least {minimum} {arguments}, "
f"expectation provided {total_positional}"
)
if maximum is not None and total_positional > maximum:
arguments = "argument" if maximum == 1 else "arguments"
raise MethodSignatureError(
f"{self._name} requires at most {maximum} {arguments}, "
f"expectation provided {total_positional}"
)
if args_len == len(kargs) and any(a for a in kwargs if a in allowed.args):
given_args = [a for a in kwargs if a in allowed.args]
arguments = "argument" if len(given_args) == 1 else "arguments"
raise MethodSignatureError(
f"{given_args} already given as positional {arguments} to {self._name}"
)
if not allowed.varkw and any(
a for a in kwargs if a not in allowed.args + allowed.kwonlyargs
):
invalid_arg = [a for a in kwargs if a not in allowed.args + allowed.kwonlyargs][0]
raise MethodSignatureError(
f"{invalid_arg} is not a valid keyword argument to {self._name}"
)
# check that kwonlyargs that don't have default value specified are provided
required_kwonlyargs = [
a for a in allowed.kwonlyargs if a not in (allowed.kwonlydefaults or {})
]
missing_kwonlyargs = [a for a in required_kwonlyargs if a not in kwargs]
if missing_kwonlyargs:
arguments = "argument" if len(missing_kwonlyargs) == 1 else "arguments"
missing_args = '", "'.join(missing_kwonlyargs)
raise MethodSignatureError(
f'{self._name} requires keyword-only {arguments} "{missing_args}"'
)
def _update_original(self, name: str, obj: Any) -> None:
if hasattr(obj, "__dict__") and name in obj.__dict__:
self._original = obj.__dict__[name]
else:
self._original = getattr(obj, name)
self._update_argspec()
def _update_argspec(self) -> None:
original = self.__dict__.get("_original")
if original:
try:
self._argspec = inspect.getfullargspec(original)
except TypeError:
# built-in function: fall back to stupid processing and hope the
# builtins don't change signature
pass
def _normalize_named_args(self, *kargs: Any, **kwargs: Any) -> Dict[str, Any]:
argspec = self._argspec
default = {"kargs": kargs, "kwargs": kwargs}
if not argspec:
return default
ret: Dict[str, Any] = {"kargs": (), "kwargs": kwargs}
if inspect.ismethod(self._original):
args = argspec.args[1:]
else:
args = argspec.args
for i, arg in enumerate(kargs):
if len(args) <= i:
return default
ret["kwargs"][args[i]] = arg
return ret
def __raise(self, exception: Type[Exception], message: str) -> NoReturn:
"""Safe internal raise implementation.
In case we're patching builtins, it's important to reset the
expectation before raising any exceptions or else things like
open() might be stubbed out and the resulting runner errors are very
difficult to diagnose.
"""
self._reset()
raise exception(message)
def _match_args(self, given_args: Any) -> bool:
"""Check if the set of given arguments matches this expectation."""
expected_args = self._args
given_args = self._normalize_named_args(*given_args["kargs"], **given_args["kwargs"])
if expected_args == given_args or expected_args is None:
return True
if (
len(given_args["kargs"]) != len(expected_args["kargs"])
or len(given_args["kwargs"]) != len(expected_args["kwargs"])
or (sorted(given_args["kwargs"].keys()) != sorted(expected_args["kwargs"].keys()))
):
return False
for i, arg in enumerate(given_args["kargs"]):
if not _arguments_match(arg, expected_args["kargs"][i]):
return False
for key, value in given_args["kwargs"].items():
if not _arguments_match(value, expected_args["kwargs"][key]):
return False
return True
def mock(self) -> Mock:
"""Return the mock associated with this expectation."""
return self._mock
def with_args(self, *kargs: Any, **kwargs: Any) -> "Expectation":
"""Override the arguments used to match this expectation's method.
Args:
- kargs: optional keyword arguments
- kwargs: optional named arguments
Returns:
- self, i.e. can be chained with other Expectation methods
"""
if not self._callable:
self.__raise(FlexmockError, "can't use with_args() with attribute stubs")
self._update_argspec()
if self._argspec:
# do this outside try block as TypeError is way too general and catches
# unrelated errors in the verify signature code
self._verify_signature_match(*kargs, **kwargs)
self._args = self._normalize_named_args(*kargs, **kwargs)
else:
self._args = {"kargs": kargs, "kwargs": kwargs}
return self
def and_return(self, *values: Any) -> "Expectation":
"""Override the return value of this expectation's method.
When and_return is given multiple times, each value provided is returned
on successive invocations of the method. It is also possible to mix
and_return with and_raise in the same manner to alternate between returning
a value and raising and exception on different method invocations.
When combined with the one_by_one property, value is treated as a list of
values to be returned in the order specified by successive calls to this
method rather than a single list to be returned each time.
Args:
- values: optional list of return values, defaults to None if not given
Returns:
- self, i.e. can be chained with other Expectation methods
"""
if not values:
value = None
elif len(values) == 1:
value = values[0]
else:
value = values
if not self._callable:
_setattr(self._mock, str(self._name), value)
return self
return_values = _getattr(self, "_return_values")
if not _getattr(self, "_one_by_one"):
value = ReturnValue(value)
return_values.append(value)
else:
try:
return_values.extend([ReturnValue(v) for v in value]) # type: ignore
except TypeError:
return_values.append(ReturnValue(value))
return self
def times(self, number: int) -> "Expectation":
"""Number of times this expectation's method is expected to be called.
There are also 3 aliases for the times() method:
- once() -> times(1)
- twice() -> times(2)
- never() -> times(0)
Args:
- number: int
Returns:
- self, i.e. can be chained with other Expectation methods
"""
if not self._callable:
self.__raise(FlexmockError, "can't use times() with attribute stubs")
expected_calls = _getattr(self, "_expected_calls")
modifier = _getattr(self, "_modifier")
expected_calls[modifier] = number
return self
def one_by_one(self) -> "Expectation":
"""Modifies the return value to be treated as a list of return values.
Each value in the list is returned on successive invocations of the method.
Returns:
- self, i.e. can be chained with other Expectation methods
"""
if not self._callable:
self.__raise(FlexmockError, "can't use one_by_one() with attribute stubs")
if not self._one_by_one:
self._one_by_one = True
return_values = _getattr(self, "_return_values")
saved_values = return_values[:]
self._return_values = return_values = []
for value in saved_values:
try:
for val in value.value:
return_values.append(ReturnValue(val))
except TypeError:
return_values.append(value)
return self
def at_least(self) -> "Expectation":
"""Modifies the associated times() expectation.
When given, an exception will only be raised if the method is called less
than times() specified. Does nothing if times() is not given.
Returns:
- self, i.e. can be chained with other Expectation methods
"""
if not self._callable:
self.__raise(FlexmockError, "can't use at_least() with attribute stubs")
expected_calls = _getattr(self, "_expected_calls")
modifier = _getattr(self, "_modifier")
if expected_calls[AT_LEAST] is not None or modifier == AT_LEAST:
self.__raise(FlexmockError, "cannot use at_least modifier twice")
if modifier == AT_MOST and expected_calls[AT_MOST] is None:
self.__raise(FlexmockError, "cannot use at_least with at_most unset")
self._modifier = AT_LEAST
return self
def at_most(self) -> "Expectation":
"""Modifies the associated "times" expectation.
When given, an exception will only be raised if the method is called more
than times() specified. Does nothing if times() is not given.
Returns:
- self, i.e. can be chained with other Expectation methods
"""
if not self._callable:
self.__raise(FlexmockError, "can't use at_most() with attribute stubs")
expected_calls = _getattr(self, "_expected_calls")
modifier = _getattr(self, "_modifier")
if expected_calls[AT_MOST] is not None or modifier == AT_MOST:
self.__raise(FlexmockError, "cannot use at_most modifier twice")
if modifier == AT_LEAST and expected_calls[AT_LEAST] is None:
self.__raise(FlexmockError, "cannot use at_most with at_least unset")
self._modifier = AT_MOST
return self
def ordered(self) -> "Expectation":
"""Makes the expectation respect the order of should_receive statements.
An exception will be raised if methods are called out of order, determined
by order of should_receive calls in the test.
Returns:
- self, i.e. can be chained with other Expectation methods
"""
if not self._callable:
self.__raise(FlexmockError, "can't use ordered() with attribute stubs")
self._ordered = True
FlexmockContainer.ordered.append(self)
return self
def when(self, func: Callable[..., Any]) -> "Expectation":
"""Sets an outside resource to be checked before executing the method.
Args:
- func: function to call to check if the method should be executed
Returns:
- self, i.e. can be chained with other Expectation methods
"""
if not self._callable:
self.__raise(FlexmockError, "can't use when() with attribute stubs")
if not hasattr(func, "__call__"):
self.__raise(FlexmockError, "when() parameter must be callable")
self._runnable = func
return self
def and_raise(self, exception: Exception, *kargs: Any, **kwargs: Any) -> "Expectation":
"""Specifies the exception to be raised when this expectation is met.
Args:
- exception: class or instance of the exception
- kargs: optional keyword arguments to pass to the exception
- kwargs: optional named arguments to pass to the exception
Returns:
- self, i.e. can be chained with other Expectation methods
"""
if not self._callable:
self.__raise(FlexmockError, "can't use and_raise() with attribute stubs")
args = {"kargs": kargs, "kwargs": kwargs}
return_values = _getattr(self, "_return_values")
return_values.append(ReturnValue(raises=exception, value=args))
return self
def replace_with(self, function: Callable[..., Any]) -> "Expectation":
"""Gives a function to run instead of the mocked out one.
Args:
- function: callable
Returns:
- self, i.e. can be chained with other Expectation methods
"""
if not self._callable:
self.__raise(FlexmockError, "can't use replace_with() with attribute/property stubs")
replace_with = _getattr(self, "_replace_with")
original = self.__dict__.get("_original")
if replace_with:
self.__raise(FlexmockError, "replace_with cannot be specified twice")
if function == original:
self._pass_thru = True
self._replace_with = function
return self
def and_yield(self, *kargs: Any) -> "Expectation":
"""Specifies the list of items to be yielded on successive method calls.
In effect, the mocked object becomes a generator.
Returns:
- self, i.e. can be chained with other Expectation methods
"""
if not self._callable:
self.__raise(FlexmockError, "can't use and_yield() with attribute stubs")
return self.and_return(iter(kargs))
def _verify(self, final: bool = True) -> None:
"""Verify that this expectation has been met.
Args:
final: boolean, True if no further calls to this method expected
(skip checking at_least expectations when False)
Raises:
MethodCallError Exception
"""
failed, message = self._verify_number_of_calls(final)
if failed and not self._verified:
self._verified = True
self.__raise(
MethodCallError,
(
f"{_format_args(str(self._name), self._args)} expected to be called "
f"{message}, called {self._times_called} "
f"{'time' if self._times_called == 1 else 'times'}"
),
)
def _verify_number_of_calls(self, final: bool) -> Tuple[bool, str]:
failed = False
message = ""
expected_calls = _getattr(self, "_expected_calls")
times_called = _getattr(self, "_times_called")
if expected_calls[EXACTLY] is not None:
message = f"exactly {expected_calls[EXACTLY]}"
if final:
if times_called != expected_calls[EXACTLY]:
failed = True
else:
if times_called > expected_calls[EXACTLY]:
failed = True
message += " time" if expected_calls[EXACTLY] == 1 else " times"
else:
if final and expected_calls[AT_LEAST] is not None:
message = f"at least {expected_calls[AT_LEAST]}"
if times_called < expected_calls[AT_LEAST]:
failed = True
message += " time" if expected_calls[AT_LEAST] == 1 else " times"
if expected_calls[AT_MOST] is not None:
if message:
message += " and "
message += f"at most {expected_calls[AT_MOST]}"
if times_called > expected_calls[AT_MOST]:
failed = True
message += " time" if expected_calls[AT_MOST] == 1 else " times"
return failed, message
def _reset(self) -> None:
"""Returns the methods overriden by this expectation to their originals."""
_mock = _getattr(self, "_mock")
if not isinstance(_mock, Mock):
original = self.__dict__.get("_original")
if original:
# name may be unicode but pypy demands dict keys to be str
name = str(_getattr(self, "_name"))
if hasattr(_mock, "__dict__") and name in _mock.__dict__ and self._local_override:
delattr(_mock, name)
elif (
hasattr(_mock, "__dict__")
and name in _mock.__dict__
and isinstance(_mock.__dict__, dict)
):
_mock.__dict__[name] = original
else:
setattr(_mock, name, original)
del self
class FlexmockContainer:
"""Holds global hash of object/expectation mappings."""
flexmock_objects: Dict[Mock, List[Expectation]] = {}
properties: Dict[Any, List[str]] = {}
ordered: List[Expectation] = []
last: Optional[Expectation] = None
@classmethod
def reset(cls) -> None:
"""Reset flexmock state."""
cls.ordered = []
cls.last = None
cls.flexmock_objects = {}
cls.properties = {}
@classmethod
def get_flexmock_expectation(
cls, obj: Mock, name: Optional[str] = None, args: Optional[Any] = None
) -> Optional[Expectation]:
"""Retrieves an existing matching expectation."""
if args is None:
args = {"kargs": (), "kwargs": {}}
if not isinstance(args, dict):
args = {"kargs": args, "kwargs": {}}
if not isinstance(args["kargs"], tuple):
args["kargs"] = (args["kargs"],)
if name and obj in cls.flexmock_objects:
found = None
for expectation in reversed(cls.flexmock_objects[obj]):
if expectation._name == name and expectation._match_args(args):
if expectation in cls.ordered or not expectation._ordered and not found:
found = expectation
if found and found._ordered:
cls._verify_call_order(found, args)
return found
return None
@classmethod
def _verify_call_order(cls, expectation: Expectation, args: Dict[str, Any]) -> None:
if not cls.ordered:
next_method = cls.last
else:
next_method = cls.ordered.pop(0)
cls.last = next_method
if expectation is not next_method and next_method is not None:
raise CallOrderError(
f"{_format_args(str(expectation._name), args)} called before "
f"{_format_args(str(next_method._name), next_method._args)}"
)
@classmethod
def add_expectation(cls, obj: Mock, expectation: Expectation) -> None:
"""Add expectation."""
if obj in cls.flexmock_objects:
cls.flexmock_objects[obj].append(expectation)
else:
cls.flexmock_objects[obj] = [expectation]
@classmethod
def get_expectations_with_name(cls, obj: Mock, name: str) -> List[Expectation]:
"""Get all expectations for given name."""
return [x for x in FlexmockContainer.flexmock_objects.get(obj, []) if x._name == name]
@classmethod
def add_teardown_property(cls, obj: Any, name: str) -> None:
"""Add teardown property."""
if obj in cls.properties:
cls.properties[obj].append(name)
else:
cls.properties[obj] = [name]
@classmethod
def teardown_properties(cls) -> None:
"""Teardown properties."""
for obj, names in cls.properties.items():
for name in names:
delattr(obj, name)
def flexmock(spec: Optional[Any] = None, **kwargs: Any) -> Mock:
"""Main entry point into the flexmock API.
This function is used to either generate a new fake object or take
an existing object (or class or module) and use it as a basis for
a partial mock. In case of a partial mock, the passed in object
is modified to support basic Mock class functionality making
it unnecessary to make successive flexmock() calls on the same
objects to generate new expectations.
Examples:
>>> flexmock(SomeClass)
>>> SomeClass.should_receive('some_method')
NOTE: it's safe to call flexmock() on the same object, it will detect
when an object has already been partially mocked and return it each time.
Args:
- spec: object (or class or module) to mock
- kwargs: method/return_value pairs to attach to the object
Returns:
Mock object if no spec is provided. Otherwise return the spec object.
"""
if spec is not None:
return _create_partial_mock(spec, **kwargs)
# use this intermediate class to attach properties
klass = type("MockClass", (Mock,), {})
return klass(**kwargs) # type: ignore
def _getattr(obj: object, name: str) -> Any:
"""Convenience wrapper to work around custom __getattribute__."""
return object.__getattribute__(obj, name)
def _arg_to_str(arg: Any) -> str:
if isinstance(arg, RE_TYPE):
return f"/{arg.pattern}/"
if isinstance(arg, str):
return f'"{arg}"'
return f"{arg}"
def _format_args(name: str, arguments: Optional[Dict[str, Any]]) -> str:
if arguments is None:
arguments = {"kargs": (), "kwargs": {}}
kargs = ", ".join(_arg_to_str(arg) for arg in arguments["kargs"])
kwargs = ", ".join(f"{k}={_arg_to_str(v)}" for k, v in arguments["kwargs"].items())
if kargs and kwargs:
args = f"{kargs}, {kwargs}"
else:
args = f"{kargs}{kwargs}"
return f"{name}({args})"
def _create_partial_mock(obj_or_class: Any, **kwargs: Any) -> Mock:
"""Create partial mock."""
matches = [x for x in FlexmockContainer.flexmock_objects if x._object is obj_or_class]
if matches:
mock = matches[0]
else:
mock = Mock()
mock._object = obj_or_class
for name, return_value in kwargs.items():
if hasattr(return_value, "__call__"):
mock.should_receive(name).replace_with(return_value)
else:
mock.should_receive(name).and_return(return_value)
if not matches:
FlexmockContainer.add_expectation(mock, Expectation(obj_or_class))
if _attach_flexmock_methods(mock, Mock, obj_or_class) and not inspect.isclass(mock._object):
mock = mock._object
return mock
def _attach_flexmock_methods(mock: Mock, flexmock_class: Type[Mock], obj: Any) -> bool:
try:
for attr in UPDATED_ATTRS:
if hasattr(obj, attr):
if getattr(obj, attr).__code__ is not getattr(flexmock_class, attr).__code__:
return False
for attr in UPDATED_ATTRS:
_setattr(obj, attr, getattr(mock, attr))
except TypeError as exc:
raise MockBuiltinError(
"Python does not allow you to mock builtin objects or modules. "
"Consider wrapping it in a class you can mock instead"
) from exc
except AttributeError as exc:
raise MockBuiltinError(
"Python does not allow you to mock instances of builtin objects. "
"Consider wrapping it in a class you can mock instead"
) from exc
return True
def _arguments_match(arg: Any, expected_arg: Any) -> bool:
if expected_arg == arg:
return True
if inspect.isclass(expected_arg) and isinstance(arg, expected_arg):
return True
if isinstance(expected_arg, RE_TYPE) and expected_arg.search(arg):
return True
return False
def _setattr(obj: Any, name: str, value: Any) -> bool:
"""Ensure we use local __dict__ where possible."""
local_override = False
if hasattr(obj, "__dict__") and isinstance(obj.__dict__, dict):
if name not in obj.__dict__:
# Overriding attribute locally on an instance.
local_override = True
obj.__dict__[name] = value
else:
if inspect.isclass(obj) and not vars(obj).get(name):
# Overriding derived attribute locally on a child class.
local_override = True
setattr(obj, name, value)
return local_override
def _isproperty(obj: Any, name: str) -> bool:
if isinstance(obj, Mock):
return False
if not inspect.isclass(obj) and hasattr(obj, "__dict__") and name not in obj.__dict__:
attr = getattr(obj.__class__, name)
if isinstance(attr, property):
return True
elif inspect.isclass(obj):
attr = getattr(obj, name)
if isinstance(attr, property):
return True
return False
def _is_class_method(method: Callable[..., Any], name: str) -> bool:
"""Check if a method is a classmethod.
This function checks all the classes in the class method resolution in order
to get the correct result for derived methods as well.
"""
bound_to = getattr(method, "__self__", None)
if not inspect.isclass(bound_to):
return False
for cls in inspect.getmro(bound_to):
descriptor = vars(cls).get(name)
if descriptor is not None:
return isinstance(descriptor, classmethod)
return False
def _is_static_method(obj: Any, name: str) -> bool:
try:
return isinstance(inspect.getattr_static(obj, name), staticmethod)
except AttributeError:
# AttributeError is raised when mocking a proxied object
if hasattr(obj, "__mro__"):
for cls in inspect.getmro(obj):
descriptor = vars(cls).get(name)
if descriptor is not None:
return isinstance(descriptor, staticmethod)
return False
|
python
|
import bs4
import re
from common import config
# Regular expresion definitions
is_well_former_link = re.compile(r'^https?://.+$')
is_root_path = re.compile(r'^/.+$')
def _build_link(host, link):
if is_well_former_link.match(link):
return link
elif is_root_path.match(link):
return '{}{}'.format(host, link)
else:
return '{host}/{uri}'.format(host=host, uri=link)
class NewsPage:
def __init__(self, news_site_uid):
self._config = config()['news_sites'][news_site_uid]
self._queries = self._config['queries']
self._url = self._config['url']
self._html = None
def _select(self, query_string):
return self._html.select(query_string)
def _select_list(self, query_string_list):
results = []
for query_string in query_string_list:
results = results + self._html.select(query_string)
return results
@property
def url_csv(self):
return self._url
async def visit(self, session):
async with session.get(self._url) as response:
text = await response.text()
self._html = bs4.BeautifulSoup(text, 'html.parser')
class HomePage(NewsPage):
def __init__(self, news_site_uid):
super().__init__(news_site_uid)
@property
def article_links(self):
link_list = []
for link in self._select_list(self._queries['homepage_article_links']):
if link and link.has_attr('href'):
link_list.append(link)
return set(link['href'] for link in link_list)
class ArticlePage(NewsPage):
def __init__(self, news_site_uid, article_url):
super().__init__(news_site_uid)
self._url = _build_link(self._url, article_url)
@property
def body_csv(self):
results = self._select(self._queries['article_body'])
text = ''
for result in results:
text += result.text
return text
@property
def title_csv(self):
result = self._select(self._queries['article_title'])
return result[0].text if len(result) else ''
|
python
|
'''
Created on Apr 4, 2016
@author: Noe
'''
class MyClass(object):
'''
classdocs
'''
def __init__(self, params):
'''
Constructor
'''
|
python
|
#!/usr/bin/python
from __future__ import absolute_import, division, print_function, unicode_literals
import pi3d
import ConfigParser
from PIL import Image
import sys
#read config
Config = ConfigParser.ConfigParser()
Config.read("config.ini")
xloc = int(Config.get("client",'x_offset'))
yloc = int(Config.get("client",'y_offset'))
x_virtual = int(Config.get("client",'x_virtual'))
y_virtual = int(Config.get("client",'y_virtual'))
ifile = Config.get("client","default_image")
im = Image.open(ifile)
xsize,ysize = im.size
zindex = 5
DISPLAY = pi3d.Display.create(x=0, y=0)
DISPLAY.set_background(0,0,0,0) #black
xloc = xloc + (x_virtual - DISPLAY.width) / 2
yloc = yloc - (y_virtual - DISPLAY.height) / 2
##print("foo %d " % DISPLAY.width)
#sys.exit
shader = pi3d.Shader("uv_flat")
CAMERA = pi3d.Camera(is_3d=False)
mykeys = pi3d.Keyboard()
sprite = pi3d.ImageSprite(ifile, shader, w=xsize, h=ysize, z=zindex)
while DISPLAY.loop_running():
sprite.position(xloc, yloc, zindex)
sprite.draw()
if mykeys.read() == 27:
mykeys.close()
DISPLAY.destroy()
break
|
python
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
import sys
def solve(s):
open_p = ('[', '{', '(')
close_p = (']', '}', ')')
pair = dict(zip(close_p, open_p)) # key: close_p
stack = list()
for c in s:
if c in open_p:
stack.append(c)
if c in close_p:
if len(stack) == 0:
print('NO')
return
top = stack.pop()
if top != pair[c]:
print('NO')
return
if len(stack) != 0:
print('NO')
return
print('YES')
return
num_tc = int(sys.stdin.readline())
for _ in range(num_tc):
s = sys.stdin.readline().strip()
solve(s)
|
python
|
import aiohttp
import os
import pytest
from tokki.travis import TravisClient
from tokki.enums import Status
TOKEN = os.environ["TRAVISCI_TOKEN"]
AGENT = "Tests for Tokki +(https://github.com/ChomusukeBot/Tokki)"
@pytest.mark.asyncio
async def test_no_login():
with pytest.raises(TypeError, match=r": 'token'"):
TravisClient()
@pytest.mark.asyncio
async def test_no_agent():
with pytest.raises(TypeError, match=r": 'useragent'"):
TravisClient(TOKEN)
@pytest.mark.asyncio
async def test_not_found():
with pytest.raises(aiohttp.ClientResponseError) as exception:
client = TravisClient(TOKEN, AGENT)
await client.get_repo("ChomusukeBot/ThisIsAnInvalidRepo")
assert exception.value.status == 404
@pytest.mark.asyncio
async def test_repo():
client = TravisClient(TOKEN, AGENT)
repo = await client.get_repo("ChomusukeBot/TestRepo")
assert repo.name == "TestRepo"
assert repo.site_slug == "ChomusukeBot/TestRepo"
assert repo.repo_slug == "ChomusukeBot/TestRepo"
assert repo.owner == "ChomusukeBot"
assert repo.default_branch == "master"
@pytest.mark.asyncio
async def test_trigger_build():
client = TravisClient(TOKEN, AGENT)
repo = await client.get_repo("ChomusukeBot/TestRepo")
await repo.trigger_build(branch="master", message="Run from Tokki's tests")
@pytest.mark.asyncio
async def test_get_builds():
client = TravisClient(TOKEN, AGENT)
repo = await client.get_repo("ChomusukeBot/TestRepo")
builds = await repo.get_builds(quantity=5)
assert len(builds) == 5
for build in builds:
assert type(build.id) is int
assert type(build.version) is str
assert type(build.status) is Status
assert type(build.branch) is str
|
python
|
import argparse
parse = argparse.ArgumentParser(description="test")
parse.add_argument('count' , action='store' , type = int)
parse.add_argument('units',action='store')
parse.add_argument('priseperunit' , action= 'store')
print(parse.parse_args())
|
python
|
#!/usr/bin/env python3
import numpy
import cv2
import math
from entities.image import Image
from entities.interfaces.scene_interface import SceneInterface
from entities.aligned.aligned_band import AlignedBand
from entities.aligned.aligned_image import AlignedImage
from entities.aligned.aligned_true_color import AlignedTrueColor
from entities.motion_vectors import MotionVectors, MotionVectorsArrows
from entities.ndsi import NDSI
from entities.motion_predicted_ndsi import MotionPredictedNDSI, MotionPredictedNDSIOverlay
from utils.utils import debug_trace
from utils import logging
logger = logging.getLogger(__name__)
class AlignedScene(SceneInterface):
MATCHES_INCLUDED_PERCENT = 0.25
ALLOWED_SHIFTING_DISTANCE = 200
def __init__(self, scene, reference_scene, previous_scene):
SceneInterface.__init__(self)
self.__scene = scene
self.__reference_scene = reference_scene
self.__affine_transform_matrix = None
self.__matches = None
self._red_band = AlignedBand(scene.red_band(), reference_scene, self)
self._green_band = AlignedBand(scene.green_band(), reference_scene, self)
self._blue_band = AlignedBand(scene.blue_band(), reference_scene, self)
self._nir_band = AlignedBand(scene.nir_band(), reference_scene, self)
self._swir1_band = AlignedBand(scene.swir1_band(), reference_scene, self)
self.__bands = [
self._red_band,
self._green_band,
self._blue_band,
self._nir_band,
self._swir1_band,
]
self.__ndsi = NDSI(self._green_band, self._swir1_band)
self.__bands.append(self.__ndsi)
self.__drawn_matches_image = DrawnMatchesImage(scene, reference_scene, self)
self.__bands.append(self.__drawn_matches_image)
self.__true_color = AlignedTrueColor(scene.true_color(), reference_scene, self)
self.__bands.append(self.__true_color)
if previous_scene is not None:
self.__motion_vectors = MotionVectors(previous_scene.ndsi(), self.__ndsi)
self.__bands.append(self.__motion_vectors)
self.__motion_vectors_arrows = MotionVectorsArrows(self.__motion_vectors,
previous_scene.ndsi(),
self.__ndsi)
self.__bands.append(self.__motion_vectors_arrows)
self.__motion_predicted_ndsi = MotionPredictedNDSI(self.__motion_vectors, self.ndsi())
self.__bands.append(self.__motion_predicted_ndsi)
self.__motion_predicted_overlay_ndsi = \
MotionPredictedNDSIOverlay(self.__motion_predicted_ndsi, self.ndsi())
self.__bands.append(self.__motion_predicted_overlay_ndsi)
else:
self.__motion_vectors = None
self.__motion_predicted_ndsi = None
def clear(self):
for b in self.__bands:
b.clear()
def affine_transform_matrix(self) -> numpy.ndarray:
if self.__affine_transform_matrix is None:
self.__calculate_affine_transform_matrix()
return self.__affine_transform_matrix
def __calculate_affine_transform_matrix(self) -> None:
self.__matches = self.__match_descriptors()
self.__prune_low_score_matches()
reference_points, image_points = self.__prune_matches_by_euclidean_distance()
if any(element is None for element in [image_points, reference_points]):
logger.error("Affine transformation matrix could not be computed due to insufficient \
valid matches.")
self.__affine_transform_matrix = None
try:
affine_transform_matrix, inliers = cv2.estimateAffine2D(image_points,
reference_points,
None,
cv2.RANSAC)
self.__affine_transform_matrix = affine_transform_matrix
logger.notice("Affine transformation matrix for scene {} with reference {}\n{}"
.format(self.__scene, self.__reference_scene, affine_transform_matrix))
except Exception as e:
logger.error("Affine transformation failed.\n{}".format(e))
def __match_descriptors(self) -> list:
descriptor_match = cv2.DescriptorMatcher_create(cv2.DESCRIPTOR_MATCHER_BRUTEFORCE_HAMMING)
reference_descriptors = self.__reference_scene.descriptors()
image_descriptors = self.__scene.descriptors()
matches = descriptor_match.match(reference_descriptors, image_descriptors)
return matches
def __prune_low_score_matches(self) -> None:
self.__matches.sort(key=lambda x: x.distance, reverse=False)
matches_count = len(self.__matches)
pruned_matches_count = int(matches_count * self.MATCHES_INCLUDED_PERCENT)
self.__matches = self.__matches[:pruned_matches_count]
def __prune_matches_by_euclidean_distance(self) -> tuple:
pruned_matches = []
reference_points = []
image_points = []
for match in self.__matches:
reference_point = self.__reference_scene.keypoints()[match.queryIdx].pt
image_point = self.__scene.keypoints()[match.trainIdx].pt
if self.__valid_shifting_distance(reference_point, image_point):
reference_points.append(reference_point)
image_points.append(image_point)
pruned_matches.append(match)
self.__matches = pruned_matches
reference_points = numpy.array(reference_points)
image_points = numpy.array(image_points)
return reference_points, image_points
def __valid_shifting_distance(self, reference_point, image_point) -> bool:
euclidean_distance = self.__euclidean_distance(reference_point, image_point)
if euclidean_distance < AlignedScene.ALLOWED_SHIFTING_DISTANCE:
return True
else:
return False
@staticmethod
def __euclidean_distance(image_point, reference_point) -> float:
x_distance = abs(reference_point[0] - image_point[0])
y_distance = abs(reference_point[1] - image_point[1])
distance = math.sqrt(math.pow(x_distance, 2) + (math.pow(y_distance, 2)))
return distance
def scene_id(self) -> str:
return self.__scene.scene_id()
def scene_path(self) -> str:
return self.__scene.scene_path()
def bands(self) -> list:
return self.__bands
def thumbnail(self) -> AlignedBand:
return self.true_color()
def true_color(self) -> AlignedImage:
return self.__true_color
def ndsi(self) -> NDSI:
return self.__ndsi
def matches(self):
if self.__matches is None:
self.affine_transform_matrix()
return self.__matches
def motion_predicted_ndsi(self) -> NDSI:
return self.__motion_predicted_ndsi
def __str__(self):
return "AlignedScene[{}]".format(self.scene_id().scene_id())
def iterate_over_all(self):
logger.notice(self.__str__)
for b in self.__bands:
if b.name() == "Motion Vectros":
continue
b.raw_data()
# Make sure we don't fill the RAM
self.__bands = None
self.__ndsi = None
self.__motion_vectors = None
self.__motion_predicted_ndsi = None
self._red_band = None
self._green_band = None
self._blue_band = None
self._nir_band = None
self._swir1_band = None
class DrawnMatchesImage(Image):
NAME = "Drawn Matches"
def __init__(self, scene, reference_scene, aligned_scene):
self.__reference_scene = reference_scene
self.__scene = scene
self.__aligned_scene = aligned_scene
def name(self):
return self.NAME
def scene_name(self):
return self.__scene.scene_id().scene_id()
def raw_data(self):
pass
def clear(self):
pass
def visual_data(self):
return self.__matches_from_reference_to_image()
def __matches_from_reference_to_image(self):
reference_green_band_8bit = (self.__reference_scene.green_band().visual_data() >> 8).astype(numpy.uint8)
green_band_8bit = (self.__scene.green_band().visual_data() >> 8).astype(numpy.uint8)
drawn_matches_image = cv2.drawMatches(reference_green_band_8bit,
self.__reference_scene.keypoints(),
green_band_8bit,
self.__scene.keypoints(),
self.__aligned_scene.matches(),
None, matchColor=(0, 255, 255),
singlePointColor=(100, 0, 0),
flags=cv2.DRAW_MATCHES_FLAGS_DRAW_RICH_KEYPOINTS)
return drawn_matches_image
|
python
|
import os
import json
import scipy.io
import pandas
import itertools
import numpy as np
from PIL import Image
from collections import OrderedDict
info = OrderedDict(description = "Testset extracted from put-in-context paper (experiment H)")
licenses = OrderedDict()
catgs = ['airplane','apple','backpack','banana','baseball bat','baseball glove','bench','bicycle','bird','boat','book','bottle','bowl','bus','cake','car','carrot','cell phone','chair','clock','cow','cup','dog','donut','fire hydrant','fork','frisbee','horse','kite','knife','motorcycle','mouse','orange','parking meter','potted plant','remote','sheep','sink','skateboard','skis','snowboard','spoon','sports ball','stop sign','suitcase','surfboard','tennis racket','tie','toothbrush','traffic light','train','truck','umbrella','vase','wine glass']
#imagedir_ori = '/home/mengmi/Projects/Proj_context2/Datasets/MSCOCO/trainColor_oriimg'
#imagedir_bin = '/home/mengmi/Projects/Proj_context2/Datasets/MSCOCO/trainColor_binimg'
imagedir_ori = '/home/mengmi/Projects/Proj_context2/Matlab/Stimulus/keyframe_expH'
imagedir_bin = '/home/mengmi/Projects/Proj_context2/Matlab/Stimulus/keyframe_expA'
#object_data = pandas.read_csv('/home/mengmi/Projects/Proj_context2/Datalist/trainColor_oriimg.txt', header=-1)
#binary_data = pandas.read_csv('/home/mengmi/Projects/Proj_context2/Datalist/trainColor_binimg.txt', header=-1)
#labels = pandas.read_csv('/home/mengmi/Projects/Proj_context2/Datalist/trainColor_label.txt', header=-1)
object_data = pandas.read_csv('/home/dimitar/experiments_I_and_J/expIJ/test_expJ_Color_oriimg.txt', header=-1)
binary_data = pandas.read_csv('/home/dimitar/experiments_I_and_J/expIJ/test_expJ_Color_binimg.txt', header=-1)
labels = pandas.read_csv('/home/dimitar/experiments_I_and_J/expIJ/test_expJ_Color_label.txt', header=-1)
image_cnt = 0
images = [] # fill this list with image annotations
categories = [] # fill this list with category annotations
annotations = [] # fill this list with object annotations
for (_, s), (_, s1), (_, label) in itertools.izip(object_data.iterrows(), binary_data.iterrows(), labels.iterrows()):
image = Image.open(os.path.join(imagedir_ori, s[0]))
bin_mask = np.array(Image.open(os.path.join(imagedir_bin, s1[0])))
A = np.argwhere(bin_mask >= 200)
top, left = A[0]
bottom, right = A[-1]
if bottom < A[-2][0] or right < A[-2][0]:
bottom, right = A[-2]
images.append(OrderedDict(file_name = s[0], height = image.height, width = image.width, id = image_cnt))
annotations.append(OrderedDict(area = (bottom-top)*(right-left), iscrowd = 0, image_id = image_cnt, bbox = [left, top, right - left, bottom - top], category_id = label[0], id = image_cnt))
image_cnt += 1
for i in range(1, 56):
categories.append(OrderedDict(id = i, name = catgs[i-1]))
cocoannotations = OrderedDict(info = info, licenses = licenses, images = images, annotations = annotations, categories = categories)
# save annotations
with open("annotations/test_annotations_exp_J.json", "w") as f:
json.dump(cocoannotations, f)
|
python
|
# See https://michaelgoerz.net/notes/extending-sphinx-napoleon-docstring-sections.html
# # -- Fixing bug with google docs showing attributes-------------
from sphinx.ext.napoleon.docstring import GoogleDocstring
# first, we define new methods for any new sections and add them to the class
def parse_keys_section(self, section):
return self._format_fields('Keys', self._consume_fields())
GoogleDocstring._parse_keys_section = parse_keys_section
def parse_attributes_section(self, section):
return self._format_fields('Attributes', self._consume_fields())
GoogleDocstring._parse_attributes_section = parse_attributes_section
def parse_class_attributes_section(self, section):
return self._format_fields('Class Attributes', self._consume_fields())
GoogleDocstring._parse_class_attributes_section = parse_class_attributes_section
# we now patch the parse method to guarantee that the the above methods are
# assigned to the _section dict
def patched_parse(self):
self._sections['keys'] = self._parse_keys_section
self._sections['class attributes'] = self._parse_class_attributes_section
self._unpatched_parse()
GoogleDocstring._unpatched_parse = GoogleDocstring._parse
GoogleDocstring._parse = patched_parse
|
python
|
import re
import random
import string
from django import template
from django.template import Context
from django.template.loader import get_template
from django.contrib.auth.models import Group
from django.core.exceptions import PermissionDenied
from crm.models import Person
from cedar_settings.models import GeneralSetting
from cedar.utils.misc_utils import get_back_url_from_context
register = template.Library()
@register.inclusion_tag('cedar/react.html')
def react():
pass
@register.inclusion_tag('cedar/react-dom.html')
def react_dom():
pass
@register.inclusion_tag('cedar/griddle.html')
def griddle():
pass
@register.inclusion_tag('cedar/spinner.html')
def spinner():
pass
@register.inclusion_tag('cedar/back-arrow-link.html')
def back_arrow(div_classes="col s1"):
return {
'div_classes': div_classes
}
@register.inclusion_tag('cedar/user-menu.html', takes_context=True)
def user_menu(context, *args, **kwargs):
# Requires a kwarg: "user_menu_id".
user_menu_id = kwargs.get('user_menu_id')
try:
if context['user'].is_authenticated():
person = Person.objects.get(user_account=context['user'])
else:
raise PermissionDenied
except Person.DoesNotExist:
person = None
# except
return {
'person': person,
'user_menu_id': user_menu_id,
'context': context,
}
@register.inclusion_tag('cedar/messages.html', takes_context=True)
def messages(context, *args, **kwargs):
return {'context': context, }
# is_choice_selected:
# For use when rebuilding modelmultiplechoice fields manually,
# trying to figure out which are selected.
@register.filter()
def is_choice_selected(choice, field_values):
if not field_values:
return ""
# choice id is an int:
if str(choice[0]) in field_values:
return "selected"
else:
return ""
# is_disabled:
# takes a user object and a permission string and checks if the
# user has that permission. If he/she doesn't, it returns the string "disabled"
# which can be used in a materializecss button class.
@register.filter()
def is_disabled(user, permission):
if user.has_perm(permission):
return ""
else:
return "disabled"
# Use this to see if you are in a CREATEVIEW or an UPDATEVIEW.
# useful when re-using a model form for updates and creates:
# Usage:
# {% is_update_view "Update Project" "Create Project" as submit_value %}
@register.assignment_tag(takes_context=True)
def is_update_view(context, text_if_true, text_if_false):
try:
object = context.get('object')
int(object.pk) # This should fail if an normal object w/ pk wasn't supplied.
return text_if_true
except AttributeError as e:
return text_if_false
@register.assignment_tag()
def get_dict_val(dictionary, key):
try:
return dictionary[key]
except:
return None
@register.assignment_tag()
def dict_has_key(dictionary, key):
if key in dictionary:
return True
else:
return False
@register.filter()
def replace_highlight_tags(text, span_class):
return text.replace("<em>", "<span class=\"{}\">".format(span_class)).replace("</em>", "</span>")
@register.assignment_tag(takes_context=True)
def chunkify_search_text(context, search_result, chunk_length):
t = search_result.text
return ['happy', 'trails']
@register.assignment_tag
def sanitize_old(text, repl_char, query):
# Get list of interview participant initials:
participants = Person.objects.filter(roles__name__contains="Participant")
# initials = [participant.initials for participant in participants]
for p in participants:
# Redact initials:
if len(p.initials) > 1: # Skip bad or weird initials
# text = text.replace(p.initials, repl_char * len(p.initials))
initials_str = p.initials.strip()
text = re.sub(r'\b{}\b'.format(initials_str), repl_char * len(initials_str), text)
# Redact names - 5 variations:
# # "Fname Lname"
# name_str = "{} {}".format(p.name_first, p.name_last).strip()
# text = text.replace(name_str, repl_char * len(name_str))
#
# # "FnameLname"
# name_str = "{}{}".format(p.name_first, p.name_last).strip()
# text = text.replace(name_str, repl_char * len(name_str))
# "Fname"
if p.name_first:
name_str = p.name_first.strip()
text = re.sub(r'\b{}\b'.format(name_str), repl_char * len(name_str), text)
# "Lname"
if p.name_first:
name_str = p.name_last.strip()
text = re.sub(r'\b{}\b'.format(name_str), repl_char * len(name_str), text)
# "Indigenous"
if p.indigenous_name:
name_str = p.indigenous_name.strip()
text = text.replace(name_str, repl_char * len(name_str))
return text
@register.filter()
def concat(val1, val2):
return str(val1) + str(val2)
@register.assignment_tag()
def get_model_class(obj):
return obj.__class__
@register.assignment_tag()
def get_model_class_name(obj):
return obj.__class__.__name__
@register.filter()
def get_subclass_model_class_name(obj):
model = obj.__class__
return model.objects.get_subclass(id=obj.id).__class__.__name__
@register.assignment_tag()
def get_model_subclass(obj):
model = obj.__class__
return model.objects.get_subclass(id=obj.id)
@register.assignment_tag()
def is_submodel(obj1, obj2):
return issubclass(obj1.__class__, obj2.__class__)
# -------------------------------------------
# DEPRECATED. See Readme for implementing permissions.
# To use: wrap any html elements with:
# {% if request.user|can_view_sensitive %} {% endif %}
# and they will be filtered out based on user role.
# Currently, "Explorers" are the only restricted group,
# any other role will be able to see stuff.
# -------------------------------------------
@register.filter
def can_view_sensitive(user):
try:
if Group.objects.get(name='Explorer') in user.groups.all():
return False
else:
return True
except Exception as err:
return False
@register.inclusion_tag('cedar/back_button.html', takes_context=True)
def back_button(context, extra=None):
'''
Tries to set a button anchor with the http referer url. Disables
button if no url present
:param context:
:param extra: something to append on to the end of the url
:return:
'''
back_url = get_back_url_from_context(context)
if back_url:
if extra:
# add ending slash if not present
if back_url[-1] != "/":
back_url += "/"
back_url += extra
return {'BACK_URL': back_url}
else:
return {'BACK_URL': False}
@register.inclusion_tag('cedar/cancel_button.html', takes_context=True)
def cancel_button(context, extra=None):
'''
Tries to set a button anchor with the http referer url. Disables
button if no url present.
This actually just called back_button()
:param context:
:param extra: something to append on to the end of the url
:return:
'''
return back_button(context, extra)
@register.inclusion_tag('cedar/edit_submit_button.html', takes_context=True)
def edit_submit_button(context, form_selector, action_text=None):
'''
:param context:
:param form_selector: jquery selector string to get the form
:param action_text: button text. if None, will try to decide if it's a New or Update form
:return:
'''
if not action_text:
action_text = is_update_view(context, "Update", "Create")
return {
'form_selector': form_selector,
'action_text': action_text
}
@register.inclusion_tag('cedar/edit_delete_button.html', takes_context=True)
def edit_delete_button(context, delete_url_string, perm=None):
'''
:param context:
:param delete_url_string: if I call it "delete_url" it would conflict with the template var "delete_url"
:param perm: permission to check, if user doesn't have perm the button will be disabled. Can be None for no check.
:return:
'''
return {
'delete_url': delete_url_string,
'disabled_css': '' if not perm else is_disabled(context.request.user, perm)
}
@register.inclusion_tag('cedar/edit_cancel_button.html', takes_context=True)
def edit_cancel_button(context, cancel_url_string):
'''
What's that, a THIRD cancel button tag? Yes, yes it is.
:param context:
:param cancel_url_string
:return:
'''
return {
'cancel_url': cancel_url_string,
}
@register.assignment_tag()
def get_background_url():
url_obj = GeneralSetting.objects.get('cedar__default_splash_page_background_img')
if isinstance(url_obj, str):
return url_obj
else:
return url_obj.file.url
@register.filter()
def render_boolean(value):
bool_template = get_template("cedar/boolean_template.html")
return bool_template.render(Context({'value': value}))
@register.assignment_tag()
def random_string(num_chars=4):
return ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(num_chars))
|
python
|
import os
from django.shortcuts import render_to_response, get_object_or_404
from django.template import RequestContext
from django.http import HttpResponseRedirect, HttpResponse, HttpResponseForbidden, Http404
from django.core.urlresolvers import reverse
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from django.template.loader import select_template
from django.contrib.contenttypes.models import ContentType
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
if "notification" in settings.INSTALLED_APPS:
from notification import models as notification
else:
notification = None
from threadedcomments.models import ThreadedComment
from topics.forms import TopicForm
from topics.models import Topic
class ContentApp(object):
def __init__(self, group_model, content_app_name):
self.group_model = group_model
self.content_app_name = content_app_name
def render(self, template_name, context, context_instance=None):
ctype = ContentType.objects.get_for_model(self.group_model)
return render_to_response([
'%s/%s/%s' % (ctype.app_label, self.content_app_name, template_name),
'%s/%s' % (self.content_app_name, template_name),
], context, context_instance=context_instance)
def get_group(self, slug):
return self.group_model._default_manager.get(slug=slug)
def topics(request, group_slug=None, form_class=TopicForm, template_name="topics.html", app=None):
try:
group = app.get_group(group_slug)
except ObjectDoesNotExist:
raise Http404
is_member = request.user.is_authenticated() and group.user_is_member(request.user) or False
if request.method == "POST":
if request.user.is_authenticated():
if is_member:
topic_form = form_class(request.POST)
if topic_form.is_valid():
topic = topic_form.save(commit=False)
topic.group = group
topic.creator = request.user
topic.save()
request.user.message_set.create(message="You have started the topic %s" % topic.title)
topic_form = form_class() # @@@ is this the right way to reset it?
else:
request.user.message_set.create(message="You are not a member and so cannot start a new topic")
topic_form = form_class()
else:
return HttpResponseForbidden()
else:
topic_form = form_class()
topics = group.get_related_objects(Topic)
return app.render(template_name, {
"group": group,
"topic_form": topic_form,
"is_member": is_member,
"topics": topics,
}, context_instance=RequestContext(request))
def topic(request, topic_id, edit=False, template_name="topic.html", app=None):
topic = get_object_or_404(Topic, id=topic_id)
if request.method == "POST" and edit == True and \
(request.user == topic.creator or request.user == topic.group.creator):
topic.body = request.POST["body"]
topic.save()
return HttpResponseRedirect(topic.get_absolute_url())
return app.render(template_name, {
'topic': topic,
'edit': edit,
}, context_instance=RequestContext(request))
def topic_delete(request, pk, app=None):
topic = Topic.objects.get(pk=pk)
if request.method == "POST" and (request.user == topic.creator or \
request.user == topic.group.creator):
if forums:
ThreadedComment.objects.all_for_object(topic).delete()
topic.delete()
return HttpResponseRedirect(request.POST["next"])
|
python
|
'''
Do a parcel analysis of the sounding and plot the parcel temperature
'''
from __future__ import print_function, division
from SkewTplus.skewT import figure
from SkewTplus.sounding import sounding
from SkewTplus.thermodynamics import parcelAnalysis, liftParcel
#Load the sounding data
mySounding = sounding("./exampleSounding.txt")
pressure, temperature, dewPointTemperature = mySounding.getCleanSounding()
# Perform a parcel analysis
# The full parcel analysis field is returned
# Most Unstable parcel : method=0
# Start looking for the most unstable parcel from the first level (initialLevel=0)
# Use at maximum 5 iterations in the bisection method to find the LCL
# Since the sounding temperature and pressure are expressed in Celsius and hPa
# we set the corresponding keywords
myParcelAnalysis = parcelAnalysis(pressure,
temperature,
dewPointTemperature,
hPa=True,
celsius=True,
fullFields=1,
method=0,
initialLevel=0,
tolerance=0.1,
maxIterations=20)
# Print the contents of the dictionary
for key,value in myParcelAnalysis.items():
if isinstance(value, float) :
print("%s = %.1f"%(key,value))
else:
print("%s = %s"%(key,str(value)))
#Plot the parcel trajectory in the SkewT diagram
# First we lift the parcel adiabatically
initialLevel = myParcelAnalysis['initialLevel']
parcelTemperature = liftParcel(temperature[initialLevel],
pressure,
myParcelAnalysis['pressureAtLCL'],
initialLevel=initialLevel,
hPa=True,
celsius=True)
# Create a Figure Manager
mySkewT_Figure = figure()
# Add an Skew-T axes to the Figure
mySkewT_Axes = mySkewT_Figure.add_subplot(111, projection='skewx')
# Plot the parcel temperature
mySkewT_Axes.plot(parcelTemperature, pressure, linewidth=3, color='r' )
# Add a marker for the LCL and the LFC
mySkewT_Axes.plot(myParcelAnalysis['temperatureAtLCL'], myParcelAnalysis['pressureAtLCL'],
marker='o', color='b' , label='LCL')
mySkewT_Axes.plot(myParcelAnalysis['temperatureAtLFC'], myParcelAnalysis['pressureAtLFC'],
marker='o', color='g' , label='LFC')
# Add a legend
mySkewT_Axes.legend(loc='center right')
mySkewT_Axes.set_title("Single Parcel Lifted adiabatically")
mySkewT_Figure.show_plot()
|
python
|
from cmath import exp, pi, sin
from re import I
import matplotlib.pyplot as mplt
def FFT(P):
n = len(P)
if n == 1:
return P
else:
w = exp((2.0 * pi * 1.0j) / n)
Pe = []
Po = []
for i in range(0, n, 2):
Pe.append(P[ i ])
for i in range(1, n, 2):
Po.append(P[ i ])
ye = FFT(Pe)
yo = FFT(Po)
y = [0.0] * n
for q in range(int(n * 0.5)):
y[q] = ye[q] + (w**q)*yo[q]
y[q + int(n/2)] = ye[q] - (w**q)*yo[q]
return y
def iFFT(P):
n = len(P)
if n == 1:
return P
else:
w = exp((-2.0 * pi * 1.0j) / n)
Pe = []
Po = []
for i in range(0, n, 2):
Pe.append(P[ i ])
for i in range(1, n, 2):
Po.append(P[ i ])
ye = iFFT(Pe)
yo = iFFT(Po)
y = [0.0] * n
for q in range(int(n * 0.5)):
y[q] = ye[q] + (w**q)*yo[q]
y[q + int(n/2)] = ye[q] - (w**q)*yo[q]
return y
#must be a power of 2
size = 256
testData = []
SAMPLERATE = 44100.0
dt = 1.0/SAMPLERATE
f = 1.0/(size/SAMPLERATE)
time = 0.0
for i in range(size):
testData.append( sin(2.0 * pi * 2.0 * f * time).real + 0.5 * sin(2.0 * pi * 8.0 * f * time).real )
time += dt
fftData = FFT(testData)
##### DO SOMETHING WITH FFT DATA #####
##### DO SOMETHING WITH FFT DATA #####
ifftData = iFFT(fftData)
for q in range( len(ifftData ) ):
ifftData[q] /= size
fig, (ax1, ax2, ax3) = mplt.subplots(3)
ax1.plot( testData, label = 'original' )
ax2.plot( ifftData, label = 'reconstructed' )
ax3.plot( fftData, label = 'FFT' )
ax1.legend( bbox_to_anchor = (1.0, 1), loc = 'upper right' )
ax2.legend( bbox_to_anchor = (1.0, 1), loc = 'upper right' )
ax3.legend( bbox_to_anchor = (1.0, 1), loc = 'upper right' )
mplt.show()
|
python
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2019-04-15 18:21
# @Author : erwin
import pandas as pd
import numpy as np
from common.util_function import *
'''
缺失值处理
1. 采用均值/出现次数设置missing值。对于一列数字,要获取平均值。
2. 对于一列非数字,例如字符,要找到出现频率最高的字符赋值给missing值
3. 删除缺失值
http://pandas.pydata.org/pandas-docs/stable/generated/pandas.DataFrame.fillna.html
http://pandas.pydata.org/pandas-docs/stable/generated/pandas.DataFrame.dropna.html
'''
raw_data = {'name': ['Jason', 'Molly', np.nan, np.nan, np.nan],
'nationality': ['USA', 'USA', 'France', 'UK', np.nan],
'age': [42, 52, 36, 24, np.nan],
'none': [np.nan, np.nan, np.nan, np.nan, np.nan],
}
df = pd.DataFrame(raw_data, columns=['name', 'nationality', 'age', 'none'])
print_line("原始数据")
print_br(df)
print_line("检查空值 NaN")
print_br(pd.isnull(df))
print_br(pd.isnull(df.name))
print_line("填充固定值")
print_br(df.fillna(value=5))
print_br(df.none.fillna(value=5))
print_line("填充均值/中位数/众数")
# inplace=True 表示在原来的 dataframe 上修改,inplace=False 表示返回新的 dataframe。
df_tmp = df['age'].fillna(df['age'].mean(), inplace=True)
print_br(df_tmp)
df_tmp = df['age'].fillna(df['age'].median(), inplace=False)
print_br(df_tmp)
df_tmp = df['nationality'].fillna(df['nationality'].mode()[0], inplace=False)
print_br(df_tmp)
print_line("删除全部为NaN值的行/列")
print_br(df.dropna(axis=0, how='all'))
print_br(df.dropna(axis=1, how='all'))
print_line("删除任一为NaN值的行/列")
df = df.drop('none', axis=1).drop(4, axis=0)
print_br(df)
print_br(df.dropna(axis=0, how='any'))
print_br(df.dropna(axis=1, how='any'))
|
python
|
"""
The sys command to manage the cmd5 distribution
"""
import glob
import os
import shutil
from cloudmesh.common.util import path_expand
from cloudmesh.shell.command import PluginCommand
from cloudmesh.shell.command import command
from cloudmesh.sys.manage import Command, Git, Version
class SysCommand(PluginCommand):
"""
The system command
"""
# noinspection PyUnusedLocal
@command
def do_sys(self, args, arguments):
"""
::
Usage:
sys upload
sys commit MESSAGE
sys command generate NAME [.]
sys generate command NAME [.]
sys version VERSION
This command does some useful things.
Arguments:
MESSAGE the message to commit
NAME the command to generate
VERSION the version number
Options:
-f specify the file
Description:
cms sys command generate NAME
When you execute this command it
will generate a directory tree for a command
with the name
cloudmesh-NAME
To install the command you need to
cd cloudmesh-NAME
pip install -e .
or
pip install .
cms sys generate command NAME .
cms sys command generate NAME .
the code will be installed in the current directory. This is
helpful, if you already are in a directory fof the name
cloudmesh-NAME, e.g. if you already created it in github and
like to add a command in that github directory.
The commands 'version', 'commit' and 'upload'
are only to be used by Gregor.
cms version
The version command adds a new version to the
VERSION file for cmd5, common, and sys.
This helps to keep the versions aligned across
these modules.
cms commit
The commit command adds a new version and commits
cms upload
The upload command uploads the new version to pypi
"""
print(arguments)
dot = arguments["."]
if arguments.commit:
msg = arguments.MESSAGE
Git.commit(msg)
elif arguments.upload:
Git.upload()
elif arguments.readme and arguments.generate:
name = arguments.NAME
Command.generate(name)
elif arguments.command and arguments.generate:
name = arguments.NAME
Command.generate(name)
if dot:
for file in ["LICENSE",
".bumpversion.cfg",
".gitignore",
"requirements.txt",
"Makefile"]:
try:
os.remove(file)
except:
pass
for entry in glob.glob("cloudmesh-{name}/**".format(name=name)):
shutil.move(entry, path_expand("."))
for entry in glob.glob("cloudmesh-{name}/.*".format(name=name)):
shutil.move(entry, path_expand("."))
shutil.rmtree("cloudmesh-{name}".format(name=name))
elif arguments.version:
version = arguments.VERSION
Version.set(version)
|
python
|
import numpy as np
from pypadre.pod.app import PadreApp
from sklearn.datasets import load_iris
from pypadre.examples.base_example import example_app
# create example app
padre_app = example_app()
def create_experiment1(app: PadreApp, name="", project="", auto_main=True):
@app.dataset(name="iris",
columns=['sepal length (cm)', 'sepal width (cm)', 'petal length (cm)',
'petal width (cm)', 'class'], target_features='class')
def dataset():
data = load_iris().data
target = load_iris().target.reshape(-1, 1)
return np.append(data, target, axis=1)
@app.preprocessing(reference_git=__file__)
def preprocessing(dataset, **kwargs):
from sklearn.preprocessing import StandardScaler
scaler = StandardScaler()
scaler.fit(dataset.features())
_features = scaler.transform(dataset.features())
targets = dataset.targets()
new_data = np.hstack((_features, targets))
return new_data
@app.experiment(dataset=dataset, reference_git=__file__, preprocessing_fn=preprocessing,
experiment_name=name, seed=1, project_name=project, auto_main=auto_main)
def experiment():
from sklearn.pipeline import Pipeline
from sklearn.svm import SVC
estimators = [('SVC', SVC(probability=True, C=1.0))]
return Pipeline(estimators)
return experiment
def create_experiment2(app: PadreApp, name="", project="", auto_main=True):
@app.dataset(name="iris",
columns=['sepal length (cm)', 'sepal width (cm)', 'petal length (cm)',
'petal width (cm)', 'class'], target_features='class')
def dataset():
data = load_iris().data
target = load_iris().target.reshape(-1, 1)
return np.append(data, target, axis=1)
@app.custom_splitter(reference_git=__file__)
def custom_splitter(dataset, **kwargs):
idx = np.arange(dataset.size[0])
cutoff = int(len(idx) / 2)
return idx[:cutoff], idx[cutoff:], None
@app.experiment(dataset=dataset, reference_git=__file__, splitting=custom_splitter,
experiment_name=name, seed=1, project_name=project, auto_main=auto_main)
def experiment():
from sklearn.pipeline import Pipeline
from sklearn.svm import SVC
from sklearn.decomposition import PCA
estimators = [('PCA',PCA()),('SVC', SVC(probability=True, C=1.0))]
return Pipeline(estimators)
return experiment
experiment1 = create_experiment1(app=padre_app, name="Iris SVC - preprocessing", project="Iris - experiments")
experiment2 = create_experiment2(app=padre_app, name="Iris SVC - custom_splitting", project="Iris - experiments")
metadata, pipelines = experiment1.compare(experiment2)
print("Experiments metadata: ")
print(metadata)
print("Experiments pipelines: ")
print(pipelines)
|
python
|
import socket
import pickle
import struct
import argparse
def send_msg(sock, msg):
msg_pickle = pickle.dumps(msg)
sock.sendall(struct.pack(">I", len(msg_pickle)))
sock.sendall(msg_pickle)
print(msg[0], 'sent to', sock.getpeername())
def recv_msg(sock, expect_msg_type = None):
msg_len = struct.unpack(">I", sock.recv(4))[0]
msg = sock.recv(msg_len, socket.MSG_WAITALL)
msg = pickle.loads(msg)
print(msg[0], 'received from', sock.getpeername())
if (expect_msg_type is not None) and (msg[0] != expect_msg_type):
raise Exception("Expected " + expect_msg_type + " but received " + msg[0])
return msg
def args_parser():
parser = argparse.ArgumentParser()
parser.add_argument('-ip', type=str, default='localhost', help='Server IP address')
parser.add_argument('-port', type=int, default=51018, help='Server port')
parser.add_argument('-size', type=int, default=132863336, help='Number of floating point parameters in message')
parser.add_argument('-sim', type=int, default=10, help='Number of simulation rounds')
args = parser.parse_args()
return args
|
python
|
"""
NetCDF Builder
This is currently a test script and will eventuall be made into a module
"""
#==============================================================================
__title__ = "netCDF maker"
__author__ = "Arden Burrell (Manon's original code modified)"
__version__ = "v1.0(02.03.2018)"
__email__ = "[email protected]"
#==============================================================================
# Set to go up two levels to TSSRESTREND folder
import os
os.chdir('../../')
#==============================================================================
# load modules for netcdf
import scipy.io.netcdf as nc
import collections
import datetime
# Load modules for the files
import numpy as np
from collections import OrderedDict
# Load modules for debugging
import pdb
# +++++ Import plotting and colorpackages +++++
import matplotlib.pyplot as plt
import matplotlib.colors as mpc
import matplotlib as mpl
import palettable
#==============================================================================
def main():
# Create a blank object to hold my info
ncinfo = netCDF_info() #call the class
# =========== load the numpy array ===========
DEMarray = np.load("./Input_data/DEM/GMTED/data/Global_DEM_at_GIMMS.npy")
# plot the data
plt.style.use('classic')
cmap = mpc.ListedColormap(
palettable.matplotlib.Viridis_20.mpl_colors
)
plt.imshow(DEMarray, vmin=0, vmax=5000, cmap=cmap)
plt.colorbar()
plt.show()
# =========== Expand the DIMS ===========
DEMarray3d = np.expand_dims(DEMarray, axis=0)
# =========== Grab lats and lons from an exising netcdf ===========
# NOTE: this netcdf is the exact shape i want to make
file_name = './Input_data/DEM/GMTED/data/10N000E_20101117_gmted_mea075_at_GIMMS.nc'
lat_arr, lon_array = nc_getLatsandLons(file_name)
# =========== Add info ===========
# The data i want to save
ncinfo.data = DEMarray3d
# File name to save into
ncinfo.fname = "./Input_data/DEM/GMTED/data/Global_DEM_GMTED_at_GIMMS.nc"
# The name of the variable to be savesd
ncinfo.var_name = "DEM"
ncinfo.var_lname = "Height_Above_Mean_Sea_Level"
# Number of lats
ncinfo.lat = 2160
# number of lons
ncinfo.lon = 4320
# Fill value, really important for CDO
ncinfo.fill = -99999.0
# Units of my variable (Meters above sea level in this case)
ncinfo.units = "m"
# The dates (This needs work)
ncinfo.dates = datetime.datetime.strptime('20100101','%Y%m%d')
# Array of the latitudes
ncinfo.latitudes = lat_arr
# Array of the longitudes
ncinfo.longitudes = lon_array
# Add Description
ncinfo.description = "Global DEM regrided from the GMTED2012 2010 250m data using CDO remapcon2"
# Add the history (This needs work)
ncinfo.history = "Created " + datetime.datetime.today().strftime("%y/%m/%d")
# =========== Create the netcdf file ===========
write_netcdf(ncinfo)
#==============================================================================
def nc_getLatsandLons(fn):
"""
This takes a netcdf fill and pulls out the lat and lons array
var:
fn, The name of a file to open
return:
lats, np array of the latitude
lons, np array of the longitude
"""
from netCDF4 import Dataset
# load the netcdf file
ncf1 = Dataset(fn, mode='r')
# Pull out the lon and lat data
lats = ncf1.variables["lat"][:]
lons = ncf1.variables["lon"][:]
return lats, lons
class netCDF_info(object):
"""
A class to store the netcdf infomation.
The goal is to move this calls to its own script in the
nc module once i have it working.
"""
def __init__(self): #(self, arg)
# self.arg = arg
# These are none, later i will add ways to automitaccly fill this data
self.data = None
self.fname = None
self.var_name = None
self.var_lname = None
self.lat = None
self.lon = None
self.fill = None
self.units = None
self.dates = None
self.latitudes = None
self.longitudes = None
self.description = None
self.history = None
def date_range(start_date, end_date):
# define time vector
start_date=datetime.datetime.strptime(start_date,'%Y%m%d.%f')
end_date=datetime.datetime.strptime(end_date,'%Y%m%d.%f')
current=[start_date+datetime.timedelta(days=x) for x in range((end_date-start_date).days+1)]
current=[t.strftime('%Y%m%d.%f') for t in current]
return current
def write_netcdf(ncinfo):
""" setup and save a netcdf file
var:
object of my created class netCDF_info
"""
# ========== Create new netcdf ==========
NAME=nc.netcdf_file(ncinfo.fname,'w')
# ========== Set up the Dimensions ==========
NAME.createDimension('time', None) #Question: Shouldn't time be unlimited?
# NAME.createDimension('lev',11)
NAME.createDimension('lat',ncinfo.lat)
NAME.createDimension('lon',ncinfo.lon)
# ========== Setup the Variables ==========
time=NAME.createVariable('time',np.float64,('time',))
# lev=NAME.createVariable('lev',np.int32,('lev',))
lat=NAME.createVariable('lat',np.float64,('lat',))
lon=NAME.createVariable('lon',np.float64,('lon',))
# VAR=NAME.createVariable(str(VAR),np.float64,('time','lev','lat','lon'),)
VAR=NAME.createVariable(ncinfo.var_name,np.float64,('time','lat','lon'),)
# setting the missing value is super important for the file to be cdo readable
setattr(VAR,'missing_value',ncinfo.fill)
setattr(VAR, 'standard_name', ncinfo.var_lname)
# ========== Set the units ==========
time.units= 'day as %Y%m%d'
# lev.units = '-'
lat.units = 'degrees_north'
lon.units = 'degrees_east'
VAR.units = ncinfo.units
# ========== Add data ==========
# creates time vector using the date_range function
# time[:]=[t for t in date_range('20110101.5','20111231.5')]
# lev[:]=PFT_vector
lat[:] = ncinfo.latitudes
lon[:] = ncinfo.longitudes
# THis is a Bodge for singe variable data
VAR[:] = ncinfo.data
#Add global attributes
NAME.description = ncinfo.description
NAME.history = ncinfo.history
# WHATS MISSING
# metadata a whole bunch of metadata
# the standard_name and long_name of the variables
# ========== Close the netcdf ==========
NAME.close()
#==============================================================================
if __name__ == '__main__':
main()
|
python
|
lista = enumerate('zero um dois três quatro cinco seis sete oito nove'.split())
numero_string=dict(lista)
string_numero={valor:chave for chave,valor in numero_string.items()}
print (numero_string)
print(string_numero)
def para_numeral(n):
numeros=[]
for digito in str(n):
numeros.append(numero_string[int(digito)])
return ", ".join(numeros)
assert "um" == para_numeral(1)
assert "um, dois" == para_numeral(12)
assert "um, um" == para_numeral(11)
def para_inteiro(string_n):
string=""
lista=string_n.split(", ")
for digito in lista:
string+=str(string_numero[digito])
return int(string)
assert 1== para_inteiro('um')
assert 12== para_inteiro('um, dois')
|
python
|
# -*- coding: utf-8 -*-
"""
Created on Fri Mar 20 00:59:05 2020
@author: Leonardo Saccotelli
"""
import numpy as np
import AlgoritmiAlgebraLineare as al
#------------------- TEST MEDOTO DI ELIMINAZIONE DI GAUSS
#Dimensione della matrice
n = 5000
#Matrice dei coefficienti
matrix = np.random.random((n, n)).astype(float)
#Vettore delle soluzioni
xSol = np.array([i for i in range(1,n+1)])
#Vettore dei termini noti
b = np.dot(matrix, xSol)
# ------ APPLICO GLI ALGORITMI a matrix e b
#Creo la matrice triangolare superiore
matrix, b = al.GaussElimination(matrix, b)
#Calcolo le soluzioni tramite la backwardSubstition
xFind = al.backwardSubstition(matrix, b)
#Calcolo l'errore relativo sulla struttura
#applicando la norma 2
xError = np.linalg.norm((xSol - xFind), 2)
#Calcolo dell'indice di condizionamento del problema
conditionNumber = np.linalg.cond(matrix,1 )
#Stampo la matrice triangolare superiore
print(' Gaussian elimination')
print(' ------------------------------------------------------------')
for i in range(n):
print(' xFind[%2d] = %18.16f xSol[%2d] = %5.3f' % (i, xFind[i], i, xSol[i]))
print(' ------------------------------------------------------------')
print(' Difference ||x-xsol|| = %e\n' %xError)
print(' Matrix condition number = %e' %conditionNumber )
|
python
|
"""Lists out the inbuilt plugins in Example"""
from src.example_reporter import ExampleReporter
from src.example_tool import ExampleTool
def get_reporters() -> dict:
"""Return the reporters in plugin"""
return {
"example-reporter": ExampleReporter,
}
def get_tools() -> dict:
"""Return the tools in plugin"""
return {
"example-tool": ExampleTool,
}
|
python
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.