prompt
large_stringlengths 72
9.34k
| completion
large_stringlengths 0
7.61k
|
---|---|
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from rest_framework import generics, permissions, views, response,status
from .models import Account
from .serializers import AccountCreateSerializer, AccountSerializer, AuthenticateSerializer, \
UpdateAccountSerializer, AccountRetrieveSerializer
# Create your views here.
class AccountCreateView(generics.CreateAPIView):
queryset = Account.objects.all()
serializer_class = AccountCreateSerializer
permission_classes = [permissions.AllowAny]
class AccountListView(generics.ListAPIView):
queryset = Account.objects.all()
serializer_class = AccountSerializer
permission_classes = [permissions.IsAuthenticated]
class AccountRetrieveView(generics.RetrieveAPIView):
queryset = Account.objects.all()
serializer_class = AccountRetrieveSerializer
class UpdateAccountView(generics.UpdateAPIView):
queryset = Account.objects.all()
serializer_class = UpdateAccountSerializer
# permission_classes = [permissions.IsAuthenticated]
class AccountAuthenticationView(views.APIView):
queryset = Account.objects.all()
serializer_class = AuthenticateSerializer
def <|fim_middle|>(self, request):
data = request.data
serializer = AuthenticateSerializer(data=data)
if serializer.is_valid(raise_exception=True):
new_date = serializer.data
return response.Response(new_date,status=status.HTTP_200_OK)
return response.Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)<|fim▁end|> | post |
<|file_name|>integration_entity.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
Talon.One API
<|fim▁hole|> The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from talon_one.configuration import Configuration
class IntegrationEntity(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'integration_id': 'str',
'created': 'datetime'
}
attribute_map = {
'integration_id': 'integrationId',
'created': 'created'
}
def __init__(self, integration_id=None, created=None, local_vars_configuration=None): # noqa: E501
"""IntegrationEntity - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._integration_id = None
self._created = None
self.discriminator = None
self.integration_id = integration_id
self.created = created
@property
def integration_id(self):
"""Gets the integration_id of this IntegrationEntity. # noqa: E501
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:return: The integration_id of this IntegrationEntity. # noqa: E501
:rtype: str
"""
return self._integration_id
@integration_id.setter
def integration_id(self, integration_id):
"""Sets the integration_id of this IntegrationEntity.
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:param integration_id: The integration_id of this IntegrationEntity. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and integration_id is None: # noqa: E501
raise ValueError("Invalid value for `integration_id`, must not be `None`") # noqa: E501
self._integration_id = integration_id
@property
def created(self):
"""Gets the created of this IntegrationEntity. # noqa: E501
The exact moment this entity was created. # noqa: E501
:return: The created of this IntegrationEntity. # noqa: E501
:rtype: datetime
"""
return self._created
@created.setter
def created(self, created):
"""Sets the created of this IntegrationEntity.
The exact moment this entity was created. # noqa: E501
:param created: The created of this IntegrationEntity. # noqa: E501
:type: datetime
"""
if self.local_vars_configuration.client_side_validation and created is None: # noqa: E501
raise ValueError("Invalid value for `created`, must not be `None`") # noqa: E501
self._created = created
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, IntegrationEntity):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, IntegrationEntity):
return True
return self.to_dict() != other.to_dict()<|fim▁end|> | The Talon.One API is used to manage applications and campaigns, as well as to integrate with your application. The operations in the _Integration API_ section are used to integrate with our platform, while the other operations are used to manage applications and campaigns. ### Where is the API? The API is available at the same hostname as these docs. For example, if you are reading this page at `https://mycompany.talon.one/docs/api/`, the URL for the [updateCustomerProfile][] operation is `https://mycompany.talon.one/v1/customer_profiles/id` [updateCustomerProfile]: #operation--v1-customer_profiles--integrationId--put # noqa: E501
|
<|file_name|>integration_entity.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
Talon.One API
The Talon.One API is used to manage applications and campaigns, as well as to integrate with your application. The operations in the _Integration API_ section are used to integrate with our platform, while the other operations are used to manage applications and campaigns. ### Where is the API? The API is available at the same hostname as these docs. For example, if you are reading this page at `https://mycompany.talon.one/docs/api/`, the URL for the [updateCustomerProfile][] operation is `https://mycompany.talon.one/v1/customer_profiles/id` [updateCustomerProfile]: #operation--v1-customer_profiles--integrationId--put # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from talon_one.configuration import Configuration
class IntegrationEntity(object):
<|fim_middle|>
<|fim▁end|> | """NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'integration_id': 'str',
'created': 'datetime'
}
attribute_map = {
'integration_id': 'integrationId',
'created': 'created'
}
def __init__(self, integration_id=None, created=None, local_vars_configuration=None): # noqa: E501
"""IntegrationEntity - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._integration_id = None
self._created = None
self.discriminator = None
self.integration_id = integration_id
self.created = created
@property
def integration_id(self):
"""Gets the integration_id of this IntegrationEntity. # noqa: E501
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:return: The integration_id of this IntegrationEntity. # noqa: E501
:rtype: str
"""
return self._integration_id
@integration_id.setter
def integration_id(self, integration_id):
"""Sets the integration_id of this IntegrationEntity.
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:param integration_id: The integration_id of this IntegrationEntity. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and integration_id is None: # noqa: E501
raise ValueError("Invalid value for `integration_id`, must not be `None`") # noqa: E501
self._integration_id = integration_id
@property
def created(self):
"""Gets the created of this IntegrationEntity. # noqa: E501
The exact moment this entity was created. # noqa: E501
:return: The created of this IntegrationEntity. # noqa: E501
:rtype: datetime
"""
return self._created
@created.setter
def created(self, created):
"""Sets the created of this IntegrationEntity.
The exact moment this entity was created. # noqa: E501
:param created: The created of this IntegrationEntity. # noqa: E501
:type: datetime
"""
if self.local_vars_configuration.client_side_validation and created is None: # noqa: E501
raise ValueError("Invalid value for `created`, must not be `None`") # noqa: E501
self._created = created
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, IntegrationEntity):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, IntegrationEntity):
return True
return self.to_dict() != other.to_dict() |
<|file_name|>integration_entity.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
Talon.One API
The Talon.One API is used to manage applications and campaigns, as well as to integrate with your application. The operations in the _Integration API_ section are used to integrate with our platform, while the other operations are used to manage applications and campaigns. ### Where is the API? The API is available at the same hostname as these docs. For example, if you are reading this page at `https://mycompany.talon.one/docs/api/`, the URL for the [updateCustomerProfile][] operation is `https://mycompany.talon.one/v1/customer_profiles/id` [updateCustomerProfile]: #operation--v1-customer_profiles--integrationId--put # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from talon_one.configuration import Configuration
class IntegrationEntity(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'integration_id': 'str',
'created': 'datetime'
}
attribute_map = {
'integration_id': 'integrationId',
'created': 'created'
}
def __init__(self, integration_id=None, created=None, local_vars_configuration=None): # noqa: E501
<|fim_middle|>
@property
def integration_id(self):
"""Gets the integration_id of this IntegrationEntity. # noqa: E501
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:return: The integration_id of this IntegrationEntity. # noqa: E501
:rtype: str
"""
return self._integration_id
@integration_id.setter
def integration_id(self, integration_id):
"""Sets the integration_id of this IntegrationEntity.
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:param integration_id: The integration_id of this IntegrationEntity. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and integration_id is None: # noqa: E501
raise ValueError("Invalid value for `integration_id`, must not be `None`") # noqa: E501
self._integration_id = integration_id
@property
def created(self):
"""Gets the created of this IntegrationEntity. # noqa: E501
The exact moment this entity was created. # noqa: E501
:return: The created of this IntegrationEntity. # noqa: E501
:rtype: datetime
"""
return self._created
@created.setter
def created(self, created):
"""Sets the created of this IntegrationEntity.
The exact moment this entity was created. # noqa: E501
:param created: The created of this IntegrationEntity. # noqa: E501
:type: datetime
"""
if self.local_vars_configuration.client_side_validation and created is None: # noqa: E501
raise ValueError("Invalid value for `created`, must not be `None`") # noqa: E501
self._created = created
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, IntegrationEntity):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, IntegrationEntity):
return True
return self.to_dict() != other.to_dict()
<|fim▁end|> | """IntegrationEntity - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._integration_id = None
self._created = None
self.discriminator = None
self.integration_id = integration_id
self.created = created |
<|file_name|>integration_entity.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
Talon.One API
The Talon.One API is used to manage applications and campaigns, as well as to integrate with your application. The operations in the _Integration API_ section are used to integrate with our platform, while the other operations are used to manage applications and campaigns. ### Where is the API? The API is available at the same hostname as these docs. For example, if you are reading this page at `https://mycompany.talon.one/docs/api/`, the URL for the [updateCustomerProfile][] operation is `https://mycompany.talon.one/v1/customer_profiles/id` [updateCustomerProfile]: #operation--v1-customer_profiles--integrationId--put # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from talon_one.configuration import Configuration
class IntegrationEntity(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'integration_id': 'str',
'created': 'datetime'
}
attribute_map = {
'integration_id': 'integrationId',
'created': 'created'
}
def __init__(self, integration_id=None, created=None, local_vars_configuration=None): # noqa: E501
"""IntegrationEntity - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._integration_id = None
self._created = None
self.discriminator = None
self.integration_id = integration_id
self.created = created
@property
def integration_id(self):
<|fim_middle|>
@integration_id.setter
def integration_id(self, integration_id):
"""Sets the integration_id of this IntegrationEntity.
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:param integration_id: The integration_id of this IntegrationEntity. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and integration_id is None: # noqa: E501
raise ValueError("Invalid value for `integration_id`, must not be `None`") # noqa: E501
self._integration_id = integration_id
@property
def created(self):
"""Gets the created of this IntegrationEntity. # noqa: E501
The exact moment this entity was created. # noqa: E501
:return: The created of this IntegrationEntity. # noqa: E501
:rtype: datetime
"""
return self._created
@created.setter
def created(self, created):
"""Sets the created of this IntegrationEntity.
The exact moment this entity was created. # noqa: E501
:param created: The created of this IntegrationEntity. # noqa: E501
:type: datetime
"""
if self.local_vars_configuration.client_side_validation and created is None: # noqa: E501
raise ValueError("Invalid value for `created`, must not be `None`") # noqa: E501
self._created = created
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, IntegrationEntity):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, IntegrationEntity):
return True
return self.to_dict() != other.to_dict()
<|fim▁end|> | """Gets the integration_id of this IntegrationEntity. # noqa: E501
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:return: The integration_id of this IntegrationEntity. # noqa: E501
:rtype: str
"""
return self._integration_id |
<|file_name|>integration_entity.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
Talon.One API
The Talon.One API is used to manage applications and campaigns, as well as to integrate with your application. The operations in the _Integration API_ section are used to integrate with our platform, while the other operations are used to manage applications and campaigns. ### Where is the API? The API is available at the same hostname as these docs. For example, if you are reading this page at `https://mycompany.talon.one/docs/api/`, the URL for the [updateCustomerProfile][] operation is `https://mycompany.talon.one/v1/customer_profiles/id` [updateCustomerProfile]: #operation--v1-customer_profiles--integrationId--put # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from talon_one.configuration import Configuration
class IntegrationEntity(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'integration_id': 'str',
'created': 'datetime'
}
attribute_map = {
'integration_id': 'integrationId',
'created': 'created'
}
def __init__(self, integration_id=None, created=None, local_vars_configuration=None): # noqa: E501
"""IntegrationEntity - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._integration_id = None
self._created = None
self.discriminator = None
self.integration_id = integration_id
self.created = created
@property
def integration_id(self):
"""Gets the integration_id of this IntegrationEntity. # noqa: E501
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:return: The integration_id of this IntegrationEntity. # noqa: E501
:rtype: str
"""
return self._integration_id
@integration_id.setter
def integration_id(self, integration_id):
<|fim_middle|>
@property
def created(self):
"""Gets the created of this IntegrationEntity. # noqa: E501
The exact moment this entity was created. # noqa: E501
:return: The created of this IntegrationEntity. # noqa: E501
:rtype: datetime
"""
return self._created
@created.setter
def created(self, created):
"""Sets the created of this IntegrationEntity.
The exact moment this entity was created. # noqa: E501
:param created: The created of this IntegrationEntity. # noqa: E501
:type: datetime
"""
if self.local_vars_configuration.client_side_validation and created is None: # noqa: E501
raise ValueError("Invalid value for `created`, must not be `None`") # noqa: E501
self._created = created
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, IntegrationEntity):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, IntegrationEntity):
return True
return self.to_dict() != other.to_dict()
<|fim▁end|> | """Sets the integration_id of this IntegrationEntity.
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:param integration_id: The integration_id of this IntegrationEntity. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and integration_id is None: # noqa: E501
raise ValueError("Invalid value for `integration_id`, must not be `None`") # noqa: E501
self._integration_id = integration_id |
<|file_name|>integration_entity.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
Talon.One API
The Talon.One API is used to manage applications and campaigns, as well as to integrate with your application. The operations in the _Integration API_ section are used to integrate with our platform, while the other operations are used to manage applications and campaigns. ### Where is the API? The API is available at the same hostname as these docs. For example, if you are reading this page at `https://mycompany.talon.one/docs/api/`, the URL for the [updateCustomerProfile][] operation is `https://mycompany.talon.one/v1/customer_profiles/id` [updateCustomerProfile]: #operation--v1-customer_profiles--integrationId--put # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from talon_one.configuration import Configuration
class IntegrationEntity(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'integration_id': 'str',
'created': 'datetime'
}
attribute_map = {
'integration_id': 'integrationId',
'created': 'created'
}
def __init__(self, integration_id=None, created=None, local_vars_configuration=None): # noqa: E501
"""IntegrationEntity - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._integration_id = None
self._created = None
self.discriminator = None
self.integration_id = integration_id
self.created = created
@property
def integration_id(self):
"""Gets the integration_id of this IntegrationEntity. # noqa: E501
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:return: The integration_id of this IntegrationEntity. # noqa: E501
:rtype: str
"""
return self._integration_id
@integration_id.setter
def integration_id(self, integration_id):
"""Sets the integration_id of this IntegrationEntity.
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:param integration_id: The integration_id of this IntegrationEntity. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and integration_id is None: # noqa: E501
raise ValueError("Invalid value for `integration_id`, must not be `None`") # noqa: E501
self._integration_id = integration_id
@property
def created(self):
<|fim_middle|>
@created.setter
def created(self, created):
"""Sets the created of this IntegrationEntity.
The exact moment this entity was created. # noqa: E501
:param created: The created of this IntegrationEntity. # noqa: E501
:type: datetime
"""
if self.local_vars_configuration.client_side_validation and created is None: # noqa: E501
raise ValueError("Invalid value for `created`, must not be `None`") # noqa: E501
self._created = created
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, IntegrationEntity):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, IntegrationEntity):
return True
return self.to_dict() != other.to_dict()
<|fim▁end|> | """Gets the created of this IntegrationEntity. # noqa: E501
The exact moment this entity was created. # noqa: E501
:return: The created of this IntegrationEntity. # noqa: E501
:rtype: datetime
"""
return self._created |
<|file_name|>integration_entity.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
Talon.One API
The Talon.One API is used to manage applications and campaigns, as well as to integrate with your application. The operations in the _Integration API_ section are used to integrate with our platform, while the other operations are used to manage applications and campaigns. ### Where is the API? The API is available at the same hostname as these docs. For example, if you are reading this page at `https://mycompany.talon.one/docs/api/`, the URL for the [updateCustomerProfile][] operation is `https://mycompany.talon.one/v1/customer_profiles/id` [updateCustomerProfile]: #operation--v1-customer_profiles--integrationId--put # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from talon_one.configuration import Configuration
class IntegrationEntity(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'integration_id': 'str',
'created': 'datetime'
}
attribute_map = {
'integration_id': 'integrationId',
'created': 'created'
}
def __init__(self, integration_id=None, created=None, local_vars_configuration=None): # noqa: E501
"""IntegrationEntity - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._integration_id = None
self._created = None
self.discriminator = None
self.integration_id = integration_id
self.created = created
@property
def integration_id(self):
"""Gets the integration_id of this IntegrationEntity. # noqa: E501
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:return: The integration_id of this IntegrationEntity. # noqa: E501
:rtype: str
"""
return self._integration_id
@integration_id.setter
def integration_id(self, integration_id):
"""Sets the integration_id of this IntegrationEntity.
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:param integration_id: The integration_id of this IntegrationEntity. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and integration_id is None: # noqa: E501
raise ValueError("Invalid value for `integration_id`, must not be `None`") # noqa: E501
self._integration_id = integration_id
@property
def created(self):
"""Gets the created of this IntegrationEntity. # noqa: E501
The exact moment this entity was created. # noqa: E501
:return: The created of this IntegrationEntity. # noqa: E501
:rtype: datetime
"""
return self._created
@created.setter
def created(self, created):
<|fim_middle|>
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, IntegrationEntity):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, IntegrationEntity):
return True
return self.to_dict() != other.to_dict()
<|fim▁end|> | """Sets the created of this IntegrationEntity.
The exact moment this entity was created. # noqa: E501
:param created: The created of this IntegrationEntity. # noqa: E501
:type: datetime
"""
if self.local_vars_configuration.client_side_validation and created is None: # noqa: E501
raise ValueError("Invalid value for `created`, must not be `None`") # noqa: E501
self._created = created |
<|file_name|>integration_entity.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
Talon.One API
The Talon.One API is used to manage applications and campaigns, as well as to integrate with your application. The operations in the _Integration API_ section are used to integrate with our platform, while the other operations are used to manage applications and campaigns. ### Where is the API? The API is available at the same hostname as these docs. For example, if you are reading this page at `https://mycompany.talon.one/docs/api/`, the URL for the [updateCustomerProfile][] operation is `https://mycompany.talon.one/v1/customer_profiles/id` [updateCustomerProfile]: #operation--v1-customer_profiles--integrationId--put # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from talon_one.configuration import Configuration
class IntegrationEntity(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'integration_id': 'str',
'created': 'datetime'
}
attribute_map = {
'integration_id': 'integrationId',
'created': 'created'
}
def __init__(self, integration_id=None, created=None, local_vars_configuration=None): # noqa: E501
"""IntegrationEntity - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._integration_id = None
self._created = None
self.discriminator = None
self.integration_id = integration_id
self.created = created
@property
def integration_id(self):
"""Gets the integration_id of this IntegrationEntity. # noqa: E501
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:return: The integration_id of this IntegrationEntity. # noqa: E501
:rtype: str
"""
return self._integration_id
@integration_id.setter
def integration_id(self, integration_id):
"""Sets the integration_id of this IntegrationEntity.
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:param integration_id: The integration_id of this IntegrationEntity. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and integration_id is None: # noqa: E501
raise ValueError("Invalid value for `integration_id`, must not be `None`") # noqa: E501
self._integration_id = integration_id
@property
def created(self):
"""Gets the created of this IntegrationEntity. # noqa: E501
The exact moment this entity was created. # noqa: E501
:return: The created of this IntegrationEntity. # noqa: E501
:rtype: datetime
"""
return self._created
@created.setter
def created(self, created):
"""Sets the created of this IntegrationEntity.
The exact moment this entity was created. # noqa: E501
:param created: The created of this IntegrationEntity. # noqa: E501
:type: datetime
"""
if self.local_vars_configuration.client_side_validation and created is None: # noqa: E501
raise ValueError("Invalid value for `created`, must not be `None`") # noqa: E501
self._created = created
def to_dict(self):
<|fim_middle|>
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, IntegrationEntity):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, IntegrationEntity):
return True
return self.to_dict() != other.to_dict()
<|fim▁end|> | """Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result |
<|file_name|>integration_entity.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
Talon.One API
The Talon.One API is used to manage applications and campaigns, as well as to integrate with your application. The operations in the _Integration API_ section are used to integrate with our platform, while the other operations are used to manage applications and campaigns. ### Where is the API? The API is available at the same hostname as these docs. For example, if you are reading this page at `https://mycompany.talon.one/docs/api/`, the URL for the [updateCustomerProfile][] operation is `https://mycompany.talon.one/v1/customer_profiles/id` [updateCustomerProfile]: #operation--v1-customer_profiles--integrationId--put # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from talon_one.configuration import Configuration
class IntegrationEntity(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'integration_id': 'str',
'created': 'datetime'
}
attribute_map = {
'integration_id': 'integrationId',
'created': 'created'
}
def __init__(self, integration_id=None, created=None, local_vars_configuration=None): # noqa: E501
"""IntegrationEntity - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._integration_id = None
self._created = None
self.discriminator = None
self.integration_id = integration_id
self.created = created
@property
def integration_id(self):
"""Gets the integration_id of this IntegrationEntity. # noqa: E501
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:return: The integration_id of this IntegrationEntity. # noqa: E501
:rtype: str
"""
return self._integration_id
@integration_id.setter
def integration_id(self, integration_id):
"""Sets the integration_id of this IntegrationEntity.
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:param integration_id: The integration_id of this IntegrationEntity. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and integration_id is None: # noqa: E501
raise ValueError("Invalid value for `integration_id`, must not be `None`") # noqa: E501
self._integration_id = integration_id
@property
def created(self):
"""Gets the created of this IntegrationEntity. # noqa: E501
The exact moment this entity was created. # noqa: E501
:return: The created of this IntegrationEntity. # noqa: E501
:rtype: datetime
"""
return self._created
@created.setter
def created(self, created):
"""Sets the created of this IntegrationEntity.
The exact moment this entity was created. # noqa: E501
:param created: The created of this IntegrationEntity. # noqa: E501
:type: datetime
"""
if self.local_vars_configuration.client_side_validation and created is None: # noqa: E501
raise ValueError("Invalid value for `created`, must not be `None`") # noqa: E501
self._created = created
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
<|fim_middle|>
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, IntegrationEntity):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, IntegrationEntity):
return True
return self.to_dict() != other.to_dict()
<|fim▁end|> | """Returns the string representation of the model"""
return pprint.pformat(self.to_dict()) |
<|file_name|>integration_entity.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
Talon.One API
The Talon.One API is used to manage applications and campaigns, as well as to integrate with your application. The operations in the _Integration API_ section are used to integrate with our platform, while the other operations are used to manage applications and campaigns. ### Where is the API? The API is available at the same hostname as these docs. For example, if you are reading this page at `https://mycompany.talon.one/docs/api/`, the URL for the [updateCustomerProfile][] operation is `https://mycompany.talon.one/v1/customer_profiles/id` [updateCustomerProfile]: #operation--v1-customer_profiles--integrationId--put # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from talon_one.configuration import Configuration
class IntegrationEntity(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'integration_id': 'str',
'created': 'datetime'
}
attribute_map = {
'integration_id': 'integrationId',
'created': 'created'
}
def __init__(self, integration_id=None, created=None, local_vars_configuration=None): # noqa: E501
"""IntegrationEntity - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._integration_id = None
self._created = None
self.discriminator = None
self.integration_id = integration_id
self.created = created
@property
def integration_id(self):
"""Gets the integration_id of this IntegrationEntity. # noqa: E501
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:return: The integration_id of this IntegrationEntity. # noqa: E501
:rtype: str
"""
return self._integration_id
@integration_id.setter
def integration_id(self, integration_id):
"""Sets the integration_id of this IntegrationEntity.
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:param integration_id: The integration_id of this IntegrationEntity. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and integration_id is None: # noqa: E501
raise ValueError("Invalid value for `integration_id`, must not be `None`") # noqa: E501
self._integration_id = integration_id
@property
def created(self):
"""Gets the created of this IntegrationEntity. # noqa: E501
The exact moment this entity was created. # noqa: E501
:return: The created of this IntegrationEntity. # noqa: E501
:rtype: datetime
"""
return self._created
@created.setter
def created(self, created):
"""Sets the created of this IntegrationEntity.
The exact moment this entity was created. # noqa: E501
:param created: The created of this IntegrationEntity. # noqa: E501
:type: datetime
"""
if self.local_vars_configuration.client_side_validation and created is None: # noqa: E501
raise ValueError("Invalid value for `created`, must not be `None`") # noqa: E501
self._created = created
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
<|fim_middle|>
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, IntegrationEntity):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, IntegrationEntity):
return True
return self.to_dict() != other.to_dict()
<|fim▁end|> | """For `print` and `pprint`"""
return self.to_str() |
<|file_name|>integration_entity.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
Talon.One API
The Talon.One API is used to manage applications and campaigns, as well as to integrate with your application. The operations in the _Integration API_ section are used to integrate with our platform, while the other operations are used to manage applications and campaigns. ### Where is the API? The API is available at the same hostname as these docs. For example, if you are reading this page at `https://mycompany.talon.one/docs/api/`, the URL for the [updateCustomerProfile][] operation is `https://mycompany.talon.one/v1/customer_profiles/id` [updateCustomerProfile]: #operation--v1-customer_profiles--integrationId--put # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from talon_one.configuration import Configuration
class IntegrationEntity(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'integration_id': 'str',
'created': 'datetime'
}
attribute_map = {
'integration_id': 'integrationId',
'created': 'created'
}
def __init__(self, integration_id=None, created=None, local_vars_configuration=None): # noqa: E501
"""IntegrationEntity - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._integration_id = None
self._created = None
self.discriminator = None
self.integration_id = integration_id
self.created = created
@property
def integration_id(self):
"""Gets the integration_id of this IntegrationEntity. # noqa: E501
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:return: The integration_id of this IntegrationEntity. # noqa: E501
:rtype: str
"""
return self._integration_id
@integration_id.setter
def integration_id(self, integration_id):
"""Sets the integration_id of this IntegrationEntity.
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:param integration_id: The integration_id of this IntegrationEntity. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and integration_id is None: # noqa: E501
raise ValueError("Invalid value for `integration_id`, must not be `None`") # noqa: E501
self._integration_id = integration_id
@property
def created(self):
"""Gets the created of this IntegrationEntity. # noqa: E501
The exact moment this entity was created. # noqa: E501
:return: The created of this IntegrationEntity. # noqa: E501
:rtype: datetime
"""
return self._created
@created.setter
def created(self, created):
"""Sets the created of this IntegrationEntity.
The exact moment this entity was created. # noqa: E501
:param created: The created of this IntegrationEntity. # noqa: E501
:type: datetime
"""
if self.local_vars_configuration.client_side_validation and created is None: # noqa: E501
raise ValueError("Invalid value for `created`, must not be `None`") # noqa: E501
self._created = created
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
<|fim_middle|>
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, IntegrationEntity):
return True
return self.to_dict() != other.to_dict()
<|fim▁end|> | """Returns true if both objects are equal"""
if not isinstance(other, IntegrationEntity):
return False
return self.to_dict() == other.to_dict() |
<|file_name|>integration_entity.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
Talon.One API
The Talon.One API is used to manage applications and campaigns, as well as to integrate with your application. The operations in the _Integration API_ section are used to integrate with our platform, while the other operations are used to manage applications and campaigns. ### Where is the API? The API is available at the same hostname as these docs. For example, if you are reading this page at `https://mycompany.talon.one/docs/api/`, the URL for the [updateCustomerProfile][] operation is `https://mycompany.talon.one/v1/customer_profiles/id` [updateCustomerProfile]: #operation--v1-customer_profiles--integrationId--put # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from talon_one.configuration import Configuration
class IntegrationEntity(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'integration_id': 'str',
'created': 'datetime'
}
attribute_map = {
'integration_id': 'integrationId',
'created': 'created'
}
def __init__(self, integration_id=None, created=None, local_vars_configuration=None): # noqa: E501
"""IntegrationEntity - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._integration_id = None
self._created = None
self.discriminator = None
self.integration_id = integration_id
self.created = created
@property
def integration_id(self):
"""Gets the integration_id of this IntegrationEntity. # noqa: E501
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:return: The integration_id of this IntegrationEntity. # noqa: E501
:rtype: str
"""
return self._integration_id
@integration_id.setter
def integration_id(self, integration_id):
"""Sets the integration_id of this IntegrationEntity.
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:param integration_id: The integration_id of this IntegrationEntity. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and integration_id is None: # noqa: E501
raise ValueError("Invalid value for `integration_id`, must not be `None`") # noqa: E501
self._integration_id = integration_id
@property
def created(self):
"""Gets the created of this IntegrationEntity. # noqa: E501
The exact moment this entity was created. # noqa: E501
:return: The created of this IntegrationEntity. # noqa: E501
:rtype: datetime
"""
return self._created
@created.setter
def created(self, created):
"""Sets the created of this IntegrationEntity.
The exact moment this entity was created. # noqa: E501
:param created: The created of this IntegrationEntity. # noqa: E501
:type: datetime
"""
if self.local_vars_configuration.client_side_validation and created is None: # noqa: E501
raise ValueError("Invalid value for `created`, must not be `None`") # noqa: E501
self._created = created
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, IntegrationEntity):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
<|fim_middle|>
<|fim▁end|> | """Returns true if both objects are not equal"""
if not isinstance(other, IntegrationEntity):
return True
return self.to_dict() != other.to_dict() |
<|file_name|>integration_entity.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
Talon.One API
The Talon.One API is used to manage applications and campaigns, as well as to integrate with your application. The operations in the _Integration API_ section are used to integrate with our platform, while the other operations are used to manage applications and campaigns. ### Where is the API? The API is available at the same hostname as these docs. For example, if you are reading this page at `https://mycompany.talon.one/docs/api/`, the URL for the [updateCustomerProfile][] operation is `https://mycompany.talon.one/v1/customer_profiles/id` [updateCustomerProfile]: #operation--v1-customer_profiles--integrationId--put # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from talon_one.configuration import Configuration
class IntegrationEntity(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'integration_id': 'str',
'created': 'datetime'
}
attribute_map = {
'integration_id': 'integrationId',
'created': 'created'
}
def __init__(self, integration_id=None, created=None, local_vars_configuration=None): # noqa: E501
"""IntegrationEntity - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
<|fim_middle|>
self.local_vars_configuration = local_vars_configuration
self._integration_id = None
self._created = None
self.discriminator = None
self.integration_id = integration_id
self.created = created
@property
def integration_id(self):
"""Gets the integration_id of this IntegrationEntity. # noqa: E501
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:return: The integration_id of this IntegrationEntity. # noqa: E501
:rtype: str
"""
return self._integration_id
@integration_id.setter
def integration_id(self, integration_id):
"""Sets the integration_id of this IntegrationEntity.
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:param integration_id: The integration_id of this IntegrationEntity. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and integration_id is None: # noqa: E501
raise ValueError("Invalid value for `integration_id`, must not be `None`") # noqa: E501
self._integration_id = integration_id
@property
def created(self):
"""Gets the created of this IntegrationEntity. # noqa: E501
The exact moment this entity was created. # noqa: E501
:return: The created of this IntegrationEntity. # noqa: E501
:rtype: datetime
"""
return self._created
@created.setter
def created(self, created):
"""Sets the created of this IntegrationEntity.
The exact moment this entity was created. # noqa: E501
:param created: The created of this IntegrationEntity. # noqa: E501
:type: datetime
"""
if self.local_vars_configuration.client_side_validation and created is None: # noqa: E501
raise ValueError("Invalid value for `created`, must not be `None`") # noqa: E501
self._created = created
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, IntegrationEntity):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, IntegrationEntity):
return True
return self.to_dict() != other.to_dict()
<|fim▁end|> | local_vars_configuration = Configuration() |
<|file_name|>integration_entity.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
Talon.One API
The Talon.One API is used to manage applications and campaigns, as well as to integrate with your application. The operations in the _Integration API_ section are used to integrate with our platform, while the other operations are used to manage applications and campaigns. ### Where is the API? The API is available at the same hostname as these docs. For example, if you are reading this page at `https://mycompany.talon.one/docs/api/`, the URL for the [updateCustomerProfile][] operation is `https://mycompany.talon.one/v1/customer_profiles/id` [updateCustomerProfile]: #operation--v1-customer_profiles--integrationId--put # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from talon_one.configuration import Configuration
class IntegrationEntity(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'integration_id': 'str',
'created': 'datetime'
}
attribute_map = {
'integration_id': 'integrationId',
'created': 'created'
}
def __init__(self, integration_id=None, created=None, local_vars_configuration=None): # noqa: E501
"""IntegrationEntity - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._integration_id = None
self._created = None
self.discriminator = None
self.integration_id = integration_id
self.created = created
@property
def integration_id(self):
"""Gets the integration_id of this IntegrationEntity. # noqa: E501
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:return: The integration_id of this IntegrationEntity. # noqa: E501
:rtype: str
"""
return self._integration_id
@integration_id.setter
def integration_id(self, integration_id):
"""Sets the integration_id of this IntegrationEntity.
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:param integration_id: The integration_id of this IntegrationEntity. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and integration_id is None: # noqa: E501
<|fim_middle|>
self._integration_id = integration_id
@property
def created(self):
"""Gets the created of this IntegrationEntity. # noqa: E501
The exact moment this entity was created. # noqa: E501
:return: The created of this IntegrationEntity. # noqa: E501
:rtype: datetime
"""
return self._created
@created.setter
def created(self, created):
"""Sets the created of this IntegrationEntity.
The exact moment this entity was created. # noqa: E501
:param created: The created of this IntegrationEntity. # noqa: E501
:type: datetime
"""
if self.local_vars_configuration.client_side_validation and created is None: # noqa: E501
raise ValueError("Invalid value for `created`, must not be `None`") # noqa: E501
self._created = created
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, IntegrationEntity):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, IntegrationEntity):
return True
return self.to_dict() != other.to_dict()
<|fim▁end|> | raise ValueError("Invalid value for `integration_id`, must not be `None`") # noqa: E501 |
<|file_name|>integration_entity.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
Talon.One API
The Talon.One API is used to manage applications and campaigns, as well as to integrate with your application. The operations in the _Integration API_ section are used to integrate with our platform, while the other operations are used to manage applications and campaigns. ### Where is the API? The API is available at the same hostname as these docs. For example, if you are reading this page at `https://mycompany.talon.one/docs/api/`, the URL for the [updateCustomerProfile][] operation is `https://mycompany.talon.one/v1/customer_profiles/id` [updateCustomerProfile]: #operation--v1-customer_profiles--integrationId--put # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from talon_one.configuration import Configuration
class IntegrationEntity(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'integration_id': 'str',
'created': 'datetime'
}
attribute_map = {
'integration_id': 'integrationId',
'created': 'created'
}
def __init__(self, integration_id=None, created=None, local_vars_configuration=None): # noqa: E501
"""IntegrationEntity - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._integration_id = None
self._created = None
self.discriminator = None
self.integration_id = integration_id
self.created = created
@property
def integration_id(self):
"""Gets the integration_id of this IntegrationEntity. # noqa: E501
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:return: The integration_id of this IntegrationEntity. # noqa: E501
:rtype: str
"""
return self._integration_id
@integration_id.setter
def integration_id(self, integration_id):
"""Sets the integration_id of this IntegrationEntity.
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:param integration_id: The integration_id of this IntegrationEntity. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and integration_id is None: # noqa: E501
raise ValueError("Invalid value for `integration_id`, must not be `None`") # noqa: E501
self._integration_id = integration_id
@property
def created(self):
"""Gets the created of this IntegrationEntity. # noqa: E501
The exact moment this entity was created. # noqa: E501
:return: The created of this IntegrationEntity. # noqa: E501
:rtype: datetime
"""
return self._created
@created.setter
def created(self, created):
"""Sets the created of this IntegrationEntity.
The exact moment this entity was created. # noqa: E501
:param created: The created of this IntegrationEntity. # noqa: E501
:type: datetime
"""
if self.local_vars_configuration.client_side_validation and created is None: # noqa: E501
<|fim_middle|>
self._created = created
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, IntegrationEntity):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, IntegrationEntity):
return True
return self.to_dict() != other.to_dict()
<|fim▁end|> | raise ValueError("Invalid value for `created`, must not be `None`") # noqa: E501 |
<|file_name|>integration_entity.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
Talon.One API
The Talon.One API is used to manage applications and campaigns, as well as to integrate with your application. The operations in the _Integration API_ section are used to integrate with our platform, while the other operations are used to manage applications and campaigns. ### Where is the API? The API is available at the same hostname as these docs. For example, if you are reading this page at `https://mycompany.talon.one/docs/api/`, the URL for the [updateCustomerProfile][] operation is `https://mycompany.talon.one/v1/customer_profiles/id` [updateCustomerProfile]: #operation--v1-customer_profiles--integrationId--put # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from talon_one.configuration import Configuration
class IntegrationEntity(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'integration_id': 'str',
'created': 'datetime'
}
attribute_map = {
'integration_id': 'integrationId',
'created': 'created'
}
def __init__(self, integration_id=None, created=None, local_vars_configuration=None): # noqa: E501
"""IntegrationEntity - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._integration_id = None
self._created = None
self.discriminator = None
self.integration_id = integration_id
self.created = created
@property
def integration_id(self):
"""Gets the integration_id of this IntegrationEntity. # noqa: E501
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:return: The integration_id of this IntegrationEntity. # noqa: E501
:rtype: str
"""
return self._integration_id
@integration_id.setter
def integration_id(self, integration_id):
"""Sets the integration_id of this IntegrationEntity.
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:param integration_id: The integration_id of this IntegrationEntity. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and integration_id is None: # noqa: E501
raise ValueError("Invalid value for `integration_id`, must not be `None`") # noqa: E501
self._integration_id = integration_id
@property
def created(self):
"""Gets the created of this IntegrationEntity. # noqa: E501
The exact moment this entity was created. # noqa: E501
:return: The created of this IntegrationEntity. # noqa: E501
:rtype: datetime
"""
return self._created
@created.setter
def created(self, created):
"""Sets the created of this IntegrationEntity.
The exact moment this entity was created. # noqa: E501
:param created: The created of this IntegrationEntity. # noqa: E501
:type: datetime
"""
if self.local_vars_configuration.client_side_validation and created is None: # noqa: E501
raise ValueError("Invalid value for `created`, must not be `None`") # noqa: E501
self._created = created
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
<|fim_middle|>
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, IntegrationEntity):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, IntegrationEntity):
return True
return self.to_dict() != other.to_dict()
<|fim▁end|> | result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
)) |
<|file_name|>integration_entity.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
Talon.One API
The Talon.One API is used to manage applications and campaigns, as well as to integrate with your application. The operations in the _Integration API_ section are used to integrate with our platform, while the other operations are used to manage applications and campaigns. ### Where is the API? The API is available at the same hostname as these docs. For example, if you are reading this page at `https://mycompany.talon.one/docs/api/`, the URL for the [updateCustomerProfile][] operation is `https://mycompany.talon.one/v1/customer_profiles/id` [updateCustomerProfile]: #operation--v1-customer_profiles--integrationId--put # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from talon_one.configuration import Configuration
class IntegrationEntity(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'integration_id': 'str',
'created': 'datetime'
}
attribute_map = {
'integration_id': 'integrationId',
'created': 'created'
}
def __init__(self, integration_id=None, created=None, local_vars_configuration=None): # noqa: E501
"""IntegrationEntity - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._integration_id = None
self._created = None
self.discriminator = None
self.integration_id = integration_id
self.created = created
@property
def integration_id(self):
"""Gets the integration_id of this IntegrationEntity. # noqa: E501
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:return: The integration_id of this IntegrationEntity. # noqa: E501
:rtype: str
"""
return self._integration_id
@integration_id.setter
def integration_id(self, integration_id):
"""Sets the integration_id of this IntegrationEntity.
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:param integration_id: The integration_id of this IntegrationEntity. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and integration_id is None: # noqa: E501
raise ValueError("Invalid value for `integration_id`, must not be `None`") # noqa: E501
self._integration_id = integration_id
@property
def created(self):
"""Gets the created of this IntegrationEntity. # noqa: E501
The exact moment this entity was created. # noqa: E501
:return: The created of this IntegrationEntity. # noqa: E501
:rtype: datetime
"""
return self._created
@created.setter
def created(self, created):
"""Sets the created of this IntegrationEntity.
The exact moment this entity was created. # noqa: E501
:param created: The created of this IntegrationEntity. # noqa: E501
:type: datetime
"""
if self.local_vars_configuration.client_side_validation and created is None: # noqa: E501
raise ValueError("Invalid value for `created`, must not be `None`") # noqa: E501
self._created = created
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
<|fim_middle|>
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, IntegrationEntity):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, IntegrationEntity):
return True
return self.to_dict() != other.to_dict()
<|fim▁end|> | result[attr] = value.to_dict() |
<|file_name|>integration_entity.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
Talon.One API
The Talon.One API is used to manage applications and campaigns, as well as to integrate with your application. The operations in the _Integration API_ section are used to integrate with our platform, while the other operations are used to manage applications and campaigns. ### Where is the API? The API is available at the same hostname as these docs. For example, if you are reading this page at `https://mycompany.talon.one/docs/api/`, the URL for the [updateCustomerProfile][] operation is `https://mycompany.talon.one/v1/customer_profiles/id` [updateCustomerProfile]: #operation--v1-customer_profiles--integrationId--put # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from talon_one.configuration import Configuration
class IntegrationEntity(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'integration_id': 'str',
'created': 'datetime'
}
attribute_map = {
'integration_id': 'integrationId',
'created': 'created'
}
def __init__(self, integration_id=None, created=None, local_vars_configuration=None): # noqa: E501
"""IntegrationEntity - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._integration_id = None
self._created = None
self.discriminator = None
self.integration_id = integration_id
self.created = created
@property
def integration_id(self):
"""Gets the integration_id of this IntegrationEntity. # noqa: E501
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:return: The integration_id of this IntegrationEntity. # noqa: E501
:rtype: str
"""
return self._integration_id
@integration_id.setter
def integration_id(self, integration_id):
"""Sets the integration_id of this IntegrationEntity.
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:param integration_id: The integration_id of this IntegrationEntity. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and integration_id is None: # noqa: E501
raise ValueError("Invalid value for `integration_id`, must not be `None`") # noqa: E501
self._integration_id = integration_id
@property
def created(self):
"""Gets the created of this IntegrationEntity. # noqa: E501
The exact moment this entity was created. # noqa: E501
:return: The created of this IntegrationEntity. # noqa: E501
:rtype: datetime
"""
return self._created
@created.setter
def created(self, created):
"""Sets the created of this IntegrationEntity.
The exact moment this entity was created. # noqa: E501
:param created: The created of this IntegrationEntity. # noqa: E501
:type: datetime
"""
if self.local_vars_configuration.client_side_validation and created is None: # noqa: E501
raise ValueError("Invalid value for `created`, must not be `None`") # noqa: E501
self._created = created
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
<|fim_middle|>
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, IntegrationEntity):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, IntegrationEntity):
return True
return self.to_dict() != other.to_dict()
<|fim▁end|> | result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
)) |
<|file_name|>integration_entity.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
Talon.One API
The Talon.One API is used to manage applications and campaigns, as well as to integrate with your application. The operations in the _Integration API_ section are used to integrate with our platform, while the other operations are used to manage applications and campaigns. ### Where is the API? The API is available at the same hostname as these docs. For example, if you are reading this page at `https://mycompany.talon.one/docs/api/`, the URL for the [updateCustomerProfile][] operation is `https://mycompany.talon.one/v1/customer_profiles/id` [updateCustomerProfile]: #operation--v1-customer_profiles--integrationId--put # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from talon_one.configuration import Configuration
class IntegrationEntity(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'integration_id': 'str',
'created': 'datetime'
}
attribute_map = {
'integration_id': 'integrationId',
'created': 'created'
}
def __init__(self, integration_id=None, created=None, local_vars_configuration=None): # noqa: E501
"""IntegrationEntity - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._integration_id = None
self._created = None
self.discriminator = None
self.integration_id = integration_id
self.created = created
@property
def integration_id(self):
"""Gets the integration_id of this IntegrationEntity. # noqa: E501
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:return: The integration_id of this IntegrationEntity. # noqa: E501
:rtype: str
"""
return self._integration_id
@integration_id.setter
def integration_id(self, integration_id):
"""Sets the integration_id of this IntegrationEntity.
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:param integration_id: The integration_id of this IntegrationEntity. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and integration_id is None: # noqa: E501
raise ValueError("Invalid value for `integration_id`, must not be `None`") # noqa: E501
self._integration_id = integration_id
@property
def created(self):
"""Gets the created of this IntegrationEntity. # noqa: E501
The exact moment this entity was created. # noqa: E501
:return: The created of this IntegrationEntity. # noqa: E501
:rtype: datetime
"""
return self._created
@created.setter
def created(self, created):
"""Sets the created of this IntegrationEntity.
The exact moment this entity was created. # noqa: E501
:param created: The created of this IntegrationEntity. # noqa: E501
:type: datetime
"""
if self.local_vars_configuration.client_side_validation and created is None: # noqa: E501
raise ValueError("Invalid value for `created`, must not be `None`") # noqa: E501
self._created = created
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
<|fim_middle|>
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, IntegrationEntity):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, IntegrationEntity):
return True
return self.to_dict() != other.to_dict()
<|fim▁end|> | result[attr] = value |
<|file_name|>integration_entity.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
Talon.One API
The Talon.One API is used to manage applications and campaigns, as well as to integrate with your application. The operations in the _Integration API_ section are used to integrate with our platform, while the other operations are used to manage applications and campaigns. ### Where is the API? The API is available at the same hostname as these docs. For example, if you are reading this page at `https://mycompany.talon.one/docs/api/`, the URL for the [updateCustomerProfile][] operation is `https://mycompany.talon.one/v1/customer_profiles/id` [updateCustomerProfile]: #operation--v1-customer_profiles--integrationId--put # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from talon_one.configuration import Configuration
class IntegrationEntity(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'integration_id': 'str',
'created': 'datetime'
}
attribute_map = {
'integration_id': 'integrationId',
'created': 'created'
}
def __init__(self, integration_id=None, created=None, local_vars_configuration=None): # noqa: E501
"""IntegrationEntity - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._integration_id = None
self._created = None
self.discriminator = None
self.integration_id = integration_id
self.created = created
@property
def integration_id(self):
"""Gets the integration_id of this IntegrationEntity. # noqa: E501
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:return: The integration_id of this IntegrationEntity. # noqa: E501
:rtype: str
"""
return self._integration_id
@integration_id.setter
def integration_id(self, integration_id):
"""Sets the integration_id of this IntegrationEntity.
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:param integration_id: The integration_id of this IntegrationEntity. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and integration_id is None: # noqa: E501
raise ValueError("Invalid value for `integration_id`, must not be `None`") # noqa: E501
self._integration_id = integration_id
@property
def created(self):
"""Gets the created of this IntegrationEntity. # noqa: E501
The exact moment this entity was created. # noqa: E501
:return: The created of this IntegrationEntity. # noqa: E501
:rtype: datetime
"""
return self._created
@created.setter
def created(self, created):
"""Sets the created of this IntegrationEntity.
The exact moment this entity was created. # noqa: E501
:param created: The created of this IntegrationEntity. # noqa: E501
:type: datetime
"""
if self.local_vars_configuration.client_side_validation and created is None: # noqa: E501
raise ValueError("Invalid value for `created`, must not be `None`") # noqa: E501
self._created = created
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, IntegrationEntity):
<|fim_middle|>
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, IntegrationEntity):
return True
return self.to_dict() != other.to_dict()
<|fim▁end|> | return False |
<|file_name|>integration_entity.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
Talon.One API
The Talon.One API is used to manage applications and campaigns, as well as to integrate with your application. The operations in the _Integration API_ section are used to integrate with our platform, while the other operations are used to manage applications and campaigns. ### Where is the API? The API is available at the same hostname as these docs. For example, if you are reading this page at `https://mycompany.talon.one/docs/api/`, the URL for the [updateCustomerProfile][] operation is `https://mycompany.talon.one/v1/customer_profiles/id` [updateCustomerProfile]: #operation--v1-customer_profiles--integrationId--put # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from talon_one.configuration import Configuration
class IntegrationEntity(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'integration_id': 'str',
'created': 'datetime'
}
attribute_map = {
'integration_id': 'integrationId',
'created': 'created'
}
def __init__(self, integration_id=None, created=None, local_vars_configuration=None): # noqa: E501
"""IntegrationEntity - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._integration_id = None
self._created = None
self.discriminator = None
self.integration_id = integration_id
self.created = created
@property
def integration_id(self):
"""Gets the integration_id of this IntegrationEntity. # noqa: E501
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:return: The integration_id of this IntegrationEntity. # noqa: E501
:rtype: str
"""
return self._integration_id
@integration_id.setter
def integration_id(self, integration_id):
"""Sets the integration_id of this IntegrationEntity.
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:param integration_id: The integration_id of this IntegrationEntity. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and integration_id is None: # noqa: E501
raise ValueError("Invalid value for `integration_id`, must not be `None`") # noqa: E501
self._integration_id = integration_id
@property
def created(self):
"""Gets the created of this IntegrationEntity. # noqa: E501
The exact moment this entity was created. # noqa: E501
:return: The created of this IntegrationEntity. # noqa: E501
:rtype: datetime
"""
return self._created
@created.setter
def created(self, created):
"""Sets the created of this IntegrationEntity.
The exact moment this entity was created. # noqa: E501
:param created: The created of this IntegrationEntity. # noqa: E501
:type: datetime
"""
if self.local_vars_configuration.client_side_validation and created is None: # noqa: E501
raise ValueError("Invalid value for `created`, must not be `None`") # noqa: E501
self._created = created
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, IntegrationEntity):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, IntegrationEntity):
<|fim_middle|>
return self.to_dict() != other.to_dict()
<|fim▁end|> | return True |
<|file_name|>integration_entity.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
Talon.One API
The Talon.One API is used to manage applications and campaigns, as well as to integrate with your application. The operations in the _Integration API_ section are used to integrate with our platform, while the other operations are used to manage applications and campaigns. ### Where is the API? The API is available at the same hostname as these docs. For example, if you are reading this page at `https://mycompany.talon.one/docs/api/`, the URL for the [updateCustomerProfile][] operation is `https://mycompany.talon.one/v1/customer_profiles/id` [updateCustomerProfile]: #operation--v1-customer_profiles--integrationId--put # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from talon_one.configuration import Configuration
class IntegrationEntity(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'integration_id': 'str',
'created': 'datetime'
}
attribute_map = {
'integration_id': 'integrationId',
'created': 'created'
}
def <|fim_middle|>(self, integration_id=None, created=None, local_vars_configuration=None): # noqa: E501
"""IntegrationEntity - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._integration_id = None
self._created = None
self.discriminator = None
self.integration_id = integration_id
self.created = created
@property
def integration_id(self):
"""Gets the integration_id of this IntegrationEntity. # noqa: E501
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:return: The integration_id of this IntegrationEntity. # noqa: E501
:rtype: str
"""
return self._integration_id
@integration_id.setter
def integration_id(self, integration_id):
"""Sets the integration_id of this IntegrationEntity.
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:param integration_id: The integration_id of this IntegrationEntity. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and integration_id is None: # noqa: E501
raise ValueError("Invalid value for `integration_id`, must not be `None`") # noqa: E501
self._integration_id = integration_id
@property
def created(self):
"""Gets the created of this IntegrationEntity. # noqa: E501
The exact moment this entity was created. # noqa: E501
:return: The created of this IntegrationEntity. # noqa: E501
:rtype: datetime
"""
return self._created
@created.setter
def created(self, created):
"""Sets the created of this IntegrationEntity.
The exact moment this entity was created. # noqa: E501
:param created: The created of this IntegrationEntity. # noqa: E501
:type: datetime
"""
if self.local_vars_configuration.client_side_validation and created is None: # noqa: E501
raise ValueError("Invalid value for `created`, must not be `None`") # noqa: E501
self._created = created
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, IntegrationEntity):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, IntegrationEntity):
return True
return self.to_dict() != other.to_dict()
<|fim▁end|> | __init__ |
<|file_name|>integration_entity.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
Talon.One API
The Talon.One API is used to manage applications and campaigns, as well as to integrate with your application. The operations in the _Integration API_ section are used to integrate with our platform, while the other operations are used to manage applications and campaigns. ### Where is the API? The API is available at the same hostname as these docs. For example, if you are reading this page at `https://mycompany.talon.one/docs/api/`, the URL for the [updateCustomerProfile][] operation is `https://mycompany.talon.one/v1/customer_profiles/id` [updateCustomerProfile]: #operation--v1-customer_profiles--integrationId--put # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from talon_one.configuration import Configuration
class IntegrationEntity(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'integration_id': 'str',
'created': 'datetime'
}
attribute_map = {
'integration_id': 'integrationId',
'created': 'created'
}
def __init__(self, integration_id=None, created=None, local_vars_configuration=None): # noqa: E501
"""IntegrationEntity - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._integration_id = None
self._created = None
self.discriminator = None
self.integration_id = integration_id
self.created = created
@property
def <|fim_middle|>(self):
"""Gets the integration_id of this IntegrationEntity. # noqa: E501
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:return: The integration_id of this IntegrationEntity. # noqa: E501
:rtype: str
"""
return self._integration_id
@integration_id.setter
def integration_id(self, integration_id):
"""Sets the integration_id of this IntegrationEntity.
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:param integration_id: The integration_id of this IntegrationEntity. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and integration_id is None: # noqa: E501
raise ValueError("Invalid value for `integration_id`, must not be `None`") # noqa: E501
self._integration_id = integration_id
@property
def created(self):
"""Gets the created of this IntegrationEntity. # noqa: E501
The exact moment this entity was created. # noqa: E501
:return: The created of this IntegrationEntity. # noqa: E501
:rtype: datetime
"""
return self._created
@created.setter
def created(self, created):
"""Sets the created of this IntegrationEntity.
The exact moment this entity was created. # noqa: E501
:param created: The created of this IntegrationEntity. # noqa: E501
:type: datetime
"""
if self.local_vars_configuration.client_side_validation and created is None: # noqa: E501
raise ValueError("Invalid value for `created`, must not be `None`") # noqa: E501
self._created = created
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, IntegrationEntity):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, IntegrationEntity):
return True
return self.to_dict() != other.to_dict()
<|fim▁end|> | integration_id |
<|file_name|>integration_entity.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
Talon.One API
The Talon.One API is used to manage applications and campaigns, as well as to integrate with your application. The operations in the _Integration API_ section are used to integrate with our platform, while the other operations are used to manage applications and campaigns. ### Where is the API? The API is available at the same hostname as these docs. For example, if you are reading this page at `https://mycompany.talon.one/docs/api/`, the URL for the [updateCustomerProfile][] operation is `https://mycompany.talon.one/v1/customer_profiles/id` [updateCustomerProfile]: #operation--v1-customer_profiles--integrationId--put # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from talon_one.configuration import Configuration
class IntegrationEntity(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'integration_id': 'str',
'created': 'datetime'
}
attribute_map = {
'integration_id': 'integrationId',
'created': 'created'
}
def __init__(self, integration_id=None, created=None, local_vars_configuration=None): # noqa: E501
"""IntegrationEntity - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._integration_id = None
self._created = None
self.discriminator = None
self.integration_id = integration_id
self.created = created
@property
def integration_id(self):
"""Gets the integration_id of this IntegrationEntity. # noqa: E501
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:return: The integration_id of this IntegrationEntity. # noqa: E501
:rtype: str
"""
return self._integration_id
@integration_id.setter
def <|fim_middle|>(self, integration_id):
"""Sets the integration_id of this IntegrationEntity.
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:param integration_id: The integration_id of this IntegrationEntity. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and integration_id is None: # noqa: E501
raise ValueError("Invalid value for `integration_id`, must not be `None`") # noqa: E501
self._integration_id = integration_id
@property
def created(self):
"""Gets the created of this IntegrationEntity. # noqa: E501
The exact moment this entity was created. # noqa: E501
:return: The created of this IntegrationEntity. # noqa: E501
:rtype: datetime
"""
return self._created
@created.setter
def created(self, created):
"""Sets the created of this IntegrationEntity.
The exact moment this entity was created. # noqa: E501
:param created: The created of this IntegrationEntity. # noqa: E501
:type: datetime
"""
if self.local_vars_configuration.client_side_validation and created is None: # noqa: E501
raise ValueError("Invalid value for `created`, must not be `None`") # noqa: E501
self._created = created
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, IntegrationEntity):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, IntegrationEntity):
return True
return self.to_dict() != other.to_dict()
<|fim▁end|> | integration_id |
<|file_name|>integration_entity.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
Talon.One API
The Talon.One API is used to manage applications and campaigns, as well as to integrate with your application. The operations in the _Integration API_ section are used to integrate with our platform, while the other operations are used to manage applications and campaigns. ### Where is the API? The API is available at the same hostname as these docs. For example, if you are reading this page at `https://mycompany.talon.one/docs/api/`, the URL for the [updateCustomerProfile][] operation is `https://mycompany.talon.one/v1/customer_profiles/id` [updateCustomerProfile]: #operation--v1-customer_profiles--integrationId--put # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from talon_one.configuration import Configuration
class IntegrationEntity(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'integration_id': 'str',
'created': 'datetime'
}
attribute_map = {
'integration_id': 'integrationId',
'created': 'created'
}
def __init__(self, integration_id=None, created=None, local_vars_configuration=None): # noqa: E501
"""IntegrationEntity - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._integration_id = None
self._created = None
self.discriminator = None
self.integration_id = integration_id
self.created = created
@property
def integration_id(self):
"""Gets the integration_id of this IntegrationEntity. # noqa: E501
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:return: The integration_id of this IntegrationEntity. # noqa: E501
:rtype: str
"""
return self._integration_id
@integration_id.setter
def integration_id(self, integration_id):
"""Sets the integration_id of this IntegrationEntity.
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:param integration_id: The integration_id of this IntegrationEntity. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and integration_id is None: # noqa: E501
raise ValueError("Invalid value for `integration_id`, must not be `None`") # noqa: E501
self._integration_id = integration_id
@property
def <|fim_middle|>(self):
"""Gets the created of this IntegrationEntity. # noqa: E501
The exact moment this entity was created. # noqa: E501
:return: The created of this IntegrationEntity. # noqa: E501
:rtype: datetime
"""
return self._created
@created.setter
def created(self, created):
"""Sets the created of this IntegrationEntity.
The exact moment this entity was created. # noqa: E501
:param created: The created of this IntegrationEntity. # noqa: E501
:type: datetime
"""
if self.local_vars_configuration.client_side_validation and created is None: # noqa: E501
raise ValueError("Invalid value for `created`, must not be `None`") # noqa: E501
self._created = created
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, IntegrationEntity):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, IntegrationEntity):
return True
return self.to_dict() != other.to_dict()
<|fim▁end|> | created |
<|file_name|>integration_entity.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
Talon.One API
The Talon.One API is used to manage applications and campaigns, as well as to integrate with your application. The operations in the _Integration API_ section are used to integrate with our platform, while the other operations are used to manage applications and campaigns. ### Where is the API? The API is available at the same hostname as these docs. For example, if you are reading this page at `https://mycompany.talon.one/docs/api/`, the URL for the [updateCustomerProfile][] operation is `https://mycompany.talon.one/v1/customer_profiles/id` [updateCustomerProfile]: #operation--v1-customer_profiles--integrationId--put # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from talon_one.configuration import Configuration
class IntegrationEntity(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'integration_id': 'str',
'created': 'datetime'
}
attribute_map = {
'integration_id': 'integrationId',
'created': 'created'
}
def __init__(self, integration_id=None, created=None, local_vars_configuration=None): # noqa: E501
"""IntegrationEntity - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._integration_id = None
self._created = None
self.discriminator = None
self.integration_id = integration_id
self.created = created
@property
def integration_id(self):
"""Gets the integration_id of this IntegrationEntity. # noqa: E501
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:return: The integration_id of this IntegrationEntity. # noqa: E501
:rtype: str
"""
return self._integration_id
@integration_id.setter
def integration_id(self, integration_id):
"""Sets the integration_id of this IntegrationEntity.
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:param integration_id: The integration_id of this IntegrationEntity. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and integration_id is None: # noqa: E501
raise ValueError("Invalid value for `integration_id`, must not be `None`") # noqa: E501
self._integration_id = integration_id
@property
def created(self):
"""Gets the created of this IntegrationEntity. # noqa: E501
The exact moment this entity was created. # noqa: E501
:return: The created of this IntegrationEntity. # noqa: E501
:rtype: datetime
"""
return self._created
@created.setter
def <|fim_middle|>(self, created):
"""Sets the created of this IntegrationEntity.
The exact moment this entity was created. # noqa: E501
:param created: The created of this IntegrationEntity. # noqa: E501
:type: datetime
"""
if self.local_vars_configuration.client_side_validation and created is None: # noqa: E501
raise ValueError("Invalid value for `created`, must not be `None`") # noqa: E501
self._created = created
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, IntegrationEntity):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, IntegrationEntity):
return True
return self.to_dict() != other.to_dict()
<|fim▁end|> | created |
<|file_name|>integration_entity.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
Talon.One API
The Talon.One API is used to manage applications and campaigns, as well as to integrate with your application. The operations in the _Integration API_ section are used to integrate with our platform, while the other operations are used to manage applications and campaigns. ### Where is the API? The API is available at the same hostname as these docs. For example, if you are reading this page at `https://mycompany.talon.one/docs/api/`, the URL for the [updateCustomerProfile][] operation is `https://mycompany.talon.one/v1/customer_profiles/id` [updateCustomerProfile]: #operation--v1-customer_profiles--integrationId--put # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from talon_one.configuration import Configuration
class IntegrationEntity(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'integration_id': 'str',
'created': 'datetime'
}
attribute_map = {
'integration_id': 'integrationId',
'created': 'created'
}
def __init__(self, integration_id=None, created=None, local_vars_configuration=None): # noqa: E501
"""IntegrationEntity - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._integration_id = None
self._created = None
self.discriminator = None
self.integration_id = integration_id
self.created = created
@property
def integration_id(self):
"""Gets the integration_id of this IntegrationEntity. # noqa: E501
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:return: The integration_id of this IntegrationEntity. # noqa: E501
:rtype: str
"""
return self._integration_id
@integration_id.setter
def integration_id(self, integration_id):
"""Sets the integration_id of this IntegrationEntity.
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:param integration_id: The integration_id of this IntegrationEntity. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and integration_id is None: # noqa: E501
raise ValueError("Invalid value for `integration_id`, must not be `None`") # noqa: E501
self._integration_id = integration_id
@property
def created(self):
"""Gets the created of this IntegrationEntity. # noqa: E501
The exact moment this entity was created. # noqa: E501
:return: The created of this IntegrationEntity. # noqa: E501
:rtype: datetime
"""
return self._created
@created.setter
def created(self, created):
"""Sets the created of this IntegrationEntity.
The exact moment this entity was created. # noqa: E501
:param created: The created of this IntegrationEntity. # noqa: E501
:type: datetime
"""
if self.local_vars_configuration.client_side_validation and created is None: # noqa: E501
raise ValueError("Invalid value for `created`, must not be `None`") # noqa: E501
self._created = created
def <|fim_middle|>(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, IntegrationEntity):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, IntegrationEntity):
return True
return self.to_dict() != other.to_dict()
<|fim▁end|> | to_dict |
<|file_name|>integration_entity.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
Talon.One API
The Talon.One API is used to manage applications and campaigns, as well as to integrate with your application. The operations in the _Integration API_ section are used to integrate with our platform, while the other operations are used to manage applications and campaigns. ### Where is the API? The API is available at the same hostname as these docs. For example, if you are reading this page at `https://mycompany.talon.one/docs/api/`, the URL for the [updateCustomerProfile][] operation is `https://mycompany.talon.one/v1/customer_profiles/id` [updateCustomerProfile]: #operation--v1-customer_profiles--integrationId--put # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from talon_one.configuration import Configuration
class IntegrationEntity(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'integration_id': 'str',
'created': 'datetime'
}
attribute_map = {
'integration_id': 'integrationId',
'created': 'created'
}
def __init__(self, integration_id=None, created=None, local_vars_configuration=None): # noqa: E501
"""IntegrationEntity - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._integration_id = None
self._created = None
self.discriminator = None
self.integration_id = integration_id
self.created = created
@property
def integration_id(self):
"""Gets the integration_id of this IntegrationEntity. # noqa: E501
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:return: The integration_id of this IntegrationEntity. # noqa: E501
:rtype: str
"""
return self._integration_id
@integration_id.setter
def integration_id(self, integration_id):
"""Sets the integration_id of this IntegrationEntity.
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:param integration_id: The integration_id of this IntegrationEntity. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and integration_id is None: # noqa: E501
raise ValueError("Invalid value for `integration_id`, must not be `None`") # noqa: E501
self._integration_id = integration_id
@property
def created(self):
"""Gets the created of this IntegrationEntity. # noqa: E501
The exact moment this entity was created. # noqa: E501
:return: The created of this IntegrationEntity. # noqa: E501
:rtype: datetime
"""
return self._created
@created.setter
def created(self, created):
"""Sets the created of this IntegrationEntity.
The exact moment this entity was created. # noqa: E501
:param created: The created of this IntegrationEntity. # noqa: E501
:type: datetime
"""
if self.local_vars_configuration.client_side_validation and created is None: # noqa: E501
raise ValueError("Invalid value for `created`, must not be `None`") # noqa: E501
self._created = created
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def <|fim_middle|>(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, IntegrationEntity):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, IntegrationEntity):
return True
return self.to_dict() != other.to_dict()
<|fim▁end|> | to_str |
<|file_name|>integration_entity.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
Talon.One API
The Talon.One API is used to manage applications and campaigns, as well as to integrate with your application. The operations in the _Integration API_ section are used to integrate with our platform, while the other operations are used to manage applications and campaigns. ### Where is the API? The API is available at the same hostname as these docs. For example, if you are reading this page at `https://mycompany.talon.one/docs/api/`, the URL for the [updateCustomerProfile][] operation is `https://mycompany.talon.one/v1/customer_profiles/id` [updateCustomerProfile]: #operation--v1-customer_profiles--integrationId--put # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from talon_one.configuration import Configuration
class IntegrationEntity(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'integration_id': 'str',
'created': 'datetime'
}
attribute_map = {
'integration_id': 'integrationId',
'created': 'created'
}
def __init__(self, integration_id=None, created=None, local_vars_configuration=None): # noqa: E501
"""IntegrationEntity - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._integration_id = None
self._created = None
self.discriminator = None
self.integration_id = integration_id
self.created = created
@property
def integration_id(self):
"""Gets the integration_id of this IntegrationEntity. # noqa: E501
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:return: The integration_id of this IntegrationEntity. # noqa: E501
:rtype: str
"""
return self._integration_id
@integration_id.setter
def integration_id(self, integration_id):
"""Sets the integration_id of this IntegrationEntity.
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:param integration_id: The integration_id of this IntegrationEntity. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and integration_id is None: # noqa: E501
raise ValueError("Invalid value for `integration_id`, must not be `None`") # noqa: E501
self._integration_id = integration_id
@property
def created(self):
"""Gets the created of this IntegrationEntity. # noqa: E501
The exact moment this entity was created. # noqa: E501
:return: The created of this IntegrationEntity. # noqa: E501
:rtype: datetime
"""
return self._created
@created.setter
def created(self, created):
"""Sets the created of this IntegrationEntity.
The exact moment this entity was created. # noqa: E501
:param created: The created of this IntegrationEntity. # noqa: E501
:type: datetime
"""
if self.local_vars_configuration.client_side_validation and created is None: # noqa: E501
raise ValueError("Invalid value for `created`, must not be `None`") # noqa: E501
self._created = created
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def <|fim_middle|>(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, IntegrationEntity):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, IntegrationEntity):
return True
return self.to_dict() != other.to_dict()
<|fim▁end|> | __repr__ |
<|file_name|>integration_entity.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
Talon.One API
The Talon.One API is used to manage applications and campaigns, as well as to integrate with your application. The operations in the _Integration API_ section are used to integrate with our platform, while the other operations are used to manage applications and campaigns. ### Where is the API? The API is available at the same hostname as these docs. For example, if you are reading this page at `https://mycompany.talon.one/docs/api/`, the URL for the [updateCustomerProfile][] operation is `https://mycompany.talon.one/v1/customer_profiles/id` [updateCustomerProfile]: #operation--v1-customer_profiles--integrationId--put # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from talon_one.configuration import Configuration
class IntegrationEntity(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'integration_id': 'str',
'created': 'datetime'
}
attribute_map = {
'integration_id': 'integrationId',
'created': 'created'
}
def __init__(self, integration_id=None, created=None, local_vars_configuration=None): # noqa: E501
"""IntegrationEntity - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._integration_id = None
self._created = None
self.discriminator = None
self.integration_id = integration_id
self.created = created
@property
def integration_id(self):
"""Gets the integration_id of this IntegrationEntity. # noqa: E501
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:return: The integration_id of this IntegrationEntity. # noqa: E501
:rtype: str
"""
return self._integration_id
@integration_id.setter
def integration_id(self, integration_id):
"""Sets the integration_id of this IntegrationEntity.
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:param integration_id: The integration_id of this IntegrationEntity. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and integration_id is None: # noqa: E501
raise ValueError("Invalid value for `integration_id`, must not be `None`") # noqa: E501
self._integration_id = integration_id
@property
def created(self):
"""Gets the created of this IntegrationEntity. # noqa: E501
The exact moment this entity was created. # noqa: E501
:return: The created of this IntegrationEntity. # noqa: E501
:rtype: datetime
"""
return self._created
@created.setter
def created(self, created):
"""Sets the created of this IntegrationEntity.
The exact moment this entity was created. # noqa: E501
:param created: The created of this IntegrationEntity. # noqa: E501
:type: datetime
"""
if self.local_vars_configuration.client_side_validation and created is None: # noqa: E501
raise ValueError("Invalid value for `created`, must not be `None`") # noqa: E501
self._created = created
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def <|fim_middle|>(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, IntegrationEntity):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, IntegrationEntity):
return True
return self.to_dict() != other.to_dict()
<|fim▁end|> | __eq__ |
<|file_name|>integration_entity.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
Talon.One API
The Talon.One API is used to manage applications and campaigns, as well as to integrate with your application. The operations in the _Integration API_ section are used to integrate with our platform, while the other operations are used to manage applications and campaigns. ### Where is the API? The API is available at the same hostname as these docs. For example, if you are reading this page at `https://mycompany.talon.one/docs/api/`, the URL for the [updateCustomerProfile][] operation is `https://mycompany.talon.one/v1/customer_profiles/id` [updateCustomerProfile]: #operation--v1-customer_profiles--integrationId--put # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from talon_one.configuration import Configuration
class IntegrationEntity(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'integration_id': 'str',
'created': 'datetime'
}
attribute_map = {
'integration_id': 'integrationId',
'created': 'created'
}
def __init__(self, integration_id=None, created=None, local_vars_configuration=None): # noqa: E501
"""IntegrationEntity - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._integration_id = None
self._created = None
self.discriminator = None
self.integration_id = integration_id
self.created = created
@property
def integration_id(self):
"""Gets the integration_id of this IntegrationEntity. # noqa: E501
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:return: The integration_id of this IntegrationEntity. # noqa: E501
:rtype: str
"""
return self._integration_id
@integration_id.setter
def integration_id(self, integration_id):
"""Sets the integration_id of this IntegrationEntity.
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:param integration_id: The integration_id of this IntegrationEntity. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and integration_id is None: # noqa: E501
raise ValueError("Invalid value for `integration_id`, must not be `None`") # noqa: E501
self._integration_id = integration_id
@property
def created(self):
"""Gets the created of this IntegrationEntity. # noqa: E501
The exact moment this entity was created. # noqa: E501
:return: The created of this IntegrationEntity. # noqa: E501
:rtype: datetime
"""
return self._created
@created.setter
def created(self, created):
"""Sets the created of this IntegrationEntity.
The exact moment this entity was created. # noqa: E501
:param created: The created of this IntegrationEntity. # noqa: E501
:type: datetime
"""
if self.local_vars_configuration.client_side_validation and created is None: # noqa: E501
raise ValueError("Invalid value for `created`, must not be `None`") # noqa: E501
self._created = created
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, IntegrationEntity):
return False
return self.to_dict() == other.to_dict()
def <|fim_middle|>(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, IntegrationEntity):
return True
return self.to_dict() != other.to_dict()
<|fim▁end|> | __ne__ |
<|file_name|>jsonconfig.py<|end_file_name|><|fim▁begin|>"""
Read a dictionary from a JSON file,
and add its contents to a Python dictionary.
"""
import json
import types
from instmakelib import rtimport
INSTMAKE_SITE_DIR = "instmakesite"
# These are the supported field names
# ===================================
# The name of the plugin (without ".py") for logging
# usage of instmake
CONFIG_USAGE_LOGGER = "usage-logger"
# The name of the plugin (without ".py") for normalizing
# path names in the clidiff report.
CONFIG_CLIDIFF_NORMPATH = "clidiff-normpath"
def update(caller_config, json_filename):
# This will throw errors
fh = open(json_filename)
file_config = json.load(fh)
fh.close()
assert type(file_config) == types.DictType
caller_config.update(file_config)<|fim▁hole|> """Import a plugin from the instmakesite directory.
The import can throw exceptions that the caller has to
catch."""
plugin_name = INSTMAKE_SITE_DIR + "." + name
return rtimport.rtimport(plugin_name)<|fim▁end|> |
def load_site_plugin(name): |
<|file_name|>jsonconfig.py<|end_file_name|><|fim▁begin|>"""
Read a dictionary from a JSON file,
and add its contents to a Python dictionary.
"""
import json
import types
from instmakelib import rtimport
INSTMAKE_SITE_DIR = "instmakesite"
# These are the supported field names
# ===================================
# The name of the plugin (without ".py") for logging
# usage of instmake
CONFIG_USAGE_LOGGER = "usage-logger"
# The name of the plugin (without ".py") for normalizing
# path names in the clidiff report.
CONFIG_CLIDIFF_NORMPATH = "clidiff-normpath"
def update(caller_config, json_filename):
# This will throw errors
<|fim_middle|>
def load_site_plugin(name):
"""Import a plugin from the instmakesite directory.
The import can throw exceptions that the caller has to
catch."""
plugin_name = INSTMAKE_SITE_DIR + "." + name
return rtimport.rtimport(plugin_name)
<|fim▁end|> | fh = open(json_filename)
file_config = json.load(fh)
fh.close()
assert type(file_config) == types.DictType
caller_config.update(file_config) |
<|file_name|>jsonconfig.py<|end_file_name|><|fim▁begin|>"""
Read a dictionary from a JSON file,
and add its contents to a Python dictionary.
"""
import json
import types
from instmakelib import rtimport
INSTMAKE_SITE_DIR = "instmakesite"
# These are the supported field names
# ===================================
# The name of the plugin (without ".py") for logging
# usage of instmake
CONFIG_USAGE_LOGGER = "usage-logger"
# The name of the plugin (without ".py") for normalizing
# path names in the clidiff report.
CONFIG_CLIDIFF_NORMPATH = "clidiff-normpath"
def update(caller_config, json_filename):
# This will throw errors
fh = open(json_filename)
file_config = json.load(fh)
fh.close()
assert type(file_config) == types.DictType
caller_config.update(file_config)
def load_site_plugin(name):
<|fim_middle|>
<|fim▁end|> | """Import a plugin from the instmakesite directory.
The import can throw exceptions that the caller has to
catch."""
plugin_name = INSTMAKE_SITE_DIR + "." + name
return rtimport.rtimport(plugin_name) |
<|file_name|>jsonconfig.py<|end_file_name|><|fim▁begin|>"""
Read a dictionary from a JSON file,
and add its contents to a Python dictionary.
"""
import json
import types
from instmakelib import rtimport
INSTMAKE_SITE_DIR = "instmakesite"
# These are the supported field names
# ===================================
# The name of the plugin (without ".py") for logging
# usage of instmake
CONFIG_USAGE_LOGGER = "usage-logger"
# The name of the plugin (without ".py") for normalizing
# path names in the clidiff report.
CONFIG_CLIDIFF_NORMPATH = "clidiff-normpath"
def <|fim_middle|>(caller_config, json_filename):
# This will throw errors
fh = open(json_filename)
file_config = json.load(fh)
fh.close()
assert type(file_config) == types.DictType
caller_config.update(file_config)
def load_site_plugin(name):
"""Import a plugin from the instmakesite directory.
The import can throw exceptions that the caller has to
catch."""
plugin_name = INSTMAKE_SITE_DIR + "." + name
return rtimport.rtimport(plugin_name)
<|fim▁end|> | update |
<|file_name|>jsonconfig.py<|end_file_name|><|fim▁begin|>"""
Read a dictionary from a JSON file,
and add its contents to a Python dictionary.
"""
import json
import types
from instmakelib import rtimport
INSTMAKE_SITE_DIR = "instmakesite"
# These are the supported field names
# ===================================
# The name of the plugin (without ".py") for logging
# usage of instmake
CONFIG_USAGE_LOGGER = "usage-logger"
# The name of the plugin (without ".py") for normalizing
# path names in the clidiff report.
CONFIG_CLIDIFF_NORMPATH = "clidiff-normpath"
def update(caller_config, json_filename):
# This will throw errors
fh = open(json_filename)
file_config = json.load(fh)
fh.close()
assert type(file_config) == types.DictType
caller_config.update(file_config)
def <|fim_middle|>(name):
"""Import a plugin from the instmakesite directory.
The import can throw exceptions that the caller has to
catch."""
plugin_name = INSTMAKE_SITE_DIR + "." + name
return rtimport.rtimport(plugin_name)
<|fim▁end|> | load_site_plugin |
<|file_name|>Movie2KTo.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from ..internal.DeadCrypter import DeadCrypter
class Movie2KTo(DeadCrypter):
__name__ = "Movie2KTo"
__type__ = "crypter"
__version__ = "0.56"
__status__ = "stable"
__pattern__ = r'http://(?:www\.)?movie2k\.to/(.+)\.html'
__config__ = [("activated", "bool", "Activated", True)]
__description__ = """Movie2k.to decrypter plugin"""<|fim▁hole|> __authors__ = [("4Christopher", "[email protected]")]<|fim▁end|> | __license__ = "GPLv3" |
<|file_name|>Movie2KTo.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from ..internal.DeadCrypter import DeadCrypter
class Movie2KTo(DeadCrypter):
<|fim_middle|>
<|fim▁end|> | __name__ = "Movie2KTo"
__type__ = "crypter"
__version__ = "0.56"
__status__ = "stable"
__pattern__ = r'http://(?:www\.)?movie2k\.to/(.+)\.html'
__config__ = [("activated", "bool", "Activated", True)]
__description__ = """Movie2k.to decrypter plugin"""
__license__ = "GPLv3"
__authors__ = [("4Christopher", "[email protected]")] |
<|file_name|>0076_articlepage_video_document.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals<|fim▁hole|>
class Migration(migrations.Migration):
dependencies = [
('wagtaildocs', '0003_add_verbose_names'),
('articles', '0075_auto_20151015_2022'),
]
operations = [
migrations.AddField(
model_name='articlepage',
name='video_document',
field=models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.SET_NULL, blank=True, to='wagtaildocs.Document', null=True),
),
]<|fim▁end|> |
import django.db.models.deletion
from django.db import migrations, models |
<|file_name|>0076_articlepage_video_document.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
<|fim_middle|>
<|fim▁end|> | dependencies = [
('wagtaildocs', '0003_add_verbose_names'),
('articles', '0075_auto_20151015_2022'),
]
operations = [
migrations.AddField(
model_name='articlepage',
name='video_document',
field=models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.SET_NULL, blank=True, to='wagtaildocs.Document', null=True),
),
] |
<|file_name|>test_testing.py<|end_file_name|><|fim▁begin|>import warnings
import unittest
import sys
from nose.tools import assert_raises
from gplearn.skutils.testing import (
_assert_less,
_assert_greater,
assert_less_equal,
assert_greater_equal,
assert_warns,
assert_no_warnings,
assert_equal,
set_random_state,
assert_raise_message)
from sklearn.tree import DecisionTreeClassifier
from sklearn.lda import LDA
try:
from nose.tools import assert_less
def test_assert_less():
# Check that the nose implementation of assert_less gives the
# same thing as the scikit's
assert_less(0, 1)
_assert_less(0, 1)
assert_raises(AssertionError, assert_less, 1, 0)
assert_raises(AssertionError, _assert_less, 1, 0)
except ImportError:
pass
try:
from nose.tools import assert_greater
def test_assert_greater():
# Check that the nose implementation of assert_less gives the
# same thing as the scikit's
assert_greater(1, 0)
_assert_greater(1, 0)
assert_raises(AssertionError, assert_greater, 0, 1)
assert_raises(AssertionError, _assert_greater, 0, 1)
except ImportError:
pass
def test_assert_less_equal():
assert_less_equal(0, 1)
assert_less_equal(1, 1)
assert_raises(AssertionError, assert_less_equal, 1, 0)
def test_assert_greater_equal():
assert_greater_equal(1, 0)
assert_greater_equal(1, 1)
assert_raises(AssertionError, assert_greater_equal, 0, 1)
def test_set_random_state():
lda = LDA()
tree = DecisionTreeClassifier()
# LDA doesn't have random state: smoke test
set_random_state(lda, 3)
set_random_state(tree, 3)
assert_equal(tree.random_state, 3)
def test_assert_raise_message():
def _raise_ValueError(message):
raise ValueError(message)
assert_raise_message(ValueError, "test",
_raise_ValueError, "test")
assert_raises(AssertionError,
assert_raise_message, ValueError, "something else",
_raise_ValueError, "test")
assert_raises(ValueError,
assert_raise_message, TypeError, "something else",
_raise_ValueError, "test")
<|fim▁hole|># This assert_warns behavior is specific to scikit-learn because
#`clean_warning_registry()` is called internally by assert_warns
# and clears all previous filters.
class TestWarns(unittest.TestCase):
def test_warn(self):
def f():
warnings.warn("yo")
return 3
# Test that assert_warns is not impacted by externally set
# filters and is reset internally.
# This is because `clean_warning_registry()` is called internally by
# assert_warns and clears all previous filters.
warnings.simplefilter("ignore", UserWarning)
assert_equal(assert_warns(UserWarning, f), 3)
# Test that the warning registry is empty after assert_warns
assert_equal(sys.modules['warnings'].filters, [])
assert_raises(AssertionError, assert_no_warnings, f)
assert_equal(assert_no_warnings(lambda x: x, 1), 1)
def test_warn_wrong_warning(self):
def f():
warnings.warn("yo", DeprecationWarning)
failed = False
filters = sys.modules['warnings'].filters[:]
try:
try:
# Should raise an AssertionError
assert_warns(UserWarning, f)
failed = True
except AssertionError:
pass
finally:
sys.modules['warnings'].filters = filters
if failed:
raise AssertionError("wrong warning caught by assert_warn")<|fim▁end|> |
# This class is inspired from numpy 1.7 with an alteration to check
# the reset warning filters after calls to assert_warns. |
<|file_name|>test_testing.py<|end_file_name|><|fim▁begin|>import warnings
import unittest
import sys
from nose.tools import assert_raises
from gplearn.skutils.testing import (
_assert_less,
_assert_greater,
assert_less_equal,
assert_greater_equal,
assert_warns,
assert_no_warnings,
assert_equal,
set_random_state,
assert_raise_message)
from sklearn.tree import DecisionTreeClassifier
from sklearn.lda import LDA
try:
from nose.tools import assert_less
def test_assert_less():
# Check that the nose implementation of assert_less gives the
# same thing as the scikit's
<|fim_middle|>
except ImportError:
pass
try:
from nose.tools import assert_greater
def test_assert_greater():
# Check that the nose implementation of assert_less gives the
# same thing as the scikit's
assert_greater(1, 0)
_assert_greater(1, 0)
assert_raises(AssertionError, assert_greater, 0, 1)
assert_raises(AssertionError, _assert_greater, 0, 1)
except ImportError:
pass
def test_assert_less_equal():
assert_less_equal(0, 1)
assert_less_equal(1, 1)
assert_raises(AssertionError, assert_less_equal, 1, 0)
def test_assert_greater_equal():
assert_greater_equal(1, 0)
assert_greater_equal(1, 1)
assert_raises(AssertionError, assert_greater_equal, 0, 1)
def test_set_random_state():
lda = LDA()
tree = DecisionTreeClassifier()
# LDA doesn't have random state: smoke test
set_random_state(lda, 3)
set_random_state(tree, 3)
assert_equal(tree.random_state, 3)
def test_assert_raise_message():
def _raise_ValueError(message):
raise ValueError(message)
assert_raise_message(ValueError, "test",
_raise_ValueError, "test")
assert_raises(AssertionError,
assert_raise_message, ValueError, "something else",
_raise_ValueError, "test")
assert_raises(ValueError,
assert_raise_message, TypeError, "something else",
_raise_ValueError, "test")
# This class is inspired from numpy 1.7 with an alteration to check
# the reset warning filters after calls to assert_warns.
# This assert_warns behavior is specific to scikit-learn because
#`clean_warning_registry()` is called internally by assert_warns
# and clears all previous filters.
class TestWarns(unittest.TestCase):
def test_warn(self):
def f():
warnings.warn("yo")
return 3
# Test that assert_warns is not impacted by externally set
# filters and is reset internally.
# This is because `clean_warning_registry()` is called internally by
# assert_warns and clears all previous filters.
warnings.simplefilter("ignore", UserWarning)
assert_equal(assert_warns(UserWarning, f), 3)
# Test that the warning registry is empty after assert_warns
assert_equal(sys.modules['warnings'].filters, [])
assert_raises(AssertionError, assert_no_warnings, f)
assert_equal(assert_no_warnings(lambda x: x, 1), 1)
def test_warn_wrong_warning(self):
def f():
warnings.warn("yo", DeprecationWarning)
failed = False
filters = sys.modules['warnings'].filters[:]
try:
try:
# Should raise an AssertionError
assert_warns(UserWarning, f)
failed = True
except AssertionError:
pass
finally:
sys.modules['warnings'].filters = filters
if failed:
raise AssertionError("wrong warning caught by assert_warn")
<|fim▁end|> | assert_less(0, 1)
_assert_less(0, 1)
assert_raises(AssertionError, assert_less, 1, 0)
assert_raises(AssertionError, _assert_less, 1, 0) |
<|file_name|>test_testing.py<|end_file_name|><|fim▁begin|>import warnings
import unittest
import sys
from nose.tools import assert_raises
from gplearn.skutils.testing import (
_assert_less,
_assert_greater,
assert_less_equal,
assert_greater_equal,
assert_warns,
assert_no_warnings,
assert_equal,
set_random_state,
assert_raise_message)
from sklearn.tree import DecisionTreeClassifier
from sklearn.lda import LDA
try:
from nose.tools import assert_less
def test_assert_less():
# Check that the nose implementation of assert_less gives the
# same thing as the scikit's
assert_less(0, 1)
_assert_less(0, 1)
assert_raises(AssertionError, assert_less, 1, 0)
assert_raises(AssertionError, _assert_less, 1, 0)
except ImportError:
pass
try:
from nose.tools import assert_greater
def test_assert_greater():
# Check that the nose implementation of assert_less gives the
# same thing as the scikit's
<|fim_middle|>
except ImportError:
pass
def test_assert_less_equal():
assert_less_equal(0, 1)
assert_less_equal(1, 1)
assert_raises(AssertionError, assert_less_equal, 1, 0)
def test_assert_greater_equal():
assert_greater_equal(1, 0)
assert_greater_equal(1, 1)
assert_raises(AssertionError, assert_greater_equal, 0, 1)
def test_set_random_state():
lda = LDA()
tree = DecisionTreeClassifier()
# LDA doesn't have random state: smoke test
set_random_state(lda, 3)
set_random_state(tree, 3)
assert_equal(tree.random_state, 3)
def test_assert_raise_message():
def _raise_ValueError(message):
raise ValueError(message)
assert_raise_message(ValueError, "test",
_raise_ValueError, "test")
assert_raises(AssertionError,
assert_raise_message, ValueError, "something else",
_raise_ValueError, "test")
assert_raises(ValueError,
assert_raise_message, TypeError, "something else",
_raise_ValueError, "test")
# This class is inspired from numpy 1.7 with an alteration to check
# the reset warning filters after calls to assert_warns.
# This assert_warns behavior is specific to scikit-learn because
#`clean_warning_registry()` is called internally by assert_warns
# and clears all previous filters.
class TestWarns(unittest.TestCase):
def test_warn(self):
def f():
warnings.warn("yo")
return 3
# Test that assert_warns is not impacted by externally set
# filters and is reset internally.
# This is because `clean_warning_registry()` is called internally by
# assert_warns and clears all previous filters.
warnings.simplefilter("ignore", UserWarning)
assert_equal(assert_warns(UserWarning, f), 3)
# Test that the warning registry is empty after assert_warns
assert_equal(sys.modules['warnings'].filters, [])
assert_raises(AssertionError, assert_no_warnings, f)
assert_equal(assert_no_warnings(lambda x: x, 1), 1)
def test_warn_wrong_warning(self):
def f():
warnings.warn("yo", DeprecationWarning)
failed = False
filters = sys.modules['warnings'].filters[:]
try:
try:
# Should raise an AssertionError
assert_warns(UserWarning, f)
failed = True
except AssertionError:
pass
finally:
sys.modules['warnings'].filters = filters
if failed:
raise AssertionError("wrong warning caught by assert_warn")
<|fim▁end|> | assert_greater(1, 0)
_assert_greater(1, 0)
assert_raises(AssertionError, assert_greater, 0, 1)
assert_raises(AssertionError, _assert_greater, 0, 1) |
<|file_name|>test_testing.py<|end_file_name|><|fim▁begin|>import warnings
import unittest
import sys
from nose.tools import assert_raises
from gplearn.skutils.testing import (
_assert_less,
_assert_greater,
assert_less_equal,
assert_greater_equal,
assert_warns,
assert_no_warnings,
assert_equal,
set_random_state,
assert_raise_message)
from sklearn.tree import DecisionTreeClassifier
from sklearn.lda import LDA
try:
from nose.tools import assert_less
def test_assert_less():
# Check that the nose implementation of assert_less gives the
# same thing as the scikit's
assert_less(0, 1)
_assert_less(0, 1)
assert_raises(AssertionError, assert_less, 1, 0)
assert_raises(AssertionError, _assert_less, 1, 0)
except ImportError:
pass
try:
from nose.tools import assert_greater
def test_assert_greater():
# Check that the nose implementation of assert_less gives the
# same thing as the scikit's
assert_greater(1, 0)
_assert_greater(1, 0)
assert_raises(AssertionError, assert_greater, 0, 1)
assert_raises(AssertionError, _assert_greater, 0, 1)
except ImportError:
pass
def test_assert_less_equal():
<|fim_middle|>
def test_assert_greater_equal():
assert_greater_equal(1, 0)
assert_greater_equal(1, 1)
assert_raises(AssertionError, assert_greater_equal, 0, 1)
def test_set_random_state():
lda = LDA()
tree = DecisionTreeClassifier()
# LDA doesn't have random state: smoke test
set_random_state(lda, 3)
set_random_state(tree, 3)
assert_equal(tree.random_state, 3)
def test_assert_raise_message():
def _raise_ValueError(message):
raise ValueError(message)
assert_raise_message(ValueError, "test",
_raise_ValueError, "test")
assert_raises(AssertionError,
assert_raise_message, ValueError, "something else",
_raise_ValueError, "test")
assert_raises(ValueError,
assert_raise_message, TypeError, "something else",
_raise_ValueError, "test")
# This class is inspired from numpy 1.7 with an alteration to check
# the reset warning filters after calls to assert_warns.
# This assert_warns behavior is specific to scikit-learn because
#`clean_warning_registry()` is called internally by assert_warns
# and clears all previous filters.
class TestWarns(unittest.TestCase):
def test_warn(self):
def f():
warnings.warn("yo")
return 3
# Test that assert_warns is not impacted by externally set
# filters and is reset internally.
# This is because `clean_warning_registry()` is called internally by
# assert_warns and clears all previous filters.
warnings.simplefilter("ignore", UserWarning)
assert_equal(assert_warns(UserWarning, f), 3)
# Test that the warning registry is empty after assert_warns
assert_equal(sys.modules['warnings'].filters, [])
assert_raises(AssertionError, assert_no_warnings, f)
assert_equal(assert_no_warnings(lambda x: x, 1), 1)
def test_warn_wrong_warning(self):
def f():
warnings.warn("yo", DeprecationWarning)
failed = False
filters = sys.modules['warnings'].filters[:]
try:
try:
# Should raise an AssertionError
assert_warns(UserWarning, f)
failed = True
except AssertionError:
pass
finally:
sys.modules['warnings'].filters = filters
if failed:
raise AssertionError("wrong warning caught by assert_warn")
<|fim▁end|> | assert_less_equal(0, 1)
assert_less_equal(1, 1)
assert_raises(AssertionError, assert_less_equal, 1, 0) |
<|file_name|>test_testing.py<|end_file_name|><|fim▁begin|>import warnings
import unittest
import sys
from nose.tools import assert_raises
from gplearn.skutils.testing import (
_assert_less,
_assert_greater,
assert_less_equal,
assert_greater_equal,
assert_warns,
assert_no_warnings,
assert_equal,
set_random_state,
assert_raise_message)
from sklearn.tree import DecisionTreeClassifier
from sklearn.lda import LDA
try:
from nose.tools import assert_less
def test_assert_less():
# Check that the nose implementation of assert_less gives the
# same thing as the scikit's
assert_less(0, 1)
_assert_less(0, 1)
assert_raises(AssertionError, assert_less, 1, 0)
assert_raises(AssertionError, _assert_less, 1, 0)
except ImportError:
pass
try:
from nose.tools import assert_greater
def test_assert_greater():
# Check that the nose implementation of assert_less gives the
# same thing as the scikit's
assert_greater(1, 0)
_assert_greater(1, 0)
assert_raises(AssertionError, assert_greater, 0, 1)
assert_raises(AssertionError, _assert_greater, 0, 1)
except ImportError:
pass
def test_assert_less_equal():
assert_less_equal(0, 1)
assert_less_equal(1, 1)
assert_raises(AssertionError, assert_less_equal, 1, 0)
def test_assert_greater_equal():
<|fim_middle|>
def test_set_random_state():
lda = LDA()
tree = DecisionTreeClassifier()
# LDA doesn't have random state: smoke test
set_random_state(lda, 3)
set_random_state(tree, 3)
assert_equal(tree.random_state, 3)
def test_assert_raise_message():
def _raise_ValueError(message):
raise ValueError(message)
assert_raise_message(ValueError, "test",
_raise_ValueError, "test")
assert_raises(AssertionError,
assert_raise_message, ValueError, "something else",
_raise_ValueError, "test")
assert_raises(ValueError,
assert_raise_message, TypeError, "something else",
_raise_ValueError, "test")
# This class is inspired from numpy 1.7 with an alteration to check
# the reset warning filters after calls to assert_warns.
# This assert_warns behavior is specific to scikit-learn because
#`clean_warning_registry()` is called internally by assert_warns
# and clears all previous filters.
class TestWarns(unittest.TestCase):
def test_warn(self):
def f():
warnings.warn("yo")
return 3
# Test that assert_warns is not impacted by externally set
# filters and is reset internally.
# This is because `clean_warning_registry()` is called internally by
# assert_warns and clears all previous filters.
warnings.simplefilter("ignore", UserWarning)
assert_equal(assert_warns(UserWarning, f), 3)
# Test that the warning registry is empty after assert_warns
assert_equal(sys.modules['warnings'].filters, [])
assert_raises(AssertionError, assert_no_warnings, f)
assert_equal(assert_no_warnings(lambda x: x, 1), 1)
def test_warn_wrong_warning(self):
def f():
warnings.warn("yo", DeprecationWarning)
failed = False
filters = sys.modules['warnings'].filters[:]
try:
try:
# Should raise an AssertionError
assert_warns(UserWarning, f)
failed = True
except AssertionError:
pass
finally:
sys.modules['warnings'].filters = filters
if failed:
raise AssertionError("wrong warning caught by assert_warn")
<|fim▁end|> | assert_greater_equal(1, 0)
assert_greater_equal(1, 1)
assert_raises(AssertionError, assert_greater_equal, 0, 1) |
<|file_name|>test_testing.py<|end_file_name|><|fim▁begin|>import warnings
import unittest
import sys
from nose.tools import assert_raises
from gplearn.skutils.testing import (
_assert_less,
_assert_greater,
assert_less_equal,
assert_greater_equal,
assert_warns,
assert_no_warnings,
assert_equal,
set_random_state,
assert_raise_message)
from sklearn.tree import DecisionTreeClassifier
from sklearn.lda import LDA
try:
from nose.tools import assert_less
def test_assert_less():
# Check that the nose implementation of assert_less gives the
# same thing as the scikit's
assert_less(0, 1)
_assert_less(0, 1)
assert_raises(AssertionError, assert_less, 1, 0)
assert_raises(AssertionError, _assert_less, 1, 0)
except ImportError:
pass
try:
from nose.tools import assert_greater
def test_assert_greater():
# Check that the nose implementation of assert_less gives the
# same thing as the scikit's
assert_greater(1, 0)
_assert_greater(1, 0)
assert_raises(AssertionError, assert_greater, 0, 1)
assert_raises(AssertionError, _assert_greater, 0, 1)
except ImportError:
pass
def test_assert_less_equal():
assert_less_equal(0, 1)
assert_less_equal(1, 1)
assert_raises(AssertionError, assert_less_equal, 1, 0)
def test_assert_greater_equal():
assert_greater_equal(1, 0)
assert_greater_equal(1, 1)
assert_raises(AssertionError, assert_greater_equal, 0, 1)
def test_set_random_state():
<|fim_middle|>
def test_assert_raise_message():
def _raise_ValueError(message):
raise ValueError(message)
assert_raise_message(ValueError, "test",
_raise_ValueError, "test")
assert_raises(AssertionError,
assert_raise_message, ValueError, "something else",
_raise_ValueError, "test")
assert_raises(ValueError,
assert_raise_message, TypeError, "something else",
_raise_ValueError, "test")
# This class is inspired from numpy 1.7 with an alteration to check
# the reset warning filters after calls to assert_warns.
# This assert_warns behavior is specific to scikit-learn because
#`clean_warning_registry()` is called internally by assert_warns
# and clears all previous filters.
class TestWarns(unittest.TestCase):
def test_warn(self):
def f():
warnings.warn("yo")
return 3
# Test that assert_warns is not impacted by externally set
# filters and is reset internally.
# This is because `clean_warning_registry()` is called internally by
# assert_warns and clears all previous filters.
warnings.simplefilter("ignore", UserWarning)
assert_equal(assert_warns(UserWarning, f), 3)
# Test that the warning registry is empty after assert_warns
assert_equal(sys.modules['warnings'].filters, [])
assert_raises(AssertionError, assert_no_warnings, f)
assert_equal(assert_no_warnings(lambda x: x, 1), 1)
def test_warn_wrong_warning(self):
def f():
warnings.warn("yo", DeprecationWarning)
failed = False
filters = sys.modules['warnings'].filters[:]
try:
try:
# Should raise an AssertionError
assert_warns(UserWarning, f)
failed = True
except AssertionError:
pass
finally:
sys.modules['warnings'].filters = filters
if failed:
raise AssertionError("wrong warning caught by assert_warn")
<|fim▁end|> | lda = LDA()
tree = DecisionTreeClassifier()
# LDA doesn't have random state: smoke test
set_random_state(lda, 3)
set_random_state(tree, 3)
assert_equal(tree.random_state, 3) |
<|file_name|>test_testing.py<|end_file_name|><|fim▁begin|>import warnings
import unittest
import sys
from nose.tools import assert_raises
from gplearn.skutils.testing import (
_assert_less,
_assert_greater,
assert_less_equal,
assert_greater_equal,
assert_warns,
assert_no_warnings,
assert_equal,
set_random_state,
assert_raise_message)
from sklearn.tree import DecisionTreeClassifier
from sklearn.lda import LDA
try:
from nose.tools import assert_less
def test_assert_less():
# Check that the nose implementation of assert_less gives the
# same thing as the scikit's
assert_less(0, 1)
_assert_less(0, 1)
assert_raises(AssertionError, assert_less, 1, 0)
assert_raises(AssertionError, _assert_less, 1, 0)
except ImportError:
pass
try:
from nose.tools import assert_greater
def test_assert_greater():
# Check that the nose implementation of assert_less gives the
# same thing as the scikit's
assert_greater(1, 0)
_assert_greater(1, 0)
assert_raises(AssertionError, assert_greater, 0, 1)
assert_raises(AssertionError, _assert_greater, 0, 1)
except ImportError:
pass
def test_assert_less_equal():
assert_less_equal(0, 1)
assert_less_equal(1, 1)
assert_raises(AssertionError, assert_less_equal, 1, 0)
def test_assert_greater_equal():
assert_greater_equal(1, 0)
assert_greater_equal(1, 1)
assert_raises(AssertionError, assert_greater_equal, 0, 1)
def test_set_random_state():
lda = LDA()
tree = DecisionTreeClassifier()
# LDA doesn't have random state: smoke test
set_random_state(lda, 3)
set_random_state(tree, 3)
assert_equal(tree.random_state, 3)
def test_assert_raise_message():
<|fim_middle|>
# This class is inspired from numpy 1.7 with an alteration to check
# the reset warning filters after calls to assert_warns.
# This assert_warns behavior is specific to scikit-learn because
#`clean_warning_registry()` is called internally by assert_warns
# and clears all previous filters.
class TestWarns(unittest.TestCase):
def test_warn(self):
def f():
warnings.warn("yo")
return 3
# Test that assert_warns is not impacted by externally set
# filters and is reset internally.
# This is because `clean_warning_registry()` is called internally by
# assert_warns and clears all previous filters.
warnings.simplefilter("ignore", UserWarning)
assert_equal(assert_warns(UserWarning, f), 3)
# Test that the warning registry is empty after assert_warns
assert_equal(sys.modules['warnings'].filters, [])
assert_raises(AssertionError, assert_no_warnings, f)
assert_equal(assert_no_warnings(lambda x: x, 1), 1)
def test_warn_wrong_warning(self):
def f():
warnings.warn("yo", DeprecationWarning)
failed = False
filters = sys.modules['warnings'].filters[:]
try:
try:
# Should raise an AssertionError
assert_warns(UserWarning, f)
failed = True
except AssertionError:
pass
finally:
sys.modules['warnings'].filters = filters
if failed:
raise AssertionError("wrong warning caught by assert_warn")
<|fim▁end|> | def _raise_ValueError(message):
raise ValueError(message)
assert_raise_message(ValueError, "test",
_raise_ValueError, "test")
assert_raises(AssertionError,
assert_raise_message, ValueError, "something else",
_raise_ValueError, "test")
assert_raises(ValueError,
assert_raise_message, TypeError, "something else",
_raise_ValueError, "test") |
<|file_name|>test_testing.py<|end_file_name|><|fim▁begin|>import warnings
import unittest
import sys
from nose.tools import assert_raises
from gplearn.skutils.testing import (
_assert_less,
_assert_greater,
assert_less_equal,
assert_greater_equal,
assert_warns,
assert_no_warnings,
assert_equal,
set_random_state,
assert_raise_message)
from sklearn.tree import DecisionTreeClassifier
from sklearn.lda import LDA
try:
from nose.tools import assert_less
def test_assert_less():
# Check that the nose implementation of assert_less gives the
# same thing as the scikit's
assert_less(0, 1)
_assert_less(0, 1)
assert_raises(AssertionError, assert_less, 1, 0)
assert_raises(AssertionError, _assert_less, 1, 0)
except ImportError:
pass
try:
from nose.tools import assert_greater
def test_assert_greater():
# Check that the nose implementation of assert_less gives the
# same thing as the scikit's
assert_greater(1, 0)
_assert_greater(1, 0)
assert_raises(AssertionError, assert_greater, 0, 1)
assert_raises(AssertionError, _assert_greater, 0, 1)
except ImportError:
pass
def test_assert_less_equal():
assert_less_equal(0, 1)
assert_less_equal(1, 1)
assert_raises(AssertionError, assert_less_equal, 1, 0)
def test_assert_greater_equal():
assert_greater_equal(1, 0)
assert_greater_equal(1, 1)
assert_raises(AssertionError, assert_greater_equal, 0, 1)
def test_set_random_state():
lda = LDA()
tree = DecisionTreeClassifier()
# LDA doesn't have random state: smoke test
set_random_state(lda, 3)
set_random_state(tree, 3)
assert_equal(tree.random_state, 3)
def test_assert_raise_message():
def _raise_ValueError(message):
<|fim_middle|>
assert_raise_message(ValueError, "test",
_raise_ValueError, "test")
assert_raises(AssertionError,
assert_raise_message, ValueError, "something else",
_raise_ValueError, "test")
assert_raises(ValueError,
assert_raise_message, TypeError, "something else",
_raise_ValueError, "test")
# This class is inspired from numpy 1.7 with an alteration to check
# the reset warning filters after calls to assert_warns.
# This assert_warns behavior is specific to scikit-learn because
#`clean_warning_registry()` is called internally by assert_warns
# and clears all previous filters.
class TestWarns(unittest.TestCase):
def test_warn(self):
def f():
warnings.warn("yo")
return 3
# Test that assert_warns is not impacted by externally set
# filters and is reset internally.
# This is because `clean_warning_registry()` is called internally by
# assert_warns and clears all previous filters.
warnings.simplefilter("ignore", UserWarning)
assert_equal(assert_warns(UserWarning, f), 3)
# Test that the warning registry is empty after assert_warns
assert_equal(sys.modules['warnings'].filters, [])
assert_raises(AssertionError, assert_no_warnings, f)
assert_equal(assert_no_warnings(lambda x: x, 1), 1)
def test_warn_wrong_warning(self):
def f():
warnings.warn("yo", DeprecationWarning)
failed = False
filters = sys.modules['warnings'].filters[:]
try:
try:
# Should raise an AssertionError
assert_warns(UserWarning, f)
failed = True
except AssertionError:
pass
finally:
sys.modules['warnings'].filters = filters
if failed:
raise AssertionError("wrong warning caught by assert_warn")
<|fim▁end|> | raise ValueError(message) |
<|file_name|>test_testing.py<|end_file_name|><|fim▁begin|>import warnings
import unittest
import sys
from nose.tools import assert_raises
from gplearn.skutils.testing import (
_assert_less,
_assert_greater,
assert_less_equal,
assert_greater_equal,
assert_warns,
assert_no_warnings,
assert_equal,
set_random_state,
assert_raise_message)
from sklearn.tree import DecisionTreeClassifier
from sklearn.lda import LDA
try:
from nose.tools import assert_less
def test_assert_less():
# Check that the nose implementation of assert_less gives the
# same thing as the scikit's
assert_less(0, 1)
_assert_less(0, 1)
assert_raises(AssertionError, assert_less, 1, 0)
assert_raises(AssertionError, _assert_less, 1, 0)
except ImportError:
pass
try:
from nose.tools import assert_greater
def test_assert_greater():
# Check that the nose implementation of assert_less gives the
# same thing as the scikit's
assert_greater(1, 0)
_assert_greater(1, 0)
assert_raises(AssertionError, assert_greater, 0, 1)
assert_raises(AssertionError, _assert_greater, 0, 1)
except ImportError:
pass
def test_assert_less_equal():
assert_less_equal(0, 1)
assert_less_equal(1, 1)
assert_raises(AssertionError, assert_less_equal, 1, 0)
def test_assert_greater_equal():
assert_greater_equal(1, 0)
assert_greater_equal(1, 1)
assert_raises(AssertionError, assert_greater_equal, 0, 1)
def test_set_random_state():
lda = LDA()
tree = DecisionTreeClassifier()
# LDA doesn't have random state: smoke test
set_random_state(lda, 3)
set_random_state(tree, 3)
assert_equal(tree.random_state, 3)
def test_assert_raise_message():
def _raise_ValueError(message):
raise ValueError(message)
assert_raise_message(ValueError, "test",
_raise_ValueError, "test")
assert_raises(AssertionError,
assert_raise_message, ValueError, "something else",
_raise_ValueError, "test")
assert_raises(ValueError,
assert_raise_message, TypeError, "something else",
_raise_ValueError, "test")
# This class is inspired from numpy 1.7 with an alteration to check
# the reset warning filters after calls to assert_warns.
# This assert_warns behavior is specific to scikit-learn because
#`clean_warning_registry()` is called internally by assert_warns
# and clears all previous filters.
class TestWarns(unittest.TestCase):
<|fim_middle|>
<|fim▁end|> | def test_warn(self):
def f():
warnings.warn("yo")
return 3
# Test that assert_warns is not impacted by externally set
# filters and is reset internally.
# This is because `clean_warning_registry()` is called internally by
# assert_warns and clears all previous filters.
warnings.simplefilter("ignore", UserWarning)
assert_equal(assert_warns(UserWarning, f), 3)
# Test that the warning registry is empty after assert_warns
assert_equal(sys.modules['warnings'].filters, [])
assert_raises(AssertionError, assert_no_warnings, f)
assert_equal(assert_no_warnings(lambda x: x, 1), 1)
def test_warn_wrong_warning(self):
def f():
warnings.warn("yo", DeprecationWarning)
failed = False
filters = sys.modules['warnings'].filters[:]
try:
try:
# Should raise an AssertionError
assert_warns(UserWarning, f)
failed = True
except AssertionError:
pass
finally:
sys.modules['warnings'].filters = filters
if failed:
raise AssertionError("wrong warning caught by assert_warn") |
<|file_name|>test_testing.py<|end_file_name|><|fim▁begin|>import warnings
import unittest
import sys
from nose.tools import assert_raises
from gplearn.skutils.testing import (
_assert_less,
_assert_greater,
assert_less_equal,
assert_greater_equal,
assert_warns,
assert_no_warnings,
assert_equal,
set_random_state,
assert_raise_message)
from sklearn.tree import DecisionTreeClassifier
from sklearn.lda import LDA
try:
from nose.tools import assert_less
def test_assert_less():
# Check that the nose implementation of assert_less gives the
# same thing as the scikit's
assert_less(0, 1)
_assert_less(0, 1)
assert_raises(AssertionError, assert_less, 1, 0)
assert_raises(AssertionError, _assert_less, 1, 0)
except ImportError:
pass
try:
from nose.tools import assert_greater
def test_assert_greater():
# Check that the nose implementation of assert_less gives the
# same thing as the scikit's
assert_greater(1, 0)
_assert_greater(1, 0)
assert_raises(AssertionError, assert_greater, 0, 1)
assert_raises(AssertionError, _assert_greater, 0, 1)
except ImportError:
pass
def test_assert_less_equal():
assert_less_equal(0, 1)
assert_less_equal(1, 1)
assert_raises(AssertionError, assert_less_equal, 1, 0)
def test_assert_greater_equal():
assert_greater_equal(1, 0)
assert_greater_equal(1, 1)
assert_raises(AssertionError, assert_greater_equal, 0, 1)
def test_set_random_state():
lda = LDA()
tree = DecisionTreeClassifier()
# LDA doesn't have random state: smoke test
set_random_state(lda, 3)
set_random_state(tree, 3)
assert_equal(tree.random_state, 3)
def test_assert_raise_message():
def _raise_ValueError(message):
raise ValueError(message)
assert_raise_message(ValueError, "test",
_raise_ValueError, "test")
assert_raises(AssertionError,
assert_raise_message, ValueError, "something else",
_raise_ValueError, "test")
assert_raises(ValueError,
assert_raise_message, TypeError, "something else",
_raise_ValueError, "test")
# This class is inspired from numpy 1.7 with an alteration to check
# the reset warning filters after calls to assert_warns.
# This assert_warns behavior is specific to scikit-learn because
#`clean_warning_registry()` is called internally by assert_warns
# and clears all previous filters.
class TestWarns(unittest.TestCase):
def test_warn(self):
<|fim_middle|>
def test_warn_wrong_warning(self):
def f():
warnings.warn("yo", DeprecationWarning)
failed = False
filters = sys.modules['warnings'].filters[:]
try:
try:
# Should raise an AssertionError
assert_warns(UserWarning, f)
failed = True
except AssertionError:
pass
finally:
sys.modules['warnings'].filters = filters
if failed:
raise AssertionError("wrong warning caught by assert_warn")
<|fim▁end|> | def f():
warnings.warn("yo")
return 3
# Test that assert_warns is not impacted by externally set
# filters and is reset internally.
# This is because `clean_warning_registry()` is called internally by
# assert_warns and clears all previous filters.
warnings.simplefilter("ignore", UserWarning)
assert_equal(assert_warns(UserWarning, f), 3)
# Test that the warning registry is empty after assert_warns
assert_equal(sys.modules['warnings'].filters, [])
assert_raises(AssertionError, assert_no_warnings, f)
assert_equal(assert_no_warnings(lambda x: x, 1), 1) |
<|file_name|>test_testing.py<|end_file_name|><|fim▁begin|>import warnings
import unittest
import sys
from nose.tools import assert_raises
from gplearn.skutils.testing import (
_assert_less,
_assert_greater,
assert_less_equal,
assert_greater_equal,
assert_warns,
assert_no_warnings,
assert_equal,
set_random_state,
assert_raise_message)
from sklearn.tree import DecisionTreeClassifier
from sklearn.lda import LDA
try:
from nose.tools import assert_less
def test_assert_less():
# Check that the nose implementation of assert_less gives the
# same thing as the scikit's
assert_less(0, 1)
_assert_less(0, 1)
assert_raises(AssertionError, assert_less, 1, 0)
assert_raises(AssertionError, _assert_less, 1, 0)
except ImportError:
pass
try:
from nose.tools import assert_greater
def test_assert_greater():
# Check that the nose implementation of assert_less gives the
# same thing as the scikit's
assert_greater(1, 0)
_assert_greater(1, 0)
assert_raises(AssertionError, assert_greater, 0, 1)
assert_raises(AssertionError, _assert_greater, 0, 1)
except ImportError:
pass
def test_assert_less_equal():
assert_less_equal(0, 1)
assert_less_equal(1, 1)
assert_raises(AssertionError, assert_less_equal, 1, 0)
def test_assert_greater_equal():
assert_greater_equal(1, 0)
assert_greater_equal(1, 1)
assert_raises(AssertionError, assert_greater_equal, 0, 1)
def test_set_random_state():
lda = LDA()
tree = DecisionTreeClassifier()
# LDA doesn't have random state: smoke test
set_random_state(lda, 3)
set_random_state(tree, 3)
assert_equal(tree.random_state, 3)
def test_assert_raise_message():
def _raise_ValueError(message):
raise ValueError(message)
assert_raise_message(ValueError, "test",
_raise_ValueError, "test")
assert_raises(AssertionError,
assert_raise_message, ValueError, "something else",
_raise_ValueError, "test")
assert_raises(ValueError,
assert_raise_message, TypeError, "something else",
_raise_ValueError, "test")
# This class is inspired from numpy 1.7 with an alteration to check
# the reset warning filters after calls to assert_warns.
# This assert_warns behavior is specific to scikit-learn because
#`clean_warning_registry()` is called internally by assert_warns
# and clears all previous filters.
class TestWarns(unittest.TestCase):
def test_warn(self):
def f():
<|fim_middle|>
# Test that assert_warns is not impacted by externally set
# filters and is reset internally.
# This is because `clean_warning_registry()` is called internally by
# assert_warns and clears all previous filters.
warnings.simplefilter("ignore", UserWarning)
assert_equal(assert_warns(UserWarning, f), 3)
# Test that the warning registry is empty after assert_warns
assert_equal(sys.modules['warnings'].filters, [])
assert_raises(AssertionError, assert_no_warnings, f)
assert_equal(assert_no_warnings(lambda x: x, 1), 1)
def test_warn_wrong_warning(self):
def f():
warnings.warn("yo", DeprecationWarning)
failed = False
filters = sys.modules['warnings'].filters[:]
try:
try:
# Should raise an AssertionError
assert_warns(UserWarning, f)
failed = True
except AssertionError:
pass
finally:
sys.modules['warnings'].filters = filters
if failed:
raise AssertionError("wrong warning caught by assert_warn")
<|fim▁end|> | warnings.warn("yo")
return 3 |
<|file_name|>test_testing.py<|end_file_name|><|fim▁begin|>import warnings
import unittest
import sys
from nose.tools import assert_raises
from gplearn.skutils.testing import (
_assert_less,
_assert_greater,
assert_less_equal,
assert_greater_equal,
assert_warns,
assert_no_warnings,
assert_equal,
set_random_state,
assert_raise_message)
from sklearn.tree import DecisionTreeClassifier
from sklearn.lda import LDA
try:
from nose.tools import assert_less
def test_assert_less():
# Check that the nose implementation of assert_less gives the
# same thing as the scikit's
assert_less(0, 1)
_assert_less(0, 1)
assert_raises(AssertionError, assert_less, 1, 0)
assert_raises(AssertionError, _assert_less, 1, 0)
except ImportError:
pass
try:
from nose.tools import assert_greater
def test_assert_greater():
# Check that the nose implementation of assert_less gives the
# same thing as the scikit's
assert_greater(1, 0)
_assert_greater(1, 0)
assert_raises(AssertionError, assert_greater, 0, 1)
assert_raises(AssertionError, _assert_greater, 0, 1)
except ImportError:
pass
def test_assert_less_equal():
assert_less_equal(0, 1)
assert_less_equal(1, 1)
assert_raises(AssertionError, assert_less_equal, 1, 0)
def test_assert_greater_equal():
assert_greater_equal(1, 0)
assert_greater_equal(1, 1)
assert_raises(AssertionError, assert_greater_equal, 0, 1)
def test_set_random_state():
lda = LDA()
tree = DecisionTreeClassifier()
# LDA doesn't have random state: smoke test
set_random_state(lda, 3)
set_random_state(tree, 3)
assert_equal(tree.random_state, 3)
def test_assert_raise_message():
def _raise_ValueError(message):
raise ValueError(message)
assert_raise_message(ValueError, "test",
_raise_ValueError, "test")
assert_raises(AssertionError,
assert_raise_message, ValueError, "something else",
_raise_ValueError, "test")
assert_raises(ValueError,
assert_raise_message, TypeError, "something else",
_raise_ValueError, "test")
# This class is inspired from numpy 1.7 with an alteration to check
# the reset warning filters after calls to assert_warns.
# This assert_warns behavior is specific to scikit-learn because
#`clean_warning_registry()` is called internally by assert_warns
# and clears all previous filters.
class TestWarns(unittest.TestCase):
def test_warn(self):
def f():
warnings.warn("yo")
return 3
# Test that assert_warns is not impacted by externally set
# filters and is reset internally.
# This is because `clean_warning_registry()` is called internally by
# assert_warns and clears all previous filters.
warnings.simplefilter("ignore", UserWarning)
assert_equal(assert_warns(UserWarning, f), 3)
# Test that the warning registry is empty after assert_warns
assert_equal(sys.modules['warnings'].filters, [])
assert_raises(AssertionError, assert_no_warnings, f)
assert_equal(assert_no_warnings(lambda x: x, 1), 1)
def test_warn_wrong_warning(self):
<|fim_middle|>
<|fim▁end|> | def f():
warnings.warn("yo", DeprecationWarning)
failed = False
filters = sys.modules['warnings'].filters[:]
try:
try:
# Should raise an AssertionError
assert_warns(UserWarning, f)
failed = True
except AssertionError:
pass
finally:
sys.modules['warnings'].filters = filters
if failed:
raise AssertionError("wrong warning caught by assert_warn") |
<|file_name|>test_testing.py<|end_file_name|><|fim▁begin|>import warnings
import unittest
import sys
from nose.tools import assert_raises
from gplearn.skutils.testing import (
_assert_less,
_assert_greater,
assert_less_equal,
assert_greater_equal,
assert_warns,
assert_no_warnings,
assert_equal,
set_random_state,
assert_raise_message)
from sklearn.tree import DecisionTreeClassifier
from sklearn.lda import LDA
try:
from nose.tools import assert_less
def test_assert_less():
# Check that the nose implementation of assert_less gives the
# same thing as the scikit's
assert_less(0, 1)
_assert_less(0, 1)
assert_raises(AssertionError, assert_less, 1, 0)
assert_raises(AssertionError, _assert_less, 1, 0)
except ImportError:
pass
try:
from nose.tools import assert_greater
def test_assert_greater():
# Check that the nose implementation of assert_less gives the
# same thing as the scikit's
assert_greater(1, 0)
_assert_greater(1, 0)
assert_raises(AssertionError, assert_greater, 0, 1)
assert_raises(AssertionError, _assert_greater, 0, 1)
except ImportError:
pass
def test_assert_less_equal():
assert_less_equal(0, 1)
assert_less_equal(1, 1)
assert_raises(AssertionError, assert_less_equal, 1, 0)
def test_assert_greater_equal():
assert_greater_equal(1, 0)
assert_greater_equal(1, 1)
assert_raises(AssertionError, assert_greater_equal, 0, 1)
def test_set_random_state():
lda = LDA()
tree = DecisionTreeClassifier()
# LDA doesn't have random state: smoke test
set_random_state(lda, 3)
set_random_state(tree, 3)
assert_equal(tree.random_state, 3)
def test_assert_raise_message():
def _raise_ValueError(message):
raise ValueError(message)
assert_raise_message(ValueError, "test",
_raise_ValueError, "test")
assert_raises(AssertionError,
assert_raise_message, ValueError, "something else",
_raise_ValueError, "test")
assert_raises(ValueError,
assert_raise_message, TypeError, "something else",
_raise_ValueError, "test")
# This class is inspired from numpy 1.7 with an alteration to check
# the reset warning filters after calls to assert_warns.
# This assert_warns behavior is specific to scikit-learn because
#`clean_warning_registry()` is called internally by assert_warns
# and clears all previous filters.
class TestWarns(unittest.TestCase):
def test_warn(self):
def f():
warnings.warn("yo")
return 3
# Test that assert_warns is not impacted by externally set
# filters and is reset internally.
# This is because `clean_warning_registry()` is called internally by
# assert_warns and clears all previous filters.
warnings.simplefilter("ignore", UserWarning)
assert_equal(assert_warns(UserWarning, f), 3)
# Test that the warning registry is empty after assert_warns
assert_equal(sys.modules['warnings'].filters, [])
assert_raises(AssertionError, assert_no_warnings, f)
assert_equal(assert_no_warnings(lambda x: x, 1), 1)
def test_warn_wrong_warning(self):
def f():
<|fim_middle|>
failed = False
filters = sys.modules['warnings'].filters[:]
try:
try:
# Should raise an AssertionError
assert_warns(UserWarning, f)
failed = True
except AssertionError:
pass
finally:
sys.modules['warnings'].filters = filters
if failed:
raise AssertionError("wrong warning caught by assert_warn")
<|fim▁end|> | warnings.warn("yo", DeprecationWarning) |
<|file_name|>test_testing.py<|end_file_name|><|fim▁begin|>import warnings
import unittest
import sys
from nose.tools import assert_raises
from gplearn.skutils.testing import (
_assert_less,
_assert_greater,
assert_less_equal,
assert_greater_equal,
assert_warns,
assert_no_warnings,
assert_equal,
set_random_state,
assert_raise_message)
from sklearn.tree import DecisionTreeClassifier
from sklearn.lda import LDA
try:
from nose.tools import assert_less
def test_assert_less():
# Check that the nose implementation of assert_less gives the
# same thing as the scikit's
assert_less(0, 1)
_assert_less(0, 1)
assert_raises(AssertionError, assert_less, 1, 0)
assert_raises(AssertionError, _assert_less, 1, 0)
except ImportError:
pass
try:
from nose.tools import assert_greater
def test_assert_greater():
# Check that the nose implementation of assert_less gives the
# same thing as the scikit's
assert_greater(1, 0)
_assert_greater(1, 0)
assert_raises(AssertionError, assert_greater, 0, 1)
assert_raises(AssertionError, _assert_greater, 0, 1)
except ImportError:
pass
def test_assert_less_equal():
assert_less_equal(0, 1)
assert_less_equal(1, 1)
assert_raises(AssertionError, assert_less_equal, 1, 0)
def test_assert_greater_equal():
assert_greater_equal(1, 0)
assert_greater_equal(1, 1)
assert_raises(AssertionError, assert_greater_equal, 0, 1)
def test_set_random_state():
lda = LDA()
tree = DecisionTreeClassifier()
# LDA doesn't have random state: smoke test
set_random_state(lda, 3)
set_random_state(tree, 3)
assert_equal(tree.random_state, 3)
def test_assert_raise_message():
def _raise_ValueError(message):
raise ValueError(message)
assert_raise_message(ValueError, "test",
_raise_ValueError, "test")
assert_raises(AssertionError,
assert_raise_message, ValueError, "something else",
_raise_ValueError, "test")
assert_raises(ValueError,
assert_raise_message, TypeError, "something else",
_raise_ValueError, "test")
# This class is inspired from numpy 1.7 with an alteration to check
# the reset warning filters after calls to assert_warns.
# This assert_warns behavior is specific to scikit-learn because
#`clean_warning_registry()` is called internally by assert_warns
# and clears all previous filters.
class TestWarns(unittest.TestCase):
def test_warn(self):
def f():
warnings.warn("yo")
return 3
# Test that assert_warns is not impacted by externally set
# filters and is reset internally.
# This is because `clean_warning_registry()` is called internally by
# assert_warns and clears all previous filters.
warnings.simplefilter("ignore", UserWarning)
assert_equal(assert_warns(UserWarning, f), 3)
# Test that the warning registry is empty after assert_warns
assert_equal(sys.modules['warnings'].filters, [])
assert_raises(AssertionError, assert_no_warnings, f)
assert_equal(assert_no_warnings(lambda x: x, 1), 1)
def test_warn_wrong_warning(self):
def f():
warnings.warn("yo", DeprecationWarning)
failed = False
filters = sys.modules['warnings'].filters[:]
try:
try:
# Should raise an AssertionError
assert_warns(UserWarning, f)
failed = True
except AssertionError:
pass
finally:
sys.modules['warnings'].filters = filters
if failed:
<|fim_middle|>
<|fim▁end|> | raise AssertionError("wrong warning caught by assert_warn") |
<|file_name|>test_testing.py<|end_file_name|><|fim▁begin|>import warnings
import unittest
import sys
from nose.tools import assert_raises
from gplearn.skutils.testing import (
_assert_less,
_assert_greater,
assert_less_equal,
assert_greater_equal,
assert_warns,
assert_no_warnings,
assert_equal,
set_random_state,
assert_raise_message)
from sklearn.tree import DecisionTreeClassifier
from sklearn.lda import LDA
try:
from nose.tools import assert_less
def <|fim_middle|>():
# Check that the nose implementation of assert_less gives the
# same thing as the scikit's
assert_less(0, 1)
_assert_less(0, 1)
assert_raises(AssertionError, assert_less, 1, 0)
assert_raises(AssertionError, _assert_less, 1, 0)
except ImportError:
pass
try:
from nose.tools import assert_greater
def test_assert_greater():
# Check that the nose implementation of assert_less gives the
# same thing as the scikit's
assert_greater(1, 0)
_assert_greater(1, 0)
assert_raises(AssertionError, assert_greater, 0, 1)
assert_raises(AssertionError, _assert_greater, 0, 1)
except ImportError:
pass
def test_assert_less_equal():
assert_less_equal(0, 1)
assert_less_equal(1, 1)
assert_raises(AssertionError, assert_less_equal, 1, 0)
def test_assert_greater_equal():
assert_greater_equal(1, 0)
assert_greater_equal(1, 1)
assert_raises(AssertionError, assert_greater_equal, 0, 1)
def test_set_random_state():
lda = LDA()
tree = DecisionTreeClassifier()
# LDA doesn't have random state: smoke test
set_random_state(lda, 3)
set_random_state(tree, 3)
assert_equal(tree.random_state, 3)
def test_assert_raise_message():
def _raise_ValueError(message):
raise ValueError(message)
assert_raise_message(ValueError, "test",
_raise_ValueError, "test")
assert_raises(AssertionError,
assert_raise_message, ValueError, "something else",
_raise_ValueError, "test")
assert_raises(ValueError,
assert_raise_message, TypeError, "something else",
_raise_ValueError, "test")
# This class is inspired from numpy 1.7 with an alteration to check
# the reset warning filters after calls to assert_warns.
# This assert_warns behavior is specific to scikit-learn because
#`clean_warning_registry()` is called internally by assert_warns
# and clears all previous filters.
class TestWarns(unittest.TestCase):
def test_warn(self):
def f():
warnings.warn("yo")
return 3
# Test that assert_warns is not impacted by externally set
# filters and is reset internally.
# This is because `clean_warning_registry()` is called internally by
# assert_warns and clears all previous filters.
warnings.simplefilter("ignore", UserWarning)
assert_equal(assert_warns(UserWarning, f), 3)
# Test that the warning registry is empty after assert_warns
assert_equal(sys.modules['warnings'].filters, [])
assert_raises(AssertionError, assert_no_warnings, f)
assert_equal(assert_no_warnings(lambda x: x, 1), 1)
def test_warn_wrong_warning(self):
def f():
warnings.warn("yo", DeprecationWarning)
failed = False
filters = sys.modules['warnings'].filters[:]
try:
try:
# Should raise an AssertionError
assert_warns(UserWarning, f)
failed = True
except AssertionError:
pass
finally:
sys.modules['warnings'].filters = filters
if failed:
raise AssertionError("wrong warning caught by assert_warn")
<|fim▁end|> | test_assert_less |
<|file_name|>test_testing.py<|end_file_name|><|fim▁begin|>import warnings
import unittest
import sys
from nose.tools import assert_raises
from gplearn.skutils.testing import (
_assert_less,
_assert_greater,
assert_less_equal,
assert_greater_equal,
assert_warns,
assert_no_warnings,
assert_equal,
set_random_state,
assert_raise_message)
from sklearn.tree import DecisionTreeClassifier
from sklearn.lda import LDA
try:
from nose.tools import assert_less
def test_assert_less():
# Check that the nose implementation of assert_less gives the
# same thing as the scikit's
assert_less(0, 1)
_assert_less(0, 1)
assert_raises(AssertionError, assert_less, 1, 0)
assert_raises(AssertionError, _assert_less, 1, 0)
except ImportError:
pass
try:
from nose.tools import assert_greater
def <|fim_middle|>():
# Check that the nose implementation of assert_less gives the
# same thing as the scikit's
assert_greater(1, 0)
_assert_greater(1, 0)
assert_raises(AssertionError, assert_greater, 0, 1)
assert_raises(AssertionError, _assert_greater, 0, 1)
except ImportError:
pass
def test_assert_less_equal():
assert_less_equal(0, 1)
assert_less_equal(1, 1)
assert_raises(AssertionError, assert_less_equal, 1, 0)
def test_assert_greater_equal():
assert_greater_equal(1, 0)
assert_greater_equal(1, 1)
assert_raises(AssertionError, assert_greater_equal, 0, 1)
def test_set_random_state():
lda = LDA()
tree = DecisionTreeClassifier()
# LDA doesn't have random state: smoke test
set_random_state(lda, 3)
set_random_state(tree, 3)
assert_equal(tree.random_state, 3)
def test_assert_raise_message():
def _raise_ValueError(message):
raise ValueError(message)
assert_raise_message(ValueError, "test",
_raise_ValueError, "test")
assert_raises(AssertionError,
assert_raise_message, ValueError, "something else",
_raise_ValueError, "test")
assert_raises(ValueError,
assert_raise_message, TypeError, "something else",
_raise_ValueError, "test")
# This class is inspired from numpy 1.7 with an alteration to check
# the reset warning filters after calls to assert_warns.
# This assert_warns behavior is specific to scikit-learn because
#`clean_warning_registry()` is called internally by assert_warns
# and clears all previous filters.
class TestWarns(unittest.TestCase):
def test_warn(self):
def f():
warnings.warn("yo")
return 3
# Test that assert_warns is not impacted by externally set
# filters and is reset internally.
# This is because `clean_warning_registry()` is called internally by
# assert_warns and clears all previous filters.
warnings.simplefilter("ignore", UserWarning)
assert_equal(assert_warns(UserWarning, f), 3)
# Test that the warning registry is empty after assert_warns
assert_equal(sys.modules['warnings'].filters, [])
assert_raises(AssertionError, assert_no_warnings, f)
assert_equal(assert_no_warnings(lambda x: x, 1), 1)
def test_warn_wrong_warning(self):
def f():
warnings.warn("yo", DeprecationWarning)
failed = False
filters = sys.modules['warnings'].filters[:]
try:
try:
# Should raise an AssertionError
assert_warns(UserWarning, f)
failed = True
except AssertionError:
pass
finally:
sys.modules['warnings'].filters = filters
if failed:
raise AssertionError("wrong warning caught by assert_warn")
<|fim▁end|> | test_assert_greater |
<|file_name|>test_testing.py<|end_file_name|><|fim▁begin|>import warnings
import unittest
import sys
from nose.tools import assert_raises
from gplearn.skutils.testing import (
_assert_less,
_assert_greater,
assert_less_equal,
assert_greater_equal,
assert_warns,
assert_no_warnings,
assert_equal,
set_random_state,
assert_raise_message)
from sklearn.tree import DecisionTreeClassifier
from sklearn.lda import LDA
try:
from nose.tools import assert_less
def test_assert_less():
# Check that the nose implementation of assert_less gives the
# same thing as the scikit's
assert_less(0, 1)
_assert_less(0, 1)
assert_raises(AssertionError, assert_less, 1, 0)
assert_raises(AssertionError, _assert_less, 1, 0)
except ImportError:
pass
try:
from nose.tools import assert_greater
def test_assert_greater():
# Check that the nose implementation of assert_less gives the
# same thing as the scikit's
assert_greater(1, 0)
_assert_greater(1, 0)
assert_raises(AssertionError, assert_greater, 0, 1)
assert_raises(AssertionError, _assert_greater, 0, 1)
except ImportError:
pass
def <|fim_middle|>():
assert_less_equal(0, 1)
assert_less_equal(1, 1)
assert_raises(AssertionError, assert_less_equal, 1, 0)
def test_assert_greater_equal():
assert_greater_equal(1, 0)
assert_greater_equal(1, 1)
assert_raises(AssertionError, assert_greater_equal, 0, 1)
def test_set_random_state():
lda = LDA()
tree = DecisionTreeClassifier()
# LDA doesn't have random state: smoke test
set_random_state(lda, 3)
set_random_state(tree, 3)
assert_equal(tree.random_state, 3)
def test_assert_raise_message():
def _raise_ValueError(message):
raise ValueError(message)
assert_raise_message(ValueError, "test",
_raise_ValueError, "test")
assert_raises(AssertionError,
assert_raise_message, ValueError, "something else",
_raise_ValueError, "test")
assert_raises(ValueError,
assert_raise_message, TypeError, "something else",
_raise_ValueError, "test")
# This class is inspired from numpy 1.7 with an alteration to check
# the reset warning filters after calls to assert_warns.
# This assert_warns behavior is specific to scikit-learn because
#`clean_warning_registry()` is called internally by assert_warns
# and clears all previous filters.
class TestWarns(unittest.TestCase):
def test_warn(self):
def f():
warnings.warn("yo")
return 3
# Test that assert_warns is not impacted by externally set
# filters and is reset internally.
# This is because `clean_warning_registry()` is called internally by
# assert_warns and clears all previous filters.
warnings.simplefilter("ignore", UserWarning)
assert_equal(assert_warns(UserWarning, f), 3)
# Test that the warning registry is empty after assert_warns
assert_equal(sys.modules['warnings'].filters, [])
assert_raises(AssertionError, assert_no_warnings, f)
assert_equal(assert_no_warnings(lambda x: x, 1), 1)
def test_warn_wrong_warning(self):
def f():
warnings.warn("yo", DeprecationWarning)
failed = False
filters = sys.modules['warnings'].filters[:]
try:
try:
# Should raise an AssertionError
assert_warns(UserWarning, f)
failed = True
except AssertionError:
pass
finally:
sys.modules['warnings'].filters = filters
if failed:
raise AssertionError("wrong warning caught by assert_warn")
<|fim▁end|> | test_assert_less_equal |
<|file_name|>test_testing.py<|end_file_name|><|fim▁begin|>import warnings
import unittest
import sys
from nose.tools import assert_raises
from gplearn.skutils.testing import (
_assert_less,
_assert_greater,
assert_less_equal,
assert_greater_equal,
assert_warns,
assert_no_warnings,
assert_equal,
set_random_state,
assert_raise_message)
from sklearn.tree import DecisionTreeClassifier
from sklearn.lda import LDA
try:
from nose.tools import assert_less
def test_assert_less():
# Check that the nose implementation of assert_less gives the
# same thing as the scikit's
assert_less(0, 1)
_assert_less(0, 1)
assert_raises(AssertionError, assert_less, 1, 0)
assert_raises(AssertionError, _assert_less, 1, 0)
except ImportError:
pass
try:
from nose.tools import assert_greater
def test_assert_greater():
# Check that the nose implementation of assert_less gives the
# same thing as the scikit's
assert_greater(1, 0)
_assert_greater(1, 0)
assert_raises(AssertionError, assert_greater, 0, 1)
assert_raises(AssertionError, _assert_greater, 0, 1)
except ImportError:
pass
def test_assert_less_equal():
assert_less_equal(0, 1)
assert_less_equal(1, 1)
assert_raises(AssertionError, assert_less_equal, 1, 0)
def <|fim_middle|>():
assert_greater_equal(1, 0)
assert_greater_equal(1, 1)
assert_raises(AssertionError, assert_greater_equal, 0, 1)
def test_set_random_state():
lda = LDA()
tree = DecisionTreeClassifier()
# LDA doesn't have random state: smoke test
set_random_state(lda, 3)
set_random_state(tree, 3)
assert_equal(tree.random_state, 3)
def test_assert_raise_message():
def _raise_ValueError(message):
raise ValueError(message)
assert_raise_message(ValueError, "test",
_raise_ValueError, "test")
assert_raises(AssertionError,
assert_raise_message, ValueError, "something else",
_raise_ValueError, "test")
assert_raises(ValueError,
assert_raise_message, TypeError, "something else",
_raise_ValueError, "test")
# This class is inspired from numpy 1.7 with an alteration to check
# the reset warning filters after calls to assert_warns.
# This assert_warns behavior is specific to scikit-learn because
#`clean_warning_registry()` is called internally by assert_warns
# and clears all previous filters.
class TestWarns(unittest.TestCase):
def test_warn(self):
def f():
warnings.warn("yo")
return 3
# Test that assert_warns is not impacted by externally set
# filters and is reset internally.
# This is because `clean_warning_registry()` is called internally by
# assert_warns and clears all previous filters.
warnings.simplefilter("ignore", UserWarning)
assert_equal(assert_warns(UserWarning, f), 3)
# Test that the warning registry is empty after assert_warns
assert_equal(sys.modules['warnings'].filters, [])
assert_raises(AssertionError, assert_no_warnings, f)
assert_equal(assert_no_warnings(lambda x: x, 1), 1)
def test_warn_wrong_warning(self):
def f():
warnings.warn("yo", DeprecationWarning)
failed = False
filters = sys.modules['warnings'].filters[:]
try:
try:
# Should raise an AssertionError
assert_warns(UserWarning, f)
failed = True
except AssertionError:
pass
finally:
sys.modules['warnings'].filters = filters
if failed:
raise AssertionError("wrong warning caught by assert_warn")
<|fim▁end|> | test_assert_greater_equal |
<|file_name|>test_testing.py<|end_file_name|><|fim▁begin|>import warnings
import unittest
import sys
from nose.tools import assert_raises
from gplearn.skutils.testing import (
_assert_less,
_assert_greater,
assert_less_equal,
assert_greater_equal,
assert_warns,
assert_no_warnings,
assert_equal,
set_random_state,
assert_raise_message)
from sklearn.tree import DecisionTreeClassifier
from sklearn.lda import LDA
try:
from nose.tools import assert_less
def test_assert_less():
# Check that the nose implementation of assert_less gives the
# same thing as the scikit's
assert_less(0, 1)
_assert_less(0, 1)
assert_raises(AssertionError, assert_less, 1, 0)
assert_raises(AssertionError, _assert_less, 1, 0)
except ImportError:
pass
try:
from nose.tools import assert_greater
def test_assert_greater():
# Check that the nose implementation of assert_less gives the
# same thing as the scikit's
assert_greater(1, 0)
_assert_greater(1, 0)
assert_raises(AssertionError, assert_greater, 0, 1)
assert_raises(AssertionError, _assert_greater, 0, 1)
except ImportError:
pass
def test_assert_less_equal():
assert_less_equal(0, 1)
assert_less_equal(1, 1)
assert_raises(AssertionError, assert_less_equal, 1, 0)
def test_assert_greater_equal():
assert_greater_equal(1, 0)
assert_greater_equal(1, 1)
assert_raises(AssertionError, assert_greater_equal, 0, 1)
def <|fim_middle|>():
lda = LDA()
tree = DecisionTreeClassifier()
# LDA doesn't have random state: smoke test
set_random_state(lda, 3)
set_random_state(tree, 3)
assert_equal(tree.random_state, 3)
def test_assert_raise_message():
def _raise_ValueError(message):
raise ValueError(message)
assert_raise_message(ValueError, "test",
_raise_ValueError, "test")
assert_raises(AssertionError,
assert_raise_message, ValueError, "something else",
_raise_ValueError, "test")
assert_raises(ValueError,
assert_raise_message, TypeError, "something else",
_raise_ValueError, "test")
# This class is inspired from numpy 1.7 with an alteration to check
# the reset warning filters after calls to assert_warns.
# This assert_warns behavior is specific to scikit-learn because
#`clean_warning_registry()` is called internally by assert_warns
# and clears all previous filters.
class TestWarns(unittest.TestCase):
def test_warn(self):
def f():
warnings.warn("yo")
return 3
# Test that assert_warns is not impacted by externally set
# filters and is reset internally.
# This is because `clean_warning_registry()` is called internally by
# assert_warns and clears all previous filters.
warnings.simplefilter("ignore", UserWarning)
assert_equal(assert_warns(UserWarning, f), 3)
# Test that the warning registry is empty after assert_warns
assert_equal(sys.modules['warnings'].filters, [])
assert_raises(AssertionError, assert_no_warnings, f)
assert_equal(assert_no_warnings(lambda x: x, 1), 1)
def test_warn_wrong_warning(self):
def f():
warnings.warn("yo", DeprecationWarning)
failed = False
filters = sys.modules['warnings'].filters[:]
try:
try:
# Should raise an AssertionError
assert_warns(UserWarning, f)
failed = True
except AssertionError:
pass
finally:
sys.modules['warnings'].filters = filters
if failed:
raise AssertionError("wrong warning caught by assert_warn")
<|fim▁end|> | test_set_random_state |
<|file_name|>test_testing.py<|end_file_name|><|fim▁begin|>import warnings
import unittest
import sys
from nose.tools import assert_raises
from gplearn.skutils.testing import (
_assert_less,
_assert_greater,
assert_less_equal,
assert_greater_equal,
assert_warns,
assert_no_warnings,
assert_equal,
set_random_state,
assert_raise_message)
from sklearn.tree import DecisionTreeClassifier
from sklearn.lda import LDA
try:
from nose.tools import assert_less
def test_assert_less():
# Check that the nose implementation of assert_less gives the
# same thing as the scikit's
assert_less(0, 1)
_assert_less(0, 1)
assert_raises(AssertionError, assert_less, 1, 0)
assert_raises(AssertionError, _assert_less, 1, 0)
except ImportError:
pass
try:
from nose.tools import assert_greater
def test_assert_greater():
# Check that the nose implementation of assert_less gives the
# same thing as the scikit's
assert_greater(1, 0)
_assert_greater(1, 0)
assert_raises(AssertionError, assert_greater, 0, 1)
assert_raises(AssertionError, _assert_greater, 0, 1)
except ImportError:
pass
def test_assert_less_equal():
assert_less_equal(0, 1)
assert_less_equal(1, 1)
assert_raises(AssertionError, assert_less_equal, 1, 0)
def test_assert_greater_equal():
assert_greater_equal(1, 0)
assert_greater_equal(1, 1)
assert_raises(AssertionError, assert_greater_equal, 0, 1)
def test_set_random_state():
lda = LDA()
tree = DecisionTreeClassifier()
# LDA doesn't have random state: smoke test
set_random_state(lda, 3)
set_random_state(tree, 3)
assert_equal(tree.random_state, 3)
def <|fim_middle|>():
def _raise_ValueError(message):
raise ValueError(message)
assert_raise_message(ValueError, "test",
_raise_ValueError, "test")
assert_raises(AssertionError,
assert_raise_message, ValueError, "something else",
_raise_ValueError, "test")
assert_raises(ValueError,
assert_raise_message, TypeError, "something else",
_raise_ValueError, "test")
# This class is inspired from numpy 1.7 with an alteration to check
# the reset warning filters after calls to assert_warns.
# This assert_warns behavior is specific to scikit-learn because
#`clean_warning_registry()` is called internally by assert_warns
# and clears all previous filters.
class TestWarns(unittest.TestCase):
def test_warn(self):
def f():
warnings.warn("yo")
return 3
# Test that assert_warns is not impacted by externally set
# filters and is reset internally.
# This is because `clean_warning_registry()` is called internally by
# assert_warns and clears all previous filters.
warnings.simplefilter("ignore", UserWarning)
assert_equal(assert_warns(UserWarning, f), 3)
# Test that the warning registry is empty after assert_warns
assert_equal(sys.modules['warnings'].filters, [])
assert_raises(AssertionError, assert_no_warnings, f)
assert_equal(assert_no_warnings(lambda x: x, 1), 1)
def test_warn_wrong_warning(self):
def f():
warnings.warn("yo", DeprecationWarning)
failed = False
filters = sys.modules['warnings'].filters[:]
try:
try:
# Should raise an AssertionError
assert_warns(UserWarning, f)
failed = True
except AssertionError:
pass
finally:
sys.modules['warnings'].filters = filters
if failed:
raise AssertionError("wrong warning caught by assert_warn")
<|fim▁end|> | test_assert_raise_message |
<|file_name|>test_testing.py<|end_file_name|><|fim▁begin|>import warnings
import unittest
import sys
from nose.tools import assert_raises
from gplearn.skutils.testing import (
_assert_less,
_assert_greater,
assert_less_equal,
assert_greater_equal,
assert_warns,
assert_no_warnings,
assert_equal,
set_random_state,
assert_raise_message)
from sklearn.tree import DecisionTreeClassifier
from sklearn.lda import LDA
try:
from nose.tools import assert_less
def test_assert_less():
# Check that the nose implementation of assert_less gives the
# same thing as the scikit's
assert_less(0, 1)
_assert_less(0, 1)
assert_raises(AssertionError, assert_less, 1, 0)
assert_raises(AssertionError, _assert_less, 1, 0)
except ImportError:
pass
try:
from nose.tools import assert_greater
def test_assert_greater():
# Check that the nose implementation of assert_less gives the
# same thing as the scikit's
assert_greater(1, 0)
_assert_greater(1, 0)
assert_raises(AssertionError, assert_greater, 0, 1)
assert_raises(AssertionError, _assert_greater, 0, 1)
except ImportError:
pass
def test_assert_less_equal():
assert_less_equal(0, 1)
assert_less_equal(1, 1)
assert_raises(AssertionError, assert_less_equal, 1, 0)
def test_assert_greater_equal():
assert_greater_equal(1, 0)
assert_greater_equal(1, 1)
assert_raises(AssertionError, assert_greater_equal, 0, 1)
def test_set_random_state():
lda = LDA()
tree = DecisionTreeClassifier()
# LDA doesn't have random state: smoke test
set_random_state(lda, 3)
set_random_state(tree, 3)
assert_equal(tree.random_state, 3)
def test_assert_raise_message():
def <|fim_middle|>(message):
raise ValueError(message)
assert_raise_message(ValueError, "test",
_raise_ValueError, "test")
assert_raises(AssertionError,
assert_raise_message, ValueError, "something else",
_raise_ValueError, "test")
assert_raises(ValueError,
assert_raise_message, TypeError, "something else",
_raise_ValueError, "test")
# This class is inspired from numpy 1.7 with an alteration to check
# the reset warning filters after calls to assert_warns.
# This assert_warns behavior is specific to scikit-learn because
#`clean_warning_registry()` is called internally by assert_warns
# and clears all previous filters.
class TestWarns(unittest.TestCase):
def test_warn(self):
def f():
warnings.warn("yo")
return 3
# Test that assert_warns is not impacted by externally set
# filters and is reset internally.
# This is because `clean_warning_registry()` is called internally by
# assert_warns and clears all previous filters.
warnings.simplefilter("ignore", UserWarning)
assert_equal(assert_warns(UserWarning, f), 3)
# Test that the warning registry is empty after assert_warns
assert_equal(sys.modules['warnings'].filters, [])
assert_raises(AssertionError, assert_no_warnings, f)
assert_equal(assert_no_warnings(lambda x: x, 1), 1)
def test_warn_wrong_warning(self):
def f():
warnings.warn("yo", DeprecationWarning)
failed = False
filters = sys.modules['warnings'].filters[:]
try:
try:
# Should raise an AssertionError
assert_warns(UserWarning, f)
failed = True
except AssertionError:
pass
finally:
sys.modules['warnings'].filters = filters
if failed:
raise AssertionError("wrong warning caught by assert_warn")
<|fim▁end|> | _raise_ValueError |
<|file_name|>test_testing.py<|end_file_name|><|fim▁begin|>import warnings
import unittest
import sys
from nose.tools import assert_raises
from gplearn.skutils.testing import (
_assert_less,
_assert_greater,
assert_less_equal,
assert_greater_equal,
assert_warns,
assert_no_warnings,
assert_equal,
set_random_state,
assert_raise_message)
from sklearn.tree import DecisionTreeClassifier
from sklearn.lda import LDA
try:
from nose.tools import assert_less
def test_assert_less():
# Check that the nose implementation of assert_less gives the
# same thing as the scikit's
assert_less(0, 1)
_assert_less(0, 1)
assert_raises(AssertionError, assert_less, 1, 0)
assert_raises(AssertionError, _assert_less, 1, 0)
except ImportError:
pass
try:
from nose.tools import assert_greater
def test_assert_greater():
# Check that the nose implementation of assert_less gives the
# same thing as the scikit's
assert_greater(1, 0)
_assert_greater(1, 0)
assert_raises(AssertionError, assert_greater, 0, 1)
assert_raises(AssertionError, _assert_greater, 0, 1)
except ImportError:
pass
def test_assert_less_equal():
assert_less_equal(0, 1)
assert_less_equal(1, 1)
assert_raises(AssertionError, assert_less_equal, 1, 0)
def test_assert_greater_equal():
assert_greater_equal(1, 0)
assert_greater_equal(1, 1)
assert_raises(AssertionError, assert_greater_equal, 0, 1)
def test_set_random_state():
lda = LDA()
tree = DecisionTreeClassifier()
# LDA doesn't have random state: smoke test
set_random_state(lda, 3)
set_random_state(tree, 3)
assert_equal(tree.random_state, 3)
def test_assert_raise_message():
def _raise_ValueError(message):
raise ValueError(message)
assert_raise_message(ValueError, "test",
_raise_ValueError, "test")
assert_raises(AssertionError,
assert_raise_message, ValueError, "something else",
_raise_ValueError, "test")
assert_raises(ValueError,
assert_raise_message, TypeError, "something else",
_raise_ValueError, "test")
# This class is inspired from numpy 1.7 with an alteration to check
# the reset warning filters after calls to assert_warns.
# This assert_warns behavior is specific to scikit-learn because
#`clean_warning_registry()` is called internally by assert_warns
# and clears all previous filters.
class TestWarns(unittest.TestCase):
def <|fim_middle|>(self):
def f():
warnings.warn("yo")
return 3
# Test that assert_warns is not impacted by externally set
# filters and is reset internally.
# This is because `clean_warning_registry()` is called internally by
# assert_warns and clears all previous filters.
warnings.simplefilter("ignore", UserWarning)
assert_equal(assert_warns(UserWarning, f), 3)
# Test that the warning registry is empty after assert_warns
assert_equal(sys.modules['warnings'].filters, [])
assert_raises(AssertionError, assert_no_warnings, f)
assert_equal(assert_no_warnings(lambda x: x, 1), 1)
def test_warn_wrong_warning(self):
def f():
warnings.warn("yo", DeprecationWarning)
failed = False
filters = sys.modules['warnings'].filters[:]
try:
try:
# Should raise an AssertionError
assert_warns(UserWarning, f)
failed = True
except AssertionError:
pass
finally:
sys.modules['warnings'].filters = filters
if failed:
raise AssertionError("wrong warning caught by assert_warn")
<|fim▁end|> | test_warn |
<|file_name|>test_testing.py<|end_file_name|><|fim▁begin|>import warnings
import unittest
import sys
from nose.tools import assert_raises
from gplearn.skutils.testing import (
_assert_less,
_assert_greater,
assert_less_equal,
assert_greater_equal,
assert_warns,
assert_no_warnings,
assert_equal,
set_random_state,
assert_raise_message)
from sklearn.tree import DecisionTreeClassifier
from sklearn.lda import LDA
try:
from nose.tools import assert_less
def test_assert_less():
# Check that the nose implementation of assert_less gives the
# same thing as the scikit's
assert_less(0, 1)
_assert_less(0, 1)
assert_raises(AssertionError, assert_less, 1, 0)
assert_raises(AssertionError, _assert_less, 1, 0)
except ImportError:
pass
try:
from nose.tools import assert_greater
def test_assert_greater():
# Check that the nose implementation of assert_less gives the
# same thing as the scikit's
assert_greater(1, 0)
_assert_greater(1, 0)
assert_raises(AssertionError, assert_greater, 0, 1)
assert_raises(AssertionError, _assert_greater, 0, 1)
except ImportError:
pass
def test_assert_less_equal():
assert_less_equal(0, 1)
assert_less_equal(1, 1)
assert_raises(AssertionError, assert_less_equal, 1, 0)
def test_assert_greater_equal():
assert_greater_equal(1, 0)
assert_greater_equal(1, 1)
assert_raises(AssertionError, assert_greater_equal, 0, 1)
def test_set_random_state():
lda = LDA()
tree = DecisionTreeClassifier()
# LDA doesn't have random state: smoke test
set_random_state(lda, 3)
set_random_state(tree, 3)
assert_equal(tree.random_state, 3)
def test_assert_raise_message():
def _raise_ValueError(message):
raise ValueError(message)
assert_raise_message(ValueError, "test",
_raise_ValueError, "test")
assert_raises(AssertionError,
assert_raise_message, ValueError, "something else",
_raise_ValueError, "test")
assert_raises(ValueError,
assert_raise_message, TypeError, "something else",
_raise_ValueError, "test")
# This class is inspired from numpy 1.7 with an alteration to check
# the reset warning filters after calls to assert_warns.
# This assert_warns behavior is specific to scikit-learn because
#`clean_warning_registry()` is called internally by assert_warns
# and clears all previous filters.
class TestWarns(unittest.TestCase):
def test_warn(self):
def <|fim_middle|>():
warnings.warn("yo")
return 3
# Test that assert_warns is not impacted by externally set
# filters and is reset internally.
# This is because `clean_warning_registry()` is called internally by
# assert_warns and clears all previous filters.
warnings.simplefilter("ignore", UserWarning)
assert_equal(assert_warns(UserWarning, f), 3)
# Test that the warning registry is empty after assert_warns
assert_equal(sys.modules['warnings'].filters, [])
assert_raises(AssertionError, assert_no_warnings, f)
assert_equal(assert_no_warnings(lambda x: x, 1), 1)
def test_warn_wrong_warning(self):
def f():
warnings.warn("yo", DeprecationWarning)
failed = False
filters = sys.modules['warnings'].filters[:]
try:
try:
# Should raise an AssertionError
assert_warns(UserWarning, f)
failed = True
except AssertionError:
pass
finally:
sys.modules['warnings'].filters = filters
if failed:
raise AssertionError("wrong warning caught by assert_warn")
<|fim▁end|> | f |
<|file_name|>test_testing.py<|end_file_name|><|fim▁begin|>import warnings
import unittest
import sys
from nose.tools import assert_raises
from gplearn.skutils.testing import (
_assert_less,
_assert_greater,
assert_less_equal,
assert_greater_equal,
assert_warns,
assert_no_warnings,
assert_equal,
set_random_state,
assert_raise_message)
from sklearn.tree import DecisionTreeClassifier
from sklearn.lda import LDA
try:
from nose.tools import assert_less
def test_assert_less():
# Check that the nose implementation of assert_less gives the
# same thing as the scikit's
assert_less(0, 1)
_assert_less(0, 1)
assert_raises(AssertionError, assert_less, 1, 0)
assert_raises(AssertionError, _assert_less, 1, 0)
except ImportError:
pass
try:
from nose.tools import assert_greater
def test_assert_greater():
# Check that the nose implementation of assert_less gives the
# same thing as the scikit's
assert_greater(1, 0)
_assert_greater(1, 0)
assert_raises(AssertionError, assert_greater, 0, 1)
assert_raises(AssertionError, _assert_greater, 0, 1)
except ImportError:
pass
def test_assert_less_equal():
assert_less_equal(0, 1)
assert_less_equal(1, 1)
assert_raises(AssertionError, assert_less_equal, 1, 0)
def test_assert_greater_equal():
assert_greater_equal(1, 0)
assert_greater_equal(1, 1)
assert_raises(AssertionError, assert_greater_equal, 0, 1)
def test_set_random_state():
lda = LDA()
tree = DecisionTreeClassifier()
# LDA doesn't have random state: smoke test
set_random_state(lda, 3)
set_random_state(tree, 3)
assert_equal(tree.random_state, 3)
def test_assert_raise_message():
def _raise_ValueError(message):
raise ValueError(message)
assert_raise_message(ValueError, "test",
_raise_ValueError, "test")
assert_raises(AssertionError,
assert_raise_message, ValueError, "something else",
_raise_ValueError, "test")
assert_raises(ValueError,
assert_raise_message, TypeError, "something else",
_raise_ValueError, "test")
# This class is inspired from numpy 1.7 with an alteration to check
# the reset warning filters after calls to assert_warns.
# This assert_warns behavior is specific to scikit-learn because
#`clean_warning_registry()` is called internally by assert_warns
# and clears all previous filters.
class TestWarns(unittest.TestCase):
def test_warn(self):
def f():
warnings.warn("yo")
return 3
# Test that assert_warns is not impacted by externally set
# filters and is reset internally.
# This is because `clean_warning_registry()` is called internally by
# assert_warns and clears all previous filters.
warnings.simplefilter("ignore", UserWarning)
assert_equal(assert_warns(UserWarning, f), 3)
# Test that the warning registry is empty after assert_warns
assert_equal(sys.modules['warnings'].filters, [])
assert_raises(AssertionError, assert_no_warnings, f)
assert_equal(assert_no_warnings(lambda x: x, 1), 1)
def <|fim_middle|>(self):
def f():
warnings.warn("yo", DeprecationWarning)
failed = False
filters = sys.modules['warnings'].filters[:]
try:
try:
# Should raise an AssertionError
assert_warns(UserWarning, f)
failed = True
except AssertionError:
pass
finally:
sys.modules['warnings'].filters = filters
if failed:
raise AssertionError("wrong warning caught by assert_warn")
<|fim▁end|> | test_warn_wrong_warning |
<|file_name|>test_testing.py<|end_file_name|><|fim▁begin|>import warnings
import unittest
import sys
from nose.tools import assert_raises
from gplearn.skutils.testing import (
_assert_less,
_assert_greater,
assert_less_equal,
assert_greater_equal,
assert_warns,
assert_no_warnings,
assert_equal,
set_random_state,
assert_raise_message)
from sklearn.tree import DecisionTreeClassifier
from sklearn.lda import LDA
try:
from nose.tools import assert_less
def test_assert_less():
# Check that the nose implementation of assert_less gives the
# same thing as the scikit's
assert_less(0, 1)
_assert_less(0, 1)
assert_raises(AssertionError, assert_less, 1, 0)
assert_raises(AssertionError, _assert_less, 1, 0)
except ImportError:
pass
try:
from nose.tools import assert_greater
def test_assert_greater():
# Check that the nose implementation of assert_less gives the
# same thing as the scikit's
assert_greater(1, 0)
_assert_greater(1, 0)
assert_raises(AssertionError, assert_greater, 0, 1)
assert_raises(AssertionError, _assert_greater, 0, 1)
except ImportError:
pass
def test_assert_less_equal():
assert_less_equal(0, 1)
assert_less_equal(1, 1)
assert_raises(AssertionError, assert_less_equal, 1, 0)
def test_assert_greater_equal():
assert_greater_equal(1, 0)
assert_greater_equal(1, 1)
assert_raises(AssertionError, assert_greater_equal, 0, 1)
def test_set_random_state():
lda = LDA()
tree = DecisionTreeClassifier()
# LDA doesn't have random state: smoke test
set_random_state(lda, 3)
set_random_state(tree, 3)
assert_equal(tree.random_state, 3)
def test_assert_raise_message():
def _raise_ValueError(message):
raise ValueError(message)
assert_raise_message(ValueError, "test",
_raise_ValueError, "test")
assert_raises(AssertionError,
assert_raise_message, ValueError, "something else",
_raise_ValueError, "test")
assert_raises(ValueError,
assert_raise_message, TypeError, "something else",
_raise_ValueError, "test")
# This class is inspired from numpy 1.7 with an alteration to check
# the reset warning filters after calls to assert_warns.
# This assert_warns behavior is specific to scikit-learn because
#`clean_warning_registry()` is called internally by assert_warns
# and clears all previous filters.
class TestWarns(unittest.TestCase):
def test_warn(self):
def f():
warnings.warn("yo")
return 3
# Test that assert_warns is not impacted by externally set
# filters and is reset internally.
# This is because `clean_warning_registry()` is called internally by
# assert_warns and clears all previous filters.
warnings.simplefilter("ignore", UserWarning)
assert_equal(assert_warns(UserWarning, f), 3)
# Test that the warning registry is empty after assert_warns
assert_equal(sys.modules['warnings'].filters, [])
assert_raises(AssertionError, assert_no_warnings, f)
assert_equal(assert_no_warnings(lambda x: x, 1), 1)
def test_warn_wrong_warning(self):
def <|fim_middle|>():
warnings.warn("yo", DeprecationWarning)
failed = False
filters = sys.modules['warnings'].filters[:]
try:
try:
# Should raise an AssertionError
assert_warns(UserWarning, f)
failed = True
except AssertionError:
pass
finally:
sys.modules['warnings'].filters = filters
if failed:
raise AssertionError("wrong warning caught by assert_warn")
<|fim▁end|> | f |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | def tryprint():
return ('it will be oke') |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>def tryprint():
<|fim_middle|>
<|fim▁end|> | return ('it will be oke') |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>def <|fim_middle|>():
return ('it will be oke')<|fim▁end|> | tryprint |
<|file_name|>util.py<|end_file_name|><|fim▁begin|>import git
from git.exc import InvalidGitRepositoryError
from git.config import GitConfigParser
from io import BytesIO
import weakref
# typing -----------------------------------------------------------------------
from typing import Any, Sequence, TYPE_CHECKING, Union
from git.types import PathLike
if TYPE_CHECKING:
from .base import Submodule
from weakref import ReferenceType
from git.repo import Repo
from git.refs import Head
from git import Remote
from git.refs import RemoteReference
__all__ = ('sm_section', 'sm_name', 'mkhead', 'find_first_remote_branch',
'SubmoduleConfigParser')
#{ Utilities
def sm_section(name: str) -> str:
""":return: section title used in .gitmodules configuration file"""
return f'submodule "{name}"'
def sm_name(section: str) -> str:
""":return: name of the submodule as parsed from the section name"""
section = section.strip()
return section[11:-1]
def mkhead(repo: 'Repo', path: PathLike) -> 'Head':
""":return: New branch/head instance"""
return git.Head(repo, git.Head.to_full_path(path))
def find_first_remote_branch(remotes: Sequence['Remote'], branch_name: str) -> 'RemoteReference':
"""Find the remote branch matching the name of the given branch or raise InvalidGitRepositoryError"""
for remote in remotes:
try:
return remote.refs[branch_name]
except IndexError:
continue
# END exception handling
# END for remote
raise InvalidGitRepositoryError("Didn't find remote branch '%r' in any of the given remotes" % branch_name)
#} END utilities
#{ Classes
class SubmoduleConfigParser(GitConfigParser):
"""
Catches calls to _write, and updates the .gitmodules blob in the index
with the new data, if we have written into a stream. Otherwise it will
add the local file to the index to make it correspond with the working tree.
Additionally, the cache must be cleared
Please note that no mutating method will work in bare mode
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
self._smref: Union['ReferenceType[Submodule]', None] = None
self._index = None
self._auto_write = True
super(SubmoduleConfigParser, self).__init__(*args, **kwargs)
#{ Interface
def set_submodule(self, submodule: 'Submodule') -> None:
"""Set this instance's submodule. It must be called before
the first write operation begins"""
self._smref = weakref.ref(submodule)
def flush_to_index(self) -> None:
"""Flush changes in our configuration file to the index"""
assert self._smref is not None
# should always have a file here
assert not isinstance(self._file_or_files, BytesIO)
sm = self._smref()
if sm is not None:
index = self._index
if index is None:<|fim▁hole|> # END handle weakref
#} END interface
#{ Overridden Methods
def write(self) -> None: # type: ignore[override]
rval: None = super(SubmoduleConfigParser, self).write()
self.flush_to_index()
return rval
# END overridden methods
#} END classes<|fim▁end|> | index = sm.repo.index
# END handle index
index.add([sm.k_modules_file], write=self._auto_write)
sm._clear_cache() |
<|file_name|>util.py<|end_file_name|><|fim▁begin|>import git
from git.exc import InvalidGitRepositoryError
from git.config import GitConfigParser
from io import BytesIO
import weakref
# typing -----------------------------------------------------------------------
from typing import Any, Sequence, TYPE_CHECKING, Union
from git.types import PathLike
if TYPE_CHECKING:
from .base import Submodule
from weakref import ReferenceType
from git.repo import Repo
from git.refs import Head
from git import Remote
from git.refs import RemoteReference
__all__ = ('sm_section', 'sm_name', 'mkhead', 'find_first_remote_branch',
'SubmoduleConfigParser')
#{ Utilities
def sm_section(name: str) -> str:
<|fim_middle|>
def sm_name(section: str) -> str:
""":return: name of the submodule as parsed from the section name"""
section = section.strip()
return section[11:-1]
def mkhead(repo: 'Repo', path: PathLike) -> 'Head':
""":return: New branch/head instance"""
return git.Head(repo, git.Head.to_full_path(path))
def find_first_remote_branch(remotes: Sequence['Remote'], branch_name: str) -> 'RemoteReference':
"""Find the remote branch matching the name of the given branch or raise InvalidGitRepositoryError"""
for remote in remotes:
try:
return remote.refs[branch_name]
except IndexError:
continue
# END exception handling
# END for remote
raise InvalidGitRepositoryError("Didn't find remote branch '%r' in any of the given remotes" % branch_name)
#} END utilities
#{ Classes
class SubmoduleConfigParser(GitConfigParser):
"""
Catches calls to _write, and updates the .gitmodules blob in the index
with the new data, if we have written into a stream. Otherwise it will
add the local file to the index to make it correspond with the working tree.
Additionally, the cache must be cleared
Please note that no mutating method will work in bare mode
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
self._smref: Union['ReferenceType[Submodule]', None] = None
self._index = None
self._auto_write = True
super(SubmoduleConfigParser, self).__init__(*args, **kwargs)
#{ Interface
def set_submodule(self, submodule: 'Submodule') -> None:
"""Set this instance's submodule. It must be called before
the first write operation begins"""
self._smref = weakref.ref(submodule)
def flush_to_index(self) -> None:
"""Flush changes in our configuration file to the index"""
assert self._smref is not None
# should always have a file here
assert not isinstance(self._file_or_files, BytesIO)
sm = self._smref()
if sm is not None:
index = self._index
if index is None:
index = sm.repo.index
# END handle index
index.add([sm.k_modules_file], write=self._auto_write)
sm._clear_cache()
# END handle weakref
#} END interface
#{ Overridden Methods
def write(self) -> None: # type: ignore[override]
rval: None = super(SubmoduleConfigParser, self).write()
self.flush_to_index()
return rval
# END overridden methods
#} END classes
<|fim▁end|> | """:return: section title used in .gitmodules configuration file"""
return f'submodule "{name}"' |
<|file_name|>util.py<|end_file_name|><|fim▁begin|>import git
from git.exc import InvalidGitRepositoryError
from git.config import GitConfigParser
from io import BytesIO
import weakref
# typing -----------------------------------------------------------------------
from typing import Any, Sequence, TYPE_CHECKING, Union
from git.types import PathLike
if TYPE_CHECKING:
from .base import Submodule
from weakref import ReferenceType
from git.repo import Repo
from git.refs import Head
from git import Remote
from git.refs import RemoteReference
__all__ = ('sm_section', 'sm_name', 'mkhead', 'find_first_remote_branch',
'SubmoduleConfigParser')
#{ Utilities
def sm_section(name: str) -> str:
""":return: section title used in .gitmodules configuration file"""
return f'submodule "{name}"'
def sm_name(section: str) -> str:
<|fim_middle|>
def mkhead(repo: 'Repo', path: PathLike) -> 'Head':
""":return: New branch/head instance"""
return git.Head(repo, git.Head.to_full_path(path))
def find_first_remote_branch(remotes: Sequence['Remote'], branch_name: str) -> 'RemoteReference':
"""Find the remote branch matching the name of the given branch or raise InvalidGitRepositoryError"""
for remote in remotes:
try:
return remote.refs[branch_name]
except IndexError:
continue
# END exception handling
# END for remote
raise InvalidGitRepositoryError("Didn't find remote branch '%r' in any of the given remotes" % branch_name)
#} END utilities
#{ Classes
class SubmoduleConfigParser(GitConfigParser):
"""
Catches calls to _write, and updates the .gitmodules blob in the index
with the new data, if we have written into a stream. Otherwise it will
add the local file to the index to make it correspond with the working tree.
Additionally, the cache must be cleared
Please note that no mutating method will work in bare mode
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
self._smref: Union['ReferenceType[Submodule]', None] = None
self._index = None
self._auto_write = True
super(SubmoduleConfigParser, self).__init__(*args, **kwargs)
#{ Interface
def set_submodule(self, submodule: 'Submodule') -> None:
"""Set this instance's submodule. It must be called before
the first write operation begins"""
self._smref = weakref.ref(submodule)
def flush_to_index(self) -> None:
"""Flush changes in our configuration file to the index"""
assert self._smref is not None
# should always have a file here
assert not isinstance(self._file_or_files, BytesIO)
sm = self._smref()
if sm is not None:
index = self._index
if index is None:
index = sm.repo.index
# END handle index
index.add([sm.k_modules_file], write=self._auto_write)
sm._clear_cache()
# END handle weakref
#} END interface
#{ Overridden Methods
def write(self) -> None: # type: ignore[override]
rval: None = super(SubmoduleConfigParser, self).write()
self.flush_to_index()
return rval
# END overridden methods
#} END classes
<|fim▁end|> | """:return: name of the submodule as parsed from the section name"""
section = section.strip()
return section[11:-1] |
<|file_name|>util.py<|end_file_name|><|fim▁begin|>import git
from git.exc import InvalidGitRepositoryError
from git.config import GitConfigParser
from io import BytesIO
import weakref
# typing -----------------------------------------------------------------------
from typing import Any, Sequence, TYPE_CHECKING, Union
from git.types import PathLike
if TYPE_CHECKING:
from .base import Submodule
from weakref import ReferenceType
from git.repo import Repo
from git.refs import Head
from git import Remote
from git.refs import RemoteReference
__all__ = ('sm_section', 'sm_name', 'mkhead', 'find_first_remote_branch',
'SubmoduleConfigParser')
#{ Utilities
def sm_section(name: str) -> str:
""":return: section title used in .gitmodules configuration file"""
return f'submodule "{name}"'
def sm_name(section: str) -> str:
""":return: name of the submodule as parsed from the section name"""
section = section.strip()
return section[11:-1]
def mkhead(repo: 'Repo', path: PathLike) -> 'Head':
<|fim_middle|>
def find_first_remote_branch(remotes: Sequence['Remote'], branch_name: str) -> 'RemoteReference':
"""Find the remote branch matching the name of the given branch or raise InvalidGitRepositoryError"""
for remote in remotes:
try:
return remote.refs[branch_name]
except IndexError:
continue
# END exception handling
# END for remote
raise InvalidGitRepositoryError("Didn't find remote branch '%r' in any of the given remotes" % branch_name)
#} END utilities
#{ Classes
class SubmoduleConfigParser(GitConfigParser):
"""
Catches calls to _write, and updates the .gitmodules blob in the index
with the new data, if we have written into a stream. Otherwise it will
add the local file to the index to make it correspond with the working tree.
Additionally, the cache must be cleared
Please note that no mutating method will work in bare mode
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
self._smref: Union['ReferenceType[Submodule]', None] = None
self._index = None
self._auto_write = True
super(SubmoduleConfigParser, self).__init__(*args, **kwargs)
#{ Interface
def set_submodule(self, submodule: 'Submodule') -> None:
"""Set this instance's submodule. It must be called before
the first write operation begins"""
self._smref = weakref.ref(submodule)
def flush_to_index(self) -> None:
"""Flush changes in our configuration file to the index"""
assert self._smref is not None
# should always have a file here
assert not isinstance(self._file_or_files, BytesIO)
sm = self._smref()
if sm is not None:
index = self._index
if index is None:
index = sm.repo.index
# END handle index
index.add([sm.k_modules_file], write=self._auto_write)
sm._clear_cache()
# END handle weakref
#} END interface
#{ Overridden Methods
def write(self) -> None: # type: ignore[override]
rval: None = super(SubmoduleConfigParser, self).write()
self.flush_to_index()
return rval
# END overridden methods
#} END classes
<|fim▁end|> | """:return: New branch/head instance"""
return git.Head(repo, git.Head.to_full_path(path)) |
<|file_name|>util.py<|end_file_name|><|fim▁begin|>import git
from git.exc import InvalidGitRepositoryError
from git.config import GitConfigParser
from io import BytesIO
import weakref
# typing -----------------------------------------------------------------------
from typing import Any, Sequence, TYPE_CHECKING, Union
from git.types import PathLike
if TYPE_CHECKING:
from .base import Submodule
from weakref import ReferenceType
from git.repo import Repo
from git.refs import Head
from git import Remote
from git.refs import RemoteReference
__all__ = ('sm_section', 'sm_name', 'mkhead', 'find_first_remote_branch',
'SubmoduleConfigParser')
#{ Utilities
def sm_section(name: str) -> str:
""":return: section title used in .gitmodules configuration file"""
return f'submodule "{name}"'
def sm_name(section: str) -> str:
""":return: name of the submodule as parsed from the section name"""
section = section.strip()
return section[11:-1]
def mkhead(repo: 'Repo', path: PathLike) -> 'Head':
""":return: New branch/head instance"""
return git.Head(repo, git.Head.to_full_path(path))
def find_first_remote_branch(remotes: Sequence['Remote'], branch_name: str) -> 'RemoteReference':
<|fim_middle|>
#} END utilities
#{ Classes
class SubmoduleConfigParser(GitConfigParser):
"""
Catches calls to _write, and updates the .gitmodules blob in the index
with the new data, if we have written into a stream. Otherwise it will
add the local file to the index to make it correspond with the working tree.
Additionally, the cache must be cleared
Please note that no mutating method will work in bare mode
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
self._smref: Union['ReferenceType[Submodule]', None] = None
self._index = None
self._auto_write = True
super(SubmoduleConfigParser, self).__init__(*args, **kwargs)
#{ Interface
def set_submodule(self, submodule: 'Submodule') -> None:
"""Set this instance's submodule. It must be called before
the first write operation begins"""
self._smref = weakref.ref(submodule)
def flush_to_index(self) -> None:
"""Flush changes in our configuration file to the index"""
assert self._smref is not None
# should always have a file here
assert not isinstance(self._file_or_files, BytesIO)
sm = self._smref()
if sm is not None:
index = self._index
if index is None:
index = sm.repo.index
# END handle index
index.add([sm.k_modules_file], write=self._auto_write)
sm._clear_cache()
# END handle weakref
#} END interface
#{ Overridden Methods
def write(self) -> None: # type: ignore[override]
rval: None = super(SubmoduleConfigParser, self).write()
self.flush_to_index()
return rval
# END overridden methods
#} END classes
<|fim▁end|> | """Find the remote branch matching the name of the given branch or raise InvalidGitRepositoryError"""
for remote in remotes:
try:
return remote.refs[branch_name]
except IndexError:
continue
# END exception handling
# END for remote
raise InvalidGitRepositoryError("Didn't find remote branch '%r' in any of the given remotes" % branch_name) |
<|file_name|>util.py<|end_file_name|><|fim▁begin|>import git
from git.exc import InvalidGitRepositoryError
from git.config import GitConfigParser
from io import BytesIO
import weakref
# typing -----------------------------------------------------------------------
from typing import Any, Sequence, TYPE_CHECKING, Union
from git.types import PathLike
if TYPE_CHECKING:
from .base import Submodule
from weakref import ReferenceType
from git.repo import Repo
from git.refs import Head
from git import Remote
from git.refs import RemoteReference
__all__ = ('sm_section', 'sm_name', 'mkhead', 'find_first_remote_branch',
'SubmoduleConfigParser')
#{ Utilities
def sm_section(name: str) -> str:
""":return: section title used in .gitmodules configuration file"""
return f'submodule "{name}"'
def sm_name(section: str) -> str:
""":return: name of the submodule as parsed from the section name"""
section = section.strip()
return section[11:-1]
def mkhead(repo: 'Repo', path: PathLike) -> 'Head':
""":return: New branch/head instance"""
return git.Head(repo, git.Head.to_full_path(path))
def find_first_remote_branch(remotes: Sequence['Remote'], branch_name: str) -> 'RemoteReference':
"""Find the remote branch matching the name of the given branch or raise InvalidGitRepositoryError"""
for remote in remotes:
try:
return remote.refs[branch_name]
except IndexError:
continue
# END exception handling
# END for remote
raise InvalidGitRepositoryError("Didn't find remote branch '%r' in any of the given remotes" % branch_name)
#} END utilities
#{ Classes
class SubmoduleConfigParser(GitConfigParser):
<|fim_middle|>
#} END classes
<|fim▁end|> | """
Catches calls to _write, and updates the .gitmodules blob in the index
with the new data, if we have written into a stream. Otherwise it will
add the local file to the index to make it correspond with the working tree.
Additionally, the cache must be cleared
Please note that no mutating method will work in bare mode
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
self._smref: Union['ReferenceType[Submodule]', None] = None
self._index = None
self._auto_write = True
super(SubmoduleConfigParser, self).__init__(*args, **kwargs)
#{ Interface
def set_submodule(self, submodule: 'Submodule') -> None:
"""Set this instance's submodule. It must be called before
the first write operation begins"""
self._smref = weakref.ref(submodule)
def flush_to_index(self) -> None:
"""Flush changes in our configuration file to the index"""
assert self._smref is not None
# should always have a file here
assert not isinstance(self._file_or_files, BytesIO)
sm = self._smref()
if sm is not None:
index = self._index
if index is None:
index = sm.repo.index
# END handle index
index.add([sm.k_modules_file], write=self._auto_write)
sm._clear_cache()
# END handle weakref
#} END interface
#{ Overridden Methods
def write(self) -> None: # type: ignore[override]
rval: None = super(SubmoduleConfigParser, self).write()
self.flush_to_index()
return rval
# END overridden methods |
<|file_name|>util.py<|end_file_name|><|fim▁begin|>import git
from git.exc import InvalidGitRepositoryError
from git.config import GitConfigParser
from io import BytesIO
import weakref
# typing -----------------------------------------------------------------------
from typing import Any, Sequence, TYPE_CHECKING, Union
from git.types import PathLike
if TYPE_CHECKING:
from .base import Submodule
from weakref import ReferenceType
from git.repo import Repo
from git.refs import Head
from git import Remote
from git.refs import RemoteReference
__all__ = ('sm_section', 'sm_name', 'mkhead', 'find_first_remote_branch',
'SubmoduleConfigParser')
#{ Utilities
def sm_section(name: str) -> str:
""":return: section title used in .gitmodules configuration file"""
return f'submodule "{name}"'
def sm_name(section: str) -> str:
""":return: name of the submodule as parsed from the section name"""
section = section.strip()
return section[11:-1]
def mkhead(repo: 'Repo', path: PathLike) -> 'Head':
""":return: New branch/head instance"""
return git.Head(repo, git.Head.to_full_path(path))
def find_first_remote_branch(remotes: Sequence['Remote'], branch_name: str) -> 'RemoteReference':
"""Find the remote branch matching the name of the given branch or raise InvalidGitRepositoryError"""
for remote in remotes:
try:
return remote.refs[branch_name]
except IndexError:
continue
# END exception handling
# END for remote
raise InvalidGitRepositoryError("Didn't find remote branch '%r' in any of the given remotes" % branch_name)
#} END utilities
#{ Classes
class SubmoduleConfigParser(GitConfigParser):
"""
Catches calls to _write, and updates the .gitmodules blob in the index
with the new data, if we have written into a stream. Otherwise it will
add the local file to the index to make it correspond with the working tree.
Additionally, the cache must be cleared
Please note that no mutating method will work in bare mode
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
<|fim_middle|>
#{ Interface
def set_submodule(self, submodule: 'Submodule') -> None:
"""Set this instance's submodule. It must be called before
the first write operation begins"""
self._smref = weakref.ref(submodule)
def flush_to_index(self) -> None:
"""Flush changes in our configuration file to the index"""
assert self._smref is not None
# should always have a file here
assert not isinstance(self._file_or_files, BytesIO)
sm = self._smref()
if sm is not None:
index = self._index
if index is None:
index = sm.repo.index
# END handle index
index.add([sm.k_modules_file], write=self._auto_write)
sm._clear_cache()
# END handle weakref
#} END interface
#{ Overridden Methods
def write(self) -> None: # type: ignore[override]
rval: None = super(SubmoduleConfigParser, self).write()
self.flush_to_index()
return rval
# END overridden methods
#} END classes
<|fim▁end|> | self._smref: Union['ReferenceType[Submodule]', None] = None
self._index = None
self._auto_write = True
super(SubmoduleConfigParser, self).__init__(*args, **kwargs) |
<|file_name|>util.py<|end_file_name|><|fim▁begin|>import git
from git.exc import InvalidGitRepositoryError
from git.config import GitConfigParser
from io import BytesIO
import weakref
# typing -----------------------------------------------------------------------
from typing import Any, Sequence, TYPE_CHECKING, Union
from git.types import PathLike
if TYPE_CHECKING:
from .base import Submodule
from weakref import ReferenceType
from git.repo import Repo
from git.refs import Head
from git import Remote
from git.refs import RemoteReference
__all__ = ('sm_section', 'sm_name', 'mkhead', 'find_first_remote_branch',
'SubmoduleConfigParser')
#{ Utilities
def sm_section(name: str) -> str:
""":return: section title used in .gitmodules configuration file"""
return f'submodule "{name}"'
def sm_name(section: str) -> str:
""":return: name of the submodule as parsed from the section name"""
section = section.strip()
return section[11:-1]
def mkhead(repo: 'Repo', path: PathLike) -> 'Head':
""":return: New branch/head instance"""
return git.Head(repo, git.Head.to_full_path(path))
def find_first_remote_branch(remotes: Sequence['Remote'], branch_name: str) -> 'RemoteReference':
"""Find the remote branch matching the name of the given branch or raise InvalidGitRepositoryError"""
for remote in remotes:
try:
return remote.refs[branch_name]
except IndexError:
continue
# END exception handling
# END for remote
raise InvalidGitRepositoryError("Didn't find remote branch '%r' in any of the given remotes" % branch_name)
#} END utilities
#{ Classes
class SubmoduleConfigParser(GitConfigParser):
"""
Catches calls to _write, and updates the .gitmodules blob in the index
with the new data, if we have written into a stream. Otherwise it will
add the local file to the index to make it correspond with the working tree.
Additionally, the cache must be cleared
Please note that no mutating method will work in bare mode
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
self._smref: Union['ReferenceType[Submodule]', None] = None
self._index = None
self._auto_write = True
super(SubmoduleConfigParser, self).__init__(*args, **kwargs)
#{ Interface
def set_submodule(self, submodule: 'Submodule') -> None:
<|fim_middle|>
def flush_to_index(self) -> None:
"""Flush changes in our configuration file to the index"""
assert self._smref is not None
# should always have a file here
assert not isinstance(self._file_or_files, BytesIO)
sm = self._smref()
if sm is not None:
index = self._index
if index is None:
index = sm.repo.index
# END handle index
index.add([sm.k_modules_file], write=self._auto_write)
sm._clear_cache()
# END handle weakref
#} END interface
#{ Overridden Methods
def write(self) -> None: # type: ignore[override]
rval: None = super(SubmoduleConfigParser, self).write()
self.flush_to_index()
return rval
# END overridden methods
#} END classes
<|fim▁end|> | """Set this instance's submodule. It must be called before
the first write operation begins"""
self._smref = weakref.ref(submodule) |
<|file_name|>util.py<|end_file_name|><|fim▁begin|>import git
from git.exc import InvalidGitRepositoryError
from git.config import GitConfigParser
from io import BytesIO
import weakref
# typing -----------------------------------------------------------------------
from typing import Any, Sequence, TYPE_CHECKING, Union
from git.types import PathLike
if TYPE_CHECKING:
from .base import Submodule
from weakref import ReferenceType
from git.repo import Repo
from git.refs import Head
from git import Remote
from git.refs import RemoteReference
__all__ = ('sm_section', 'sm_name', 'mkhead', 'find_first_remote_branch',
'SubmoduleConfigParser')
#{ Utilities
def sm_section(name: str) -> str:
""":return: section title used in .gitmodules configuration file"""
return f'submodule "{name}"'
def sm_name(section: str) -> str:
""":return: name of the submodule as parsed from the section name"""
section = section.strip()
return section[11:-1]
def mkhead(repo: 'Repo', path: PathLike) -> 'Head':
""":return: New branch/head instance"""
return git.Head(repo, git.Head.to_full_path(path))
def find_first_remote_branch(remotes: Sequence['Remote'], branch_name: str) -> 'RemoteReference':
"""Find the remote branch matching the name of the given branch or raise InvalidGitRepositoryError"""
for remote in remotes:
try:
return remote.refs[branch_name]
except IndexError:
continue
# END exception handling
# END for remote
raise InvalidGitRepositoryError("Didn't find remote branch '%r' in any of the given remotes" % branch_name)
#} END utilities
#{ Classes
class SubmoduleConfigParser(GitConfigParser):
"""
Catches calls to _write, and updates the .gitmodules blob in the index
with the new data, if we have written into a stream. Otherwise it will
add the local file to the index to make it correspond with the working tree.
Additionally, the cache must be cleared
Please note that no mutating method will work in bare mode
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
self._smref: Union['ReferenceType[Submodule]', None] = None
self._index = None
self._auto_write = True
super(SubmoduleConfigParser, self).__init__(*args, **kwargs)
#{ Interface
def set_submodule(self, submodule: 'Submodule') -> None:
"""Set this instance's submodule. It must be called before
the first write operation begins"""
self._smref = weakref.ref(submodule)
def flush_to_index(self) -> None:
<|fim_middle|>
#} END interface
#{ Overridden Methods
def write(self) -> None: # type: ignore[override]
rval: None = super(SubmoduleConfigParser, self).write()
self.flush_to_index()
return rval
# END overridden methods
#} END classes
<|fim▁end|> | """Flush changes in our configuration file to the index"""
assert self._smref is not None
# should always have a file here
assert not isinstance(self._file_or_files, BytesIO)
sm = self._smref()
if sm is not None:
index = self._index
if index is None:
index = sm.repo.index
# END handle index
index.add([sm.k_modules_file], write=self._auto_write)
sm._clear_cache()
# END handle weakref |
<|file_name|>util.py<|end_file_name|><|fim▁begin|>import git
from git.exc import InvalidGitRepositoryError
from git.config import GitConfigParser
from io import BytesIO
import weakref
# typing -----------------------------------------------------------------------
from typing import Any, Sequence, TYPE_CHECKING, Union
from git.types import PathLike
if TYPE_CHECKING:
from .base import Submodule
from weakref import ReferenceType
from git.repo import Repo
from git.refs import Head
from git import Remote
from git.refs import RemoteReference
__all__ = ('sm_section', 'sm_name', 'mkhead', 'find_first_remote_branch',
'SubmoduleConfigParser')
#{ Utilities
def sm_section(name: str) -> str:
""":return: section title used in .gitmodules configuration file"""
return f'submodule "{name}"'
def sm_name(section: str) -> str:
""":return: name of the submodule as parsed from the section name"""
section = section.strip()
return section[11:-1]
def mkhead(repo: 'Repo', path: PathLike) -> 'Head':
""":return: New branch/head instance"""
return git.Head(repo, git.Head.to_full_path(path))
def find_first_remote_branch(remotes: Sequence['Remote'], branch_name: str) -> 'RemoteReference':
"""Find the remote branch matching the name of the given branch or raise InvalidGitRepositoryError"""
for remote in remotes:
try:
return remote.refs[branch_name]
except IndexError:
continue
# END exception handling
# END for remote
raise InvalidGitRepositoryError("Didn't find remote branch '%r' in any of the given remotes" % branch_name)
#} END utilities
#{ Classes
class SubmoduleConfigParser(GitConfigParser):
"""
Catches calls to _write, and updates the .gitmodules blob in the index
with the new data, if we have written into a stream. Otherwise it will
add the local file to the index to make it correspond with the working tree.
Additionally, the cache must be cleared
Please note that no mutating method will work in bare mode
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
self._smref: Union['ReferenceType[Submodule]', None] = None
self._index = None
self._auto_write = True
super(SubmoduleConfigParser, self).__init__(*args, **kwargs)
#{ Interface
def set_submodule(self, submodule: 'Submodule') -> None:
"""Set this instance's submodule. It must be called before
the first write operation begins"""
self._smref = weakref.ref(submodule)
def flush_to_index(self) -> None:
"""Flush changes in our configuration file to the index"""
assert self._smref is not None
# should always have a file here
assert not isinstance(self._file_or_files, BytesIO)
sm = self._smref()
if sm is not None:
index = self._index
if index is None:
index = sm.repo.index
# END handle index
index.add([sm.k_modules_file], write=self._auto_write)
sm._clear_cache()
# END handle weakref
#} END interface
#{ Overridden Methods
def write(self) -> None: # type: ignore[override]
<|fim_middle|>
# END overridden methods
#} END classes
<|fim▁end|> | rval: None = super(SubmoduleConfigParser, self).write()
self.flush_to_index()
return rval |
<|file_name|>util.py<|end_file_name|><|fim▁begin|>import git
from git.exc import InvalidGitRepositoryError
from git.config import GitConfigParser
from io import BytesIO
import weakref
# typing -----------------------------------------------------------------------
from typing import Any, Sequence, TYPE_CHECKING, Union
from git.types import PathLike
if TYPE_CHECKING:
<|fim_middle|>
__all__ = ('sm_section', 'sm_name', 'mkhead', 'find_first_remote_branch',
'SubmoduleConfigParser')
#{ Utilities
def sm_section(name: str) -> str:
""":return: section title used in .gitmodules configuration file"""
return f'submodule "{name}"'
def sm_name(section: str) -> str:
""":return: name of the submodule as parsed from the section name"""
section = section.strip()
return section[11:-1]
def mkhead(repo: 'Repo', path: PathLike) -> 'Head':
""":return: New branch/head instance"""
return git.Head(repo, git.Head.to_full_path(path))
def find_first_remote_branch(remotes: Sequence['Remote'], branch_name: str) -> 'RemoteReference':
"""Find the remote branch matching the name of the given branch or raise InvalidGitRepositoryError"""
for remote in remotes:
try:
return remote.refs[branch_name]
except IndexError:
continue
# END exception handling
# END for remote
raise InvalidGitRepositoryError("Didn't find remote branch '%r' in any of the given remotes" % branch_name)
#} END utilities
#{ Classes
class SubmoduleConfigParser(GitConfigParser):
"""
Catches calls to _write, and updates the .gitmodules blob in the index
with the new data, if we have written into a stream. Otherwise it will
add the local file to the index to make it correspond with the working tree.
Additionally, the cache must be cleared
Please note that no mutating method will work in bare mode
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
self._smref: Union['ReferenceType[Submodule]', None] = None
self._index = None
self._auto_write = True
super(SubmoduleConfigParser, self).__init__(*args, **kwargs)
#{ Interface
def set_submodule(self, submodule: 'Submodule') -> None:
"""Set this instance's submodule. It must be called before
the first write operation begins"""
self._smref = weakref.ref(submodule)
def flush_to_index(self) -> None:
"""Flush changes in our configuration file to the index"""
assert self._smref is not None
# should always have a file here
assert not isinstance(self._file_or_files, BytesIO)
sm = self._smref()
if sm is not None:
index = self._index
if index is None:
index = sm.repo.index
# END handle index
index.add([sm.k_modules_file], write=self._auto_write)
sm._clear_cache()
# END handle weakref
#} END interface
#{ Overridden Methods
def write(self) -> None: # type: ignore[override]
rval: None = super(SubmoduleConfigParser, self).write()
self.flush_to_index()
return rval
# END overridden methods
#} END classes
<|fim▁end|> | from .base import Submodule
from weakref import ReferenceType
from git.repo import Repo
from git.refs import Head
from git import Remote
from git.refs import RemoteReference |
<|file_name|>util.py<|end_file_name|><|fim▁begin|>import git
from git.exc import InvalidGitRepositoryError
from git.config import GitConfigParser
from io import BytesIO
import weakref
# typing -----------------------------------------------------------------------
from typing import Any, Sequence, TYPE_CHECKING, Union
from git.types import PathLike
if TYPE_CHECKING:
from .base import Submodule
from weakref import ReferenceType
from git.repo import Repo
from git.refs import Head
from git import Remote
from git.refs import RemoteReference
__all__ = ('sm_section', 'sm_name', 'mkhead', 'find_first_remote_branch',
'SubmoduleConfigParser')
#{ Utilities
def sm_section(name: str) -> str:
""":return: section title used in .gitmodules configuration file"""
return f'submodule "{name}"'
def sm_name(section: str) -> str:
""":return: name of the submodule as parsed from the section name"""
section = section.strip()
return section[11:-1]
def mkhead(repo: 'Repo', path: PathLike) -> 'Head':
""":return: New branch/head instance"""
return git.Head(repo, git.Head.to_full_path(path))
def find_first_remote_branch(remotes: Sequence['Remote'], branch_name: str) -> 'RemoteReference':
"""Find the remote branch matching the name of the given branch or raise InvalidGitRepositoryError"""
for remote in remotes:
try:
return remote.refs[branch_name]
except IndexError:
continue
# END exception handling
# END for remote
raise InvalidGitRepositoryError("Didn't find remote branch '%r' in any of the given remotes" % branch_name)
#} END utilities
#{ Classes
class SubmoduleConfigParser(GitConfigParser):
"""
Catches calls to _write, and updates the .gitmodules blob in the index
with the new data, if we have written into a stream. Otherwise it will
add the local file to the index to make it correspond with the working tree.
Additionally, the cache must be cleared
Please note that no mutating method will work in bare mode
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
self._smref: Union['ReferenceType[Submodule]', None] = None
self._index = None
self._auto_write = True
super(SubmoduleConfigParser, self).__init__(*args, **kwargs)
#{ Interface
def set_submodule(self, submodule: 'Submodule') -> None:
"""Set this instance's submodule. It must be called before
the first write operation begins"""
self._smref = weakref.ref(submodule)
def flush_to_index(self) -> None:
"""Flush changes in our configuration file to the index"""
assert self._smref is not None
# should always have a file here
assert not isinstance(self._file_or_files, BytesIO)
sm = self._smref()
if sm is not None:
<|fim_middle|>
# END handle weakref
#} END interface
#{ Overridden Methods
def write(self) -> None: # type: ignore[override]
rval: None = super(SubmoduleConfigParser, self).write()
self.flush_to_index()
return rval
# END overridden methods
#} END classes
<|fim▁end|> | index = self._index
if index is None:
index = sm.repo.index
# END handle index
index.add([sm.k_modules_file], write=self._auto_write)
sm._clear_cache() |
<|file_name|>util.py<|end_file_name|><|fim▁begin|>import git
from git.exc import InvalidGitRepositoryError
from git.config import GitConfigParser
from io import BytesIO
import weakref
# typing -----------------------------------------------------------------------
from typing import Any, Sequence, TYPE_CHECKING, Union
from git.types import PathLike
if TYPE_CHECKING:
from .base import Submodule
from weakref import ReferenceType
from git.repo import Repo
from git.refs import Head
from git import Remote
from git.refs import RemoteReference
__all__ = ('sm_section', 'sm_name', 'mkhead', 'find_first_remote_branch',
'SubmoduleConfigParser')
#{ Utilities
def sm_section(name: str) -> str:
""":return: section title used in .gitmodules configuration file"""
return f'submodule "{name}"'
def sm_name(section: str) -> str:
""":return: name of the submodule as parsed from the section name"""
section = section.strip()
return section[11:-1]
def mkhead(repo: 'Repo', path: PathLike) -> 'Head':
""":return: New branch/head instance"""
return git.Head(repo, git.Head.to_full_path(path))
def find_first_remote_branch(remotes: Sequence['Remote'], branch_name: str) -> 'RemoteReference':
"""Find the remote branch matching the name of the given branch or raise InvalidGitRepositoryError"""
for remote in remotes:
try:
return remote.refs[branch_name]
except IndexError:
continue
# END exception handling
# END for remote
raise InvalidGitRepositoryError("Didn't find remote branch '%r' in any of the given remotes" % branch_name)
#} END utilities
#{ Classes
class SubmoduleConfigParser(GitConfigParser):
"""
Catches calls to _write, and updates the .gitmodules blob in the index
with the new data, if we have written into a stream. Otherwise it will
add the local file to the index to make it correspond with the working tree.
Additionally, the cache must be cleared
Please note that no mutating method will work in bare mode
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
self._smref: Union['ReferenceType[Submodule]', None] = None
self._index = None
self._auto_write = True
super(SubmoduleConfigParser, self).__init__(*args, **kwargs)
#{ Interface
def set_submodule(self, submodule: 'Submodule') -> None:
"""Set this instance's submodule. It must be called before
the first write operation begins"""
self._smref = weakref.ref(submodule)
def flush_to_index(self) -> None:
"""Flush changes in our configuration file to the index"""
assert self._smref is not None
# should always have a file here
assert not isinstance(self._file_or_files, BytesIO)
sm = self._smref()
if sm is not None:
index = self._index
if index is None:
<|fim_middle|>
# END handle index
index.add([sm.k_modules_file], write=self._auto_write)
sm._clear_cache()
# END handle weakref
#} END interface
#{ Overridden Methods
def write(self) -> None: # type: ignore[override]
rval: None = super(SubmoduleConfigParser, self).write()
self.flush_to_index()
return rval
# END overridden methods
#} END classes
<|fim▁end|> | index = sm.repo.index |
<|file_name|>util.py<|end_file_name|><|fim▁begin|>import git
from git.exc import InvalidGitRepositoryError
from git.config import GitConfigParser
from io import BytesIO
import weakref
# typing -----------------------------------------------------------------------
from typing import Any, Sequence, TYPE_CHECKING, Union
from git.types import PathLike
if TYPE_CHECKING:
from .base import Submodule
from weakref import ReferenceType
from git.repo import Repo
from git.refs import Head
from git import Remote
from git.refs import RemoteReference
__all__ = ('sm_section', 'sm_name', 'mkhead', 'find_first_remote_branch',
'SubmoduleConfigParser')
#{ Utilities
def <|fim_middle|>(name: str) -> str:
""":return: section title used in .gitmodules configuration file"""
return f'submodule "{name}"'
def sm_name(section: str) -> str:
""":return: name of the submodule as parsed from the section name"""
section = section.strip()
return section[11:-1]
def mkhead(repo: 'Repo', path: PathLike) -> 'Head':
""":return: New branch/head instance"""
return git.Head(repo, git.Head.to_full_path(path))
def find_first_remote_branch(remotes: Sequence['Remote'], branch_name: str) -> 'RemoteReference':
"""Find the remote branch matching the name of the given branch or raise InvalidGitRepositoryError"""
for remote in remotes:
try:
return remote.refs[branch_name]
except IndexError:
continue
# END exception handling
# END for remote
raise InvalidGitRepositoryError("Didn't find remote branch '%r' in any of the given remotes" % branch_name)
#} END utilities
#{ Classes
class SubmoduleConfigParser(GitConfigParser):
"""
Catches calls to _write, and updates the .gitmodules blob in the index
with the new data, if we have written into a stream. Otherwise it will
add the local file to the index to make it correspond with the working tree.
Additionally, the cache must be cleared
Please note that no mutating method will work in bare mode
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
self._smref: Union['ReferenceType[Submodule]', None] = None
self._index = None
self._auto_write = True
super(SubmoduleConfigParser, self).__init__(*args, **kwargs)
#{ Interface
def set_submodule(self, submodule: 'Submodule') -> None:
"""Set this instance's submodule. It must be called before
the first write operation begins"""
self._smref = weakref.ref(submodule)
def flush_to_index(self) -> None:
"""Flush changes in our configuration file to the index"""
assert self._smref is not None
# should always have a file here
assert not isinstance(self._file_or_files, BytesIO)
sm = self._smref()
if sm is not None:
index = self._index
if index is None:
index = sm.repo.index
# END handle index
index.add([sm.k_modules_file], write=self._auto_write)
sm._clear_cache()
# END handle weakref
#} END interface
#{ Overridden Methods
def write(self) -> None: # type: ignore[override]
rval: None = super(SubmoduleConfigParser, self).write()
self.flush_to_index()
return rval
# END overridden methods
#} END classes
<|fim▁end|> | sm_section |
<|file_name|>util.py<|end_file_name|><|fim▁begin|>import git
from git.exc import InvalidGitRepositoryError
from git.config import GitConfigParser
from io import BytesIO
import weakref
# typing -----------------------------------------------------------------------
from typing import Any, Sequence, TYPE_CHECKING, Union
from git.types import PathLike
if TYPE_CHECKING:
from .base import Submodule
from weakref import ReferenceType
from git.repo import Repo
from git.refs import Head
from git import Remote
from git.refs import RemoteReference
__all__ = ('sm_section', 'sm_name', 'mkhead', 'find_first_remote_branch',
'SubmoduleConfigParser')
#{ Utilities
def sm_section(name: str) -> str:
""":return: section title used in .gitmodules configuration file"""
return f'submodule "{name}"'
def <|fim_middle|>(section: str) -> str:
""":return: name of the submodule as parsed from the section name"""
section = section.strip()
return section[11:-1]
def mkhead(repo: 'Repo', path: PathLike) -> 'Head':
""":return: New branch/head instance"""
return git.Head(repo, git.Head.to_full_path(path))
def find_first_remote_branch(remotes: Sequence['Remote'], branch_name: str) -> 'RemoteReference':
"""Find the remote branch matching the name of the given branch or raise InvalidGitRepositoryError"""
for remote in remotes:
try:
return remote.refs[branch_name]
except IndexError:
continue
# END exception handling
# END for remote
raise InvalidGitRepositoryError("Didn't find remote branch '%r' in any of the given remotes" % branch_name)
#} END utilities
#{ Classes
class SubmoduleConfigParser(GitConfigParser):
"""
Catches calls to _write, and updates the .gitmodules blob in the index
with the new data, if we have written into a stream. Otherwise it will
add the local file to the index to make it correspond with the working tree.
Additionally, the cache must be cleared
Please note that no mutating method will work in bare mode
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
self._smref: Union['ReferenceType[Submodule]', None] = None
self._index = None
self._auto_write = True
super(SubmoduleConfigParser, self).__init__(*args, **kwargs)
#{ Interface
def set_submodule(self, submodule: 'Submodule') -> None:
"""Set this instance's submodule. It must be called before
the first write operation begins"""
self._smref = weakref.ref(submodule)
def flush_to_index(self) -> None:
"""Flush changes in our configuration file to the index"""
assert self._smref is not None
# should always have a file here
assert not isinstance(self._file_or_files, BytesIO)
sm = self._smref()
if sm is not None:
index = self._index
if index is None:
index = sm.repo.index
# END handle index
index.add([sm.k_modules_file], write=self._auto_write)
sm._clear_cache()
# END handle weakref
#} END interface
#{ Overridden Methods
def write(self) -> None: # type: ignore[override]
rval: None = super(SubmoduleConfigParser, self).write()
self.flush_to_index()
return rval
# END overridden methods
#} END classes
<|fim▁end|> | sm_name |
<|file_name|>util.py<|end_file_name|><|fim▁begin|>import git
from git.exc import InvalidGitRepositoryError
from git.config import GitConfigParser
from io import BytesIO
import weakref
# typing -----------------------------------------------------------------------
from typing import Any, Sequence, TYPE_CHECKING, Union
from git.types import PathLike
if TYPE_CHECKING:
from .base import Submodule
from weakref import ReferenceType
from git.repo import Repo
from git.refs import Head
from git import Remote
from git.refs import RemoteReference
__all__ = ('sm_section', 'sm_name', 'mkhead', 'find_first_remote_branch',
'SubmoduleConfigParser')
#{ Utilities
def sm_section(name: str) -> str:
""":return: section title used in .gitmodules configuration file"""
return f'submodule "{name}"'
def sm_name(section: str) -> str:
""":return: name of the submodule as parsed from the section name"""
section = section.strip()
return section[11:-1]
def <|fim_middle|>(repo: 'Repo', path: PathLike) -> 'Head':
""":return: New branch/head instance"""
return git.Head(repo, git.Head.to_full_path(path))
def find_first_remote_branch(remotes: Sequence['Remote'], branch_name: str) -> 'RemoteReference':
"""Find the remote branch matching the name of the given branch or raise InvalidGitRepositoryError"""
for remote in remotes:
try:
return remote.refs[branch_name]
except IndexError:
continue
# END exception handling
# END for remote
raise InvalidGitRepositoryError("Didn't find remote branch '%r' in any of the given remotes" % branch_name)
#} END utilities
#{ Classes
class SubmoduleConfigParser(GitConfigParser):
"""
Catches calls to _write, and updates the .gitmodules blob in the index
with the new data, if we have written into a stream. Otherwise it will
add the local file to the index to make it correspond with the working tree.
Additionally, the cache must be cleared
Please note that no mutating method will work in bare mode
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
self._smref: Union['ReferenceType[Submodule]', None] = None
self._index = None
self._auto_write = True
super(SubmoduleConfigParser, self).__init__(*args, **kwargs)
#{ Interface
def set_submodule(self, submodule: 'Submodule') -> None:
"""Set this instance's submodule. It must be called before
the first write operation begins"""
self._smref = weakref.ref(submodule)
def flush_to_index(self) -> None:
"""Flush changes in our configuration file to the index"""
assert self._smref is not None
# should always have a file here
assert not isinstance(self._file_or_files, BytesIO)
sm = self._smref()
if sm is not None:
index = self._index
if index is None:
index = sm.repo.index
# END handle index
index.add([sm.k_modules_file], write=self._auto_write)
sm._clear_cache()
# END handle weakref
#} END interface
#{ Overridden Methods
def write(self) -> None: # type: ignore[override]
rval: None = super(SubmoduleConfigParser, self).write()
self.flush_to_index()
return rval
# END overridden methods
#} END classes
<|fim▁end|> | mkhead |
<|file_name|>util.py<|end_file_name|><|fim▁begin|>import git
from git.exc import InvalidGitRepositoryError
from git.config import GitConfigParser
from io import BytesIO
import weakref
# typing -----------------------------------------------------------------------
from typing import Any, Sequence, TYPE_CHECKING, Union
from git.types import PathLike
if TYPE_CHECKING:
from .base import Submodule
from weakref import ReferenceType
from git.repo import Repo
from git.refs import Head
from git import Remote
from git.refs import RemoteReference
__all__ = ('sm_section', 'sm_name', 'mkhead', 'find_first_remote_branch',
'SubmoduleConfigParser')
#{ Utilities
def sm_section(name: str) -> str:
""":return: section title used in .gitmodules configuration file"""
return f'submodule "{name}"'
def sm_name(section: str) -> str:
""":return: name of the submodule as parsed from the section name"""
section = section.strip()
return section[11:-1]
def mkhead(repo: 'Repo', path: PathLike) -> 'Head':
""":return: New branch/head instance"""
return git.Head(repo, git.Head.to_full_path(path))
def <|fim_middle|>(remotes: Sequence['Remote'], branch_name: str) -> 'RemoteReference':
"""Find the remote branch matching the name of the given branch or raise InvalidGitRepositoryError"""
for remote in remotes:
try:
return remote.refs[branch_name]
except IndexError:
continue
# END exception handling
# END for remote
raise InvalidGitRepositoryError("Didn't find remote branch '%r' in any of the given remotes" % branch_name)
#} END utilities
#{ Classes
class SubmoduleConfigParser(GitConfigParser):
"""
Catches calls to _write, and updates the .gitmodules blob in the index
with the new data, if we have written into a stream. Otherwise it will
add the local file to the index to make it correspond with the working tree.
Additionally, the cache must be cleared
Please note that no mutating method will work in bare mode
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
self._smref: Union['ReferenceType[Submodule]', None] = None
self._index = None
self._auto_write = True
super(SubmoduleConfigParser, self).__init__(*args, **kwargs)
#{ Interface
def set_submodule(self, submodule: 'Submodule') -> None:
"""Set this instance's submodule. It must be called before
the first write operation begins"""
self._smref = weakref.ref(submodule)
def flush_to_index(self) -> None:
"""Flush changes in our configuration file to the index"""
assert self._smref is not None
# should always have a file here
assert not isinstance(self._file_or_files, BytesIO)
sm = self._smref()
if sm is not None:
index = self._index
if index is None:
index = sm.repo.index
# END handle index
index.add([sm.k_modules_file], write=self._auto_write)
sm._clear_cache()
# END handle weakref
#} END interface
#{ Overridden Methods
def write(self) -> None: # type: ignore[override]
rval: None = super(SubmoduleConfigParser, self).write()
self.flush_to_index()
return rval
# END overridden methods
#} END classes
<|fim▁end|> | find_first_remote_branch |
<|file_name|>util.py<|end_file_name|><|fim▁begin|>import git
from git.exc import InvalidGitRepositoryError
from git.config import GitConfigParser
from io import BytesIO
import weakref
# typing -----------------------------------------------------------------------
from typing import Any, Sequence, TYPE_CHECKING, Union
from git.types import PathLike
if TYPE_CHECKING:
from .base import Submodule
from weakref import ReferenceType
from git.repo import Repo
from git.refs import Head
from git import Remote
from git.refs import RemoteReference
__all__ = ('sm_section', 'sm_name', 'mkhead', 'find_first_remote_branch',
'SubmoduleConfigParser')
#{ Utilities
def sm_section(name: str) -> str:
""":return: section title used in .gitmodules configuration file"""
return f'submodule "{name}"'
def sm_name(section: str) -> str:
""":return: name of the submodule as parsed from the section name"""
section = section.strip()
return section[11:-1]
def mkhead(repo: 'Repo', path: PathLike) -> 'Head':
""":return: New branch/head instance"""
return git.Head(repo, git.Head.to_full_path(path))
def find_first_remote_branch(remotes: Sequence['Remote'], branch_name: str) -> 'RemoteReference':
"""Find the remote branch matching the name of the given branch or raise InvalidGitRepositoryError"""
for remote in remotes:
try:
return remote.refs[branch_name]
except IndexError:
continue
# END exception handling
# END for remote
raise InvalidGitRepositoryError("Didn't find remote branch '%r' in any of the given remotes" % branch_name)
#} END utilities
#{ Classes
class SubmoduleConfigParser(GitConfigParser):
"""
Catches calls to _write, and updates the .gitmodules blob in the index
with the new data, if we have written into a stream. Otherwise it will
add the local file to the index to make it correspond with the working tree.
Additionally, the cache must be cleared
Please note that no mutating method will work in bare mode
"""
def <|fim_middle|>(self, *args: Any, **kwargs: Any) -> None:
self._smref: Union['ReferenceType[Submodule]', None] = None
self._index = None
self._auto_write = True
super(SubmoduleConfigParser, self).__init__(*args, **kwargs)
#{ Interface
def set_submodule(self, submodule: 'Submodule') -> None:
"""Set this instance's submodule. It must be called before
the first write operation begins"""
self._smref = weakref.ref(submodule)
def flush_to_index(self) -> None:
"""Flush changes in our configuration file to the index"""
assert self._smref is not None
# should always have a file here
assert not isinstance(self._file_or_files, BytesIO)
sm = self._smref()
if sm is not None:
index = self._index
if index is None:
index = sm.repo.index
# END handle index
index.add([sm.k_modules_file], write=self._auto_write)
sm._clear_cache()
# END handle weakref
#} END interface
#{ Overridden Methods
def write(self) -> None: # type: ignore[override]
rval: None = super(SubmoduleConfigParser, self).write()
self.flush_to_index()
return rval
# END overridden methods
#} END classes
<|fim▁end|> | __init__ |
<|file_name|>util.py<|end_file_name|><|fim▁begin|>import git
from git.exc import InvalidGitRepositoryError
from git.config import GitConfigParser
from io import BytesIO
import weakref
# typing -----------------------------------------------------------------------
from typing import Any, Sequence, TYPE_CHECKING, Union
from git.types import PathLike
if TYPE_CHECKING:
from .base import Submodule
from weakref import ReferenceType
from git.repo import Repo
from git.refs import Head
from git import Remote
from git.refs import RemoteReference
__all__ = ('sm_section', 'sm_name', 'mkhead', 'find_first_remote_branch',
'SubmoduleConfigParser')
#{ Utilities
def sm_section(name: str) -> str:
""":return: section title used in .gitmodules configuration file"""
return f'submodule "{name}"'
def sm_name(section: str) -> str:
""":return: name of the submodule as parsed from the section name"""
section = section.strip()
return section[11:-1]
def mkhead(repo: 'Repo', path: PathLike) -> 'Head':
""":return: New branch/head instance"""
return git.Head(repo, git.Head.to_full_path(path))
def find_first_remote_branch(remotes: Sequence['Remote'], branch_name: str) -> 'RemoteReference':
"""Find the remote branch matching the name of the given branch or raise InvalidGitRepositoryError"""
for remote in remotes:
try:
return remote.refs[branch_name]
except IndexError:
continue
# END exception handling
# END for remote
raise InvalidGitRepositoryError("Didn't find remote branch '%r' in any of the given remotes" % branch_name)
#} END utilities
#{ Classes
class SubmoduleConfigParser(GitConfigParser):
"""
Catches calls to _write, and updates the .gitmodules blob in the index
with the new data, if we have written into a stream. Otherwise it will
add the local file to the index to make it correspond with the working tree.
Additionally, the cache must be cleared
Please note that no mutating method will work in bare mode
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
self._smref: Union['ReferenceType[Submodule]', None] = None
self._index = None
self._auto_write = True
super(SubmoduleConfigParser, self).__init__(*args, **kwargs)
#{ Interface
def <|fim_middle|>(self, submodule: 'Submodule') -> None:
"""Set this instance's submodule. It must be called before
the first write operation begins"""
self._smref = weakref.ref(submodule)
def flush_to_index(self) -> None:
"""Flush changes in our configuration file to the index"""
assert self._smref is not None
# should always have a file here
assert not isinstance(self._file_or_files, BytesIO)
sm = self._smref()
if sm is not None:
index = self._index
if index is None:
index = sm.repo.index
# END handle index
index.add([sm.k_modules_file], write=self._auto_write)
sm._clear_cache()
# END handle weakref
#} END interface
#{ Overridden Methods
def write(self) -> None: # type: ignore[override]
rval: None = super(SubmoduleConfigParser, self).write()
self.flush_to_index()
return rval
# END overridden methods
#} END classes
<|fim▁end|> | set_submodule |
<|file_name|>util.py<|end_file_name|><|fim▁begin|>import git
from git.exc import InvalidGitRepositoryError
from git.config import GitConfigParser
from io import BytesIO
import weakref
# typing -----------------------------------------------------------------------
from typing import Any, Sequence, TYPE_CHECKING, Union
from git.types import PathLike
if TYPE_CHECKING:
from .base import Submodule
from weakref import ReferenceType
from git.repo import Repo
from git.refs import Head
from git import Remote
from git.refs import RemoteReference
__all__ = ('sm_section', 'sm_name', 'mkhead', 'find_first_remote_branch',
'SubmoduleConfigParser')
#{ Utilities
def sm_section(name: str) -> str:
""":return: section title used in .gitmodules configuration file"""
return f'submodule "{name}"'
def sm_name(section: str) -> str:
""":return: name of the submodule as parsed from the section name"""
section = section.strip()
return section[11:-1]
def mkhead(repo: 'Repo', path: PathLike) -> 'Head':
""":return: New branch/head instance"""
return git.Head(repo, git.Head.to_full_path(path))
def find_first_remote_branch(remotes: Sequence['Remote'], branch_name: str) -> 'RemoteReference':
"""Find the remote branch matching the name of the given branch or raise InvalidGitRepositoryError"""
for remote in remotes:
try:
return remote.refs[branch_name]
except IndexError:
continue
# END exception handling
# END for remote
raise InvalidGitRepositoryError("Didn't find remote branch '%r' in any of the given remotes" % branch_name)
#} END utilities
#{ Classes
class SubmoduleConfigParser(GitConfigParser):
"""
Catches calls to _write, and updates the .gitmodules blob in the index
with the new data, if we have written into a stream. Otherwise it will
add the local file to the index to make it correspond with the working tree.
Additionally, the cache must be cleared
Please note that no mutating method will work in bare mode
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
self._smref: Union['ReferenceType[Submodule]', None] = None
self._index = None
self._auto_write = True
super(SubmoduleConfigParser, self).__init__(*args, **kwargs)
#{ Interface
def set_submodule(self, submodule: 'Submodule') -> None:
"""Set this instance's submodule. It must be called before
the first write operation begins"""
self._smref = weakref.ref(submodule)
def <|fim_middle|>(self) -> None:
"""Flush changes in our configuration file to the index"""
assert self._smref is not None
# should always have a file here
assert not isinstance(self._file_or_files, BytesIO)
sm = self._smref()
if sm is not None:
index = self._index
if index is None:
index = sm.repo.index
# END handle index
index.add([sm.k_modules_file], write=self._auto_write)
sm._clear_cache()
# END handle weakref
#} END interface
#{ Overridden Methods
def write(self) -> None: # type: ignore[override]
rval: None = super(SubmoduleConfigParser, self).write()
self.flush_to_index()
return rval
# END overridden methods
#} END classes
<|fim▁end|> | flush_to_index |
<|file_name|>util.py<|end_file_name|><|fim▁begin|>import git
from git.exc import InvalidGitRepositoryError
from git.config import GitConfigParser
from io import BytesIO
import weakref
# typing -----------------------------------------------------------------------
from typing import Any, Sequence, TYPE_CHECKING, Union
from git.types import PathLike
if TYPE_CHECKING:
from .base import Submodule
from weakref import ReferenceType
from git.repo import Repo
from git.refs import Head
from git import Remote
from git.refs import RemoteReference
__all__ = ('sm_section', 'sm_name', 'mkhead', 'find_first_remote_branch',
'SubmoduleConfigParser')
#{ Utilities
def sm_section(name: str) -> str:
""":return: section title used in .gitmodules configuration file"""
return f'submodule "{name}"'
def sm_name(section: str) -> str:
""":return: name of the submodule as parsed from the section name"""
section = section.strip()
return section[11:-1]
def mkhead(repo: 'Repo', path: PathLike) -> 'Head':
""":return: New branch/head instance"""
return git.Head(repo, git.Head.to_full_path(path))
def find_first_remote_branch(remotes: Sequence['Remote'], branch_name: str) -> 'RemoteReference':
"""Find the remote branch matching the name of the given branch or raise InvalidGitRepositoryError"""
for remote in remotes:
try:
return remote.refs[branch_name]
except IndexError:
continue
# END exception handling
# END for remote
raise InvalidGitRepositoryError("Didn't find remote branch '%r' in any of the given remotes" % branch_name)
#} END utilities
#{ Classes
class SubmoduleConfigParser(GitConfigParser):
"""
Catches calls to _write, and updates the .gitmodules blob in the index
with the new data, if we have written into a stream. Otherwise it will
add the local file to the index to make it correspond with the working tree.
Additionally, the cache must be cleared
Please note that no mutating method will work in bare mode
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
self._smref: Union['ReferenceType[Submodule]', None] = None
self._index = None
self._auto_write = True
super(SubmoduleConfigParser, self).__init__(*args, **kwargs)
#{ Interface
def set_submodule(self, submodule: 'Submodule') -> None:
"""Set this instance's submodule. It must be called before
the first write operation begins"""
self._smref = weakref.ref(submodule)
def flush_to_index(self) -> None:
"""Flush changes in our configuration file to the index"""
assert self._smref is not None
# should always have a file here
assert not isinstance(self._file_or_files, BytesIO)
sm = self._smref()
if sm is not None:
index = self._index
if index is None:
index = sm.repo.index
# END handle index
index.add([sm.k_modules_file], write=self._auto_write)
sm._clear_cache()
# END handle weakref
#} END interface
#{ Overridden Methods
def <|fim_middle|>(self) -> None: # type: ignore[override]
rval: None = super(SubmoduleConfigParser, self).write()
self.flush_to_index()
return rval
# END overridden methods
#} END classes
<|fim▁end|> | write |
<|file_name|>inhibit.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright 2011 Christoph Reiter <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
import os
import sys
if os.name == "nt" or sys.platform == "darwin":
from quodlibet.plugins import PluginNotSupportedError
raise PluginNotSupportedError
import dbus
from quodlibet import _
from quodlibet import app
from quodlibet.qltk import Icons
from quodlibet.plugins.events import EventPlugin
def get_toplevel_xid():
if app.window.get_window():
try:
return app.window.get_window().get_xid()
except AttributeError: # non x11
pass
return 0
class InhibitFlags(object):
LOGOUT = 1
USERSWITCH = 1 << 1
SUSPEND = 1 << 2
IDLE = 1 << 3
class SessionInhibit(EventPlugin):
PLUGIN_ID = "screensaver_inhibit"
PLUGIN_NAME = _("Inhibit Screensaver")
PLUGIN_DESC = _("Prevents the GNOME screensaver from activating while"
" a song is playing.")
PLUGIN_ICON = Icons.PREFERENCES_DESKTOP_SCREENSAVER
DBUS_NAME = "org.gnome.SessionManager"
DBUS_INTERFACE = "org.gnome.SessionManager"
DBUS_PATH = "/org/gnome/SessionManager"
APPLICATION_ID = "quodlibet"
INHIBIT_REASON = _("Music is playing")
__cookie = None
def enabled(self):
if not app.player.paused:<|fim▁hole|> self.plugin_on_paused()
def plugin_on_unpaused(self):
xid = dbus.UInt32(get_toplevel_xid())
flags = dbus.UInt32(InhibitFlags.IDLE)
try:
bus = dbus.SessionBus()
obj = bus.get_object(self.DBUS_NAME, self.DBUS_PATH)
iface = dbus.Interface(obj, self.DBUS_INTERFACE)
self.__cookie = iface.Inhibit(
self.APPLICATION_ID, xid, self.INHIBIT_REASON, flags)
except dbus.DBusException:
pass
def plugin_on_paused(self):
if self.__cookie is None:
return
try:
bus = dbus.SessionBus()
obj = bus.get_object(self.DBUS_NAME, self.DBUS_PATH)
iface = dbus.Interface(obj, self.DBUS_INTERFACE)
iface.Uninhibit(self.__cookie)
self.__cookie = None
except dbus.DBusException:
pass<|fim▁end|> | self.plugin_on_unpaused()
def disabled(self):
if not app.player.paused: |
<|file_name|>inhibit.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright 2011 Christoph Reiter <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
import os
import sys
if os.name == "nt" or sys.platform == "darwin":
from quodlibet.plugins import PluginNotSupportedError
raise PluginNotSupportedError
import dbus
from quodlibet import _
from quodlibet import app
from quodlibet.qltk import Icons
from quodlibet.plugins.events import EventPlugin
def get_toplevel_xid():
<|fim_middle|>
class InhibitFlags(object):
LOGOUT = 1
USERSWITCH = 1 << 1
SUSPEND = 1 << 2
IDLE = 1 << 3
class SessionInhibit(EventPlugin):
PLUGIN_ID = "screensaver_inhibit"
PLUGIN_NAME = _("Inhibit Screensaver")
PLUGIN_DESC = _("Prevents the GNOME screensaver from activating while"
" a song is playing.")
PLUGIN_ICON = Icons.PREFERENCES_DESKTOP_SCREENSAVER
DBUS_NAME = "org.gnome.SessionManager"
DBUS_INTERFACE = "org.gnome.SessionManager"
DBUS_PATH = "/org/gnome/SessionManager"
APPLICATION_ID = "quodlibet"
INHIBIT_REASON = _("Music is playing")
__cookie = None
def enabled(self):
if not app.player.paused:
self.plugin_on_unpaused()
def disabled(self):
if not app.player.paused:
self.plugin_on_paused()
def plugin_on_unpaused(self):
xid = dbus.UInt32(get_toplevel_xid())
flags = dbus.UInt32(InhibitFlags.IDLE)
try:
bus = dbus.SessionBus()
obj = bus.get_object(self.DBUS_NAME, self.DBUS_PATH)
iface = dbus.Interface(obj, self.DBUS_INTERFACE)
self.__cookie = iface.Inhibit(
self.APPLICATION_ID, xid, self.INHIBIT_REASON, flags)
except dbus.DBusException:
pass
def plugin_on_paused(self):
if self.__cookie is None:
return
try:
bus = dbus.SessionBus()
obj = bus.get_object(self.DBUS_NAME, self.DBUS_PATH)
iface = dbus.Interface(obj, self.DBUS_INTERFACE)
iface.Uninhibit(self.__cookie)
self.__cookie = None
except dbus.DBusException:
pass
<|fim▁end|> | if app.window.get_window():
try:
return app.window.get_window().get_xid()
except AttributeError: # non x11
pass
return 0 |
<|file_name|>inhibit.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright 2011 Christoph Reiter <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
import os
import sys
if os.name == "nt" or sys.platform == "darwin":
from quodlibet.plugins import PluginNotSupportedError
raise PluginNotSupportedError
import dbus
from quodlibet import _
from quodlibet import app
from quodlibet.qltk import Icons
from quodlibet.plugins.events import EventPlugin
def get_toplevel_xid():
if app.window.get_window():
try:
return app.window.get_window().get_xid()
except AttributeError: # non x11
pass
return 0
class InhibitFlags(object):
<|fim_middle|>
class SessionInhibit(EventPlugin):
PLUGIN_ID = "screensaver_inhibit"
PLUGIN_NAME = _("Inhibit Screensaver")
PLUGIN_DESC = _("Prevents the GNOME screensaver from activating while"
" a song is playing.")
PLUGIN_ICON = Icons.PREFERENCES_DESKTOP_SCREENSAVER
DBUS_NAME = "org.gnome.SessionManager"
DBUS_INTERFACE = "org.gnome.SessionManager"
DBUS_PATH = "/org/gnome/SessionManager"
APPLICATION_ID = "quodlibet"
INHIBIT_REASON = _("Music is playing")
__cookie = None
def enabled(self):
if not app.player.paused:
self.plugin_on_unpaused()
def disabled(self):
if not app.player.paused:
self.plugin_on_paused()
def plugin_on_unpaused(self):
xid = dbus.UInt32(get_toplevel_xid())
flags = dbus.UInt32(InhibitFlags.IDLE)
try:
bus = dbus.SessionBus()
obj = bus.get_object(self.DBUS_NAME, self.DBUS_PATH)
iface = dbus.Interface(obj, self.DBUS_INTERFACE)
self.__cookie = iface.Inhibit(
self.APPLICATION_ID, xid, self.INHIBIT_REASON, flags)
except dbus.DBusException:
pass
def plugin_on_paused(self):
if self.__cookie is None:
return
try:
bus = dbus.SessionBus()
obj = bus.get_object(self.DBUS_NAME, self.DBUS_PATH)
iface = dbus.Interface(obj, self.DBUS_INTERFACE)
iface.Uninhibit(self.__cookie)
self.__cookie = None
except dbus.DBusException:
pass
<|fim▁end|> | LOGOUT = 1
USERSWITCH = 1 << 1
SUSPEND = 1 << 2
IDLE = 1 << 3 |
<|file_name|>inhibit.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright 2011 Christoph Reiter <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
import os
import sys
if os.name == "nt" or sys.platform == "darwin":
from quodlibet.plugins import PluginNotSupportedError
raise PluginNotSupportedError
import dbus
from quodlibet import _
from quodlibet import app
from quodlibet.qltk import Icons
from quodlibet.plugins.events import EventPlugin
def get_toplevel_xid():
if app.window.get_window():
try:
return app.window.get_window().get_xid()
except AttributeError: # non x11
pass
return 0
class InhibitFlags(object):
LOGOUT = 1
USERSWITCH = 1 << 1
SUSPEND = 1 << 2
IDLE = 1 << 3
class SessionInhibit(EventPlugin):
<|fim_middle|>
<|fim▁end|> | PLUGIN_ID = "screensaver_inhibit"
PLUGIN_NAME = _("Inhibit Screensaver")
PLUGIN_DESC = _("Prevents the GNOME screensaver from activating while"
" a song is playing.")
PLUGIN_ICON = Icons.PREFERENCES_DESKTOP_SCREENSAVER
DBUS_NAME = "org.gnome.SessionManager"
DBUS_INTERFACE = "org.gnome.SessionManager"
DBUS_PATH = "/org/gnome/SessionManager"
APPLICATION_ID = "quodlibet"
INHIBIT_REASON = _("Music is playing")
__cookie = None
def enabled(self):
if not app.player.paused:
self.plugin_on_unpaused()
def disabled(self):
if not app.player.paused:
self.plugin_on_paused()
def plugin_on_unpaused(self):
xid = dbus.UInt32(get_toplevel_xid())
flags = dbus.UInt32(InhibitFlags.IDLE)
try:
bus = dbus.SessionBus()
obj = bus.get_object(self.DBUS_NAME, self.DBUS_PATH)
iface = dbus.Interface(obj, self.DBUS_INTERFACE)
self.__cookie = iface.Inhibit(
self.APPLICATION_ID, xid, self.INHIBIT_REASON, flags)
except dbus.DBusException:
pass
def plugin_on_paused(self):
if self.__cookie is None:
return
try:
bus = dbus.SessionBus()
obj = bus.get_object(self.DBUS_NAME, self.DBUS_PATH)
iface = dbus.Interface(obj, self.DBUS_INTERFACE)
iface.Uninhibit(self.__cookie)
self.__cookie = None
except dbus.DBusException:
pass |
<|file_name|>inhibit.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright 2011 Christoph Reiter <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
import os
import sys
if os.name == "nt" or sys.platform == "darwin":
from quodlibet.plugins import PluginNotSupportedError
raise PluginNotSupportedError
import dbus
from quodlibet import _
from quodlibet import app
from quodlibet.qltk import Icons
from quodlibet.plugins.events import EventPlugin
def get_toplevel_xid():
if app.window.get_window():
try:
return app.window.get_window().get_xid()
except AttributeError: # non x11
pass
return 0
class InhibitFlags(object):
LOGOUT = 1
USERSWITCH = 1 << 1
SUSPEND = 1 << 2
IDLE = 1 << 3
class SessionInhibit(EventPlugin):
PLUGIN_ID = "screensaver_inhibit"
PLUGIN_NAME = _("Inhibit Screensaver")
PLUGIN_DESC = _("Prevents the GNOME screensaver from activating while"
" a song is playing.")
PLUGIN_ICON = Icons.PREFERENCES_DESKTOP_SCREENSAVER
DBUS_NAME = "org.gnome.SessionManager"
DBUS_INTERFACE = "org.gnome.SessionManager"
DBUS_PATH = "/org/gnome/SessionManager"
APPLICATION_ID = "quodlibet"
INHIBIT_REASON = _("Music is playing")
__cookie = None
def enabled(self):
<|fim_middle|>
def disabled(self):
if not app.player.paused:
self.plugin_on_paused()
def plugin_on_unpaused(self):
xid = dbus.UInt32(get_toplevel_xid())
flags = dbus.UInt32(InhibitFlags.IDLE)
try:
bus = dbus.SessionBus()
obj = bus.get_object(self.DBUS_NAME, self.DBUS_PATH)
iface = dbus.Interface(obj, self.DBUS_INTERFACE)
self.__cookie = iface.Inhibit(
self.APPLICATION_ID, xid, self.INHIBIT_REASON, flags)
except dbus.DBusException:
pass
def plugin_on_paused(self):
if self.__cookie is None:
return
try:
bus = dbus.SessionBus()
obj = bus.get_object(self.DBUS_NAME, self.DBUS_PATH)
iface = dbus.Interface(obj, self.DBUS_INTERFACE)
iface.Uninhibit(self.__cookie)
self.__cookie = None
except dbus.DBusException:
pass
<|fim▁end|> | if not app.player.paused:
self.plugin_on_unpaused() |
<|file_name|>inhibit.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright 2011 Christoph Reiter <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
import os
import sys
if os.name == "nt" or sys.platform == "darwin":
from quodlibet.plugins import PluginNotSupportedError
raise PluginNotSupportedError
import dbus
from quodlibet import _
from quodlibet import app
from quodlibet.qltk import Icons
from quodlibet.plugins.events import EventPlugin
def get_toplevel_xid():
if app.window.get_window():
try:
return app.window.get_window().get_xid()
except AttributeError: # non x11
pass
return 0
class InhibitFlags(object):
LOGOUT = 1
USERSWITCH = 1 << 1
SUSPEND = 1 << 2
IDLE = 1 << 3
class SessionInhibit(EventPlugin):
PLUGIN_ID = "screensaver_inhibit"
PLUGIN_NAME = _("Inhibit Screensaver")
PLUGIN_DESC = _("Prevents the GNOME screensaver from activating while"
" a song is playing.")
PLUGIN_ICON = Icons.PREFERENCES_DESKTOP_SCREENSAVER
DBUS_NAME = "org.gnome.SessionManager"
DBUS_INTERFACE = "org.gnome.SessionManager"
DBUS_PATH = "/org/gnome/SessionManager"
APPLICATION_ID = "quodlibet"
INHIBIT_REASON = _("Music is playing")
__cookie = None
def enabled(self):
if not app.player.paused:
self.plugin_on_unpaused()
def disabled(self):
<|fim_middle|>
def plugin_on_unpaused(self):
xid = dbus.UInt32(get_toplevel_xid())
flags = dbus.UInt32(InhibitFlags.IDLE)
try:
bus = dbus.SessionBus()
obj = bus.get_object(self.DBUS_NAME, self.DBUS_PATH)
iface = dbus.Interface(obj, self.DBUS_INTERFACE)
self.__cookie = iface.Inhibit(
self.APPLICATION_ID, xid, self.INHIBIT_REASON, flags)
except dbus.DBusException:
pass
def plugin_on_paused(self):
if self.__cookie is None:
return
try:
bus = dbus.SessionBus()
obj = bus.get_object(self.DBUS_NAME, self.DBUS_PATH)
iface = dbus.Interface(obj, self.DBUS_INTERFACE)
iface.Uninhibit(self.__cookie)
self.__cookie = None
except dbus.DBusException:
pass
<|fim▁end|> | if not app.player.paused:
self.plugin_on_paused() |
<|file_name|>inhibit.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright 2011 Christoph Reiter <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
import os
import sys
if os.name == "nt" or sys.platform == "darwin":
from quodlibet.plugins import PluginNotSupportedError
raise PluginNotSupportedError
import dbus
from quodlibet import _
from quodlibet import app
from quodlibet.qltk import Icons
from quodlibet.plugins.events import EventPlugin
def get_toplevel_xid():
if app.window.get_window():
try:
return app.window.get_window().get_xid()
except AttributeError: # non x11
pass
return 0
class InhibitFlags(object):
LOGOUT = 1
USERSWITCH = 1 << 1
SUSPEND = 1 << 2
IDLE = 1 << 3
class SessionInhibit(EventPlugin):
PLUGIN_ID = "screensaver_inhibit"
PLUGIN_NAME = _("Inhibit Screensaver")
PLUGIN_DESC = _("Prevents the GNOME screensaver from activating while"
" a song is playing.")
PLUGIN_ICON = Icons.PREFERENCES_DESKTOP_SCREENSAVER
DBUS_NAME = "org.gnome.SessionManager"
DBUS_INTERFACE = "org.gnome.SessionManager"
DBUS_PATH = "/org/gnome/SessionManager"
APPLICATION_ID = "quodlibet"
INHIBIT_REASON = _("Music is playing")
__cookie = None
def enabled(self):
if not app.player.paused:
self.plugin_on_unpaused()
def disabled(self):
if not app.player.paused:
self.plugin_on_paused()
def plugin_on_unpaused(self):
<|fim_middle|>
def plugin_on_paused(self):
if self.__cookie is None:
return
try:
bus = dbus.SessionBus()
obj = bus.get_object(self.DBUS_NAME, self.DBUS_PATH)
iface = dbus.Interface(obj, self.DBUS_INTERFACE)
iface.Uninhibit(self.__cookie)
self.__cookie = None
except dbus.DBusException:
pass
<|fim▁end|> | xid = dbus.UInt32(get_toplevel_xid())
flags = dbus.UInt32(InhibitFlags.IDLE)
try:
bus = dbus.SessionBus()
obj = bus.get_object(self.DBUS_NAME, self.DBUS_PATH)
iface = dbus.Interface(obj, self.DBUS_INTERFACE)
self.__cookie = iface.Inhibit(
self.APPLICATION_ID, xid, self.INHIBIT_REASON, flags)
except dbus.DBusException:
pass |
<|file_name|>inhibit.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright 2011 Christoph Reiter <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
import os
import sys
if os.name == "nt" or sys.platform == "darwin":
from quodlibet.plugins import PluginNotSupportedError
raise PluginNotSupportedError
import dbus
from quodlibet import _
from quodlibet import app
from quodlibet.qltk import Icons
from quodlibet.plugins.events import EventPlugin
def get_toplevel_xid():
if app.window.get_window():
try:
return app.window.get_window().get_xid()
except AttributeError: # non x11
pass
return 0
class InhibitFlags(object):
LOGOUT = 1
USERSWITCH = 1 << 1
SUSPEND = 1 << 2
IDLE = 1 << 3
class SessionInhibit(EventPlugin):
PLUGIN_ID = "screensaver_inhibit"
PLUGIN_NAME = _("Inhibit Screensaver")
PLUGIN_DESC = _("Prevents the GNOME screensaver from activating while"
" a song is playing.")
PLUGIN_ICON = Icons.PREFERENCES_DESKTOP_SCREENSAVER
DBUS_NAME = "org.gnome.SessionManager"
DBUS_INTERFACE = "org.gnome.SessionManager"
DBUS_PATH = "/org/gnome/SessionManager"
APPLICATION_ID = "quodlibet"
INHIBIT_REASON = _("Music is playing")
__cookie = None
def enabled(self):
if not app.player.paused:
self.plugin_on_unpaused()
def disabled(self):
if not app.player.paused:
self.plugin_on_paused()
def plugin_on_unpaused(self):
xid = dbus.UInt32(get_toplevel_xid())
flags = dbus.UInt32(InhibitFlags.IDLE)
try:
bus = dbus.SessionBus()
obj = bus.get_object(self.DBUS_NAME, self.DBUS_PATH)
iface = dbus.Interface(obj, self.DBUS_INTERFACE)
self.__cookie = iface.Inhibit(
self.APPLICATION_ID, xid, self.INHIBIT_REASON, flags)
except dbus.DBusException:
pass
def plugin_on_paused(self):
<|fim_middle|>
<|fim▁end|> | if self.__cookie is None:
return
try:
bus = dbus.SessionBus()
obj = bus.get_object(self.DBUS_NAME, self.DBUS_PATH)
iface = dbus.Interface(obj, self.DBUS_INTERFACE)
iface.Uninhibit(self.__cookie)
self.__cookie = None
except dbus.DBusException:
pass |
<|file_name|>inhibit.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright 2011 Christoph Reiter <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
import os
import sys
if os.name == "nt" or sys.platform == "darwin":
<|fim_middle|>
import dbus
from quodlibet import _
from quodlibet import app
from quodlibet.qltk import Icons
from quodlibet.plugins.events import EventPlugin
def get_toplevel_xid():
if app.window.get_window():
try:
return app.window.get_window().get_xid()
except AttributeError: # non x11
pass
return 0
class InhibitFlags(object):
LOGOUT = 1
USERSWITCH = 1 << 1
SUSPEND = 1 << 2
IDLE = 1 << 3
class SessionInhibit(EventPlugin):
PLUGIN_ID = "screensaver_inhibit"
PLUGIN_NAME = _("Inhibit Screensaver")
PLUGIN_DESC = _("Prevents the GNOME screensaver from activating while"
" a song is playing.")
PLUGIN_ICON = Icons.PREFERENCES_DESKTOP_SCREENSAVER
DBUS_NAME = "org.gnome.SessionManager"
DBUS_INTERFACE = "org.gnome.SessionManager"
DBUS_PATH = "/org/gnome/SessionManager"
APPLICATION_ID = "quodlibet"
INHIBIT_REASON = _("Music is playing")
__cookie = None
def enabled(self):
if not app.player.paused:
self.plugin_on_unpaused()
def disabled(self):
if not app.player.paused:
self.plugin_on_paused()
def plugin_on_unpaused(self):
xid = dbus.UInt32(get_toplevel_xid())
flags = dbus.UInt32(InhibitFlags.IDLE)
try:
bus = dbus.SessionBus()
obj = bus.get_object(self.DBUS_NAME, self.DBUS_PATH)
iface = dbus.Interface(obj, self.DBUS_INTERFACE)
self.__cookie = iface.Inhibit(
self.APPLICATION_ID, xid, self.INHIBIT_REASON, flags)
except dbus.DBusException:
pass
def plugin_on_paused(self):
if self.__cookie is None:
return
try:
bus = dbus.SessionBus()
obj = bus.get_object(self.DBUS_NAME, self.DBUS_PATH)
iface = dbus.Interface(obj, self.DBUS_INTERFACE)
iface.Uninhibit(self.__cookie)
self.__cookie = None
except dbus.DBusException:
pass
<|fim▁end|> | from quodlibet.plugins import PluginNotSupportedError
raise PluginNotSupportedError |
<|file_name|>inhibit.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright 2011 Christoph Reiter <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
import os
import sys
if os.name == "nt" or sys.platform == "darwin":
from quodlibet.plugins import PluginNotSupportedError
raise PluginNotSupportedError
import dbus
from quodlibet import _
from quodlibet import app
from quodlibet.qltk import Icons
from quodlibet.plugins.events import EventPlugin
def get_toplevel_xid():
if app.window.get_window():
<|fim_middle|>
return 0
class InhibitFlags(object):
LOGOUT = 1
USERSWITCH = 1 << 1
SUSPEND = 1 << 2
IDLE = 1 << 3
class SessionInhibit(EventPlugin):
PLUGIN_ID = "screensaver_inhibit"
PLUGIN_NAME = _("Inhibit Screensaver")
PLUGIN_DESC = _("Prevents the GNOME screensaver from activating while"
" a song is playing.")
PLUGIN_ICON = Icons.PREFERENCES_DESKTOP_SCREENSAVER
DBUS_NAME = "org.gnome.SessionManager"
DBUS_INTERFACE = "org.gnome.SessionManager"
DBUS_PATH = "/org/gnome/SessionManager"
APPLICATION_ID = "quodlibet"
INHIBIT_REASON = _("Music is playing")
__cookie = None
def enabled(self):
if not app.player.paused:
self.plugin_on_unpaused()
def disabled(self):
if not app.player.paused:
self.plugin_on_paused()
def plugin_on_unpaused(self):
xid = dbus.UInt32(get_toplevel_xid())
flags = dbus.UInt32(InhibitFlags.IDLE)
try:
bus = dbus.SessionBus()
obj = bus.get_object(self.DBUS_NAME, self.DBUS_PATH)
iface = dbus.Interface(obj, self.DBUS_INTERFACE)
self.__cookie = iface.Inhibit(
self.APPLICATION_ID, xid, self.INHIBIT_REASON, flags)
except dbus.DBusException:
pass
def plugin_on_paused(self):
if self.__cookie is None:
return
try:
bus = dbus.SessionBus()
obj = bus.get_object(self.DBUS_NAME, self.DBUS_PATH)
iface = dbus.Interface(obj, self.DBUS_INTERFACE)
iface.Uninhibit(self.__cookie)
self.__cookie = None
except dbus.DBusException:
pass
<|fim▁end|> | try:
return app.window.get_window().get_xid()
except AttributeError: # non x11
pass |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.