code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
parsed = self.download_parsed(days=days) return parsed.account.statement
def statement(self, days=60)
Download the :py:class:`ofxparse.Statement` given the time range :param days: Number of days to look back at :type days: integer :rtype: :py:class:`ofxparser.Statement`
19.381643
13.455626
1.440412
data = { 'local_id': self.local_id(), 'institution': self.institution.serialize(), 'number': self.number, 'description': self.description } if hasattr(self, 'broker_id'): data['broker_id'] = self.broker_id elif hasattr(self, 'routing_number'): data['routing_number'] = self.routing_number data['account_type'] = self.account_type return data
def serialize(self)
Serialize predictably for use in configuration storage. Output look like this:: { 'local_id': 'string', 'number': 'account num', 'description': 'descr', 'broker_id': 'may be missing - type dependent', 'routing_number': 'may be missing - type dependent, 'account_type': 'may be missing - type dependent, 'institution': { # ... see :py:meth:`ofxclient.Institution.serialize` } } :rtype: nested dictionary
2.884085
2.000792
1.441472
from ofxclient.institution import Institution institution = Institution.deserialize(raw['institution']) del raw['institution'] del raw['local_id'] if 'broker_id' in raw: a = BrokerageAccount(institution=institution, **raw) elif 'routing_number' in raw: a = BankAccount(institution=institution, **raw) else: a = CreditCardAccount(institution=institution, **raw) return a
def deserialize(raw)
Instantiate :py:class:`ofxclient.Account` subclass from dictionary :param raw: serilized Account :param type: dict as given by :py:meth:`~ofxclient.Account.serialize` :rtype: subclass of :py:class:`ofxclient.Account`
3.860161
3.506489
1.100862
description = data.desc if hasattr(data, 'desc') else None if data.type == AccountType.Bank: return BankAccount( institution=institution, number=data.account_id, routing_number=data.routing_number, account_type=data.account_type, description=description) elif data.type == AccountType.CreditCard: return CreditCardAccount( institution=institution, number=data.account_id, description=description) elif data.type == AccountType.Investment: return BrokerageAccount( institution=institution, number=data.account_id, broker_id=data.brokerid, description=description) raise ValueError("unknown account type: %s" % data.type)
def from_ofxparse(data, institution)
Instantiate :py:class:`ofxclient.Account` subclass from ofxparse module :param data: an ofxparse account :type data: An :py:class:`ofxparse.Account` object :param institution: The parent institution of the account :type institution: :py:class:`ofxclient.Institution` object
2.247069
2.30091
0.9766
c = self.institution.client() q = c.brokerage_account_query( number=self.number, date=as_of, broker_id=self.broker_id) return q
def _download_query(self, as_of)
Formulate the specific query needed for download Not intended to be called by developers directly. :param as_of: Date in 'YYYYMMDD' format :type as_of: string
9.34608
9.382393
0.99613
c = self.institution.client() q = c.bank_account_query( number=self.number, date=as_of, account_type=self.account_type, bank_id=self.routing_number) return q
def _download_query(self, as_of)
Formulate the specific query needed for download Not intended to be called by developers directly. :param as_of: Date in 'YYYYMMDD' format :type as_of: string
6.724686
6.738343
0.997973
c = self.institution.client() q = c.credit_card_account_query(number=self.number, date=as_of) return q
def _download_query(self, as_of)
Formulate the specific query needed for download Not intended to be called by developers directly. :param as_of: Date in 'YYYYMMDD' format :type as_of: string
12.833717
12.518276
1.025198
client = Client(institution=None) out_file = StringIO() out_file.write(client.header()) out_file.write('<OFX>') for a in accounts: ofx = a.download(days=days).read() stripped = ofx.partition('<OFX>')[2].partition('</OFX>')[0] out_file.write(stripped) out_file.write("</OFX>") out_file.seek(0) return out_file
def combined_download(accounts, days=60)
Download OFX files and combine them into one It expects an 'accounts' list of ofxclient.Account objects as well as an optional 'days' specifier which defaults to 60
3.672906
3.255335
1.128273
u = username or self.institution.username p = password or self.institution.password contents = ['OFX', self._signOn(username=u, password=p)] if with_message: contents.append(with_message) return LINE_ENDING.join([self.header(), _tag(*contents)])
def authenticated_query( self, with_message=None, username=None, password=None )
Authenticated query If you pass a 'with_messages' array those queries will be passed along otherwise this will just be an authentication probe query only.
8.743397
8.837606
0.98934
return self.authenticated_query( self._bareq(number, date, account_type, bank_id) )
def bank_account_query(self, number, date, account_type, bank_id)
Bank account statement request
9.179267
10.237205
0.896658
return self.authenticated_query(self._ccreq(number, date))
def credit_card_account_query(self, number, date)
CC Statement request
21.163551
20.642334
1.02525
res, response = self._do_post(query) cookies = res.getheader('Set-Cookie', None) if len(response) == 0 and cookies is not None and res.status == 200: logging.debug('Got 0-length 200 response with Set-Cookies header; ' 'retrying request with cookies') _, response = self._do_post(query, [('Cookie', cookies)]) return response
def post(self, query)
Wrapper around ``_do_post()`` to handle accounts that require sending back session cookies (``self.set_cookies`` True).
4.813127
4.399481
1.094021
i = self.institution logging.debug('posting data to %s' % i.url) garbage, path = splittype(i.url) host, selector = splithost(path) h = HTTPSConnection(host, timeout=60) # Discover requires a particular ordering of headers, so send the # request step by step. h.putrequest('POST', selector, skip_host=True, skip_accept_encoding=True) headers = [ ('Content-Type', 'application/x-ofx'), ('Host', host), ('Content-Length', len(query)), ('Connection', 'Keep-Alive') ] if self.accept: headers.append(('Accept', self.accept)) if self.user_agent: headers.append(('User-Agent', self.user_agent)) for ehname, ehval in extra_headers: headers.append((ehname, ehval)) logging.debug('---- request headers ----') for hname, hval in headers: logging.debug('%s: %s', hname, hval) h.putheader(hname, hval) logging.debug('---- request body (query) ----') logging.debug(query) h.endheaders(query.encode()) res = h.getresponse() response = res.read().decode('ascii', 'ignore') logging.debug('---- response ----') logging.debug(res.__dict__) logging.debug('Headers: %s', res.getheaders()) logging.debug(response) res.close() return res, response
def _do_post(self, query, extra_headers=[])
Do a POST to the Institution. :param query: Body content to POST (OFX Query) :type query: str :param extra_headers: Extra headers to send with the request, as a list of (Name, Value) header 2-tuples. :type extra_headers: list :return: 2-tuple of (HTTPResponse, str response body) :rtype: tuple
3.18171
2.871487
1.108035
raw_headers = [] for k, v in headers.items(): raw_headers.append((k.encode('utf8'), v.encode('utf8'))) return tuple(raw_headers)
def _build_raw_headers(self, headers: Dict) -> Tuple
Convert a dict of headers to a tuple of tuples Mimics the format of ClientResponse.
2.401368
2.214988
1.084145
url = normalize_url(merge_params(url, kwargs.get('params'))) url_str = str(url) for prefix in self._passthrough: if url_str.startswith(prefix): return (await self.patcher.temp_original( orig_self, method, url, *args, **kwargs )) response = await self.match(method, url, **kwargs) if response is None: raise ClientConnectionError( 'Connection refused: {} {}'.format(method, url) ) self._responses.append(response) key = (method, url) self.requests.setdefault(key, []) self.requests[key].append(RequestCall(args, kwargs)) return response
async def _request_mock(self, orig_self: ClientSession, method: str, url: 'Union[URL, str]', *args: Tuple, **kwargs: Dict) -> 'ClientResponse'
Return mocked response object or raise connection error.
4.081458
4.100137
0.995444
url = URL(url) return url.with_query(urlencode(sorted(parse_qsl(url.query_string))))
def normalize_url(url: 'Union[URL, str]') -> 'URL'
Normalize url to make comparisons.
5.31702
4.844633
1.097507
case = Case(title=title, description=description, **kwargs) response = self._thehive.create_case(case) # Check for failed authentication if response.status_code == requests.codes.unauthorized: raise TheHiveException("Authentication failed") if self.status_ok(response.status_code): return self(response.json()['id']) else: raise CaseException("Server returned {}: {}".format(response.status_code, response.text))
def create(self, title, description, **kwargs)
Create an instance of the Case class. :param title: Case title. :param description: Case description. :param kwargs: Additional arguments. :return: The created instance.
4.184309
4.087798
1.02361
response = self._thehive.do_patch("/api/case/{}".format(case_id), **attributes) if response.status_code == requests.codes.unauthorized: raise TheHiveException("Authentication failed") if self.status_ok(response.status_code): return self(response.json()['id']) else: raise CaseException("Server returned {}: {}".format(response.status_code, response.text))
def update(self, case_id, **attributes)
Update a case. :param case_id: The ID of the case to update :param attributes: key=value pairs of case attributes to update (field=new_value) :return: The created instance.
4.179743
4.086085
1.022921
req = self.url + find_url # Add range and sort parameters params = { "range": attributes.get("range", "all"), "sort": attributes.get("sort", []) } # Add body data = { "query": attributes.get("query", {}) } try: return requests.post(req, params=params, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert) except requests.exceptions.RequestException as e: raise TheHiveException("Error: {}".format(e))
def __find_rows(self, find_url, **attributes)
:param find_url: URL of the find api :type find_url: string :return: The Response returned by requests including the list of documents based on find_url :rtype: Response object
3.467861
3.409859
1.01701
req = self.url + "/api/case" data = case.jsonify() try: return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert) except requests.exceptions.RequestException as e: raise CaseException("Case create error: {}".format(e))
def create_case(self, case)
:param case: The case details :type case: Case defined in models.py :return: TheHive case :rtype: json
3.373746
3.453436
0.976924
req = self.url + "/api/case/{}".format(case.id) # Choose which attributes to send update_keys = [ 'title', 'description', 'severity', 'startDate', 'owner', 'flag', 'tlp', 'tags', 'status', 'resolutionStatus', 'impactStatus', 'summary', 'endDate', 'metrics', 'customFields' ] data = {k: v for k, v in case.__dict__.items() if (len(fields) > 0 and k in fields) or (len(fields) == 0 and k in update_keys)} try: return requests.patch(req, headers={'Content-Type': 'application/json'}, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert) except requests.exceptions.RequestException: raise CaseException("Case update error: {}".format(e))
def update_case(self, case, fields=[])
Update a case. :param case: The case to update. The case's `id` determines which case to update. :param fields: Optional parameter, an array of fields names, the ones we want to update :return:
3.900077
4.089692
0.953636
req = self.url + "/api/case/{}/task".format(case_id) data = case_task.jsonify() try: return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert) except requests.exceptions.RequestException as e: raise CaseTaskException("Case task create error: {}".format(e))
def create_case_task(self, case_id, case_task)
:param case_id: Case identifier :param case_task: TheHive task :type case_task: CaseTask defined in models.py :return: TheHive task :rtype: json
3.030594
3.196902
0.947978
req = self.url + "/api/case/task/{}".format(task.id) # Choose which attributes to send update_keys = [ 'title', 'description', 'status', 'order', 'user', 'owner', 'flag', 'endDate' ] data = {k: v for k, v in task.__dict__.items() if k in update_keys} try: return requests.patch(req, headers={'Content-Type': 'application/json'}, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert) except requests.exceptions.RequestException as e: raise CaseTaskException("Case task update error: {}".format(e))
def update_case_task(self, task)
:Updates TheHive Task :param case: The task to update. The task's `id` determines which Task to update. :return:
3.60858
3.815563
0.945753
req = self.url + "/api/case/task/{}/log".format(task_id) data = {'_json': json.dumps({"message":case_task_log.message})} if case_task_log.file: f = {'attachment': (os.path.basename(case_task_log.file), open(case_task_log.file, 'rb'), magic.Magic(mime=True).from_file(case_task_log.file))} try: return requests.post(req, data=data,files=f, proxies=self.proxies, auth=self.auth, verify=self.cert) except requests.exceptions.RequestException as e: raise CaseTaskException("Case task log create error: {}".format(e)) else: try: return requests.post(req, headers={'Content-Type': 'application/json'}, data=json.dumps({'message':case_task_log.message}), proxies=self.proxies, auth=self.auth, verify=self.cert) except requests.exceptions.RequestException as e: raise CaseTaskException("Case task log create error: {}".format(e))
def create_task_log(self, task_id, case_task_log)
:param task_id: Task identifier :param case_task_log: TheHive log :type case_task_log: CaseTaskLog defined in models.py :return: TheHive log :rtype: json
2.124512
2.170583
0.978775
req = self.url + "/api/case/{}/artifact".format(case_id) if case_observable.dataType == 'file': try: mesg = json.dumps({ "dataType": case_observable.dataType, "message": case_observable.message, "tlp": case_observable.tlp, "tags": case_observable.tags, "ioc": case_observable.ioc }) data = {"_json": mesg} return requests.post(req, data=data, files=case_observable.data[0], proxies=self.proxies, auth=self.auth, verify=self.cert) except requests.exceptions.RequestException as e: raise CaseObservableException("Case observable create error: {}".format(e)) else: try: return requests.post(req, headers={'Content-Type': 'application/json'}, data=case_observable.jsonify(), proxies=self.proxies, auth=self.auth, verify=self.cert) except requests.exceptions.RequestException as e: raise CaseObservableException("Case observable create error: {}".format(e))
def create_case_observable(self, case_id, case_observable)
:param case_id: Case identifier :param case_observable: TheHive observable :type case_observable: CaseObservable defined in models.py :return: TheHive observable :rtype: json
2.651867
2.716405
0.976241
req = self.url + "/api/case/{}/links".format(case_id) try: return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert) except requests.exceptions.RequestException as e: raise CaseException("Linked cases fetch error: {}".format(e))
def get_linked_cases(self, case_id)
:param case_id: Case identifier :return: TheHive case(s) :rtype: json
3.874538
4.092735
0.946687
req = self.url + "/api/case/template/_search" data = { "query": And(Eq("name", name), Eq("status", "Ok")) } try: response = requests.post(req, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert) json_response = response.json() if response.status_code == 200 and len(json_response) > 0: return response.json()[0] else: raise CaseTemplateException("Case template fetch error: Unable to find case template {}".format(name)) except requests.exceptions.RequestException as e: raise CaseTemplateException("Case template fetch error: {}".format(e))
def get_case_template(self, name)
:param name: Case template name :return: TheHive case template :rtype: json
3.075448
3.17199
0.969564
req = self.url + "/api/case/task/{}/log".format(taskId) try: return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert) except requests.exceptions.RequestException as e: raise CaseTaskException("Case task logs search error: {}".format(e))
def get_task_logs(self, taskId)
:param taskId: Task identifier :type caseTaskLog: CaseTaskLog defined in models.py :return: TheHive logs :rtype: json
4.265142
3.982129
1.071071
req = self.url + "/api/alert" data = alert.jsonify() try: return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert) except requests.exceptions.RequestException as e: raise AlertException("Alert create error: {}".format(e))
def create_alert(self, alert)
:param alert: TheHive alert :type alert: Alert defined in models.py :return: TheHive alert :rtype: json
3.18874
3.346525
0.952851
req = self.url + "/api/alert/{}/markAsRead".format(alert_id) try: return requests.post(req, headers={'Content-Type': 'application/json'}, proxies=self.proxies, auth=self.auth, verify=self.cert) except requests.exceptions.RequestException: raise AlertException("Mark alert as read error: {}".format(e))
def mark_alert_as_read(self, alert_id)
Mark an alert as read. :param alert_id: The ID of the alert to mark as read. :return:
3.463047
3.947988
0.877167
req = self.url + "/api/alert/{}".format(alert_id) # update only the alert attributes that are not read-only update_keys = ['tlp', 'severity', 'tags', 'caseTemplate', 'title', 'description'] data = {k: v for k, v in alert.__dict__.items() if (len(fields) > 0 and k in fields) or (len(fields) == 0 and k in update_keys)} if hasattr(alert, 'artifacts'): data['artifacts'] = [a.__dict__ for a in alert.artifacts] try: return requests.patch(req, headers={'Content-Type': 'application/json'}, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert) except requests.exceptions.RequestException: raise AlertException("Alert update error: {}".format(e))
def update_alert(self, alert_id, alert, fields=[])
Update an alert. :param alert_id: The ID of the alert to update. :param data: The alert to update. :param fields: Optional parameter, an array of fields names, the ones we want to update :return:
3.591426
3.843248
0.934477
req = self.url + "/api/alert/{}".format(alert_id) try: return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert) except requests.exceptions.RequestException as e: raise AlertException("Alert fetch error: {}".format(e))
def get_alert(self, alert_id)
:param alert_id: Alert identifier :return: TheHive Alert :rtype: json
3.296924
3.61532
0.911931
req = self.url + "/api/alert/{}/createCase".format(alert_id) try: return requests.post(req, headers={'Content-Type': 'application/json'}, proxies=self.proxies, auth=self.auth, verify=self.cert, data=json.dumps({})) except requests.exceptions.RequestException as the_exception: raise AlertException("Couldn't promote alert to case: {}".format(the_exception)) return None
def promote_alert_to_case(self, alert_id)
This uses the TheHiveAPI to promote an alert to a case :param alert_id: Alert identifier :return: TheHive Case :rtype: json
3.836169
3.827634
1.00223
req = self.url + "/api/connector/cortex/job" try: data = json.dumps({ "cortexId": cortex_id, "artifactId": artifact_id, "analyzerId": analyzer_id }) return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert) except requests.exceptions.RequestException as e: raise TheHiveException("Analyzer run error: {}".format(e))
def run_analyzer(self, cortex_id, artifact_id, analyzer_id)
:param cortex_id: identifier of the Cortex server :param artifact_id: identifier of the artifact as found with an artifact search :param analyzer_id: name of the analyzer used by the job :rtype: json
3.369132
3.581812
0.940622
return self.transport.forward_request( method='GET', path='/', headers=headers)
def info(self, headers=None)
Retrieves information of the node being connected to via the root endpoint ``'/'``. Args: headers (dict): Optional headers to pass to the request. Returns: dict: Details of the node that this instance is connected to. Some information that may be interesting: * the server version and * an overview of all the endpoints Note: Currently limited to one node, and will be expanded to return information for each node that this instance is connected to.
11.970894
15.483246
0.773151
return self.transport.forward_request( method='GET', path=self.api_prefix, headers=headers, )
def api_info(self, headers=None)
Retrieves information provided by the API root endpoint ``'/api/v1'``. Args: headers (dict): Optional headers to pass to the request. Returns: dict: Details of the HTTP API provided by the BigchainDB server.
7.72451
9.12994
0.846064
return prepare_transaction( operation=operation, signers=signers, recipients=recipients, asset=asset, metadata=metadata, inputs=inputs, )
def prepare(*, operation='CREATE', signers=None, recipients=None, asset=None, metadata=None, inputs=None)
Prepares a transaction payload, ready to be fulfilled. Args: operation (str): The operation to perform. Must be ``'CREATE'`` or ``'TRANSFER'``. Case insensitive. Defaults to ``'CREATE'``. signers (:obj:`list` | :obj:`tuple` | :obj:`str`, optional): One or more public keys representing the issuer(s) of the asset being created. Only applies for ``'CREATE'`` operations. Defaults to ``None``. recipients (:obj:`list` | :obj:`tuple` | :obj:`str`, optional): One or more public keys representing the new recipients(s) of the asset being created or transferred. Defaults to ``None``. asset (:obj:`dict`, optional): The asset to be created or transferred. MUST be supplied for ``'TRANSFER'`` operations. Defaults to ``None``. metadata (:obj:`dict`, optional): Metadata associated with the transaction. Defaults to ``None``. inputs (:obj:`dict` | :obj:`list` | :obj:`tuple`, optional): One or more inputs holding the condition(s) that this transaction intends to fulfill. Each input is expected to be a :obj:`dict`. Only applies to, and MUST be supplied for, ``'TRANSFER'`` operations. Returns: dict: The prepared transaction. Raises: :class:`~.exceptions.BigchaindbException`: If ``operation`` is not ``'CREATE'`` or ``'TRANSFER'``. .. important:: **CREATE operations** * ``signers`` MUST be set. * ``recipients``, ``asset``, and ``metadata`` MAY be set. * If ``asset`` is set, it MUST be in the form of:: { 'data': { ... } } * The argument ``inputs`` is ignored. * If ``recipients`` is not given, or evaluates to ``False``, it will be set equal to ``signers``:: if not recipients: recipients = signers **TRANSFER operations** * ``recipients``, ``asset``, and ``inputs`` MUST be set. * ``asset`` MUST be in the form of:: { 'id': '<Asset ID (i.e. TX ID of its CREATE transaction)>' } * ``metadata`` MAY be set. * The argument ``signers`` is ignored.
2.122493
3.326246
0.638105
return self.transport.forward_request( method='GET', path=self.path, params={'asset_id': asset_id, 'operation': operation}, headers=headers, )
def get(self, *, asset_id, operation=None, headers=None)
Given an asset id, get its list of transactions (and optionally filter for only ``'CREATE'`` or ``'TRANSFER'`` transactions). Args: asset_id (str): Id of the asset. operation (str): The type of operation the transaction should be. Either ``'CREATE'`` or ``'TRANSFER'``. Defaults to ``None``. headers (dict): Optional headers to pass to the request. Note: Please note that the id of an asset in BigchainDB is actually the id of the transaction which created the asset. In other words, when querying for an asset id with the operation set to ``'CREATE'``, only one transaction should be expected. This transaction will be the transaction in which the asset was created, and the transaction id will be equal to the given asset id. Hence, the following calls to :meth:`.retrieve` and :meth:`.get` should return the same transaction. >>> bdb = BigchainDB() >>> bdb.transactions.retrieve('foo') >>> bdb.transactions.get(asset_id='foo', operation='CREATE') Since :meth:`.get` returns a list of transactions, it may be more efficient to use :meth:`.retrieve` instead, if one is only interested in the ``'CREATE'`` operation. Returns: list: List of transactions.
3.876838
4.810462
0.805918
return self.transport.forward_request( method='POST', path=self.path, json=transaction, params={'mode': 'async'}, headers=headers)
def send_async(self, transaction, headers=None)
Submit a transaction to the Federation with the mode `async`. Args: transaction (dict): the transaction to be sent to the Federation node(s). headers (dict): Optional headers to pass to the request. Returns: dict: The transaction sent to the Federation node(s).
6.633468
5.375552
1.234007
path = self.path + txid return self.transport.forward_request( method='GET', path=path, headers=None)
def retrieve(self, txid, headers=None)
Retrieves the transaction with the given id. Args: txid (str): Id of the transaction to retrieve. headers (dict): Optional headers to pass to the request. Returns: dict: The transaction with the given id.
7.931345
11.269026
0.703818
return self.transport.forward_request( method='GET', path=self.path, params={'public_key': public_key, 'spent': spent}, headers=headers, )
def get(self, public_key, spent=None, headers=None)
Get transaction outputs by public key. The public_key parameter must be a base58 encoded ed25519 public key associated with transaction output ownership. Args: public_key (str): Public key for which unfulfilled conditions are sought. spent (bool): Indicate if the result set should include only spent or only unspent outputs. If not specified (``None``) the result includes all the outputs (both spent and unspent) associated with the public key. headers (dict): Optional headers to pass to the request. Returns: :obj:`list` of :obj:`str`: List of unfulfilled conditions. Example: Given a transaction with `id` ``da1b64a907ba54`` having an `ed25519` condition (at index ``0``) with alice's public key:: >>> bdb = BigchainDB() >>> bdb.outputs.get(alice_pubkey) ... ['../transactions/da1b64a907ba54/conditions/0']
3.518342
5.640317
0.623784
block_list = self.transport.forward_request( method='GET', path=self.path, params={'transaction_id': txid}, headers=headers, ) return block_list[0] if len(block_list) else None
def get(self, *, txid, headers=None)
Get the block that contains the given transaction id (``txid``) else return ``None`` Args: txid (str): Transaction id. headers (dict): Optional headers to pass to the request. Returns: :obj:`list` of :obj:`int`: List of block heights.
4.369015
4.753367
0.919141
path = self.path + block_height return self.transport.forward_request( method='GET', path=path, headers=None)
def retrieve(self, block_height, headers=None)
Retrieves the block with the given ``block_height``. Args: block_height (str): height of the block to retrieve. headers (dict): Optional headers to pass to the request. Returns: dict: The block with the given ``block_height``.
9.02853
11.851788
0.761786
return self.transport.forward_request( method='GET', path=self.path, params={'search': search, 'limit': limit}, headers=headers )
def get(self, *, search, limit=0, headers=None)
Retrieves the assets that match a given text search string. Args: search (str): Text search string. limit (int): Limit the number of returned documents. Defaults to zero meaning that it returns all the matching assets. headers (dict): Optional headers to pass to the request. Returns: :obj:`list` of :obj:`dict`: List of assets that match the query.
4.137309
5.397295
0.766552
operation = _normalize_operation(operation) return _prepare_transaction( operation, signers=signers, recipients=recipients, asset=asset, metadata=metadata, inputs=inputs, )
def prepare_transaction(*, operation='CREATE', signers=None, recipients=None, asset=None, metadata=None, inputs=None)
Prepares a transaction payload, ready to be fulfilled. Depending on the value of ``operation``, simply dispatches to either :func:`~.prepare_create_transaction` or :func:`~.prepare_transfer_transaction`. Args: operation (str): The operation to perform. Must be ``'CREATE'`` or ``'TRANSFER'``. Case insensitive. Defaults to ``'CREATE'``. signers (:obj:`list` | :obj:`tuple` | :obj:`str`, optional): One or more public keys representing the issuer(s) of the asset being created. Only applies for ``'CREATE'`` operations. Defaults to ``None``. recipients (:obj:`list` | :obj:`tuple` | :obj:`str`, optional): One or more public keys representing the new recipients(s) of the asset being created or transferred. Defaults to ``None``. asset (:obj:`dict`, optional): The asset to be created or transferred. MUST be supplied for ``'TRANSFER'`` operations. Defaults to ``None``. metadata (:obj:`dict`, optional): Metadata associated with the transaction. Defaults to ``None``. inputs (:obj:`dict` | :obj:`list` | :obj:`tuple`, optional): One or more inputs holding the condition(s) that this transaction intends to fulfill. Each input is expected to be a :obj:`dict`. Only applies to, and MUST be supplied for, ``'TRANSFER'`` operations. Returns: dict: The prepared transaction. Raises: :class:`~.exceptions.BigchaindbException`: If ``operation`` is not ``'CREATE'`` or ``'TRANSFER'``. .. important:: **CREATE operations** * ``signers`` MUST be set. * ``recipients``, ``asset``, and ``metadata`` MAY be set. * If ``asset`` is set, it MUST be in the form of:: { 'data': { ... } } * The argument ``inputs`` is ignored. * If ``recipients`` is not given, or evaluates to ``False``, it will be set equal to ``signers``:: if not recipients: recipients = signers **TRANSFER operations** * ``recipients``, ``asset``, and ``inputs`` MUST be set. * ``asset`` MUST be in the form of:: { 'id': '<Asset ID (i.e. TX ID of its CREATE transaction)>' } * ``metadata`` MAY be set. * The argument ``signers`` is ignored.
2.614086
3.658768
0.714472
if not isinstance(signers, (list, tuple)): signers = [signers] # NOTE: Needed for the time being. See # https://github.com/bigchaindb/bigchaindb/issues/797 elif isinstance(signers, tuple): signers = list(signers) if not recipients: recipients = [(signers, 1)] elif not isinstance(recipients, (list, tuple)): recipients = [([recipients], 1)] # NOTE: Needed for the time being. See # https://github.com/bigchaindb/bigchaindb/issues/797 elif isinstance(recipients, tuple): recipients = [(list(recipients), 1)] transaction = Transaction.create( signers, recipients, metadata=metadata, asset=asset['data'] if asset else None, ) return transaction.to_dict()
def prepare_create_transaction(*, signers, recipients=None, asset=None, metadata=None)
Prepares a ``"CREATE"`` transaction payload, ready to be fulfilled. Args: signers (:obj:`list` | :obj:`tuple` | :obj:`str`): One or more public keys representing the issuer(s) of the asset being created. recipients (:obj:`list` | :obj:`tuple` | :obj:`str`, optional): One or more public keys representing the new recipients(s) of the asset being created. Defaults to ``None``. asset (:obj:`dict`, optional): The asset to be created. Defaults to ``None``. metadata (:obj:`dict`, optional): Metadata associated with the transaction. Defaults to ``None``. Returns: dict: The prepared ``"CREATE"`` transaction. .. important:: * If ``asset`` is set, it MUST be in the form of:: { 'data': { ... } } * If ``recipients`` is not given, or evaluates to ``False``, it will be set equal to ``signers``:: if not recipients: recipients = signers
2.348346
2.392156
0.981686
if not isinstance(private_keys, (list, tuple)): private_keys = [private_keys] # NOTE: Needed for the time being. See # https://github.com/bigchaindb/bigchaindb/issues/797 if isinstance(private_keys, tuple): private_keys = list(private_keys) transaction_obj = Transaction.from_dict(transaction) try: signed_transaction = transaction_obj.sign(private_keys) except KeypairMismatchException as exc: raise MissingPrivateKeyError('A private key is missing!') from exc return signed_transaction.to_dict()
def fulfill_transaction(transaction, *, private_keys)
Fulfills the given transaction. Args: transaction (dict): The transaction to be fulfilled. private_keys (:obj:`str` | :obj:`list` | :obj:`tuple`): One or more private keys to be used for fulfilling the transaction. Returns: dict: The fulfilled transaction payload, ready to be sent to a BigchainDB federation. Raises: :exc:`~.exceptions.MissingPrivateKeyError`: If a private key is missing.
3.182621
2.886733
1.102499
try: operation = operation.upper() except AttributeError: pass try: operation = ops_map[operation]() except KeyError: pass return operation
def _normalize_operation(operation)
Normalizes the given operation string. For now, this simply means converting the given string to uppercase, looking it up in :attr:`~.ops_map`, and returning the corresponding class if present. Args: operation (str): The operation string to convert. Returns: The class corresponding to the given string, :class:`~.CreateOperation` or :class:`~TransferOperation`. .. important:: If the :meth:`str.upper` step, or the :attr:`~.ops_map` lookup fails, the given ``operation`` argument is returned.
4.146572
3.395866
1.221065
if not node: node = DEFAULT_NODE elif '://' not in node: node = '//{}'.format(node) parts = urlparse(node, scheme='http', allow_fragments=False) port = parts.port if parts.port else _get_default_port(parts.scheme) netloc = '{}:{}'.format(parts.hostname, port) return urlunparse((parts.scheme, netloc, parts.path, '', '', ''))
def normalize_url(node)
Normalizes the given node url
2.401195
2.439099
0.98446
headers = {} if headers is None else headers if isinstance(node, str): url = normalize_url(node) return {'endpoint': url, 'headers': headers} url = normalize_url(node['endpoint']) node_headers = node.get('headers', {}) return {'endpoint': url, 'headers': {**headers, **node_headers}}
def normalize_node(node, headers=None)
Normalizes given node as str or dict with headers
2.462664
2.280366
1.079942
if not nodes: return (normalize_node(DEFAULT_NODE, headers),) normalized_nodes = () for node in nodes: normalized_nodes += (normalize_node(node, headers),) return normalized_nodes
def normalize_nodes(*nodes, headers=None)
Normalizes given dict or array of driver nodes
3.284498
3.348733
0.980818
backoff_timedelta = self.get_backoff_timedelta() if timeout is not None and timeout < backoff_timedelta: raise TimeoutError if backoff_timedelta > 0: time.sleep(backoff_timedelta) connExc = None timeout = timeout if timeout is None else timeout - backoff_timedelta try: response = self._request( method=method, timeout=timeout, url=self.node_url + path if path else self.node_url, json=json, params=params, headers=headers, **kwargs, ) except ConnectionError as err: connExc = err raise err finally: self.update_backoff_time(success=connExc is None, backoff_cap=backoff_cap) return response
def request(self, method, *, path=None, json=None, params=None, headers=None, timeout=None, backoff_cap=None, **kwargs)
Performs an HTTP request with the given parameters. Implements exponential backoff. If `ConnectionError` occurs, a timestamp equal to now + the default delay (`BACKOFF_DELAY`) is assigned to the object. The timestamp is in UTC. Next time the function is called, it either waits till the timestamp is passed or raises `TimeoutError`. If `ConnectionError` occurs two or more times in a row, the retry count is incremented and the new timestamp is calculated as now + the default delay multiplied by two to the power of the number of retries. If a request is successful, the backoff timestamp is removed, the retry count is back to zero. Args: method (str): HTTP method (e.g.: ``'GET'``). path (str): API endpoint path (e.g.: ``'/transactions'``). json (dict): JSON data to send along with the request. params (dict): Dictionary of URL (query) parameters. headers (dict): Optional headers to pass to the request. timeout (int): Optional timeout in seconds. backoff_cap (int): The maximal allowed backoff delay in seconds to be assigned to a node. kwargs: Optional keyword arguments.
3.10321
3.168085
0.979522
if len(connections) == 1: return connections[0] def key(conn): return (datetime.min if conn.backoff_time is None else conn.backoff_time) return min(*connections, key=key)
def pick(self, connections)
Picks a connection with the earliest backoff time. As a result, the first connection is picked for as long as it has no backoff time. Otherwise, the connections are tried in a round robin fashion. Args: connections (:obj:list): List of :class:`~bigchaindb_driver.connection.Connection` instances.
4.700172
3.951192
1.189558
error_trace = [] timeout = self.timeout backoff_cap = NO_TIMEOUT_BACKOFF_CAP if timeout is None \ else timeout / 2 while timeout is None or timeout > 0: connection = self.connection_pool.get_connection() start = time() try: response = connection.request( method=method, path=path, params=params, json=json, headers=headers, timeout=timeout, backoff_cap=backoff_cap, ) except ConnectionError as err: error_trace.append(err) continue else: return response.data finally: elapsed = time() - start if timeout is not None: timeout -= elapsed raise TimeoutError(error_trace)
def forward_request(self, method, path=None, json=None, params=None, headers=None)
Makes HTTP requests to the configured nodes. Retries connection errors (e.g. DNS failures, refused connection, etc). A user may choose to retry other errors by catching the corresponding exceptions and retrying `forward_request`. Exponential backoff is implemented individually for each node. Backoff delays are expressed as timestamps stored on the object and they are not reset in between multiple function calls. Times out when `self.timeout` is expired, if not `None`. Args: method (str): HTTP method name (e.g.: ``'GET'``). path (str): Path to be appended to the base url of a node. E.g.: ``'/transactions'``). json (dict): Payload to be sent with the HTTP request. params (dict)): Dictionary of URL (query) parameters. headers (dict): Optional headers to pass to the request. Returns: dict: Result of :meth:`requests.models.Response.json`
2.964531
3.229153
0.918052
for key, value in obj.items(): validation_fun(obj_name, key) if isinstance(value, dict): validate_all_keys(obj_name, value, validation_fun)
def validate_all_keys(obj_name, obj, validation_fun)
Validate all (nested) keys in `obj` by using `validation_fun`. Args: obj_name (str): name for `obj` being validated. obj (dict): dictionary object. validation_fun (function): function used to validate the value of `key`. Returns: None: indicates validation successful Raises: ValidationError: `validation_fun` will raise this error on failure
1.858907
2.556607
0.727099
for vkey, value in obj.items(): if vkey == key: validation_fun(value) elif isinstance(value, dict): validate_all_values_for_key(value, key, validation_fun)
def validate_all_values_for_key(obj, key, validation_fun)
Validate value for all (nested) occurrence of `key` in `obj` using `validation_fun`. Args: obj (dict): dictionary object. key (str): key whose value is to be validated. validation_fun (function): function used to validate the value of `key`. Raises: ValidationError: `validation_fun` will raise this error on failure
1.991806
2.606729
0.764102
if self.operation == Transaction.CREATE: self._asset_id = self._id elif self.operation == Transaction.TRANSFER: self._asset_id = self.asset['id'] return (UnspentOutput( transaction_id=self._id, output_index=output_index, amount=output.amount, asset_id=self._asset_id, condition_uri=output.fulfillment.condition_uri, ) for output_index, output in enumerate(self.outputs))
def unspent_outputs(self)
UnspentOutput: The outputs of this transaction, in a data structure containing relevant information for storing them in a UTXO set, and performing validation.
3.567407
3.399639
1.049349
if not isinstance(inputs, list): raise TypeError('`inputs` must be a list instance') if len(inputs) == 0: raise ValueError('`inputs` must contain at least one item') if not isinstance(recipients, list): raise TypeError('`recipients` must be a list instance') if len(recipients) == 0: raise ValueError('`recipients` list cannot be empty') outputs = [] for recipient in recipients: if not isinstance(recipient, tuple) or len(recipient) != 2: raise ValueError(('Each `recipient` in the list must be a' ' tuple of `([<list of public keys>],' ' <amount>)`')) pub_keys, amount = recipient outputs.append(Output.generate(pub_keys, amount)) if not isinstance(asset_id, str): raise TypeError('`asset_id` must be a string') inputs = deepcopy(inputs) return cls(cls.TRANSFER, {'id': asset_id}, inputs, outputs, metadata)
def transfer(cls, inputs, recipients, asset_id, metadata=None)
A simple way to generate a `TRANSFER` transaction. Note: Different cases for threshold conditions: Combining multiple `inputs` with an arbitrary number of `recipients` can yield interesting cases for the creation of threshold conditions we'd like to support. The following notation is proposed: 1. The index of a `recipient` corresponds to the index of an input: e.g. `transfer([input1], [a])`, means `input1` would now be owned by user `a`. 2. `recipients` can (almost) get arbitrary deeply nested, creating various complex threshold conditions: e.g. `transfer([inp1, inp2], [[a, [b, c]], d])`, means `a`'s signature would have a 50% weight on `inp1` compared to `b` and `c` that share 25% of the leftover weight respectively. `inp2` is owned completely by `d`. Args: inputs (:obj:`list` of :class:`~bigchaindb.common.transaction. Input`): Converted `Output`s, intended to be used as inputs in the transfer to generate. recipients (:obj:`list` of :obj:`tuple`): A list of ([keys],amount) that represent the recipients of this Transaction. asset_id (str): The asset ID of the asset to be transferred in this Transaction. metadata (dict): Python dictionary to be stored along with the Transaction. Returns: :class:`~bigchaindb.common.transaction.Transaction`
2.461699
2.358459
1.043774
if self.operation == Transaction.CREATE: # NOTE: Since in the case of a `CREATE`-transaction we do not have # to check for outputs, we're just submitting dummy # values to the actual method. This simplifies it's logic # greatly, as we do not have to check against `None` values. return self._inputs_valid(['dummyvalue' for _ in self.inputs]) elif self.operation == Transaction.TRANSFER: return self._inputs_valid([output.fulfillment.condition_uri for output in outputs]) else: allowed_ops = ', '.join(self.__class__.ALLOWED_OPERATIONS) raise TypeError('`operation` must be one of {}' .format(allowed_ops))
def inputs_valid(self, outputs=None)
Validates the Inputs in the Transaction against given Outputs. Note: Given a `CREATE` Transaction is passed, dummy values for Outputs are submitted for validation that evaluate parts of the validation-checks to `True`. Args: outputs (:obj:`list` of :class:`~bigchaindb.common. transaction.Output`): A list of Outputs to check the Inputs against. Returns: bool: If all Inputs are valid.
6.830989
5.517805
1.23799
ccffill = input_.fulfillment try: parsed_ffill = Fulfillment.from_uri(ccffill.serialize_uri()) except (TypeError, ValueError, ParsingError, ASN1DecodeError, ASN1EncodeError): return False if operation == Transaction.CREATE: # NOTE: In the case of a `CREATE` transaction, the # output is always valid. output_valid = True else: output_valid = output_condition_uri == ccffill.condition_uri message = sha3_256(message.encode()) if input_.fulfills: message.update('{}{}'.format( input_.fulfills.txid, input_.fulfills.output).encode()) # NOTE: We pass a timestamp to `.validate`, as in case of a timeout # condition we'll have to validate against it # cryptoconditions makes no assumptions of the encoding of the # message to sign or verify. It only accepts bytestrings ffill_valid = parsed_ffill.validate(message=message.digest()) return output_valid and ffill_valid
def _input_valid(input_, operation, message, output_condition_uri=None)
Validates a single Input against a single Output. Note: In case of a `CREATE` Transaction, this method does not validate against `output_condition_uri`. Args: input_ (:class:`~bigchaindb.common.transaction. Input`) The Input to be signed. operation (str): The type of Transaction. message (str): The fulfillment message. output_condition_uri (str, optional): An Output to check the Input against. Returns: bool: If the Input is valid.
7.503
6.28827
1.193174
# NOTE: Remove reference to avoid side effects tx_body = deepcopy(tx_body) try: proposed_tx_id = tx_body['id'] except KeyError: raise InvalidHash('No transaction id found!') tx_body['id'] = None tx_body_serialized = Transaction._to_str(tx_body) valid_tx_id = Transaction._to_hash(tx_body_serialized) if proposed_tx_id != valid_tx_id: err_msg = ("The transaction's id '{}' isn't equal to " "the hash of its body, i.e. it's not valid.") raise InvalidHash(err_msg.format(proposed_tx_id))
def validate_id(tx_body)
Validate the transaction ID of a transaction Args: tx_body (dict): The Transaction to be transformed.
3.980195
4.052019
0.982275
inputs = [Input.from_dict(input_) for input_ in tx['inputs']] outputs = [Output.from_dict(output) for output in tx['outputs']] return cls(tx['operation'], tx['asset'], inputs, outputs, tx['metadata'], tx['version'], hash_id=tx['id'])
def from_dict(cls, tx)
Transforms a Python dictionary to a Transaction object. Args: tx_body (dict): The Transaction to be transformed. Returns: :class:`~bigchaindb.common.transaction.Transaction`
3.495541
3.793565
0.921439
query = [] encoders = {dict: _dictionary_encoder} for k, v in dictionary.iteritems(): if v.__class__ in encoders: nested_query = encoders[v.__class__](k, v) query += nested_query else: key = to_utf8(k) value = to_utf8(v) query.append('{}={}'.format(key, value)) return '&'.join(query)
def dict2query(dictionary)
We want post vars of form: {'foo': 'bar', 'nested': {'a': 'b', 'c': 'd'}} to become: foo=bar&nested[a]=b&nested[c]=d
3.230972
2.966619
1.089109
self.found_visible = False is_multi_quote_header = self.MULTI_QUOTE_HDR_REGEX_MULTILINE.search(self.text) if is_multi_quote_header: self.text = self.MULTI_QUOTE_HDR_REGEX.sub(is_multi_quote_header.groups()[0].replace('\n', ''), self.text) # Fix any outlook style replies, with the reply immediately above the signature boundary line # See email_2_2.txt for an example self.text = re.sub('([^\n])(?=\n ?[_-]{7,})', '\\1\n', self.text, re.MULTILINE) self.lines = self.text.split('\n') self.lines.reverse() for line in self.lines: self._scan_line(line) self._finish_fragment() self.fragments.reverse() return self
def read(self)
Creates new fragment for each line and labels as a signature, quote, or hidden. Returns EmailMessage instance
5.951473
5.442143
1.09359
reply = [] for f in self.fragments: if not (f.hidden or f.quoted): reply.append(f.content) return '\n'.join(reply)
def reply(self)
Captures reply message within email
5.039784
5.047954
0.998382
is_quote_header = self.QUOTE_HDR_REGEX.match(line) is not None is_quoted = self.QUOTED_REGEX.match(line) is not None is_header = is_quote_header or self.HEADER_REGEX.match(line) is not None if self.fragment and len(line.strip()) == 0: if self.SIG_REGEX.match(self.fragment.lines[-1].strip()): self.fragment.signature = True self._finish_fragment() if self.fragment \ and ((self.fragment.headers == is_header and self.fragment.quoted == is_quoted) or (self.fragment.quoted and (is_quote_header or len(line.strip()) == 0))): self.fragment.lines.append(line) else: self._finish_fragment() self.fragment = Fragment(is_quoted, line, headers=is_header)
def _scan_line(self, line)
Reviews each line in email message and determines fragment type line - a row of text from an email message
3.240917
3.080254
1.052159
if self.fragment: self.fragment.finish() if self.fragment.headers: # Regardless of what's been seen to this point, if we encounter a headers fragment, # all the previous fragments should be marked hidden and found_visible set to False. self.found_visible = False for f in self.fragments: f.hidden = True if not self.found_visible: if self.fragment.quoted \ or self.fragment.headers \ or self.fragment.signature \ or (len(self.fragment.content.strip()) == 0): self.fragment.hidden = True else: self.found_visible = True self.fragments.append(self.fragment) self.fragment = None
def _finish_fragment(self)
Creates fragment
4.879507
4.712088
1.03553
self.lines.reverse() self._content = '\n'.join(self.lines) self.lines = None
def finish(self)
Creates block of content with lines belonging to fragment.
6.237521
4.464978
1.396988
if not src_tstamp_str: return False res = src_tstamp_str if src_format and dst_format: try: # dt_value needs to be a datetime.datetime object\ # (so notime.struct_time or mx.DateTime.DateTime here!) dt_value = datetime.datetime.strptime(src_tstamp_str, src_format) if context.get('tz', False): try: import pytz src_tz = pytz.timezone(context['tz']) dst_tz = pytz.timezone('UTC') src_dt = src_tz.localize(dt_value, is_dst=True) dt_value = src_dt.astimezone(dst_tz) except Exception: pass res = dt_value.strftime(dst_format) except Exception: # Normal ways to end up here are if strptime or strftime failed if not ignore_unparsable_time: return False pass return res
def _offset_format_timestamp1(src_tstamp_str, src_format, dst_format, ignore_unparsable_time=True, context=None)
Convert a source timeStamp string into a destination timeStamp string, attempting to apply the correct offset if both the server and local timeZone are recognized,or no offset at all if they aren't or if tz_offset is false (i.e. assuming they are both in the same TZ). @param src_tstamp_str: the STR value containing the timeStamp. @param src_format: the format to use when parsing the local timeStamp. @param dst_format: the format to use when formatting the resulting timeStamp. @param server_to_client: specify timeZone offset direction (server=src and client=dest if True, or client=src and server=dest if False) @param ignore_unparsable_time: if True, return False if src_tstamp_str cannot be parsed using src_format or formatted using dst_format. @return: destination formatted timestamp, expressed in the destination timezone if possible and if tz_offset is true, or src_tstamp_str if timezone offset could not be determined.
3.373677
3.444784
0.979358
''' Based on isroom, status will be updated. ---------------------------------------- @param self: object pointer ''' if self.isroom is False: self.status = 'occupied' if self.isroom is True: self.status = 'available'
def isroom_change(self)
Based on isroom, status will be updated. ---------------------------------------- @param self: object pointer
6.723926
2.529268
2.658447
if 'isroom' in vals and vals['isroom'] is False: vals.update({'color': 2, 'status': 'occupied'}) if 'isroom'in vals and vals['isroom'] is True: vals.update({'color': 5, 'status': 'available'}) ret_val = super(HotelRoom, self).write(vals) return ret_val
def write(self, vals)
Overrides orm write method. @param self: The object pointer @param vals: dictionary of fields value.
3.92313
3.790363
1.035028
''' This method is used to validate the room_lines. ------------------------------------------------ @param self: object pointer @return: raise warning depending on the validation ''' folio_rooms = [] for room in self[0].room_lines: if room.product_id.id in folio_rooms: raise ValidationError(_('You Cannot Take Same Room Twice')) folio_rooms.append(room.product_id.id)
def folio_room_lines(self)
This method is used to validate the room_lines. ------------------------------------------------ @param self: object pointer @return: raise warning depending on the validation
6.811605
3.524737
1.932515
''' This method gives the duration between check in and checkout if customer will leave only for some hour it would be considers as a whole day.If customer will check in checkout for more or equal hours, which configured in company as additional hours than it would be consider as full days -------------------------------------------------------------------- @param self: object pointer @return: Duration and checkout_date ''' configured_addition_hours = 0 wid = self.warehouse_id whouse_com_id = wid or wid.company_id if whouse_com_id: configured_addition_hours = wid.company_id.additional_hours myduration = 0 chckin = self.checkin_date chckout = self.checkout_date if chckin and chckout: server_dt = DEFAULT_SERVER_DATETIME_FORMAT chkin_dt = datetime.datetime.strptime(chckin, server_dt) chkout_dt = datetime.datetime.strptime(chckout, server_dt) dur = chkout_dt - chkin_dt sec_dur = dur.seconds if (not dur.days and not sec_dur) or (dur.days and not sec_dur): myduration = dur.days else: myduration = dur.days + 1 # To calculate additional hours in hotel room as per minutes if configured_addition_hours > 0: additional_hours = abs((dur.seconds / 60) / 60) if additional_hours >= configured_addition_hours: myduration += 1 self.duration = myduration self.duration_dummy = self.duration
def onchange_dates(self)
This method gives the duration between check in and checkout if customer will leave only for some hour it would be considers as a whole day.If customer will check in checkout for more or equal hours, which configured in company as additional hours than it would be consider as full days -------------------------------------------------------------------- @param self: object pointer @return: Duration and checkout_date
5.967069
2.849905
2.093778
if not 'service_lines' and 'folio_id' in vals: tmp_room_lines = vals.get('room_lines', []) vals['order_policy'] = vals.get('hotel_policy', 'manual') vals.update({'room_lines': []}) folio_id = super(HotelFolio, self).create(vals) for line in (tmp_room_lines): line[2].update({'folio_id': folio_id}) vals.update({'room_lines': tmp_room_lines}) folio_id.write(vals) else: if not vals: vals = {} vals['name'] = self.env['ir.sequence'].next_by_code('hotel.folio') vals['duration'] = vals.get('duration', 0.0) or vals.get('duration_dummy', 0.0) folio_id = super(HotelFolio, self).create(vals) folio_room_line_obj = self.env['folio.room.line'] h_room_obj = self.env['hotel.room'] try: for rec in folio_id: if not rec.reservation_id: for room_rec in rec.room_lines: prod = room_rec.product_id.name room_obj = h_room_obj.search([('name', '=', prod)]) room_obj.write({'isroom': False}) vals = {'room_id': room_obj.id, 'check_in': rec.checkin_date, 'check_out': rec.checkout_date, 'folio_id': rec.id, } folio_room_line_obj.create(vals) except: for rec in folio_id: for room_rec in rec.room_lines: prod = room_rec.product_id.name room_obj = h_room_obj.search([('name', '=', prod)]) room_obj.write({'isroom': False}) vals = {'room_id': room_obj.id, 'check_in': rec.checkin_date, 'check_out': rec.checkout_date, 'folio_id': rec.id, } folio_room_line_obj.create(vals) return folio_id
def create(self, vals, check=True)
Overrides orm create method. @param self: The object pointer @param vals: dictionary of fields value. @return: new record set for hotel folio.
2.447994
2.333352
1.049132
product_obj = self.env['product.product'] h_room_obj = self.env['hotel.room'] folio_room_line_obj = self.env['folio.room.line'] room_lst = [] room_lst1 = [] for rec in self: for res in rec.room_lines: room_lst1.append(res.product_id.id) if vals and vals.get('duration_dummy', False): vals['duration'] = vals.get('duration_dummy', 0.0) else: vals['duration'] = rec.duration for folio_rec in rec.room_lines: room_lst.append(folio_rec.product_id.id) new_rooms = set(room_lst).difference(set(room_lst1)) if len(list(new_rooms)) != 0: room_list = product_obj.browse(list(new_rooms)) for rm in room_list: room_obj = h_room_obj.search([('name', '=', rm.name)]) room_obj.write({'isroom': False}) vals = {'room_id': room_obj.id, 'check_in': rec.checkin_date, 'check_out': rec.checkout_date, 'folio_id': rec.id, } folio_room_line_obj.create(vals) if len(list(new_rooms)) == 0: room_list_obj = product_obj.browse(room_lst1) for rom in room_list_obj: room_obj = h_room_obj.search([('name', '=', rom.name)]) room_obj.write({'isroom': False}) room_vals = {'room_id': room_obj.id, 'check_in': rec.checkin_date, 'check_out': rec.checkout_date, 'folio_id': rec.id, } folio_romline_rec = (folio_room_line_obj.search ([('folio_id', '=', rec.id)])) folio_romline_rec.write(room_vals) return super(HotelFolio, self).write(vals)
def write(self, vals)
Overrides orm write method. @param self: The object pointer @param vals: dictionary of fields value.
2.431879
2.435675
0.998442
''' When you change partner_id it will update the partner_invoice_id, partner_shipping_id and pricelist_id of the hotel folio as well --------------------------------------------------------------- @param self: object pointer ''' if self.partner_id: partner_rec = self.env['res.partner'].browse(self.partner_id.id) order_ids = [folio.order_id.id for folio in self] if not order_ids: self.partner_invoice_id = partner_rec.id self.partner_shipping_id = partner_rec.id self.pricelist_id = partner_rec.property_product_pricelist.id raise _('Not Any Order For %s ' % (partner_rec.name)) else: self.partner_invoice_id = partner_rec.id self.partner_shipping_id = partner_rec.id self.pricelist_id = partner_rec.property_product_pricelist.id
def onchange_partner_id(self)
When you change partner_id it will update the partner_invoice_id, partner_shipping_id and pricelist_id of the hotel folio as well --------------------------------------------------------------- @param self: object pointer
2.962501
1.906669
1.553758
''' This method is used to validate the checkin_date and checkout_date. ------------------------------------------------------------------- @param self: object pointer @return: raise warning depending on the validation ''' if self.checkin_date >= self.checkout_date: raise ValidationError(_('Room line Check In Date Should be \ less than the Check Out Date!')) if self.folio_id.date_order and self.checkin_date: if self.checkin_date <= self.folio_id.date_order: raise ValidationError(_('Room line check in date should be \ greater than the current date.'))
def check_dates(self)
This method is used to validate the checkin_date and checkout_date. ------------------------------------------------------------------- @param self: object pointer @return: raise warning depending on the validation
6.182559
3.686477
1.677091
sale_line_obj = self.env['sale.order.line'] fr_obj = self.env['folio.room.line'] for line in self: if line.order_line_id: sale_unlink_obj = (sale_line_obj.browse ([line.order_line_id.id])) for rec in sale_unlink_obj: room_obj = self.env['hotel.room' ].search([('name', '=', rec.name)]) if room_obj.id: folio_arg = [('folio_id', '=', line.folio_id.id), ('room_id', '=', room_obj.id)] folio_room_line_myobj = fr_obj.search(folio_arg) if folio_room_line_myobj.id: folio_room_line_myobj.unlink() room_obj.write({'isroom': True, 'status': 'available'}) sale_unlink_obj.unlink() return super(HotelFolioLine, self).unlink()
def unlink(self)
Overrides orm unlink method. @param self: The object pointer @return: True/False.
3.856467
4.009447
0.961845
''' - @param self: object pointer - ''' context = dict(self._context) if not context: context = {} if context.get('folio', False): if self.product_id and self.folio_id.partner_id: self.name = self.product_id.name self.price_unit = self.product_id.list_price self.product_uom = self.product_id.uom_id tax_obj = self.env['account.tax'] pr = self.product_id self.price_unit = tax_obj._fix_tax_included_price(pr.price, pr.taxes_id, self.tax_id) else: if not self.product_id: return {'domain': {'product_uom': []}} val = {} pr = self.product_id.with_context( lang=self.folio_id.partner_id.lang, partner=self.folio_id.partner_id.id, quantity=val.get('product_uom_qty') or self.product_uom_qty, date=self.folio_id.date_order, pricelist=self.folio_id.pricelist_id.id, uom=self.product_uom.id ) p = pr.with_context(pricelist=self.order_id.pricelist_id.id).price if self.folio_id.pricelist_id and self.folio_id.partner_id: obj = self.env['account.tax'] val['price_unit'] = obj._fix_tax_included_price(p, pr.taxes_id, self.tax_id)
def product_id_change(self)
- @param self: object pointer -
2.875836
2.677563
1.07405
''' When you change checkin_date or checkout_date it will checked it and update the qty of hotel folio line ----------------------------------------------------------------- @param self: object pointer ''' configured_addition_hours = 0 fwhouse_id = self.folio_id.warehouse_id fwc_id = fwhouse_id or fwhouse_id.company_id if fwc_id: configured_addition_hours = fwhouse_id.company_id.additional_hours myduration = 0 if not self.checkin_date: self.checkin_date = time.strftime(DEFAULT_SERVER_DATETIME_FORMAT) if not self.checkout_date: self.checkout_date = time.strftime(DEFAULT_SERVER_DATETIME_FORMAT) chckin = self.checkin_date chckout = self.checkout_date if chckin and chckout: server_dt = DEFAULT_SERVER_DATETIME_FORMAT chkin_dt = datetime.datetime.strptime(chckin, server_dt) chkout_dt = datetime.datetime.strptime(chckout, server_dt) dur = chkout_dt - chkin_dt sec_dur = dur.seconds if (not dur.days and not sec_dur) or (dur.days and not sec_dur): myduration = dur.days else: myduration = dur.days + 1 # To calculate additional hours in hotel room as per minutes if configured_addition_hours > 0: additional_hours = abs((dur.seconds / 60) / 60) if additional_hours >= configured_addition_hours: myduration += 1 self.product_uom_qty = myduration hotel_room_obj = self.env['hotel.room'] hotel_room_ids = hotel_room_obj.search([]) avail_prod_ids = [] for room in hotel_room_ids: assigned = False for rm_line in room.room_line_ids: if rm_line.status != 'cancel': if(self.checkin_date <= rm_line.check_in <= self.checkout_date) or (self.checkin_date <= rm_line.check_out <= self.checkout_date): assigned = True elif (rm_line.check_in <= self.checkin_date <= rm_line.check_out) or (rm_line.check_in <= self.checkout_date <= rm_line.check_out): assigned = True if not assigned: avail_prod_ids.append(room.product_id.id) domain = {'product_id': [('id', 'in', avail_prod_ids)]} return {'domain': domain}
def on_change_checkout(self)
When you change checkin_date or checkout_date it will checked it and update the qty of hotel folio line ----------------------------------------------------------------- @param self: object pointer
3.07836
2.560023
1.202474
if 'folio_id' in vals: folio = self.env['hotel.folio'].browse(vals['folio_id']) vals.update({'order_id': folio.order_id.id}) return super(HotelServiceLine, self).create(vals)
def create(self, vals, check=True)
Overrides orm create method. @param self: The object pointer @param vals: dictionary of fields value. @return: new record set for hotel service line.
3.620851
3.044405
1.189346
s_line_obj = self.env['sale.order.line'] for line in self: if line.service_line_id: sale_unlink_obj = s_line_obj.browse([line.service_line_id.id]) sale_unlink_obj.unlink() return super(HotelServiceLine, self).unlink()
def unlink(self)
Overrides orm unlink method. @param self: The object pointer @return: True/False.
4.562912
4.640032
0.983379
''' When you change checkin_date or checkout_date it will checked it and update the qty of hotel service line ----------------------------------------------------------------- @param self: object pointer ''' if not self.ser_checkin_date: time_a = time.strftime(DEFAULT_SERVER_DATETIME_FORMAT) self.ser_checkin_date = time_a if not self.ser_checkout_date: self.ser_checkout_date = time_a if self.ser_checkout_date < self.ser_checkin_date: raise _('Checkout must be greater or equal checkin date') if self.ser_checkin_date and self.ser_checkout_date: date_a = time.strptime(self.ser_checkout_date, DEFAULT_SERVER_DATETIME_FORMAT)[:5] date_b = time.strptime(self.ser_checkin_date, DEFAULT_SERVER_DATETIME_FORMAT)[:5] diffDate = datetime.datetime(*date_a) - datetime.datetime(*date_b) qty = diffDate.days + 1 self.product_uom_qty = qty
def on_change_checkout(self)
When you change checkin_date or checkout_date it will checked it and update the qty of hotel service line ----------------------------------------------------------------- @param self: object pointer
3.509807
2.256855
1.555176
reservation_line_obj = self.env['hotel.room.reservation.line'] room_obj = self.env['hotel.room'] prod_id = vals.get('product_id') or self.product_id.id chkin = vals.get('checkin_date') or self.checkin_date chkout = vals.get('checkout_date') or self.checkout_date is_reserved = self.is_reserved if prod_id and is_reserved: prod_domain = [('product_id', '=', prod_id)] prod_room = room_obj.search(prod_domain, limit=1) if (self.product_id and self.checkin_date and self.checkout_date): old_prd_domain = [('product_id', '=', self.product_id.id)] old_prod_room = room_obj.search(old_prd_domain, limit=1) if prod_room and old_prod_room: # Check for existing room lines. srch_rmline = [('room_id', '=', old_prod_room.id), ('check_in', '=', self.checkin_date), ('check_out', '=', self.checkout_date), ] rm_lines = reservation_line_obj.search(srch_rmline) if rm_lines: rm_line_vals = {'room_id': prod_room.id, 'check_in': chkin, 'check_out': chkout} rm_lines.write(rm_line_vals) return super(HotelFolioLineExt, self).write(vals)
def write(self, vals)
Overrides orm write method. @param self: The object pointer @param vals: dictionary of fields value. Update Hotel Room Reservation line history
2.539919
2.35719
1.07752
for reserv_rec in self: if reserv_rec.state != 'draft': raise ValidationError(_('You cannot delete Reservation in %s\ state.') % (reserv_rec.state)) return super(HotelReservation, self).unlink()
def unlink(self)
Overrides orm unlink method. @param self: The object pointer @return: True/False.
5.883389
6.532341
0.900656
''' This method is used to validate the reservation_line. ----------------------------------------------------- @param self: object pointer @return: raise a warning depending on the validation ''' ctx = dict(self._context) or {} for reservation in self: cap = 0 for rec in reservation.reservation_line: if len(rec.reserve) == 0: raise ValidationError(_( 'Please Select Rooms For Reservation.')) for room in rec.reserve: cap += room.capacity if not ctx.get('duplicate'): if (reservation.adults + reservation.children) > cap: raise ValidationError(_( 'Room Capacity Exceeded \n' ' Please Select Rooms According to' ' Members Accomodation.')) if reservation.adults <= 0: raise ValidationError(_('Adults must be more than 0'))
def check_reservation_rooms(self)
This method is used to validate the reservation_line. ----------------------------------------------------- @param self: object pointer @return: raise a warning depending on the validation
5.948163
4.226774
1.407258
if self.checkout and self.checkin: if self.checkin < self.date_order: raise ValidationError(_('Check-in date should be greater than \ the current date.')) if self.checkout < self.checkin: raise ValidationError(_('Check-out date should be greater \ than Check-in date.'))
def check_in_out_dates(self)
When date_order is less then check-in date or Checkout date should be greater than the check-in date.
3.363086
2.523886
1.332503
''' When you change checkout or checkin update dummy field ----------------------------------------------------------- @param self: object pointer @return: raise warning depending on the validation ''' checkout_date = time.strftime(dt) checkin_date = time.strftime(dt) if not (checkout_date and checkin_date): return {'value': {}} delta = timedelta(days=1) dat_a = time.strptime(checkout_date, dt)[:5] addDays = datetime(*dat_a) + delta self.dummy = addDays.strftime(dt)
def on_change_checkout(self)
When you change checkout or checkin update dummy field ----------------------------------------------------------- @param self: object pointer @return: raise warning depending on the validation
9.020914
3.997936
2.256393
''' When you change partner_id it will update the partner_invoice_id, partner_shipping_id and pricelist_id of the hotel reservation as well --------------------------------------------------------------------- @param self: object pointer ''' if not self.partner_id: self.partner_invoice_id = False self.partner_shipping_id = False self.partner_order_id = False else: addr = self.partner_id.address_get(['delivery', 'invoice', 'contact']) self.partner_invoice_id = addr['invoice'] self.partner_order_id = addr['contact'] self.partner_shipping_id = addr['delivery'] self.pricelist_id = self.partner_id.property_product_pricelist.id
def onchange_partner_id(self)
When you change partner_id it will update the partner_invoice_id, partner_shipping_id and pricelist_id of the hotel reservation as well --------------------------------------------------------------------- @param self: object pointer
3.205991
1.914947
1.674193
if not vals: vals = {} vals['reservation_no'] = self.env['ir.sequence'].\ next_by_code('hotel.reservation') or 'New' return super(HotelReservation, self).create(vals)
def create(self, vals)
Overrides orm create method. @param self: The object pointer @param vals: dictionary of fields value.
4.151074
4.839298
0.857784
reservation_line_obj = self.env['hotel.room.reservation.line'] vals = {} for reservation in self: reserv_checkin = datetime.strptime(reservation.checkin, dt) reserv_checkout = datetime.strptime(reservation.checkout, dt) room_bool = False for line_id in reservation.reservation_line: for room_id in line_id.reserve: if room_id.room_reservation_line_ids: for reserv in room_id.room_reservation_line_ids.\ search([('status', 'in', ('confirm', 'done')), ('room_id', '=', room_id.id)]): check_in = datetime.strptime(reserv.check_in, dt) check_out = datetime.strptime(reserv.check_out, dt) if check_in <= reserv_checkin <= check_out: room_bool = True if check_in <= reserv_checkout <= check_out: room_bool = True if reserv_checkin <= check_in and \ reserv_checkout >= check_out: room_bool = True mytime = "%Y-%m-%d" r_checkin = datetime.strptime(reservation.checkin, dt).date() r_checkin = r_checkin.strftime(mytime) r_checkout = datetime.\ strptime(reservation.checkout, dt).date() r_checkout = r_checkout.strftime(mytime) check_intm = datetime.strptime(reserv.check_in, dt).date() check_outtm = datetime.strptime(reserv.check_out, dt).date() check_intm = check_intm.strftime(mytime) check_outtm = check_outtm.strftime(mytime) range1 = [r_checkin, r_checkout] range2 = [check_intm, check_outtm] overlap_dates = self.check_overlap(*range1) \ & self.check_overlap(*range2) overlap_dates = [datetime.strftime(dates, '%d/%m/%Y') for dates in overlap_dates] if room_bool: raise ValidationError(_('You tried to Confirm ' 'Reservation with room' ' those already ' 'reserved in this ' 'Reservation Period. ' 'Overlap Dates are ' '%s') % overlap_dates) else: self.state = 'confirm' vals = {'room_id': room_id.id, 'check_in': reservation.checkin, 'check_out': reservation.checkout, 'state': 'assigned', 'reservation_id': reservation.id, } room_id.write({'isroom': False, 'status': 'occupied'}) else: self.state = 'confirm' vals = {'room_id': room_id.id, 'check_in': reservation.checkin, 'check_out': reservation.checkout, 'state': 'assigned', 'reservation_id': reservation.id, } room_id.write({'isroom': False, 'status': 'occupied'}) else: self.state = 'confirm' vals = {'room_id': room_id.id, 'check_in': reservation.checkin, 'check_out': reservation.checkout, 'state': 'assigned', 'reservation_id': reservation.id, } room_id.write({'isroom': False, 'status': 'occupied'}) reservation_line_obj.create(vals) return True
def confirmed_reservation(self)
This method create a new record set for hotel room reservation line ------------------------------------------------------------------- @param self: The object pointer @return: new record set for hotel room reservation line.
2.191356
2.170888
1.009428
room_res_line_obj = self.env['hotel.room.reservation.line'] hotel_res_line_obj = self.env['hotel_reservation.line'] self.state = 'cancel' room_reservation_line = room_res_line_obj.search([('reservation_id', 'in', self.ids)]) room_reservation_line.write({'state': 'unassigned'}) room_reservation_line.unlink() reservation_lines = hotel_res_line_obj.search([('line_id', 'in', self.ids)]) for reservation_line in reservation_lines: reservation_line.reserve.write({'isroom': True, 'status': 'available'}) return True
def cancel_reservation(self)
This method cancel record set for hotel room reservation line ------------------------------------------------------------------ @param self: The object pointer @return: cancel record set for hotel room reservation line.
3.719143
3.563574
1.043655
''' This function opens a window to compose an email, template message loaded by default. @param self: object pointer ''' assert len(self._ids) == 1, 'This is for a single id at a time.' ir_model_data = self.env['ir.model.data'] try: template_id = (ir_model_data.get_object_reference ('hotel_reservation', 'mail_template_hotel_reservation')[1]) except ValueError: template_id = False try: compose_form_id = (ir_model_data.get_object_reference ('mail', 'email_compose_message_wizard_form')[1]) except ValueError: compose_form_id = False ctx = dict() ctx.update({ 'default_model': 'hotel.reservation', 'default_res_id': self._ids[0], 'default_use_template': bool(template_id), 'default_template_id': template_id, 'default_composition_mode': 'comment', 'force_send': True, 'mark_so_as_sent': True }) return { 'type': 'ir.actions.act_window', 'view_type': 'form', 'view_mode': 'form', 'res_model': 'mail.compose.message', 'views': [(compose_form_id, 'form')], 'view_id': compose_form_id, 'target': 'new', 'context': ctx, 'force_send': True }
def send_reservation_maill(self)
This function opens a window to compose an email, template message loaded by default. @param self: object pointer
2.461405
2.000889
1.230155
now_str = time.strftime(dt) now_date = datetime.strptime(now_str, dt) ir_model_data = self.env['ir.model.data'] template_id = (ir_model_data.get_object_reference ('hotel_reservation', 'mail_template_reservation_reminder_24hrs')[1]) template_rec = self.env['mail.template'].browse(template_id) for reserv_rec in self.search([]): checkin_date = (datetime.strptime(reserv_rec.checkin, dt)) difference = relativedelta(now_date, checkin_date) if(difference.days == -1 and reserv_rec.partner_id.email and reserv_rec.state == 'confirm'): template_rec.send_mail(reserv_rec.id, force_send=True) return True
def reservation_reminder_24hrs(self)
This method is for scheduler every 1day scheduler will call this method to find all tomorrow's reservations. ---------------------------------------------- @param self: The object pointer @return: send a mail
3.433583
3.411105
1.00659
hotel_folio_obj = self.env['hotel.folio'] room_obj = self.env['hotel.room'] for reservation in self: folio_lines = [] checkin_date = reservation['checkin'] checkout_date = reservation['checkout'] if not self.checkin < self.checkout: raise ValidationError(_('Checkout date should be greater \ than the Check-in date.')) duration_vals = (self.onchange_check_dates (checkin_date=checkin_date, checkout_date=checkout_date, duration=False)) duration = duration_vals.get('duration') or 0.0 folio_vals = { 'date_order': reservation.date_order, 'warehouse_id': reservation.warehouse_id.id, 'partner_id': reservation.partner_id.id, 'pricelist_id': reservation.pricelist_id.id, 'partner_invoice_id': reservation.partner_invoice_id.id, 'partner_shipping_id': reservation.partner_shipping_id.id, 'checkin_date': reservation.checkin, 'checkout_date': reservation.checkout, 'duration': duration, 'reservation_id': reservation.id, 'service_lines': reservation['folio_id'] } for line in reservation.reservation_line: for r in line.reserve: folio_lines.append((0, 0, { 'checkin_date': checkin_date, 'checkout_date': checkout_date, 'product_id': r.product_id and r.product_id.id, 'name': reservation['reservation_no'], 'price_unit': r.list_price, 'product_uom_qty': duration, 'is_reserved': True})) res_obj = room_obj.browse([r.id]) res_obj.write({'status': 'occupied', 'isroom': False}) folio_vals.update({'room_lines': folio_lines}) folio = hotel_folio_obj.create(folio_vals) if folio: for rm_line in folio.room_lines: rm_line.product_id_change() self._cr.execute('insert into hotel_folio_reservation_rel' '(order_id, invoice_id) values (%s,%s)', (reservation.id, folio.id)) self.state = 'done' return True
def create_folio(self)
This method is for create new hotel folio. ----------------------------------------- @param self: The object pointer @return: new record set for hotel folio.
2.907565
2.873461
1.011869
''' This method gives the duration between check in checkout if customer will leave only for some hour it would be considers as a whole day. If customer will checkin checkout for more or equal hours, which configured in company as additional hours than it would be consider as full days -------------------------------------------------------------------- @param self: object pointer @return: Duration and checkout_date ''' value = {} configured_addition_hours = 0 wc_id = self.warehouse_id whcomp_id = wc_id or wc_id.company_id if whcomp_id: configured_addition_hours = wc_id.company_id.additional_hours duration = 0 if checkin_date and checkout_date: chkin_dt = datetime.strptime(checkin_date, dt) chkout_dt = datetime.strptime(checkout_date, dt) dur = chkout_dt - chkin_dt duration = dur.days + 1 if configured_addition_hours > 0: additional_hours = abs((dur.seconds / 60)) if additional_hours <= abs(configured_addition_hours * 60): duration -= 1 value.update({'duration': duration}) return value
def onchange_check_dates(self, checkin_date=False, checkout_date=False, duration=False)
This method gives the duration between check in checkout if customer will leave only for some hour it would be considers as a whole day. If customer will checkin checkout for more or equal hours, which configured in company as additional hours than it would be consider as full days -------------------------------------------------------------------- @param self: object pointer @return: Duration and checkout_date
6.713411
2.566217
2.616073
''' When you change categ_id it check checkin and checkout are filled or not if not then raise warning ----------------------------------------------------------- @param self: object pointer ''' hotel_room_obj = self.env['hotel.room'] hotel_room_ids = hotel_room_obj.search([('categ_id', '=', self.categ_id.id)]) room_ids = [] if not self.line_id.checkin: raise ValidationError(_('Before choosing a room,\n You have to \ select a Check in date or a Check out \ date in the reservation form.')) for room in hotel_room_ids: assigned = False for line in room.room_reservation_line_ids: if line.status != 'cancel': if(self.line_id.checkin <= line.check_in <= self.line_id.checkout) or (self.line_id.checkin <= line.check_out <= self.line_id.checkout): assigned = True elif(line.check_in <= self.line_id.checkin <= line.check_out) or (line.check_in <= self.line_id.checkout <= line.check_out): assigned = True for rm_line in room.room_line_ids: if rm_line.status != 'cancel': if(self.line_id.checkin <= rm_line.check_in <= self.line_id.checkout) or (self.line_id.checkin <= rm_line.check_out <= self.line_id.checkout): assigned = True elif(rm_line.check_in <= self.line_id.checkin <= rm_line.check_out) or (rm_line.check_in <= self.line_id.checkout <= rm_line.check_out): assigned = True if not assigned: room_ids.append(room.id) domain = {'reserve': [('id', 'in', room_ids)]} return {'domain': domain}
def on_change_categ(self)
When you change categ_id it check checkin and checkout are filled or not if not then raise warning ----------------------------------------------------------- @param self: object pointer
2.483078
2.00445
1.238783
hotel_room_reserv_line_obj = self.env['hotel.room.reservation.line'] for reserv_rec in self: for rec in reserv_rec.reserve: hres_arg = [('room_id', '=', rec.id), ('reservation_id', '=', reserv_rec.line_id.id)] myobj = hotel_room_reserv_line_obj.search(hres_arg) if myobj.ids: rec.write({'isroom': True, 'status': 'available'}) myobj.unlink() return super(HotelReservationLine, self).unlink()
def unlink(self)
Overrides orm unlink method. @param self: The object pointer @return: True/False.
4.793241
4.821029
0.994236
for room in self: for reserv_line in room.room_reservation_line_ids: if reserv_line.status == 'confirm': raise ValidationError(_('User is not able to delete the \ room after the room in %s state \ in reservation') % (reserv_line.status)) return super(HotelRoom, self).unlink()
def unlink(self)
Overrides orm unlink method. @param self: The object pointer @return: True/False.
8.007172
8.429746
0.949871
reservation_line_obj = self.env['hotel.room.reservation.line'] folio_room_line_obj = self.env['folio.room.line'] now = datetime.now() curr_date = now.strftime(dt) for room in self.search([]): reserv_line_ids = [reservation_line.id for reservation_line in room.room_reservation_line_ids] reserv_args = [('id', 'in', reserv_line_ids), ('check_in', '<=', curr_date), ('check_out', '>=', curr_date)] reservation_line_ids = reservation_line_obj.search(reserv_args) rooms_ids = [room_line.ids for room_line in room.room_line_ids] rom_args = [('id', 'in', rooms_ids), ('check_in', '<=', curr_date), ('check_out', '>=', curr_date)] room_line_ids = folio_room_line_obj.search(rom_args) status = {'isroom': True, 'color': 5} if reservation_line_ids.ids: status = {'isroom': False, 'color': 2} room.write(status) if room_line_ids.ids: status = {'isroom': False, 'color': 2} room.write(status) if reservation_line_ids.ids and room_line_ids.ids: raise ValidationError(_('Please Check Rooms Status \ for %s.' % (room.name))) return True
def cron_room_line(self)
This method is for scheduler every 1min scheduler will call this method and check Status of room is occupied or available -------------------------------------------------------------- @param self: The object pointer @return: update status of hotel room reservation line
2.770008
2.704651
1.024165