prompt
large_stringlengths
72
9.34k
completion
large_stringlengths
0
7.61k
<|file_name|>utils.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*- # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pyparsing as pp uninary_operators = ("not", ) binary_operator = (u">=", u"<=", u"!=", u">", u"<", u"=", u"==", u"eq", u"ne", u"lt", u"gt", u"ge", u"le", u"in", u"like", u"≠", u"≥", u"≤", u"like" "in") multiple_operators = (u"and", u"or", u"∧", u"∨") operator = pp.Regex(u"|".join(binary_operator)) null = pp.Regex("None|none|null").setParseAction(pp.replaceWith(None)) boolean = "False|True|false|true" boolean = pp.Regex(boolean).setParseAction(lambda t: t[0].lower() == "true") hex_string = lambda n: pp.Word(pp.hexnums, exact=n) uuid = pp.Combine(hex_string(8) + ("-" + hex_string(4)) * 3 + "-" + hex_string(12)) number = r"[+-]?\d+(:?\.\d*)?(:?[eE][+-]?\d+)?" number = pp.Regex(number).setParseAction(lambda t: float(t[0])) identifier = pp.Word(pp.alphas, pp.alphanums + "_") quoted_string = pp.QuotedString('"') | pp.QuotedString("'") comparison_term = pp.Forward() in_list = pp.Group(pp.Suppress('[') + pp.Optional(pp.delimitedList(comparison_term)) + pp.Suppress(']'))("list") comparison_term << (null | boolean | uuid | identifier | number | quoted_string | in_list) condition = pp.Group(comparison_term + operator + comparison_term) expr = pp.operatorPrecedence(condition, [ ("not", 1, pp.opAssoc.RIGHT, ), ("and", 2, pp.opAssoc.LEFT, ), ("∧", 2, pp.opAssoc.LEFT, ), ("or", 2, pp.opAssoc.LEFT, ), ("∨", 2, pp.opAssoc.LEFT, ), ]) def _parsed_query2dict(parsed_query): result = None <|fim_middle|> query_builder(query): parsed_query = expr.parseString(query)[0] return _parsed_query2dict(parsed_query) def list2cols(cols, objs): return cols, [tuple([o[k] for k in cols]) for o in objs] def format_string_list(objs, field): objs[field] = ", ".join(objs[field]) def format_dict_list(objs, field): objs[field] = "\n".join( "- " + ", ".join("%s: %s" % (k, v) for k, v in elem.items()) for elem in objs[field]) def format_move_dict_to_root(obj, field): for attr in obj[field]: obj["%s/%s" % (field, attr)] = obj[field][attr] del obj[field] def format_archive_policy(ap): format_dict_list(ap, "definition") format_string_list(ap, "aggregation_methods") def dict_from_parsed_args(parsed_args, attrs): d = {} for attr in attrs: value = getattr(parsed_args, attr) if value is not None: d[attr] = value return d def dict_to_querystring(objs): return "&".join(["%s=%s" % (k, v) for k, v in objs.items() if v is not None]) <|fim▁end|>
while parsed_query: part = parsed_query.pop() if part in binary_operator: result = {part: {parsed_query.pop(): result}} elif part in multiple_operators: if result.get(part): result[part].append( _parsed_query2dict(parsed_query.pop())) else: result = {part: [result]} elif part in uninary_operators: result = {part: result} elif isinstance(part, pp.ParseResults): kind = part.getName() if kind == "list": res = part.asList() else: res = _parsed_query2dict(part) if result is None: result = res elif isinstance(result, dict): list(result.values())[0].append(res) else: result = part return result def search_
<|file_name|>utils.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*- # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pyparsing as pp uninary_operators = ("not", ) binary_operator = (u">=", u"<=", u"!=", u">", u"<", u"=", u"==", u"eq", u"ne", u"lt", u"gt", u"ge", u"le", u"in", u"like", u"≠", u"≥", u"≤", u"like" "in") multiple_operators = (u"and", u"or", u"∧", u"∨") operator = pp.Regex(u"|".join(binary_operator)) null = pp.Regex("None|none|null").setParseAction(pp.replaceWith(None)) boolean = "False|True|false|true" boolean = pp.Regex(boolean).setParseAction(lambda t: t[0].lower() == "true") hex_string = lambda n: pp.Word(pp.hexnums, exact=n) uuid = pp.Combine(hex_string(8) + ("-" + hex_string(4)) * 3 + "-" + hex_string(12)) number = r"[+-]?\d+(:?\.\d*)?(:?[eE][+-]?\d+)?" number = pp.Regex(number).setParseAction(lambda t: float(t[0])) identifier = pp.Word(pp.alphas, pp.alphanums + "_") quoted_string = pp.QuotedString('"') | pp.QuotedString("'") comparison_term = pp.Forward() in_list = pp.Group(pp.Suppress('[') + pp.Optional(pp.delimitedList(comparison_term)) + pp.Suppress(']'))("list") comparison_term << (null | boolean | uuid | identifier | number | quoted_string | in_list) condition = pp.Group(comparison_term + operator + comparison_term) expr = pp.operatorPrecedence(condition, [ ("not", 1, pp.opAssoc.RIGHT, ), ("and", 2, pp.opAssoc.LEFT, ), ("∧", 2, pp.opAssoc.LEFT, ), ("or", 2, pp.opAssoc.LEFT, ), ("∨", 2, pp.opAssoc.LEFT, ), ]) def _parsed_query2dict(parsed_query): result = None while parsed_query: part = parsed_query.pop() if part in binary_operator: result = {part: {parsed_query.pop(): result}} elif part in multiple_operators: if result.get(part): result[part].append( _parsed_query2dict(parsed_query.pop())) else: result = {part: [result]} elif part in uninary_operators: result = {part: result} elif isinstance(part, pp.ParseResults): kind = part.getName() if kind == "list": res = part.asList() else: res = _parsed_query2dict(part) if result is None: result = res elif isinstance(result, dict): list(result.values())[0].append(res) else: result = part return result def search_query_builder(query): parsed_query =<|fim_middle|> ls(cols, objs): return cols, [tuple([o[k] for k in cols]) for o in objs] def format_string_list(objs, field): objs[field] = ", ".join(objs[field]) def format_dict_list(objs, field): objs[field] = "\n".join( "- " + ", ".join("%s: %s" % (k, v) for k, v in elem.items()) for elem in objs[field]) def format_move_dict_to_root(obj, field): for attr in obj[field]: obj["%s/%s" % (field, attr)] = obj[field][attr] del obj[field] def format_archive_policy(ap): format_dict_list(ap, "definition") format_string_list(ap, "aggregation_methods") def dict_from_parsed_args(parsed_args, attrs): d = {} for attr in attrs: value = getattr(parsed_args, attr) if value is not None: d[attr] = value return d def dict_to_querystring(objs): return "&".join(["%s=%s" % (k, v) for k, v in objs.items() if v is not None]) <|fim▁end|>
expr.parseString(query)[0] return _parsed_query2dict(parsed_query) def list2co
<|file_name|>utils.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*- # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pyparsing as pp uninary_operators = ("not", ) binary_operator = (u">=", u"<=", u"!=", u">", u"<", u"=", u"==", u"eq", u"ne", u"lt", u"gt", u"ge", u"le", u"in", u"like", u"≠", u"≥", u"≤", u"like" "in") multiple_operators = (u"and", u"or", u"∧", u"∨") operator = pp.Regex(u"|".join(binary_operator)) null = pp.Regex("None|none|null").setParseAction(pp.replaceWith(None)) boolean = "False|True|false|true" boolean = pp.Regex(boolean).setParseAction(lambda t: t[0].lower() == "true") hex_string = lambda n: pp.Word(pp.hexnums, exact=n) uuid = pp.Combine(hex_string(8) + ("-" + hex_string(4)) * 3 + "-" + hex_string(12)) number = r"[+-]?\d+(:?\.\d*)?(:?[eE][+-]?\d+)?" number = pp.Regex(number).setParseAction(lambda t: float(t[0])) identifier = pp.Word(pp.alphas, pp.alphanums + "_") quoted_string = pp.QuotedString('"') | pp.QuotedString("'") comparison_term = pp.Forward() in_list = pp.Group(pp.Suppress('[') + pp.Optional(pp.delimitedList(comparison_term)) + pp.Suppress(']'))("list") comparison_term << (null | boolean | uuid | identifier | number | quoted_string | in_list) condition = pp.Group(comparison_term + operator + comparison_term) expr = pp.operatorPrecedence(condition, [ ("not", 1, pp.opAssoc.RIGHT, ), ("and", 2, pp.opAssoc.LEFT, ), ("∧", 2, pp.opAssoc.LEFT, ), ("or", 2, pp.opAssoc.LEFT, ), ("∨", 2, pp.opAssoc.LEFT, ), ]) def _parsed_query2dict(parsed_query): result = None while parsed_query: part = parsed_query.pop() if part in binary_operator: result = {part: {parsed_query.pop(): result}} elif part in multiple_operators: if result.get(part): result[part].append( _parsed_query2dict(parsed_query.pop())) else: result = {part: [result]} elif part in uninary_operators: result = {part: result} elif isinstance(part, pp.ParseResults): kind = part.getName() if kind == "list": res = part.asList() else: res = _parsed_query2dict(part) if result is None: result = res elif isinstance(result, dict): list(result.values())[0].append(res) else: result = part return result def search_query_builder(query): parsed_query = expr.parseString(query)[0] return _parsed_query2dict(parsed_query) def list2cols(cols, objs): return cols, [<|fim_middle|> string_list(objs, field): objs[field] = ", ".join(objs[field]) def format_dict_list(objs, field): objs[field] = "\n".join( "- " + ", ".join("%s: %s" % (k, v) for k, v in elem.items()) for elem in objs[field]) def format_move_dict_to_root(obj, field): for attr in obj[field]: obj["%s/%s" % (field, attr)] = obj[field][attr] del obj[field] def format_archive_policy(ap): format_dict_list(ap, "definition") format_string_list(ap, "aggregation_methods") def dict_from_parsed_args(parsed_args, attrs): d = {} for attr in attrs: value = getattr(parsed_args, attr) if value is not None: d[attr] = value return d def dict_to_querystring(objs): return "&".join(["%s=%s" % (k, v) for k, v in objs.items() if v is not None]) <|fim▁end|>
tuple([o[k] for k in cols]) for o in objs] def format_
<|file_name|>utils.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*- # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pyparsing as pp uninary_operators = ("not", ) binary_operator = (u">=", u"<=", u"!=", u">", u"<", u"=", u"==", u"eq", u"ne", u"lt", u"gt", u"ge", u"le", u"in", u"like", u"≠", u"≥", u"≤", u"like" "in") multiple_operators = (u"and", u"or", u"∧", u"∨") operator = pp.Regex(u"|".join(binary_operator)) null = pp.Regex("None|none|null").setParseAction(pp.replaceWith(None)) boolean = "False|True|false|true" boolean = pp.Regex(boolean).setParseAction(lambda t: t[0].lower() == "true") hex_string = lambda n: pp.Word(pp.hexnums, exact=n) uuid = pp.Combine(hex_string(8) + ("-" + hex_string(4)) * 3 + "-" + hex_string(12)) number = r"[+-]?\d+(:?\.\d*)?(:?[eE][+-]?\d+)?" number = pp.Regex(number).setParseAction(lambda t: float(t[0])) identifier = pp.Word(pp.alphas, pp.alphanums + "_") quoted_string = pp.QuotedString('"') | pp.QuotedString("'") comparison_term = pp.Forward() in_list = pp.Group(pp.Suppress('[') + pp.Optional(pp.delimitedList(comparison_term)) + pp.Suppress(']'))("list") comparison_term << (null | boolean | uuid | identifier | number | quoted_string | in_list) condition = pp.Group(comparison_term + operator + comparison_term) expr = pp.operatorPrecedence(condition, [ ("not", 1, pp.opAssoc.RIGHT, ), ("and", 2, pp.opAssoc.LEFT, ), ("∧", 2, pp.opAssoc.LEFT, ), ("or", 2, pp.opAssoc.LEFT, ), ("∨", 2, pp.opAssoc.LEFT, ), ]) def _parsed_query2dict(parsed_query): result = None while parsed_query: part = parsed_query.pop() if part in binary_operator: result = {part: {parsed_query.pop(): result}} elif part in multiple_operators: if result.get(part): result[part].append( _parsed_query2dict(parsed_query.pop())) else: result = {part: [result]} elif part in uninary_operators: result = {part: result} elif isinstance(part, pp.ParseResults): kind = part.getName() if kind == "list": res = part.asList() else: res = _parsed_query2dict(part) if result is None: result = res elif isinstance(result, dict): list(result.values())[0].append(res) else: result = part return result def search_query_builder(query): parsed_query = expr.parseString(query)[0] return _parsed_query2dict(parsed_query) def list2cols(cols, objs): return cols, [tuple([o[k] for k in cols]) for o in objs] def format_string_list(objs, field): objs[field] = <|fim_middle|> dict_list(objs, field): objs[field] = "\n".join( "- " + ", ".join("%s: %s" % (k, v) for k, v in elem.items()) for elem in objs[field]) def format_move_dict_to_root(obj, field): for attr in obj[field]: obj["%s/%s" % (field, attr)] = obj[field][attr] del obj[field] def format_archive_policy(ap): format_dict_list(ap, "definition") format_string_list(ap, "aggregation_methods") def dict_from_parsed_args(parsed_args, attrs): d = {} for attr in attrs: value = getattr(parsed_args, attr) if value is not None: d[attr] = value return d def dict_to_querystring(objs): return "&".join(["%s=%s" % (k, v) for k, v in objs.items() if v is not None]) <|fim▁end|>
", ".join(objs[field]) def format_
<|file_name|>utils.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*- # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pyparsing as pp uninary_operators = ("not", ) binary_operator = (u">=", u"<=", u"!=", u">", u"<", u"=", u"==", u"eq", u"ne", u"lt", u"gt", u"ge", u"le", u"in", u"like", u"≠", u"≥", u"≤", u"like" "in") multiple_operators = (u"and", u"or", u"∧", u"∨") operator = pp.Regex(u"|".join(binary_operator)) null = pp.Regex("None|none|null").setParseAction(pp.replaceWith(None)) boolean = "False|True|false|true" boolean = pp.Regex(boolean).setParseAction(lambda t: t[0].lower() == "true") hex_string = lambda n: pp.Word(pp.hexnums, exact=n) uuid = pp.Combine(hex_string(8) + ("-" + hex_string(4)) * 3 + "-" + hex_string(12)) number = r"[+-]?\d+(:?\.\d*)?(:?[eE][+-]?\d+)?" number = pp.Regex(number).setParseAction(lambda t: float(t[0])) identifier = pp.Word(pp.alphas, pp.alphanums + "_") quoted_string = pp.QuotedString('"') | pp.QuotedString("'") comparison_term = pp.Forward() in_list = pp.Group(pp.Suppress('[') + pp.Optional(pp.delimitedList(comparison_term)) + pp.Suppress(']'))("list") comparison_term << (null | boolean | uuid | identifier | number | quoted_string | in_list) condition = pp.Group(comparison_term + operator + comparison_term) expr = pp.operatorPrecedence(condition, [ ("not", 1, pp.opAssoc.RIGHT, ), ("and", 2, pp.opAssoc.LEFT, ), ("∧", 2, pp.opAssoc.LEFT, ), ("or", 2, pp.opAssoc.LEFT, ), ("∨", 2, pp.opAssoc.LEFT, ), ]) def _parsed_query2dict(parsed_query): result = None while parsed_query: part = parsed_query.pop() if part in binary_operator: result = {part: {parsed_query.pop(): result}} elif part in multiple_operators: if result.get(part): result[part].append( _parsed_query2dict(parsed_query.pop())) else: result = {part: [result]} elif part in uninary_operators: result = {part: result} elif isinstance(part, pp.ParseResults): kind = part.getName() if kind == "list": res = part.asList() else: res = _parsed_query2dict(part) if result is None: result = res elif isinstance(result, dict): list(result.values())[0].append(res) else: result = part return result def search_query_builder(query): parsed_query = expr.parseString(query)[0] return _parsed_query2dict(parsed_query) def list2cols(cols, objs): return cols, [tuple([o[k] for k in cols]) for o in objs] def format_string_list(objs, field): objs[field] = ", ".join(objs[field]) def format_dict_list(objs, field): objs[field] = <|fim_middle|> move_dict_to_root(obj, field): for attr in obj[field]: obj["%s/%s" % (field, attr)] = obj[field][attr] del obj[field] def format_archive_policy(ap): format_dict_list(ap, "definition") format_string_list(ap, "aggregation_methods") def dict_from_parsed_args(parsed_args, attrs): d = {} for attr in attrs: value = getattr(parsed_args, attr) if value is not None: d[attr] = value return d def dict_to_querystring(objs): return "&".join(["%s=%s" % (k, v) for k, v in objs.items() if v is not None]) <|fim▁end|>
"\n".join( "- " + ", ".join("%s: %s" % (k, v) for k, v in elem.items()) for elem in objs[field]) def format_
<|file_name|>utils.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*- # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pyparsing as pp uninary_operators = ("not", ) binary_operator = (u">=", u"<=", u"!=", u">", u"<", u"=", u"==", u"eq", u"ne", u"lt", u"gt", u"ge", u"le", u"in", u"like", u"≠", u"≥", u"≤", u"like" "in") multiple_operators = (u"and", u"or", u"∧", u"∨") operator = pp.Regex(u"|".join(binary_operator)) null = pp.Regex("None|none|null").setParseAction(pp.replaceWith(None)) boolean = "False|True|false|true" boolean = pp.Regex(boolean).setParseAction(lambda t: t[0].lower() == "true") hex_string = lambda n: pp.Word(pp.hexnums, exact=n) uuid = pp.Combine(hex_string(8) + ("-" + hex_string(4)) * 3 + "-" + hex_string(12)) number = r"[+-]?\d+(:?\.\d*)?(:?[eE][+-]?\d+)?" number = pp.Regex(number).setParseAction(lambda t: float(t[0])) identifier = pp.Word(pp.alphas, pp.alphanums + "_") quoted_string = pp.QuotedString('"') | pp.QuotedString("'") comparison_term = pp.Forward() in_list = pp.Group(pp.Suppress('[') + pp.Optional(pp.delimitedList(comparison_term)) + pp.Suppress(']'))("list") comparison_term << (null | boolean | uuid | identifier | number | quoted_string | in_list) condition = pp.Group(comparison_term + operator + comparison_term) expr = pp.operatorPrecedence(condition, [ ("not", 1, pp.opAssoc.RIGHT, ), ("and", 2, pp.opAssoc.LEFT, ), ("∧", 2, pp.opAssoc.LEFT, ), ("or", 2, pp.opAssoc.LEFT, ), ("∨", 2, pp.opAssoc.LEFT, ), ]) def _parsed_query2dict(parsed_query): result = None while parsed_query: part = parsed_query.pop() if part in binary_operator: result = {part: {parsed_query.pop(): result}} elif part in multiple_operators: if result.get(part): result[part].append( _parsed_query2dict(parsed_query.pop())) else: result = {part: [result]} elif part in uninary_operators: result = {part: result} elif isinstance(part, pp.ParseResults): kind = part.getName() if kind == "list": res = part.asList() else: res = _parsed_query2dict(part) if result is None: result = res elif isinstance(result, dict): list(result.values())[0].append(res) else: result = part return result def search_query_builder(query): parsed_query = expr.parseString(query)[0] return _parsed_query2dict(parsed_query) def list2cols(cols, objs): return cols, [tuple([o[k] for k in cols]) for o in objs] def format_string_list(objs, field): objs[field] = ", ".join(objs[field]) def format_dict_list(objs, field): objs[field] = "\n".join( "- " + ", ".join("%s: %s" % (k, v) for k, v in elem.items()) for elem in objs[field]) def format_move_dict_to_root(obj, field): for attr in ob<|fim_middle|> archive_policy(ap): format_dict_list(ap, "definition") format_string_list(ap, "aggregation_methods") def dict_from_parsed_args(parsed_args, attrs): d = {} for attr in attrs: value = getattr(parsed_args, attr) if value is not None: d[attr] = value return d def dict_to_querystring(objs): return "&".join(["%s=%s" % (k, v) for k, v in objs.items() if v is not None]) <|fim▁end|>
j[field]: obj["%s/%s" % (field, attr)] = obj[field][attr] del obj[field] def format_
<|file_name|>utils.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*- # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pyparsing as pp uninary_operators = ("not", ) binary_operator = (u">=", u"<=", u"!=", u">", u"<", u"=", u"==", u"eq", u"ne", u"lt", u"gt", u"ge", u"le", u"in", u"like", u"≠", u"≥", u"≤", u"like" "in") multiple_operators = (u"and", u"or", u"∧", u"∨") operator = pp.Regex(u"|".join(binary_operator)) null = pp.Regex("None|none|null").setParseAction(pp.replaceWith(None)) boolean = "False|True|false|true" boolean = pp.Regex(boolean).setParseAction(lambda t: t[0].lower() == "true") hex_string = lambda n: pp.Word(pp.hexnums, exact=n) uuid = pp.Combine(hex_string(8) + ("-" + hex_string(4)) * 3 + "-" + hex_string(12)) number = r"[+-]?\d+(:?\.\d*)?(:?[eE][+-]?\d+)?" number = pp.Regex(number).setParseAction(lambda t: float(t[0])) identifier = pp.Word(pp.alphas, pp.alphanums + "_") quoted_string = pp.QuotedString('"') | pp.QuotedString("'") comparison_term = pp.Forward() in_list = pp.Group(pp.Suppress('[') + pp.Optional(pp.delimitedList(comparison_term)) + pp.Suppress(']'))("list") comparison_term << (null | boolean | uuid | identifier | number | quoted_string | in_list) condition = pp.Group(comparison_term + operator + comparison_term) expr = pp.operatorPrecedence(condition, [ ("not", 1, pp.opAssoc.RIGHT, ), ("and", 2, pp.opAssoc.LEFT, ), ("∧", 2, pp.opAssoc.LEFT, ), ("or", 2, pp.opAssoc.LEFT, ), ("∨", 2, pp.opAssoc.LEFT, ), ]) def _parsed_query2dict(parsed_query): result = None while parsed_query: part = parsed_query.pop() if part in binary_operator: result = {part: {parsed_query.pop(): result}} elif part in multiple_operators: if result.get(part): result[part].append( _parsed_query2dict(parsed_query.pop())) else: result = {part: [result]} elif part in uninary_operators: result = {part: result} elif isinstance(part, pp.ParseResults): kind = part.getName() if kind == "list": res = part.asList() else: res = _parsed_query2dict(part) if result is None: result = res elif isinstance(result, dict): list(result.values())[0].append(res) else: result = part return result def search_query_builder(query): parsed_query = expr.parseString(query)[0] return _parsed_query2dict(parsed_query) def list2cols(cols, objs): return cols, [tuple([o[k] for k in cols]) for o in objs] def format_string_list(objs, field): objs[field] = ", ".join(objs[field]) def format_dict_list(objs, field): objs[field] = "\n".join( "- " + ", ".join("%s: %s" % (k, v) for k, v in elem.items()) for elem in objs[field]) def format_move_dict_to_root(obj, field): for attr in obj[field]: obj["%s/%s" % (field, attr)] = obj[field][attr] del obj[field] def format_archive_policy(ap): format_dict_li<|fim_middle|> om_parsed_args(parsed_args, attrs): d = {} for attr in attrs: value = getattr(parsed_args, attr) if value is not None: d[attr] = value return d def dict_to_querystring(objs): return "&".join(["%s=%s" % (k, v) for k, v in objs.items() if v is not None]) <|fim▁end|>
st(ap, "definition") format_string_list(ap, "aggregation_methods") def dict_fr
<|file_name|>utils.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*- # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pyparsing as pp uninary_operators = ("not", ) binary_operator = (u">=", u"<=", u"!=", u">", u"<", u"=", u"==", u"eq", u"ne", u"lt", u"gt", u"ge", u"le", u"in", u"like", u"≠", u"≥", u"≤", u"like" "in") multiple_operators = (u"and", u"or", u"∧", u"∨") operator = pp.Regex(u"|".join(binary_operator)) null = pp.Regex("None|none|null").setParseAction(pp.replaceWith(None)) boolean = "False|True|false|true" boolean = pp.Regex(boolean).setParseAction(lambda t: t[0].lower() == "true") hex_string = lambda n: pp.Word(pp.hexnums, exact=n) uuid = pp.Combine(hex_string(8) + ("-" + hex_string(4)) * 3 + "-" + hex_string(12)) number = r"[+-]?\d+(:?\.\d*)?(:?[eE][+-]?\d+)?" number = pp.Regex(number).setParseAction(lambda t: float(t[0])) identifier = pp.Word(pp.alphas, pp.alphanums + "_") quoted_string = pp.QuotedString('"') | pp.QuotedString("'") comparison_term = pp.Forward() in_list = pp.Group(pp.Suppress('[') + pp.Optional(pp.delimitedList(comparison_term)) + pp.Suppress(']'))("list") comparison_term << (null | boolean | uuid | identifier | number | quoted_string | in_list) condition = pp.Group(comparison_term + operator + comparison_term) expr = pp.operatorPrecedence(condition, [ ("not", 1, pp.opAssoc.RIGHT, ), ("and", 2, pp.opAssoc.LEFT, ), ("∧", 2, pp.opAssoc.LEFT, ), ("or", 2, pp.opAssoc.LEFT, ), ("∨", 2, pp.opAssoc.LEFT, ), ]) def _parsed_query2dict(parsed_query): result = None while parsed_query: part = parsed_query.pop() if part in binary_operator: result = {part: {parsed_query.pop(): result}} elif part in multiple_operators: if result.get(part): result[part].append( _parsed_query2dict(parsed_query.pop())) else: result = {part: [result]} elif part in uninary_operators: result = {part: result} elif isinstance(part, pp.ParseResults): kind = part.getName() if kind == "list": res = part.asList() else: res = _parsed_query2dict(part) if result is None: result = res elif isinstance(result, dict): list(result.values())[0].append(res) else: result = part return result def search_query_builder(query): parsed_query = expr.parseString(query)[0] return _parsed_query2dict(parsed_query) def list2cols(cols, objs): return cols, [tuple([o[k] for k in cols]) for o in objs] def format_string_list(objs, field): objs[field] = ", ".join(objs[field]) def format_dict_list(objs, field): objs[field] = "\n".join( "- " + ", ".join("%s: %s" % (k, v) for k, v in elem.items()) for elem in objs[field]) def format_move_dict_to_root(obj, field): for attr in obj[field]: obj["%s/%s" % (field, attr)] = obj[field][attr] del obj[field] def format_archive_policy(ap): format_dict_list(ap, "definition") format_string_list(ap, "aggregation_methods") def dict_from_parsed_args(parsed_args, attrs): d = {} for<|fim_middle|> _querystring(objs): return "&".join(["%s=%s" % (k, v) for k, v in objs.items() if v is not None]) <|fim▁end|>
attr in attrs: value = getattr(parsed_args, attr) if value is not None: d[attr] = value return d def dict_to
<|file_name|>utils.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*- # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pyparsing as pp uninary_operators = ("not", ) binary_operator = (u">=", u"<=", u"!=", u">", u"<", u"=", u"==", u"eq", u"ne", u"lt", u"gt", u"ge", u"le", u"in", u"like", u"≠", u"≥", u"≤", u"like" "in") multiple_operators = (u"and", u"or", u"∧", u"∨") operator = pp.Regex(u"|".join(binary_operator)) null = pp.Regex("None|none|null").setParseAction(pp.replaceWith(None)) boolean = "False|True|false|true" boolean = pp.Regex(boolean).setParseAction(lambda t: t[0].lower() == "true") hex_string = lambda n: pp.Word(pp.hexnums, exact=n) uuid = pp.Combine(hex_string(8) + ("-" + hex_string(4)) * 3 + "-" + hex_string(12)) number = r"[+-]?\d+(:?\.\d*)?(:?[eE][+-]?\d+)?" number = pp.Regex(number).setParseAction(lambda t: float(t[0])) identifier = pp.Word(pp.alphas, pp.alphanums + "_") quoted_string = pp.QuotedString('"') | pp.QuotedString("'") comparison_term = pp.Forward() in_list = pp.Group(pp.Suppress('[') + pp.Optional(pp.delimitedList(comparison_term)) + pp.Suppress(']'))("list") comparison_term << (null | boolean | uuid | identifier | number | quoted_string | in_list) condition = pp.Group(comparison_term + operator + comparison_term) expr = pp.operatorPrecedence(condition, [ ("not", 1, pp.opAssoc.RIGHT, ), ("and", 2, pp.opAssoc.LEFT, ), ("∧", 2, pp.opAssoc.LEFT, ), ("or", 2, pp.opAssoc.LEFT, ), ("∨", 2, pp.opAssoc.LEFT, ), ]) def _parsed_query2dict(parsed_query): result = None while parsed_query: part = parsed_query.pop() if part in binary_operator: result = {part: {parsed_query.pop(): result}} elif part in multiple_operators: if result.get(part): result[part].append( _parsed_query2dict(parsed_query.pop())) else: result = {part: [result]} elif part in uninary_operators: result = {part: result} elif isinstance(part, pp.ParseResults): kind = part.getName() if kind == "list": res = part.asList() else: res = _parsed_query2dict(part) if result is None: result = res elif isinstance(result, dict): list(result.values())[0].append(res) else: result = part return result def search_query_builder(query): parsed_query = expr.parseString(query)[0] return _parsed_query2dict(parsed_query) def list2cols(cols, objs): return cols, [tuple([o[k] for k in cols]) for o in objs] def format_string_list(objs, field): objs[field] = ", ".join(objs[field]) def format_dict_list(objs, field): objs[field] = "\n".join( "- " + ", ".join("%s: %s" % (k, v) for k, v in elem.items()) for elem in objs[field]) def format_move_dict_to_root(obj, field): for attr in obj[field]: obj["%s/%s" % (field, attr)] = obj[field][attr] del obj[field] def format_archive_policy(ap): format_dict_list(ap, "definition") format_string_list(ap, "aggregation_methods") def dict_from_parsed_args(parsed_args, attrs): d = {} for attr in attrs: value = getattr(parsed_args, attr) if value is not None: d[attr] = value return d def dict_to_querystring(objs): return "&".joi<|fim_middle|> <|fim▁end|>
n(["%s=%s" % (k, v) for k, v in objs.items() if v is not None])
<|file_name|>utils.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*- # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pyparsing as pp uninary_operators = ("not", ) binary_operator = (u">=", u"<=", u"!=", u">", u"<", u"=", u"==", u"eq", u"ne", u"lt", u"gt", u"ge", u"le", u"in", u"like", u"≠", u"≥", u"≤", u"like" "in") multiple_operators = (u"and", u"or", u"∧", u"∨") operator = pp.Regex(u"|".join(binary_operator)) null = pp.Regex("None|none|null").setParseAction(pp.replaceWith(None)) boolean = "False|True|false|true" boolean = pp.Regex(boolean).setParseAction(lambda t: t[0].lower() == "true") hex_string = lambda n: pp.Word(pp.hexnums, exact=n) uuid = pp.Combine(hex_string(8) + ("-" + hex_string(4)) * 3 + "-" + hex_string(12)) number = r"[+-]?\d+(:?\.\d*)?(:?[eE][+-]?\d+)?" number = pp.Regex(number).setParseAction(lambda t: float(t[0])) identifier = pp.Word(pp.alphas, pp.alphanums + "_") quoted_string = pp.QuotedString('"') | pp.QuotedString("'") comparison_term = pp.Forward() in_list = pp.Group(pp.Suppress('[') + pp.Optional(pp.delimitedList(comparison_term)) + pp.Suppress(']'))("list") comparison_term << (null | boolean | uuid | identifier | number | quoted_string | in_list) condition = pp.Group(comparison_term + operator + comparison_term) expr = pp.operatorPrecedence(condition, [ ("not", 1, pp.opAssoc.RIGHT, ), ("and", 2, pp.opAssoc.LEFT, ), ("∧", 2, pp.opAssoc.LEFT, ), ("or", 2, pp.opAssoc.LEFT, ), ("∨", 2, pp.opAssoc.LEFT, ), ]) def _parsed_query2dict(parsed_query): result = None while parsed_query: part = parsed_query.pop() if part in binary_operator: result = {part <|fim_middle|> part in multiple_operators: if result.get(part): result[part].append( _parsed_query2dict(parsed_query.pop())) else: result = {part: [result]} elif part in uninary_operators: result = {part: result} elif isinstance(part, pp.ParseResults): kind = part.getName() if kind == "list": res = part.asList() else: res = _parsed_query2dict(part) if result is None: result = res elif isinstance(result, dict): list(result.values())[0].append(res) else: result = part return result def search_query_builder(query): parsed_query = expr.parseString(query)[0] return _parsed_query2dict(parsed_query) def list2cols(cols, objs): return cols, [tuple([o[k] for k in cols]) for o in objs] def format_string_list(objs, field): objs[field] = ", ".join(objs[field]) def format_dict_list(objs, field): objs[field] = "\n".join( "- " + ", ".join("%s: %s" % (k, v) for k, v in elem.items()) for elem in objs[field]) def format_move_dict_to_root(obj, field): for attr in obj[field]: obj["%s/%s" % (field, attr)] = obj[field][attr] del obj[field] def format_archive_policy(ap): format_dict_list(ap, "definition") format_string_list(ap, "aggregation_methods") def dict_from_parsed_args(parsed_args, attrs): d = {} for attr in attrs: value = getattr(parsed_args, attr) if value is not None: d[attr] = value return d def dict_to_querystring(objs): return "&".join(["%s=%s" % (k, v) for k, v in objs.items() if v is not None]) <|fim▁end|>
: {parsed_query.pop(): result}} elif
<|file_name|>utils.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*- # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pyparsing as pp uninary_operators = ("not", ) binary_operator = (u">=", u"<=", u"!=", u">", u"<", u"=", u"==", u"eq", u"ne", u"lt", u"gt", u"ge", u"le", u"in", u"like", u"≠", u"≥", u"≤", u"like" "in") multiple_operators = (u"and", u"or", u"∧", u"∨") operator = pp.Regex(u"|".join(binary_operator)) null = pp.Regex("None|none|null").setParseAction(pp.replaceWith(None)) boolean = "False|True|false|true" boolean = pp.Regex(boolean).setParseAction(lambda t: t[0].lower() == "true") hex_string = lambda n: pp.Word(pp.hexnums, exact=n) uuid = pp.Combine(hex_string(8) + ("-" + hex_string(4)) * 3 + "-" + hex_string(12)) number = r"[+-]?\d+(:?\.\d*)?(:?[eE][+-]?\d+)?" number = pp.Regex(number).setParseAction(lambda t: float(t[0])) identifier = pp.Word(pp.alphas, pp.alphanums + "_") quoted_string = pp.QuotedString('"') | pp.QuotedString("'") comparison_term = pp.Forward() in_list = pp.Group(pp.Suppress('[') + pp.Optional(pp.delimitedList(comparison_term)) + pp.Suppress(']'))("list") comparison_term << (null | boolean | uuid | identifier | number | quoted_string | in_list) condition = pp.Group(comparison_term + operator + comparison_term) expr = pp.operatorPrecedence(condition, [ ("not", 1, pp.opAssoc.RIGHT, ), ("and", 2, pp.opAssoc.LEFT, ), ("∧", 2, pp.opAssoc.LEFT, ), ("or", 2, pp.opAssoc.LEFT, ), ("∨", 2, pp.opAssoc.LEFT, ), ]) def _parsed_query2dict(parsed_query): result = None while parsed_query: part = parsed_query.pop() if part in binary_operator: result = {part: {parsed_query.pop(): result}} elif part in multiple_operators: if result.get( <|fim_middle|> part in uninary_operators: result = {part: result} elif isinstance(part, pp.ParseResults): kind = part.getName() if kind == "list": res = part.asList() else: res = _parsed_query2dict(part) if result is None: result = res elif isinstance(result, dict): list(result.values())[0].append(res) else: result = part return result def search_query_builder(query): parsed_query = expr.parseString(query)[0] return _parsed_query2dict(parsed_query) def list2cols(cols, objs): return cols, [tuple([o[k] for k in cols]) for o in objs] def format_string_list(objs, field): objs[field] = ", ".join(objs[field]) def format_dict_list(objs, field): objs[field] = "\n".join( "- " + ", ".join("%s: %s" % (k, v) for k, v in elem.items()) for elem in objs[field]) def format_move_dict_to_root(obj, field): for attr in obj[field]: obj["%s/%s" % (field, attr)] = obj[field][attr] del obj[field] def format_archive_policy(ap): format_dict_list(ap, "definition") format_string_list(ap, "aggregation_methods") def dict_from_parsed_args(parsed_args, attrs): d = {} for attr in attrs: value = getattr(parsed_args, attr) if value is not None: d[attr] = value return d def dict_to_querystring(objs): return "&".join(["%s=%s" % (k, v) for k, v in objs.items() if v is not None]) <|fim▁end|>
part): result[part].append( _parsed_query2dict(parsed_query.pop())) else: result = {part: [result]} elif
<|file_name|>utils.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*- # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pyparsing as pp uninary_operators = ("not", ) binary_operator = (u">=", u"<=", u"!=", u">", u"<", u"=", u"==", u"eq", u"ne", u"lt", u"gt", u"ge", u"le", u"in", u"like", u"≠", u"≥", u"≤", u"like" "in") multiple_operators = (u"and", u"or", u"∧", u"∨") operator = pp.Regex(u"|".join(binary_operator)) null = pp.Regex("None|none|null").setParseAction(pp.replaceWith(None)) boolean = "False|True|false|true" boolean = pp.Regex(boolean).setParseAction(lambda t: t[0].lower() == "true") hex_string = lambda n: pp.Word(pp.hexnums, exact=n) uuid = pp.Combine(hex_string(8) + ("-" + hex_string(4)) * 3 + "-" + hex_string(12)) number = r"[+-]?\d+(:?\.\d*)?(:?[eE][+-]?\d+)?" number = pp.Regex(number).setParseAction(lambda t: float(t[0])) identifier = pp.Word(pp.alphas, pp.alphanums + "_") quoted_string = pp.QuotedString('"') | pp.QuotedString("'") comparison_term = pp.Forward() in_list = pp.Group(pp.Suppress('[') + pp.Optional(pp.delimitedList(comparison_term)) + pp.Suppress(']'))("list") comparison_term << (null | boolean | uuid | identifier | number | quoted_string | in_list) condition = pp.Group(comparison_term + operator + comparison_term) expr = pp.operatorPrecedence(condition, [ ("not", 1, pp.opAssoc.RIGHT, ), ("and", 2, pp.opAssoc.LEFT, ), ("∧", 2, pp.opAssoc.LEFT, ), ("or", 2, pp.opAssoc.LEFT, ), ("∨", 2, pp.opAssoc.LEFT, ), ]) def _parsed_query2dict(parsed_query): result = None while parsed_query: part = parsed_query.pop() if part in binary_operator: result = {part: {parsed_query.pop(): result}} elif part in multiple_operators: if result.get(part): result[part].a <|fim_middle|> lse: result = {part: [result]} elif part in uninary_operators: result = {part: result} elif isinstance(part, pp.ParseResults): kind = part.getName() if kind == "list": res = part.asList() else: res = _parsed_query2dict(part) if result is None: result = res elif isinstance(result, dict): list(result.values())[0].append(res) else: result = part return result def search_query_builder(query): parsed_query = expr.parseString(query)[0] return _parsed_query2dict(parsed_query) def list2cols(cols, objs): return cols, [tuple([o[k] for k in cols]) for o in objs] def format_string_list(objs, field): objs[field] = ", ".join(objs[field]) def format_dict_list(objs, field): objs[field] = "\n".join( "- " + ", ".join("%s: %s" % (k, v) for k, v in elem.items()) for elem in objs[field]) def format_move_dict_to_root(obj, field): for attr in obj[field]: obj["%s/%s" % (field, attr)] = obj[field][attr] del obj[field] def format_archive_policy(ap): format_dict_list(ap, "definition") format_string_list(ap, "aggregation_methods") def dict_from_parsed_args(parsed_args, attrs): d = {} for attr in attrs: value = getattr(parsed_args, attr) if value is not None: d[attr] = value return d def dict_to_querystring(objs): return "&".join(["%s=%s" % (k, v) for k, v in objs.items() if v is not None]) <|fim▁end|>
ppend( _parsed_query2dict(parsed_query.pop())) e
<|file_name|>utils.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*- # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pyparsing as pp uninary_operators = ("not", ) binary_operator = (u">=", u"<=", u"!=", u">", u"<", u"=", u"==", u"eq", u"ne", u"lt", u"gt", u"ge", u"le", u"in", u"like", u"≠", u"≥", u"≤", u"like" "in") multiple_operators = (u"and", u"or", u"∧", u"∨") operator = pp.Regex(u"|".join(binary_operator)) null = pp.Regex("None|none|null").setParseAction(pp.replaceWith(None)) boolean = "False|True|false|true" boolean = pp.Regex(boolean).setParseAction(lambda t: t[0].lower() == "true") hex_string = lambda n: pp.Word(pp.hexnums, exact=n) uuid = pp.Combine(hex_string(8) + ("-" + hex_string(4)) * 3 + "-" + hex_string(12)) number = r"[+-]?\d+(:?\.\d*)?(:?[eE][+-]?\d+)?" number = pp.Regex(number).setParseAction(lambda t: float(t[0])) identifier = pp.Word(pp.alphas, pp.alphanums + "_") quoted_string = pp.QuotedString('"') | pp.QuotedString("'") comparison_term = pp.Forward() in_list = pp.Group(pp.Suppress('[') + pp.Optional(pp.delimitedList(comparison_term)) + pp.Suppress(']'))("list") comparison_term << (null | boolean | uuid | identifier | number | quoted_string | in_list) condition = pp.Group(comparison_term + operator + comparison_term) expr = pp.operatorPrecedence(condition, [ ("not", 1, pp.opAssoc.RIGHT, ), ("and", 2, pp.opAssoc.LEFT, ), ("∧", 2, pp.opAssoc.LEFT, ), ("or", 2, pp.opAssoc.LEFT, ), ("∨", 2, pp.opAssoc.LEFT, ), ]) def _parsed_query2dict(parsed_query): result = None while parsed_query: part = parsed_query.pop() if part in binary_operator: result = {part: {parsed_query.pop(): result}} elif part in multiple_operators: if result.get(part): result[part].append( _parsed_query2dict(parsed_query.pop())) else: result = {part <|fim_middle|> part in uninary_operators: result = {part: result} elif isinstance(part, pp.ParseResults): kind = part.getName() if kind == "list": res = part.asList() else: res = _parsed_query2dict(part) if result is None: result = res elif isinstance(result, dict): list(result.values())[0].append(res) else: result = part return result def search_query_builder(query): parsed_query = expr.parseString(query)[0] return _parsed_query2dict(parsed_query) def list2cols(cols, objs): return cols, [tuple([o[k] for k in cols]) for o in objs] def format_string_list(objs, field): objs[field] = ", ".join(objs[field]) def format_dict_list(objs, field): objs[field] = "\n".join( "- " + ", ".join("%s: %s" % (k, v) for k, v in elem.items()) for elem in objs[field]) def format_move_dict_to_root(obj, field): for attr in obj[field]: obj["%s/%s" % (field, attr)] = obj[field][attr] del obj[field] def format_archive_policy(ap): format_dict_list(ap, "definition") format_string_list(ap, "aggregation_methods") def dict_from_parsed_args(parsed_args, attrs): d = {} for attr in attrs: value = getattr(parsed_args, attr) if value is not None: d[attr] = value return d def dict_to_querystring(objs): return "&".join(["%s=%s" % (k, v) for k, v in objs.items() if v is not None]) <|fim▁end|>
: [result]} elif
<|file_name|>utils.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*- # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pyparsing as pp uninary_operators = ("not", ) binary_operator = (u">=", u"<=", u"!=", u">", u"<", u"=", u"==", u"eq", u"ne", u"lt", u"gt", u"ge", u"le", u"in", u"like", u"≠", u"≥", u"≤", u"like" "in") multiple_operators = (u"and", u"or", u"∧", u"∨") operator = pp.Regex(u"|".join(binary_operator)) null = pp.Regex("None|none|null").setParseAction(pp.replaceWith(None)) boolean = "False|True|false|true" boolean = pp.Regex(boolean).setParseAction(lambda t: t[0].lower() == "true") hex_string = lambda n: pp.Word(pp.hexnums, exact=n) uuid = pp.Combine(hex_string(8) + ("-" + hex_string(4)) * 3 + "-" + hex_string(12)) number = r"[+-]?\d+(:?\.\d*)?(:?[eE][+-]?\d+)?" number = pp.Regex(number).setParseAction(lambda t: float(t[0])) identifier = pp.Word(pp.alphas, pp.alphanums + "_") quoted_string = pp.QuotedString('"') | pp.QuotedString("'") comparison_term = pp.Forward() in_list = pp.Group(pp.Suppress('[') + pp.Optional(pp.delimitedList(comparison_term)) + pp.Suppress(']'))("list") comparison_term << (null | boolean | uuid | identifier | number | quoted_string | in_list) condition = pp.Group(comparison_term + operator + comparison_term) expr = pp.operatorPrecedence(condition, [ ("not", 1, pp.opAssoc.RIGHT, ), ("and", 2, pp.opAssoc.LEFT, ), ("∧", 2, pp.opAssoc.LEFT, ), ("or", 2, pp.opAssoc.LEFT, ), ("∨", 2, pp.opAssoc.LEFT, ), ]) def _parsed_query2dict(parsed_query): result = None while parsed_query: part = parsed_query.pop() if part in binary_operator: result = {part: {parsed_query.pop(): result}} elif part in multiple_operators: if result.get(part): result[part].append( _parsed_query2dict(parsed_query.pop())) else: result = {part: [result]} elif part in uninary_operators: result = {part <|fim_middle|> isinstance(part, pp.ParseResults): kind = part.getName() if kind == "list": res = part.asList() else: res = _parsed_query2dict(part) if result is None: result = res elif isinstance(result, dict): list(result.values())[0].append(res) else: result = part return result def search_query_builder(query): parsed_query = expr.parseString(query)[0] return _parsed_query2dict(parsed_query) def list2cols(cols, objs): return cols, [tuple([o[k] for k in cols]) for o in objs] def format_string_list(objs, field): objs[field] = ", ".join(objs[field]) def format_dict_list(objs, field): objs[field] = "\n".join( "- " + ", ".join("%s: %s" % (k, v) for k, v in elem.items()) for elem in objs[field]) def format_move_dict_to_root(obj, field): for attr in obj[field]: obj["%s/%s" % (field, attr)] = obj[field][attr] del obj[field] def format_archive_policy(ap): format_dict_list(ap, "definition") format_string_list(ap, "aggregation_methods") def dict_from_parsed_args(parsed_args, attrs): d = {} for attr in attrs: value = getattr(parsed_args, attr) if value is not None: d[attr] = value return d def dict_to_querystring(objs): return "&".join(["%s=%s" % (k, v) for k, v in objs.items() if v is not None]) <|fim▁end|>
: result} elif
<|file_name|>utils.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*- # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pyparsing as pp uninary_operators = ("not", ) binary_operator = (u">=", u"<=", u"!=", u">", u"<", u"=", u"==", u"eq", u"ne", u"lt", u"gt", u"ge", u"le", u"in", u"like", u"≠", u"≥", u"≤", u"like" "in") multiple_operators = (u"and", u"or", u"∧", u"∨") operator = pp.Regex(u"|".join(binary_operator)) null = pp.Regex("None|none|null").setParseAction(pp.replaceWith(None)) boolean = "False|True|false|true" boolean = pp.Regex(boolean).setParseAction(lambda t: t[0].lower() == "true") hex_string = lambda n: pp.Word(pp.hexnums, exact=n) uuid = pp.Combine(hex_string(8) + ("-" + hex_string(4)) * 3 + "-" + hex_string(12)) number = r"[+-]?\d+(:?\.\d*)?(:?[eE][+-]?\d+)?" number = pp.Regex(number).setParseAction(lambda t: float(t[0])) identifier = pp.Word(pp.alphas, pp.alphanums + "_") quoted_string = pp.QuotedString('"') | pp.QuotedString("'") comparison_term = pp.Forward() in_list = pp.Group(pp.Suppress('[') + pp.Optional(pp.delimitedList(comparison_term)) + pp.Suppress(']'))("list") comparison_term << (null | boolean | uuid | identifier | number | quoted_string | in_list) condition = pp.Group(comparison_term + operator + comparison_term) expr = pp.operatorPrecedence(condition, [ ("not", 1, pp.opAssoc.RIGHT, ), ("and", 2, pp.opAssoc.LEFT, ), ("∧", 2, pp.opAssoc.LEFT, ), ("or", 2, pp.opAssoc.LEFT, ), ("∨", 2, pp.opAssoc.LEFT, ), ]) def _parsed_query2dict(parsed_query): result = None while parsed_query: part = parsed_query.pop() if part in binary_operator: result = {part: {parsed_query.pop(): result}} elif part in multiple_operators: if result.get(part): result[part].append( _parsed_query2dict(parsed_query.pop())) else: result = {part: [result]} elif part in uninary_operators: result = {part: result} elif isinstance(part, pp.ParseResults): kind = part.ge <|fim_middle|> result = part return result def search_query_builder(query): parsed_query = expr.parseString(query)[0] return _parsed_query2dict(parsed_query) def list2cols(cols, objs): return cols, [tuple([o[k] for k in cols]) for o in objs] def format_string_list(objs, field): objs[field] = ", ".join(objs[field]) def format_dict_list(objs, field): objs[field] = "\n".join( "- " + ", ".join("%s: %s" % (k, v) for k, v in elem.items()) for elem in objs[field]) def format_move_dict_to_root(obj, field): for attr in obj[field]: obj["%s/%s" % (field, attr)] = obj[field][attr] del obj[field] def format_archive_policy(ap): format_dict_list(ap, "definition") format_string_list(ap, "aggregation_methods") def dict_from_parsed_args(parsed_args, attrs): d = {} for attr in attrs: value = getattr(parsed_args, attr) if value is not None: d[attr] = value return d def dict_to_querystring(objs): return "&".join(["%s=%s" % (k, v) for k, v in objs.items() if v is not None]) <|fim▁end|>
tName() if kind == "list": res = part.asList() else: res = _parsed_query2dict(part) if result is None: result = res elif isinstance(result, dict): list(result.values())[0].append(res) else:
<|file_name|>utils.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*- # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pyparsing as pp uninary_operators = ("not", ) binary_operator = (u">=", u"<=", u"!=", u">", u"<", u"=", u"==", u"eq", u"ne", u"lt", u"gt", u"ge", u"le", u"in", u"like", u"≠", u"≥", u"≤", u"like" "in") multiple_operators = (u"and", u"or", u"∧", u"∨") operator = pp.Regex(u"|".join(binary_operator)) null = pp.Regex("None|none|null").setParseAction(pp.replaceWith(None)) boolean = "False|True|false|true" boolean = pp.Regex(boolean).setParseAction(lambda t: t[0].lower() == "true") hex_string = lambda n: pp.Word(pp.hexnums, exact=n) uuid = pp.Combine(hex_string(8) + ("-" + hex_string(4)) * 3 + "-" + hex_string(12)) number = r"[+-]?\d+(:?\.\d*)?(:?[eE][+-]?\d+)?" number = pp.Regex(number).setParseAction(lambda t: float(t[0])) identifier = pp.Word(pp.alphas, pp.alphanums + "_") quoted_string = pp.QuotedString('"') | pp.QuotedString("'") comparison_term = pp.Forward() in_list = pp.Group(pp.Suppress('[') + pp.Optional(pp.delimitedList(comparison_term)) + pp.Suppress(']'))("list") comparison_term << (null | boolean | uuid | identifier | number | quoted_string | in_list) condition = pp.Group(comparison_term + operator + comparison_term) expr = pp.operatorPrecedence(condition, [ ("not", 1, pp.opAssoc.RIGHT, ), ("and", 2, pp.opAssoc.LEFT, ), ("∧", 2, pp.opAssoc.LEFT, ), ("or", 2, pp.opAssoc.LEFT, ), ("∨", 2, pp.opAssoc.LEFT, ), ]) def _parsed_query2dict(parsed_query): result = None while parsed_query: part = parsed_query.pop() if part in binary_operator: result = {part: {parsed_query.pop(): result}} elif part in multiple_operators: if result.get(part): result[part].append( _parsed_query2dict(parsed_query.pop())) else: result = {part: [result]} elif part in uninary_operators: result = {part: result} elif isinstance(part, pp.ParseResults): kind = part.getName() if kind == "list": res = part.asL <|fim_middle|> lse: res = _parsed_query2dict(part) if result is None: result = res elif isinstance(result, dict): list(result.values())[0].append(res) else: result = part return result def search_query_builder(query): parsed_query = expr.parseString(query)[0] return _parsed_query2dict(parsed_query) def list2cols(cols, objs): return cols, [tuple([o[k] for k in cols]) for o in objs] def format_string_list(objs, field): objs[field] = ", ".join(objs[field]) def format_dict_list(objs, field): objs[field] = "\n".join( "- " + ", ".join("%s: %s" % (k, v) for k, v in elem.items()) for elem in objs[field]) def format_move_dict_to_root(obj, field): for attr in obj[field]: obj["%s/%s" % (field, attr)] = obj[field][attr] del obj[field] def format_archive_policy(ap): format_dict_list(ap, "definition") format_string_list(ap, "aggregation_methods") def dict_from_parsed_args(parsed_args, attrs): d = {} for attr in attrs: value = getattr(parsed_args, attr) if value is not None: d[attr] = value return d def dict_to_querystring(objs): return "&".join(["%s=%s" % (k, v) for k, v in objs.items() if v is not None]) <|fim▁end|>
ist() e
<|file_name|>utils.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*- # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pyparsing as pp uninary_operators = ("not", ) binary_operator = (u">=", u"<=", u"!=", u">", u"<", u"=", u"==", u"eq", u"ne", u"lt", u"gt", u"ge", u"le", u"in", u"like", u"≠", u"≥", u"≤", u"like" "in") multiple_operators = (u"and", u"or", u"∧", u"∨") operator = pp.Regex(u"|".join(binary_operator)) null = pp.Regex("None|none|null").setParseAction(pp.replaceWith(None)) boolean = "False|True|false|true" boolean = pp.Regex(boolean).setParseAction(lambda t: t[0].lower() == "true") hex_string = lambda n: pp.Word(pp.hexnums, exact=n) uuid = pp.Combine(hex_string(8) + ("-" + hex_string(4)) * 3 + "-" + hex_string(12)) number = r"[+-]?\d+(:?\.\d*)?(:?[eE][+-]?\d+)?" number = pp.Regex(number).setParseAction(lambda t: float(t[0])) identifier = pp.Word(pp.alphas, pp.alphanums + "_") quoted_string = pp.QuotedString('"') | pp.QuotedString("'") comparison_term = pp.Forward() in_list = pp.Group(pp.Suppress('[') + pp.Optional(pp.delimitedList(comparison_term)) + pp.Suppress(']'))("list") comparison_term << (null | boolean | uuid | identifier | number | quoted_string | in_list) condition = pp.Group(comparison_term + operator + comparison_term) expr = pp.operatorPrecedence(condition, [ ("not", 1, pp.opAssoc.RIGHT, ), ("and", 2, pp.opAssoc.LEFT, ), ("∧", 2, pp.opAssoc.LEFT, ), ("or", 2, pp.opAssoc.LEFT, ), ("∨", 2, pp.opAssoc.LEFT, ), ]) def _parsed_query2dict(parsed_query): result = None while parsed_query: part = parsed_query.pop() if part in binary_operator: result = {part: {parsed_query.pop(): result}} elif part in multiple_operators: if result.get(part): result[part].append( _parsed_query2dict(parsed_query.pop())) else: result = {part: [result]} elif part in uninary_operators: result = {part: result} elif isinstance(part, pp.ParseResults): kind = part.getName() if kind == "list": res = part.asList() else: res = _parsed_ <|fim_middle|> f result is None: result = res elif isinstance(result, dict): list(result.values())[0].append(res) else: result = part return result def search_query_builder(query): parsed_query = expr.parseString(query)[0] return _parsed_query2dict(parsed_query) def list2cols(cols, objs): return cols, [tuple([o[k] for k in cols]) for o in objs] def format_string_list(objs, field): objs[field] = ", ".join(objs[field]) def format_dict_list(objs, field): objs[field] = "\n".join( "- " + ", ".join("%s: %s" % (k, v) for k, v in elem.items()) for elem in objs[field]) def format_move_dict_to_root(obj, field): for attr in obj[field]: obj["%s/%s" % (field, attr)] = obj[field][attr] del obj[field] def format_archive_policy(ap): format_dict_list(ap, "definition") format_string_list(ap, "aggregation_methods") def dict_from_parsed_args(parsed_args, attrs): d = {} for attr in attrs: value = getattr(parsed_args, attr) if value is not None: d[attr] = value return d def dict_to_querystring(objs): return "&".join(["%s=%s" % (k, v) for k, v in objs.items() if v is not None]) <|fim▁end|>
query2dict(part) i
<|file_name|>utils.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*- # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pyparsing as pp uninary_operators = ("not", ) binary_operator = (u">=", u"<=", u"!=", u">", u"<", u"=", u"==", u"eq", u"ne", u"lt", u"gt", u"ge", u"le", u"in", u"like", u"≠", u"≥", u"≤", u"like" "in") multiple_operators = (u"and", u"or", u"∧", u"∨") operator = pp.Regex(u"|".join(binary_operator)) null = pp.Regex("None|none|null").setParseAction(pp.replaceWith(None)) boolean = "False|True|false|true" boolean = pp.Regex(boolean).setParseAction(lambda t: t[0].lower() == "true") hex_string = lambda n: pp.Word(pp.hexnums, exact=n) uuid = pp.Combine(hex_string(8) + ("-" + hex_string(4)) * 3 + "-" + hex_string(12)) number = r"[+-]?\d+(:?\.\d*)?(:?[eE][+-]?\d+)?" number = pp.Regex(number).setParseAction(lambda t: float(t[0])) identifier = pp.Word(pp.alphas, pp.alphanums + "_") quoted_string = pp.QuotedString('"') | pp.QuotedString("'") comparison_term = pp.Forward() in_list = pp.Group(pp.Suppress('[') + pp.Optional(pp.delimitedList(comparison_term)) + pp.Suppress(']'))("list") comparison_term << (null | boolean | uuid | identifier | number | quoted_string | in_list) condition = pp.Group(comparison_term + operator + comparison_term) expr = pp.operatorPrecedence(condition, [ ("not", 1, pp.opAssoc.RIGHT, ), ("and", 2, pp.opAssoc.LEFT, ), ("∧", 2, pp.opAssoc.LEFT, ), ("or", 2, pp.opAssoc.LEFT, ), ("∨", 2, pp.opAssoc.LEFT, ), ]) def _parsed_query2dict(parsed_query): result = None while parsed_query: part = parsed_query.pop() if part in binary_operator: result = {part: {parsed_query.pop(): result}} elif part in multiple_operators: if result.get(part): result[part].append( _parsed_query2dict(parsed_query.pop())) else: result = {part: [result]} elif part in uninary_operators: result = {part: result} elif isinstance(part, pp.ParseResults): kind = part.getName() if kind == "list": res = part.asList() else: res = _parsed_query2dict(part) if result is None: result = res <|fim_middle|> lif isinstance(result, dict): list(result.values())[0].append(res) else: result = part return result def search_query_builder(query): parsed_query = expr.parseString(query)[0] return _parsed_query2dict(parsed_query) def list2cols(cols, objs): return cols, [tuple([o[k] for k in cols]) for o in objs] def format_string_list(objs, field): objs[field] = ", ".join(objs[field]) def format_dict_list(objs, field): objs[field] = "\n".join( "- " + ", ".join("%s: %s" % (k, v) for k, v in elem.items()) for elem in objs[field]) def format_move_dict_to_root(obj, field): for attr in obj[field]: obj["%s/%s" % (field, attr)] = obj[field][attr] del obj[field] def format_archive_policy(ap): format_dict_list(ap, "definition") format_string_list(ap, "aggregation_methods") def dict_from_parsed_args(parsed_args, attrs): d = {} for attr in attrs: value = getattr(parsed_args, attr) if value is not None: d[attr] = value return d def dict_to_querystring(objs): return "&".join(["%s=%s" % (k, v) for k, v in objs.items() if v is not None]) <|fim▁end|>
e
<|file_name|>utils.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*- # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pyparsing as pp uninary_operators = ("not", ) binary_operator = (u">=", u"<=", u"!=", u">", u"<", u"=", u"==", u"eq", u"ne", u"lt", u"gt", u"ge", u"le", u"in", u"like", u"≠", u"≥", u"≤", u"like" "in") multiple_operators = (u"and", u"or", u"∧", u"∨") operator = pp.Regex(u"|".join(binary_operator)) null = pp.Regex("None|none|null").setParseAction(pp.replaceWith(None)) boolean = "False|True|false|true" boolean = pp.Regex(boolean).setParseAction(lambda t: t[0].lower() == "true") hex_string = lambda n: pp.Word(pp.hexnums, exact=n) uuid = pp.Combine(hex_string(8) + ("-" + hex_string(4)) * 3 + "-" + hex_string(12)) number = r"[+-]?\d+(:?\.\d*)?(:?[eE][+-]?\d+)?" number = pp.Regex(number).setParseAction(lambda t: float(t[0])) identifier = pp.Word(pp.alphas, pp.alphanums + "_") quoted_string = pp.QuotedString('"') | pp.QuotedString("'") comparison_term = pp.Forward() in_list = pp.Group(pp.Suppress('[') + pp.Optional(pp.delimitedList(comparison_term)) + pp.Suppress(']'))("list") comparison_term << (null | boolean | uuid | identifier | number | quoted_string | in_list) condition = pp.Group(comparison_term + operator + comparison_term) expr = pp.operatorPrecedence(condition, [ ("not", 1, pp.opAssoc.RIGHT, ), ("and", 2, pp.opAssoc.LEFT, ), ("∧", 2, pp.opAssoc.LEFT, ), ("or", 2, pp.opAssoc.LEFT, ), ("∨", 2, pp.opAssoc.LEFT, ), ]) def _parsed_query2dict(parsed_query): result = None while parsed_query: part = parsed_query.pop() if part in binary_operator: result = {part: {parsed_query.pop(): result}} elif part in multiple_operators: if result.get(part): result[part].append( _parsed_query2dict(parsed_query.pop())) else: result = {part: [result]} elif part in uninary_operators: result = {part: result} elif isinstance(part, pp.ParseResults): kind = part.getName() if kind == "list": res = part.asList() else: res = _parsed_query2dict(part) if result is None: result = res elif isinstance(result, dict): list(result.va <|fim_middle|> result = part return result def search_query_builder(query): parsed_query = expr.parseString(query)[0] return _parsed_query2dict(parsed_query) def list2cols(cols, objs): return cols, [tuple([o[k] for k in cols]) for o in objs] def format_string_list(objs, field): objs[field] = ", ".join(objs[field]) def format_dict_list(objs, field): objs[field] = "\n".join( "- " + ", ".join("%s: %s" % (k, v) for k, v in elem.items()) for elem in objs[field]) def format_move_dict_to_root(obj, field): for attr in obj[field]: obj["%s/%s" % (field, attr)] = obj[field][attr] del obj[field] def format_archive_policy(ap): format_dict_list(ap, "definition") format_string_list(ap, "aggregation_methods") def dict_from_parsed_args(parsed_args, attrs): d = {} for attr in attrs: value = getattr(parsed_args, attr) if value is not None: d[attr] = value return d def dict_to_querystring(objs): return "&".join(["%s=%s" % (k, v) for k, v in objs.items() if v is not None]) <|fim▁end|>
lues())[0].append(res) else:
<|file_name|>utils.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*- # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pyparsing as pp uninary_operators = ("not", ) binary_operator = (u">=", u"<=", u"!=", u">", u"<", u"=", u"==", u"eq", u"ne", u"lt", u"gt", u"ge", u"le", u"in", u"like", u"≠", u"≥", u"≤", u"like" "in") multiple_operators = (u"and", u"or", u"∧", u"∨") operator = pp.Regex(u"|".join(binary_operator)) null = pp.Regex("None|none|null").setParseAction(pp.replaceWith(None)) boolean = "False|True|false|true" boolean = pp.Regex(boolean).setParseAction(lambda t: t[0].lower() == "true") hex_string = lambda n: pp.Word(pp.hexnums, exact=n) uuid = pp.Combine(hex_string(8) + ("-" + hex_string(4)) * 3 + "-" + hex_string(12)) number = r"[+-]?\d+(:?\.\d*)?(:?[eE][+-]?\d+)?" number = pp.Regex(number).setParseAction(lambda t: float(t[0])) identifier = pp.Word(pp.alphas, pp.alphanums + "_") quoted_string = pp.QuotedString('"') | pp.QuotedString("'") comparison_term = pp.Forward() in_list = pp.Group(pp.Suppress('[') + pp.Optional(pp.delimitedList(comparison_term)) + pp.Suppress(']'))("list") comparison_term << (null | boolean | uuid | identifier | number | quoted_string | in_list) condition = pp.Group(comparison_term + operator + comparison_term) expr = pp.operatorPrecedence(condition, [ ("not", 1, pp.opAssoc.RIGHT, ), ("and", 2, pp.opAssoc.LEFT, ), ("∧", 2, pp.opAssoc.LEFT, ), ("or", 2, pp.opAssoc.LEFT, ), ("∨", 2, pp.opAssoc.LEFT, ), ]) def _parsed_query2dict(parsed_query): result = None while parsed_query: part = parsed_query.pop() if part in binary_operator: result = {part: {parsed_query.pop(): result}} elif part in multiple_operators: if result.get(part): result[part].append( _parsed_query2dict(parsed_query.pop())) else: result = {part: [result]} elif part in uninary_operators: result = {part: result} elif isinstance(part, pp.ParseResults): kind = part.getName() if kind == "list": res = part.asList() else: res = _parsed_query2dict(part) if result is None: result = res elif isinstance(result, dict): list(result.values())[0].append(res) else: result = part <|fim_middle|> sult def search_query_builder(query): parsed_query = expr.parseString(query)[0] return _parsed_query2dict(parsed_query) def list2cols(cols, objs): return cols, [tuple([o[k] for k in cols]) for o in objs] def format_string_list(objs, field): objs[field] = ", ".join(objs[field]) def format_dict_list(objs, field): objs[field] = "\n".join( "- " + ", ".join("%s: %s" % (k, v) for k, v in elem.items()) for elem in objs[field]) def format_move_dict_to_root(obj, field): for attr in obj[field]: obj["%s/%s" % (field, attr)] = obj[field][attr] del obj[field] def format_archive_policy(ap): format_dict_list(ap, "definition") format_string_list(ap, "aggregation_methods") def dict_from_parsed_args(parsed_args, attrs): d = {} for attr in attrs: value = getattr(parsed_args, attr) if value is not None: d[attr] = value return d def dict_to_querystring(objs): return "&".join(["%s=%s" % (k, v) for k, v in objs.items() if v is not None]) <|fim▁end|>
return re
<|file_name|>utils.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*- # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pyparsing as pp uninary_operators = ("not", ) binary_operator = (u">=", u"<=", u"!=", u">", u"<", u"=", u"==", u"eq", u"ne", u"lt", u"gt", u"ge", u"le", u"in", u"like", u"≠", u"≥", u"≤", u"like" "in") multiple_operators = (u"and", u"or", u"∧", u"∨") operator = pp.Regex(u"|".join(binary_operator)) null = pp.Regex("None|none|null").setParseAction(pp.replaceWith(None)) boolean = "False|True|false|true" boolean = pp.Regex(boolean).setParseAction(lambda t: t[0].lower() == "true") hex_string = lambda n: pp.Word(pp.hexnums, exact=n) uuid = pp.Combine(hex_string(8) + ("-" + hex_string(4)) * 3 + "-" + hex_string(12)) number = r"[+-]?\d+(:?\.\d*)?(:?[eE][+-]?\d+)?" number = pp.Regex(number).setParseAction(lambda t: float(t[0])) identifier = pp.Word(pp.alphas, pp.alphanums + "_") quoted_string = pp.QuotedString('"') | pp.QuotedString("'") comparison_term = pp.Forward() in_list = pp.Group(pp.Suppress('[') + pp.Optional(pp.delimitedList(comparison_term)) + pp.Suppress(']'))("list") comparison_term << (null | boolean | uuid | identifier | number | quoted_string | in_list) condition = pp.Group(comparison_term + operator + comparison_term) expr = pp.operatorPrecedence(condition, [ ("not", 1, pp.opAssoc.RIGHT, ), ("and", 2, pp.opAssoc.LEFT, ), ("∧", 2, pp.opAssoc.LEFT, ), ("or", 2, pp.opAssoc.LEFT, ), ("∨", 2, pp.opAssoc.LEFT, ), ]) def _parsed_query2dict(parsed_query): result = None while parsed_query: part = parsed_query.pop() if part in binary_operator: result = {part: {parsed_query.pop(): result}} elif part in multiple_operators: if result.get(part): result[part].append( _parsed_query2dict(parsed_query.pop())) else: result = {part: [result]} elif part in uninary_operators: result = {part: result} elif isinstance(part, pp.ParseResults): kind = part.getName() if kind == "list": res = part.asList() else: res = _parsed_query2dict(part) if result is None: result = res elif isinstance(result, dict): list(result.values())[0].append(res) else: result = part return result def search_query_builder(query): parsed_query = expr.parseString(query)[0] return _parsed_query2dict(parsed_query) def list2cols(cols, objs): return cols, [tuple([o[k] for k in cols]) for o in objs] def format_string_list(objs, field): objs[field] = ", ".join(objs[field]) def format_dict_list(objs, field): objs[field] = "\n".join( "- " + ", ".join("%s: %s" % (k, v) for k, v in elem.items()) for elem in objs[field]) def format_move_dict_to_root(obj, field): for attr in obj[field]: obj["%s/%s" % (field, attr)] = obj[field][attr] del obj[field] def format_archive_policy(ap): format_dict_list(ap, "definition") format_string_list(ap, "aggregation_methods") def dict_from_parsed_args(parsed_args, attrs): d = {} for attr in attrs: value = getattr(parsed_args, attr) if value is not None: d[attr] = valu <|fim_middle|> def dict_to_querystring(objs): return "&".join(["%s=%s" % (k, v) for k, v in objs.items() if v is not None]) <|fim▁end|>
e return d
<|file_name|>utils.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*- # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pyparsing as pp uninary_operators = ("not", ) binary_operator = (u">=", u"<=", u"!=", u">", u"<", u"=", u"==", u"eq", u"ne", u"lt", u"gt", u"ge", u"le", u"in", u"like", u"≠", u"≥", u"≤", u"like" "in") multiple_operators = (u"and", u"or", u"∧", u"∨") operator = pp.Regex(u"|".join(binary_operator)) null = pp.Regex("None|none|null").setParseAction(pp.replaceWith(None)) boolean = "False|True|false|true" boolean = pp.Regex(boolean).setParseAction(lambda t: t[0].lower() == "true") hex_string = lambda n: pp.Word(pp.hexnums, exact=n) uuid = pp.Combine(hex_string(8) + ("-" + hex_string(4)) * 3 + "-" + hex_string(12)) number = r"[+-]?\d+(:?\.\d*)?(:?[eE][+-]?\d+)?" number = pp.Regex(number).setParseAction(lambda t: float(t[0])) identifier = pp.Word(pp.alphas, pp.alphanums + "_") quoted_string = pp.QuotedString('"') | pp.QuotedString("'") comparison_term = pp.Forward() in_list = pp.Group(pp.Suppress('[') + pp.Optional(pp.delimitedList(comparison_term)) + pp.Suppress(']'))("list") comparison_term << (null | boolean | uuid | identifier | number | quoted_string | in_list) condition = pp.Group(comparison_term + operator + comparison_term) expr = pp.operatorPrecedence(condition, [ ("not", 1, pp.opAssoc.RIGHT, ), ("and", 2, pp.opAssoc.LEFT, ), ("∧", 2, pp.opAssoc.LEFT, ), ("or", 2, pp.opAssoc.LEFT, ), ("∨", 2, pp.opAssoc.LEFT, ), ]) def _parsed_query2<|fim_middle|>: result = None while parsed_query: part = parsed_query.pop() if part in binary_operator: result = {part: {parsed_query.pop(): result}} elif part in multiple_operators: if result.get(part): result[part].append( _parsed_query2dict(parsed_query.pop())) else: result = {part: [result]} elif part in uninary_operators: result = {part: result} elif isinstance(part, pp.ParseResults): kind = part.getName() if kind == "list": res = part.asList() else: res = _parsed_query2dict(part) if result is None: result = res elif isinstance(result, dict): list(result.values())[0].append(res) else: result = part return result def search_query_builder(query): parsed_query = expr.parseString(query)[0] return _parsed_query2dict(parsed_query) def list2cols(cols, objs): return cols, [tuple([o[k] for k in cols]) for o in objs] def format_string_list(objs, field): objs[field] = ", ".join(objs[field]) def format_dict_list(objs, field): objs[field] = "\n".join( "- " + ", ".join("%s: %s" % (k, v) for k, v in elem.items()) for elem in objs[field]) def format_move_dict_to_root(obj, field): for attr in obj[field]: obj["%s/%s" % (field, attr)] = obj[field][attr] del obj[field] def format_archive_policy(ap): format_dict_list(ap, "definition") format_string_list(ap, "aggregation_methods") def dict_from_parsed_args(parsed_args, attrs): d = {} for attr in attrs: value = getattr(parsed_args, attr) if value is not None: d[attr] = value return d def dict_to_querystring(objs): return "&".join(["%s=%s" % (k, v) for k, v in objs.items() if v is not None]) <|fim▁end|>
dict(parsed_query)
<|file_name|>utils.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*- # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pyparsing as pp uninary_operators = ("not", ) binary_operator = (u">=", u"<=", u"!=", u">", u"<", u"=", u"==", u"eq", u"ne", u"lt", u"gt", u"ge", u"le", u"in", u"like", u"≠", u"≥", u"≤", u"like" "in") multiple_operators = (u"and", u"or", u"∧", u"∨") operator = pp.Regex(u"|".join(binary_operator)) null = pp.Regex("None|none|null").setParseAction(pp.replaceWith(None)) boolean = "False|True|false|true" boolean = pp.Regex(boolean).setParseAction(lambda t: t[0].lower() == "true") hex_string = lambda n: pp.Word(pp.hexnums, exact=n) uuid = pp.Combine(hex_string(8) + ("-" + hex_string(4)) * 3 + "-" + hex_string(12)) number = r"[+-]?\d+(:?\.\d*)?(:?[eE][+-]?\d+)?" number = pp.Regex(number).setParseAction(lambda t: float(t[0])) identifier = pp.Word(pp.alphas, pp.alphanums + "_") quoted_string = pp.QuotedString('"') | pp.QuotedString("'") comparison_term = pp.Forward() in_list = pp.Group(pp.Suppress('[') + pp.Optional(pp.delimitedList(comparison_term)) + pp.Suppress(']'))("list") comparison_term << (null | boolean | uuid | identifier | number | quoted_string | in_list) condition = pp.Group(comparison_term + operator + comparison_term) expr = pp.operatorPrecedence(condition, [ ("not", 1, pp.opAssoc.RIGHT, ), ("and", 2, pp.opAssoc.LEFT, ), ("∧", 2, pp.opAssoc.LEFT, ), ("or", 2, pp.opAssoc.LEFT, ), ("∨", 2, pp.opAssoc.LEFT, ), ]) def _parsed_query2dict(parsed_query): result = None while parsed_query: part = parsed_query.pop() if part in binary_operator: result = {part: {parsed_query.pop(): result}} elif part in multiple_operators: if result.get(part): result[part].append( _parsed_query2dict(parsed_query.pop())) else: result = {part: [result]} elif part in uninary_operators: result = {part: result} elif isinstance(part, pp.ParseResults): kind = part.getName() if kind == "list": res = part.asList() else: res = _parsed_query2dict(part) if result is None: result = res elif isinstance(result, dict): list(result.values())[0].append(res) else: result = part return result def search_query_b<|fim_middle|>arsed_query = expr.parseString(query)[0] return _parsed_query2dict(parsed_query) def list2cols(cols, objs): return cols, [tuple([o[k] for k in cols]) for o in objs] def format_string_list(objs, field): objs[field] = ", ".join(objs[field]) def format_dict_list(objs, field): objs[field] = "\n".join( "- " + ", ".join("%s: %s" % (k, v) for k, v in elem.items()) for elem in objs[field]) def format_move_dict_to_root(obj, field): for attr in obj[field]: obj["%s/%s" % (field, attr)] = obj[field][attr] del obj[field] def format_archive_policy(ap): format_dict_list(ap, "definition") format_string_list(ap, "aggregation_methods") def dict_from_parsed_args(parsed_args, attrs): d = {} for attr in attrs: value = getattr(parsed_args, attr) if value is not None: d[attr] = value return d def dict_to_querystring(objs): return "&".join(["%s=%s" % (k, v) for k, v in objs.items() if v is not None]) <|fim▁end|>
uilder(query): p
<|file_name|>utils.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*- # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pyparsing as pp uninary_operators = ("not", ) binary_operator = (u">=", u"<=", u"!=", u">", u"<", u"=", u"==", u"eq", u"ne", u"lt", u"gt", u"ge", u"le", u"in", u"like", u"≠", u"≥", u"≤", u"like" "in") multiple_operators = (u"and", u"or", u"∧", u"∨") operator = pp.Regex(u"|".join(binary_operator)) null = pp.Regex("None|none|null").setParseAction(pp.replaceWith(None)) boolean = "False|True|false|true" boolean = pp.Regex(boolean).setParseAction(lambda t: t[0].lower() == "true") hex_string = lambda n: pp.Word(pp.hexnums, exact=n) uuid = pp.Combine(hex_string(8) + ("-" + hex_string(4)) * 3 + "-" + hex_string(12)) number = r"[+-]?\d+(:?\.\d*)?(:?[eE][+-]?\d+)?" number = pp.Regex(number).setParseAction(lambda t: float(t[0])) identifier = pp.Word(pp.alphas, pp.alphanums + "_") quoted_string = pp.QuotedString('"') | pp.QuotedString("'") comparison_term = pp.Forward() in_list = pp.Group(pp.Suppress('[') + pp.Optional(pp.delimitedList(comparison_term)) + pp.Suppress(']'))("list") comparison_term << (null | boolean | uuid | identifier | number | quoted_string | in_list) condition = pp.Group(comparison_term + operator + comparison_term) expr = pp.operatorPrecedence(condition, [ ("not", 1, pp.opAssoc.RIGHT, ), ("and", 2, pp.opAssoc.LEFT, ), ("∧", 2, pp.opAssoc.LEFT, ), ("or", 2, pp.opAssoc.LEFT, ), ("∨", 2, pp.opAssoc.LEFT, ), ]) def _parsed_query2dict(parsed_query): result = None while parsed_query: part = parsed_query.pop() if part in binary_operator: result = {part: {parsed_query.pop(): result}} elif part in multiple_operators: if result.get(part): result[part].append( _parsed_query2dict(parsed_query.pop())) else: result = {part: [result]} elif part in uninary_operators: result = {part: result} elif isinstance(part, pp.ParseResults): kind = part.getName() if kind == "list": res = part.asList() else: res = _parsed_query2dict(part) if result is None: result = res elif isinstance(result, dict): list(result.values())[0].append(res) else: result = part return result def search_query_builder(query): parsed_query = expr.parseString(query)[0] return _parsed_query2dict(parsed_query) def list2cols(cols<|fim_middle|> return cols, [tuple([o[k] for k in cols]) for o in objs] def format_string_list(objs, field): objs[field] = ", ".join(objs[field]) def format_dict_list(objs, field): objs[field] = "\n".join( "- " + ", ".join("%s: %s" % (k, v) for k, v in elem.items()) for elem in objs[field]) def format_move_dict_to_root(obj, field): for attr in obj[field]: obj["%s/%s" % (field, attr)] = obj[field][attr] del obj[field] def format_archive_policy(ap): format_dict_list(ap, "definition") format_string_list(ap, "aggregation_methods") def dict_from_parsed_args(parsed_args, attrs): d = {} for attr in attrs: value = getattr(parsed_args, attr) if value is not None: d[attr] = value return d def dict_to_querystring(objs): return "&".join(["%s=%s" % (k, v) for k, v in objs.items() if v is not None]) <|fim▁end|>
, objs):
<|file_name|>utils.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*- # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pyparsing as pp uninary_operators = ("not", ) binary_operator = (u">=", u"<=", u"!=", u">", u"<", u"=", u"==", u"eq", u"ne", u"lt", u"gt", u"ge", u"le", u"in", u"like", u"≠", u"≥", u"≤", u"like" "in") multiple_operators = (u"and", u"or", u"∧", u"∨") operator = pp.Regex(u"|".join(binary_operator)) null = pp.Regex("None|none|null").setParseAction(pp.replaceWith(None)) boolean = "False|True|false|true" boolean = pp.Regex(boolean).setParseAction(lambda t: t[0].lower() == "true") hex_string = lambda n: pp.Word(pp.hexnums, exact=n) uuid = pp.Combine(hex_string(8) + ("-" + hex_string(4)) * 3 + "-" + hex_string(12)) number = r"[+-]?\d+(:?\.\d*)?(:?[eE][+-]?\d+)?" number = pp.Regex(number).setParseAction(lambda t: float(t[0])) identifier = pp.Word(pp.alphas, pp.alphanums + "_") quoted_string = pp.QuotedString('"') | pp.QuotedString("'") comparison_term = pp.Forward() in_list = pp.Group(pp.Suppress('[') + pp.Optional(pp.delimitedList(comparison_term)) + pp.Suppress(']'))("list") comparison_term << (null | boolean | uuid | identifier | number | quoted_string | in_list) condition = pp.Group(comparison_term + operator + comparison_term) expr = pp.operatorPrecedence(condition, [ ("not", 1, pp.opAssoc.RIGHT, ), ("and", 2, pp.opAssoc.LEFT, ), ("∧", 2, pp.opAssoc.LEFT, ), ("or", 2, pp.opAssoc.LEFT, ), ("∨", 2, pp.opAssoc.LEFT, ), ]) def _parsed_query2dict(parsed_query): result = None while parsed_query: part = parsed_query.pop() if part in binary_operator: result = {part: {parsed_query.pop(): result}} elif part in multiple_operators: if result.get(part): result[part].append( _parsed_query2dict(parsed_query.pop())) else: result = {part: [result]} elif part in uninary_operators: result = {part: result} elif isinstance(part, pp.ParseResults): kind = part.getName() if kind == "list": res = part.asList() else: res = _parsed_query2dict(part) if result is None: result = res elif isinstance(result, dict): list(result.values())[0].append(res) else: result = part return result def search_query_builder(query): parsed_query = expr.parseString(query)[0] return _parsed_query2dict(parsed_query) def list2cols(cols, objs): return cols, [tuple([o[k] for k in cols]) for o in objs] def format_string_<|fim_middle|> objs[field] = ", ".join(objs[field]) def format_dict_list(objs, field): objs[field] = "\n".join( "- " + ", ".join("%s: %s" % (k, v) for k, v in elem.items()) for elem in objs[field]) def format_move_dict_to_root(obj, field): for attr in obj[field]: obj["%s/%s" % (field, attr)] = obj[field][attr] del obj[field] def format_archive_policy(ap): format_dict_list(ap, "definition") format_string_list(ap, "aggregation_methods") def dict_from_parsed_args(parsed_args, attrs): d = {} for attr in attrs: value = getattr(parsed_args, attr) if value is not None: d[attr] = value return d def dict_to_querystring(objs): return "&".join(["%s=%s" % (k, v) for k, v in objs.items() if v is not None]) <|fim▁end|>
list(objs, field):
<|file_name|>utils.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*- # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pyparsing as pp uninary_operators = ("not", ) binary_operator = (u">=", u"<=", u"!=", u">", u"<", u"=", u"==", u"eq", u"ne", u"lt", u"gt", u"ge", u"le", u"in", u"like", u"≠", u"≥", u"≤", u"like" "in") multiple_operators = (u"and", u"or", u"∧", u"∨") operator = pp.Regex(u"|".join(binary_operator)) null = pp.Regex("None|none|null").setParseAction(pp.replaceWith(None)) boolean = "False|True|false|true" boolean = pp.Regex(boolean).setParseAction(lambda t: t[0].lower() == "true") hex_string = lambda n: pp.Word(pp.hexnums, exact=n) uuid = pp.Combine(hex_string(8) + ("-" + hex_string(4)) * 3 + "-" + hex_string(12)) number = r"[+-]?\d+(:?\.\d*)?(:?[eE][+-]?\d+)?" number = pp.Regex(number).setParseAction(lambda t: float(t[0])) identifier = pp.Word(pp.alphas, pp.alphanums + "_") quoted_string = pp.QuotedString('"') | pp.QuotedString("'") comparison_term = pp.Forward() in_list = pp.Group(pp.Suppress('[') + pp.Optional(pp.delimitedList(comparison_term)) + pp.Suppress(']'))("list") comparison_term << (null | boolean | uuid | identifier | number | quoted_string | in_list) condition = pp.Group(comparison_term + operator + comparison_term) expr = pp.operatorPrecedence(condition, [ ("not", 1, pp.opAssoc.RIGHT, ), ("and", 2, pp.opAssoc.LEFT, ), ("∧", 2, pp.opAssoc.LEFT, ), ("or", 2, pp.opAssoc.LEFT, ), ("∨", 2, pp.opAssoc.LEFT, ), ]) def _parsed_query2dict(parsed_query): result = None while parsed_query: part = parsed_query.pop() if part in binary_operator: result = {part: {parsed_query.pop(): result}} elif part in multiple_operators: if result.get(part): result[part].append( _parsed_query2dict(parsed_query.pop())) else: result = {part: [result]} elif part in uninary_operators: result = {part: result} elif isinstance(part, pp.ParseResults): kind = part.getName() if kind == "list": res = part.asList() else: res = _parsed_query2dict(part) if result is None: result = res elif isinstance(result, dict): list(result.values())[0].append(res) else: result = part return result def search_query_builder(query): parsed_query = expr.parseString(query)[0] return _parsed_query2dict(parsed_query) def list2cols(cols, objs): return cols, [tuple([o[k] for k in cols]) for o in objs] def format_string_list(objs, field): objs[field] = ", ".join(objs[field]) def format_dict_li<|fim_middle|> objs[field] = "\n".join( "- " + ", ".join("%s: %s" % (k, v) for k, v in elem.items()) for elem in objs[field]) def format_move_dict_to_root(obj, field): for attr in obj[field]: obj["%s/%s" % (field, attr)] = obj[field][attr] del obj[field] def format_archive_policy(ap): format_dict_list(ap, "definition") format_string_list(ap, "aggregation_methods") def dict_from_parsed_args(parsed_args, attrs): d = {} for attr in attrs: value = getattr(parsed_args, attr) if value is not None: d[attr] = value return d def dict_to_querystring(objs): return "&".join(["%s=%s" % (k, v) for k, v in objs.items() if v is not None]) <|fim▁end|>
st(objs, field):
<|file_name|>utils.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*- # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pyparsing as pp uninary_operators = ("not", ) binary_operator = (u">=", u"<=", u"!=", u">", u"<", u"=", u"==", u"eq", u"ne", u"lt", u"gt", u"ge", u"le", u"in", u"like", u"≠", u"≥", u"≤", u"like" "in") multiple_operators = (u"and", u"or", u"∧", u"∨") operator = pp.Regex(u"|".join(binary_operator)) null = pp.Regex("None|none|null").setParseAction(pp.replaceWith(None)) boolean = "False|True|false|true" boolean = pp.Regex(boolean).setParseAction(lambda t: t[0].lower() == "true") hex_string = lambda n: pp.Word(pp.hexnums, exact=n) uuid = pp.Combine(hex_string(8) + ("-" + hex_string(4)) * 3 + "-" + hex_string(12)) number = r"[+-]?\d+(:?\.\d*)?(:?[eE][+-]?\d+)?" number = pp.Regex(number).setParseAction(lambda t: float(t[0])) identifier = pp.Word(pp.alphas, pp.alphanums + "_") quoted_string = pp.QuotedString('"') | pp.QuotedString("'") comparison_term = pp.Forward() in_list = pp.Group(pp.Suppress('[') + pp.Optional(pp.delimitedList(comparison_term)) + pp.Suppress(']'))("list") comparison_term << (null | boolean | uuid | identifier | number | quoted_string | in_list) condition = pp.Group(comparison_term + operator + comparison_term) expr = pp.operatorPrecedence(condition, [ ("not", 1, pp.opAssoc.RIGHT, ), ("and", 2, pp.opAssoc.LEFT, ), ("∧", 2, pp.opAssoc.LEFT, ), ("or", 2, pp.opAssoc.LEFT, ), ("∨", 2, pp.opAssoc.LEFT, ), ]) def _parsed_query2dict(parsed_query): result = None while parsed_query: part = parsed_query.pop() if part in binary_operator: result = {part: {parsed_query.pop(): result}} elif part in multiple_operators: if result.get(part): result[part].append( _parsed_query2dict(parsed_query.pop())) else: result = {part: [result]} elif part in uninary_operators: result = {part: result} elif isinstance(part, pp.ParseResults): kind = part.getName() if kind == "list": res = part.asList() else: res = _parsed_query2dict(part) if result is None: result = res elif isinstance(result, dict): list(result.values())[0].append(res) else: result = part return result def search_query_builder(query): parsed_query = expr.parseString(query)[0] return _parsed_query2dict(parsed_query) def list2cols(cols, objs): return cols, [tuple([o[k] for k in cols]) for o in objs] def format_string_list(objs, field): objs[field] = ", ".join(objs[field]) def format_dict_list(objs, field): objs[field] = "\n".join( "- " + ", ".join("%s: %s" % (k, v) for k, v in elem.items()) for elem in objs[field]) def format_move_di<|fim_middle|> for attr in obj[field]: obj["%s/%s" % (field, attr)] = obj[field][attr] del obj[field] def format_archive_policy(ap): format_dict_list(ap, "definition") format_string_list(ap, "aggregation_methods") def dict_from_parsed_args(parsed_args, attrs): d = {} for attr in attrs: value = getattr(parsed_args, attr) if value is not None: d[attr] = value return d def dict_to_querystring(objs): return "&".join(["%s=%s" % (k, v) for k, v in objs.items() if v is not None]) <|fim▁end|>
ct_to_root(obj, field):
<|file_name|>utils.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*- # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pyparsing as pp uninary_operators = ("not", ) binary_operator = (u">=", u"<=", u"!=", u">", u"<", u"=", u"==", u"eq", u"ne", u"lt", u"gt", u"ge", u"le", u"in", u"like", u"≠", u"≥", u"≤", u"like" "in") multiple_operators = (u"and", u"or", u"∧", u"∨") operator = pp.Regex(u"|".join(binary_operator)) null = pp.Regex("None|none|null").setParseAction(pp.replaceWith(None)) boolean = "False|True|false|true" boolean = pp.Regex(boolean).setParseAction(lambda t: t[0].lower() == "true") hex_string = lambda n: pp.Word(pp.hexnums, exact=n) uuid = pp.Combine(hex_string(8) + ("-" + hex_string(4)) * 3 + "-" + hex_string(12)) number = r"[+-]?\d+(:?\.\d*)?(:?[eE][+-]?\d+)?" number = pp.Regex(number).setParseAction(lambda t: float(t[0])) identifier = pp.Word(pp.alphas, pp.alphanums + "_") quoted_string = pp.QuotedString('"') | pp.QuotedString("'") comparison_term = pp.Forward() in_list = pp.Group(pp.Suppress('[') + pp.Optional(pp.delimitedList(comparison_term)) + pp.Suppress(']'))("list") comparison_term << (null | boolean | uuid | identifier | number | quoted_string | in_list) condition = pp.Group(comparison_term + operator + comparison_term) expr = pp.operatorPrecedence(condition, [ ("not", 1, pp.opAssoc.RIGHT, ), ("and", 2, pp.opAssoc.LEFT, ), ("∧", 2, pp.opAssoc.LEFT, ), ("or", 2, pp.opAssoc.LEFT, ), ("∨", 2, pp.opAssoc.LEFT, ), ]) def _parsed_query2dict(parsed_query): result = None while parsed_query: part = parsed_query.pop() if part in binary_operator: result = {part: {parsed_query.pop(): result}} elif part in multiple_operators: if result.get(part): result[part].append( _parsed_query2dict(parsed_query.pop())) else: result = {part: [result]} elif part in uninary_operators: result = {part: result} elif isinstance(part, pp.ParseResults): kind = part.getName() if kind == "list": res = part.asList() else: res = _parsed_query2dict(part) if result is None: result = res elif isinstance(result, dict): list(result.values())[0].append(res) else: result = part return result def search_query_builder(query): parsed_query = expr.parseString(query)[0] return _parsed_query2dict(parsed_query) def list2cols(cols, objs): return cols, [tuple([o[k] for k in cols]) for o in objs] def format_string_list(objs, field): objs[field] = ", ".join(objs[field]) def format_dict_list(objs, field): objs[field] = "\n".join( "- " + ", ".join("%s: %s" % (k, v) for k, v in elem.items()) for elem in objs[field]) def format_move_dict_to_root(obj, field): for attr in obj[field]: obj["%s/%s" % (field, attr)] = obj[field][attr] del obj[field] def format_archive<|fim_middle|>at_dict_list(ap, "definition") format_string_list(ap, "aggregation_methods") def dict_from_parsed_args(parsed_args, attrs): d = {} for attr in attrs: value = getattr(parsed_args, attr) if value is not None: d[attr] = value return d def dict_to_querystring(objs): return "&".join(["%s=%s" % (k, v) for k, v in objs.items() if v is not None]) <|fim▁end|>
_policy(ap): form
<|file_name|>utils.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*- # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pyparsing as pp uninary_operators = ("not", ) binary_operator = (u">=", u"<=", u"!=", u">", u"<", u"=", u"==", u"eq", u"ne", u"lt", u"gt", u"ge", u"le", u"in", u"like", u"≠", u"≥", u"≤", u"like" "in") multiple_operators = (u"and", u"or", u"∧", u"∨") operator = pp.Regex(u"|".join(binary_operator)) null = pp.Regex("None|none|null").setParseAction(pp.replaceWith(None)) boolean = "False|True|false|true" boolean = pp.Regex(boolean).setParseAction(lambda t: t[0].lower() == "true") hex_string = lambda n: pp.Word(pp.hexnums, exact=n) uuid = pp.Combine(hex_string(8) + ("-" + hex_string(4)) * 3 + "-" + hex_string(12)) number = r"[+-]?\d+(:?\.\d*)?(:?[eE][+-]?\d+)?" number = pp.Regex(number).setParseAction(lambda t: float(t[0])) identifier = pp.Word(pp.alphas, pp.alphanums + "_") quoted_string = pp.QuotedString('"') | pp.QuotedString("'") comparison_term = pp.Forward() in_list = pp.Group(pp.Suppress('[') + pp.Optional(pp.delimitedList(comparison_term)) + pp.Suppress(']'))("list") comparison_term << (null | boolean | uuid | identifier | number | quoted_string | in_list) condition = pp.Group(comparison_term + operator + comparison_term) expr = pp.operatorPrecedence(condition, [ ("not", 1, pp.opAssoc.RIGHT, ), ("and", 2, pp.opAssoc.LEFT, ), ("∧", 2, pp.opAssoc.LEFT, ), ("or", 2, pp.opAssoc.LEFT, ), ("∨", 2, pp.opAssoc.LEFT, ), ]) def _parsed_query2dict(parsed_query): result = None while parsed_query: part = parsed_query.pop() if part in binary_operator: result = {part: {parsed_query.pop(): result}} elif part in multiple_operators: if result.get(part): result[part].append( _parsed_query2dict(parsed_query.pop())) else: result = {part: [result]} elif part in uninary_operators: result = {part: result} elif isinstance(part, pp.ParseResults): kind = part.getName() if kind == "list": res = part.asList() else: res = _parsed_query2dict(part) if result is None: result = res elif isinstance(result, dict): list(result.values())[0].append(res) else: result = part return result def search_query_builder(query): parsed_query = expr.parseString(query)[0] return _parsed_query2dict(parsed_query) def list2cols(cols, objs): return cols, [tuple([o[k] for k in cols]) for o in objs] def format_string_list(objs, field): objs[field] = ", ".join(objs[field]) def format_dict_list(objs, field): objs[field] = "\n".join( "- " + ", ".join("%s: %s" % (k, v) for k, v in elem.items()) for elem in objs[field]) def format_move_dict_to_root(obj, field): for attr in obj[field]: obj["%s/%s" % (field, attr)] = obj[field][attr] del obj[field] def format_archive_policy(ap): format_dict_list(ap, "definition") format_string_list(ap, "aggregation_methods") def dict_from_pars<|fim_middle|>attrs): d = {} for attr in attrs: value = getattr(parsed_args, attr) if value is not None: d[attr] = value return d def dict_to_querystring(objs): return "&".join(["%s=%s" % (k, v) for k, v in objs.items() if v is not None]) <|fim▁end|>
ed_args(parsed_args,
<|file_name|>utils.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*- # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pyparsing as pp uninary_operators = ("not", ) binary_operator = (u">=", u"<=", u"!=", u">", u"<", u"=", u"==", u"eq", u"ne", u"lt", u"gt", u"ge", u"le", u"in", u"like", u"≠", u"≥", u"≤", u"like" "in") multiple_operators = (u"and", u"or", u"∧", u"∨") operator = pp.Regex(u"|".join(binary_operator)) null = pp.Regex("None|none|null").setParseAction(pp.replaceWith(None)) boolean = "False|True|false|true" boolean = pp.Regex(boolean).setParseAction(lambda t: t[0].lower() == "true") hex_string = lambda n: pp.Word(pp.hexnums, exact=n) uuid = pp.Combine(hex_string(8) + ("-" + hex_string(4)) * 3 + "-" + hex_string(12)) number = r"[+-]?\d+(:?\.\d*)?(:?[eE][+-]?\d+)?" number = pp.Regex(number).setParseAction(lambda t: float(t[0])) identifier = pp.Word(pp.alphas, pp.alphanums + "_") quoted_string = pp.QuotedString('"') | pp.QuotedString("'") comparison_term = pp.Forward() in_list = pp.Group(pp.Suppress('[') + pp.Optional(pp.delimitedList(comparison_term)) + pp.Suppress(']'))("list") comparison_term << (null | boolean | uuid | identifier | number | quoted_string | in_list) condition = pp.Group(comparison_term + operator + comparison_term) expr = pp.operatorPrecedence(condition, [ ("not", 1, pp.opAssoc.RIGHT, ), ("and", 2, pp.opAssoc.LEFT, ), ("∧", 2, pp.opAssoc.LEFT, ), ("or", 2, pp.opAssoc.LEFT, ), ("∨", 2, pp.opAssoc.LEFT, ), ]) def _parsed_query2dict(parsed_query): result = None while parsed_query: part = parsed_query.pop() if part in binary_operator: result = {part: {parsed_query.pop(): result}} elif part in multiple_operators: if result.get(part): result[part].append( _parsed_query2dict(parsed_query.pop())) else: result = {part: [result]} elif part in uninary_operators: result = {part: result} elif isinstance(part, pp.ParseResults): kind = part.getName() if kind == "list": res = part.asList() else: res = _parsed_query2dict(part) if result is None: result = res elif isinstance(result, dict): list(result.values())[0].append(res) else: result = part return result def search_query_builder(query): parsed_query = expr.parseString(query)[0] return _parsed_query2dict(parsed_query) def list2cols(cols, objs): return cols, [tuple([o[k] for k in cols]) for o in objs] def format_string_list(objs, field): objs[field] = ", ".join(objs[field]) def format_dict_list(objs, field): objs[field] = "\n".join( "- " + ", ".join("%s: %s" % (k, v) for k, v in elem.items()) for elem in objs[field]) def format_move_dict_to_root(obj, field): for attr in obj[field]: obj["%s/%s" % (field, attr)] = obj[field][attr] del obj[field] def format_archive_policy(ap): format_dict_list(ap, "definition") format_string_list(ap, "aggregation_methods") def dict_from_parsed_args(parsed_args, attrs): d = {} for attr in attrs: value = getattr(parsed_args, attr) if value is not None: d[attr] = value return d def dict_to_querys<|fim_middle|>turn "&".join(["%s=%s" % (k, v) for k, v in objs.items() if v is not None]) <|fim▁end|>
tring(objs): re
<|file_name|>character_count.py<|end_file_name|><|fim▁begin|>from collections import defaultdict import codecs def count(corpus, output_file): debug = False dic = defaultdict(int) other = set() fout = codecs.open(output_file, 'w', 'utf8') for line in open(corpus, 'r'): words = line.split() for word in words: if len(word) % 3 == 0: for i in xrange(len(word) / 3): dic[word[i:i+3]] += 1 else: other.add(word) fout.write('%i %i\n' % (len(dic), len(other))) record_list = [(y, x) for x, y in dic.items()] record_list.sort() record_list.reverse() i = 0 for x, y in record_list: #print y.decode('utf8'), x try: yy = y.decode('GBK') except: print y<|fim▁hole|> if i > 10 and debug: break other_list = list(other) other_list.sort() for item in other_list: #print item.decode('utf8') item2 = item.decode('utf8') fout.write(item2) fout.write('\n') i += 1 if i > 20 and debug: break fout.close() if __name__ =='__main__': count('data/train.zh_parsed', 'output/count.zh') count('data/train.ja_parsed', 'output/count.ja')<|fim▁end|>
yy = 'N/A' fout.write('%s %i\n' % (yy, x)) i += 1
<|file_name|>character_count.py<|end_file_name|><|fim▁begin|>from collections import defaultdict import codecs def count(corpus, output_file): <|fim_middle|> if __name__ =='__main__': count('data/train.zh_parsed', 'output/count.zh') count('data/train.ja_parsed', 'output/count.ja') <|fim▁end|>
debug = False dic = defaultdict(int) other = set() fout = codecs.open(output_file, 'w', 'utf8') for line in open(corpus, 'r'): words = line.split() for word in words: if len(word) % 3 == 0: for i in xrange(len(word) / 3): dic[word[i:i+3]] += 1 else: other.add(word) fout.write('%i %i\n' % (len(dic), len(other))) record_list = [(y, x) for x, y in dic.items()] record_list.sort() record_list.reverse() i = 0 for x, y in record_list: #print y.decode('utf8'), x try: yy = y.decode('GBK') except: print y yy = 'N/A' fout.write('%s %i\n' % (yy, x)) i += 1 if i > 10 and debug: break other_list = list(other) other_list.sort() for item in other_list: #print item.decode('utf8') item2 = item.decode('utf8') fout.write(item2) fout.write('\n') i += 1 if i > 20 and debug: break fout.close()
<|file_name|>character_count.py<|end_file_name|><|fim▁begin|>from collections import defaultdict import codecs def count(corpus, output_file): debug = False dic = defaultdict(int) other = set() fout = codecs.open(output_file, 'w', 'utf8') for line in open(corpus, 'r'): words = line.split() for word in words: if len(word) % 3 == 0: <|fim_middle|> else: other.add(word) fout.write('%i %i\n' % (len(dic), len(other))) record_list = [(y, x) for x, y in dic.items()] record_list.sort() record_list.reverse() i = 0 for x, y in record_list: #print y.decode('utf8'), x try: yy = y.decode('GBK') except: print y yy = 'N/A' fout.write('%s %i\n' % (yy, x)) i += 1 if i > 10 and debug: break other_list = list(other) other_list.sort() for item in other_list: #print item.decode('utf8') item2 = item.decode('utf8') fout.write(item2) fout.write('\n') i += 1 if i > 20 and debug: break fout.close() if __name__ =='__main__': count('data/train.zh_parsed', 'output/count.zh') count('data/train.ja_parsed', 'output/count.ja') <|fim▁end|>
for i in xrange(len(word) / 3): dic[word[i:i+3]] += 1
<|file_name|>character_count.py<|end_file_name|><|fim▁begin|>from collections import defaultdict import codecs def count(corpus, output_file): debug = False dic = defaultdict(int) other = set() fout = codecs.open(output_file, 'w', 'utf8') for line in open(corpus, 'r'): words = line.split() for word in words: if len(word) % 3 == 0: for i in xrange(len(word) / 3): dic[word[i:i+3]] += 1 else: <|fim_middle|> fout.write('%i %i\n' % (len(dic), len(other))) record_list = [(y, x) for x, y in dic.items()] record_list.sort() record_list.reverse() i = 0 for x, y in record_list: #print y.decode('utf8'), x try: yy = y.decode('GBK') except: print y yy = 'N/A' fout.write('%s %i\n' % (yy, x)) i += 1 if i > 10 and debug: break other_list = list(other) other_list.sort() for item in other_list: #print item.decode('utf8') item2 = item.decode('utf8') fout.write(item2) fout.write('\n') i += 1 if i > 20 and debug: break fout.close() if __name__ =='__main__': count('data/train.zh_parsed', 'output/count.zh') count('data/train.ja_parsed', 'output/count.ja') <|fim▁end|>
other.add(word)
<|file_name|>character_count.py<|end_file_name|><|fim▁begin|>from collections import defaultdict import codecs def count(corpus, output_file): debug = False dic = defaultdict(int) other = set() fout = codecs.open(output_file, 'w', 'utf8') for line in open(corpus, 'r'): words = line.split() for word in words: if len(word) % 3 == 0: for i in xrange(len(word) / 3): dic[word[i:i+3]] += 1 else: other.add(word) fout.write('%i %i\n' % (len(dic), len(other))) record_list = [(y, x) for x, y in dic.items()] record_list.sort() record_list.reverse() i = 0 for x, y in record_list: #print y.decode('utf8'), x try: yy = y.decode('GBK') except: print y yy = 'N/A' fout.write('%s %i\n' % (yy, x)) i += 1 if i > 10 and debug: <|fim_middle|> other_list = list(other) other_list.sort() for item in other_list: #print item.decode('utf8') item2 = item.decode('utf8') fout.write(item2) fout.write('\n') i += 1 if i > 20 and debug: break fout.close() if __name__ =='__main__': count('data/train.zh_parsed', 'output/count.zh') count('data/train.ja_parsed', 'output/count.ja') <|fim▁end|>
break
<|file_name|>character_count.py<|end_file_name|><|fim▁begin|>from collections import defaultdict import codecs def count(corpus, output_file): debug = False dic = defaultdict(int) other = set() fout = codecs.open(output_file, 'w', 'utf8') for line in open(corpus, 'r'): words = line.split() for word in words: if len(word) % 3 == 0: for i in xrange(len(word) / 3): dic[word[i:i+3]] += 1 else: other.add(word) fout.write('%i %i\n' % (len(dic), len(other))) record_list = [(y, x) for x, y in dic.items()] record_list.sort() record_list.reverse() i = 0 for x, y in record_list: #print y.decode('utf8'), x try: yy = y.decode('GBK') except: print y yy = 'N/A' fout.write('%s %i\n' % (yy, x)) i += 1 if i > 10 and debug: break other_list = list(other) other_list.sort() for item in other_list: #print item.decode('utf8') item2 = item.decode('utf8') fout.write(item2) fout.write('\n') i += 1 if i > 20 and debug: <|fim_middle|> fout.close() if __name__ =='__main__': count('data/train.zh_parsed', 'output/count.zh') count('data/train.ja_parsed', 'output/count.ja') <|fim▁end|>
break
<|file_name|>character_count.py<|end_file_name|><|fim▁begin|>from collections import defaultdict import codecs def count(corpus, output_file): debug = False dic = defaultdict(int) other = set() fout = codecs.open(output_file, 'w', 'utf8') for line in open(corpus, 'r'): words = line.split() for word in words: if len(word) % 3 == 0: for i in xrange(len(word) / 3): dic[word[i:i+3]] += 1 else: other.add(word) fout.write('%i %i\n' % (len(dic), len(other))) record_list = [(y, x) for x, y in dic.items()] record_list.sort() record_list.reverse() i = 0 for x, y in record_list: #print y.decode('utf8'), x try: yy = y.decode('GBK') except: print y yy = 'N/A' fout.write('%s %i\n' % (yy, x)) i += 1 if i > 10 and debug: break other_list = list(other) other_list.sort() for item in other_list: #print item.decode('utf8') item2 = item.decode('utf8') fout.write(item2) fout.write('\n') i += 1 if i > 20 and debug: break fout.close() if __name__ =='__main__': <|fim_middle|> <|fim▁end|>
count('data/train.zh_parsed', 'output/count.zh') count('data/train.ja_parsed', 'output/count.ja')
<|file_name|>character_count.py<|end_file_name|><|fim▁begin|>from collections import defaultdict import codecs def <|fim_middle|>(corpus, output_file): debug = False dic = defaultdict(int) other = set() fout = codecs.open(output_file, 'w', 'utf8') for line in open(corpus, 'r'): words = line.split() for word in words: if len(word) % 3 == 0: for i in xrange(len(word) / 3): dic[word[i:i+3]] += 1 else: other.add(word) fout.write('%i %i\n' % (len(dic), len(other))) record_list = [(y, x) for x, y in dic.items()] record_list.sort() record_list.reverse() i = 0 for x, y in record_list: #print y.decode('utf8'), x try: yy = y.decode('GBK') except: print y yy = 'N/A' fout.write('%s %i\n' % (yy, x)) i += 1 if i > 10 and debug: break other_list = list(other) other_list.sort() for item in other_list: #print item.decode('utf8') item2 = item.decode('utf8') fout.write(item2) fout.write('\n') i += 1 if i > 20 and debug: break fout.close() if __name__ =='__main__': count('data/train.zh_parsed', 'output/count.zh') count('data/train.ja_parsed', 'output/count.ja') <|fim▁end|>
count
<|file_name|>browser_default.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python<|fim▁hole|>from rdflib.namespace import FOAF from flask import Flask import flask_rdf import random app = Flask(__name__) # set up a custom formatter to return turtle in text/plain to browsers custom_formatter = flask_rdf.FormatSelector() custom_formatter.wildcard_mimetype = 'text/plain' custom_formatter.add_format('text/plain', 'turtle') custom_decorator = flask_rdf.flask.Decorator(custom_formatter) @app.route('/') @app.route('/<path:path>') @custom_decorator def random_age(path=''): graph = Graph('IOMemory', BNode()) graph.add((URIRef(path), FOAF.age, Literal(random.randint(20, 50)))) return graph if __name__ == '__main__': app.run(host='0.0.0.0', debug=True)<|fim▁end|>
from rdflib import Graph, BNode, Literal, URIRef
<|file_name|>browser_default.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python from rdflib import Graph, BNode, Literal, URIRef from rdflib.namespace import FOAF from flask import Flask import flask_rdf import random app = Flask(__name__) # set up a custom formatter to return turtle in text/plain to browsers custom_formatter = flask_rdf.FormatSelector() custom_formatter.wildcard_mimetype = 'text/plain' custom_formatter.add_format('text/plain', 'turtle') custom_decorator = flask_rdf.flask.Decorator(custom_formatter) @app.route('/') @app.route('/<path:path>') @custom_decorator def random_age(path=''): <|fim_middle|> if __name__ == '__main__': app.run(host='0.0.0.0', debug=True) <|fim▁end|>
graph = Graph('IOMemory', BNode()) graph.add((URIRef(path), FOAF.age, Literal(random.randint(20, 50)))) return graph
<|file_name|>browser_default.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python from rdflib import Graph, BNode, Literal, URIRef from rdflib.namespace import FOAF from flask import Flask import flask_rdf import random app = Flask(__name__) # set up a custom formatter to return turtle in text/plain to browsers custom_formatter = flask_rdf.FormatSelector() custom_formatter.wildcard_mimetype = 'text/plain' custom_formatter.add_format('text/plain', 'turtle') custom_decorator = flask_rdf.flask.Decorator(custom_formatter) @app.route('/') @app.route('/<path:path>') @custom_decorator def random_age(path=''): graph = Graph('IOMemory', BNode()) graph.add((URIRef(path), FOAF.age, Literal(random.randint(20, 50)))) return graph if __name__ == '__main__': <|fim_middle|> <|fim▁end|>
app.run(host='0.0.0.0', debug=True)
<|file_name|>browser_default.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python from rdflib import Graph, BNode, Literal, URIRef from rdflib.namespace import FOAF from flask import Flask import flask_rdf import random app = Flask(__name__) # set up a custom formatter to return turtle in text/plain to browsers custom_formatter = flask_rdf.FormatSelector() custom_formatter.wildcard_mimetype = 'text/plain' custom_formatter.add_format('text/plain', 'turtle') custom_decorator = flask_rdf.flask.Decorator(custom_formatter) @app.route('/') @app.route('/<path:path>') @custom_decorator def <|fim_middle|>(path=''): graph = Graph('IOMemory', BNode()) graph.add((URIRef(path), FOAF.age, Literal(random.randint(20, 50)))) return graph if __name__ == '__main__': app.run(host='0.0.0.0', debug=True) <|fim▁end|>
random_age
<|file_name|>user.py<|end_file_name|><|fim▁begin|># Copyright (c) 2014 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the License); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an AS IS BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and# # limitations under the License. import logging from taskflow.patterns import linear_flow from pumphouse import exceptions from pumphouse import events from pumphouse import task LOG = logging.getLogger(__name__) class RetrieveUser(task.BaseCloudTask): def execute(self, user_id): user = self.cloud.keystone.users.get(user_id) self.cloud.identity.fetch(user.id) return user.to_dict() class EnsureUser(task.BaseCloudTask): def execute(self, user_info, tenant_info): try: user = self.cloud.keystone.users.find(name=user_info["name"]) # TODO(akscram): Current password should be replaced by temporary. except exceptions.keystone_excs.NotFound: user = self.cloud.keystone.users.create( name=user_info["name"], # TODO(akscram): Here we should generate a temporary # password for the user and use them # along the migration process. # The RepairUserPasswords should repair # original after all operations. password="default", email=user_info["email"], tenant_id=tenant_info["id"] if tenant_info else None, enabled=user_info["enabled"], ) self.created_event(user) return user.to_dict() def created_event(self, user): LOG.info("Created user: %s", user) events.emit("create", { "id": user.id, "type": "user", "cloud": self.cloud.name, "data": user.to_dict(), }, namespace="/events") class EnsureOrphanUser(EnsureUser): def execute(self, user_info): super(EnsureOrphanUser, self).execute(user_info, None) class EnsureUserRole(task.BaseCloudTask): def execute(self, user_info, role_info, tenant_info): try: self.cloud.keystone.tenants.add_user(tenant_info["id"], user_info["id"], role_info["id"]) except exceptions.keystone_excs.Conflict: pass else: self.role_assigned_event(role_info, user_info, tenant_info) return user_info def role_assigned_event(self, role_info, user_info, tenant_info):<|fim▁hole|>def migrate_membership(context, user_id, role_id, tenant_id): user_ensure = "user-{}-ensure".format(user_id) role_ensure = "role-{}-ensure".format(role_id) tenant_ensure = "tenant-{}-ensure".format(tenant_id) user_role_ensure = "user-role-{}-{}-{}-ensure".format(user_id, role_id, tenant_id) task = EnsureUserRole(context.dst_cloud, name=user_role_ensure, provides=user_role_ensure, rebind=[user_ensure, role_ensure, tenant_ensure]) context.store[user_role_ensure] = user_role_ensure return task def migrate_user(context, user_id, tenant_id=None): user_binding = "user-{}".format(user_id) user_retrieve = "{}-retrieve".format(user_binding) user_ensure = "{}-ensure".format(user_binding) flow = linear_flow.Flow("migrate-user-{}".format(user_id)) flow.add(RetrieveUser(context.src_cloud, name=user_binding, provides=user_binding, rebind=[user_retrieve])) if tenant_id is not None: tenant_ensure = "tenant-{}-ensure".format(tenant_id) flow.add(EnsureUser(context.dst_cloud, name=user_ensure, provides=user_ensure, rebind=[user_binding, tenant_ensure])) else: flow.add(EnsureUser(context.dst_cloud, name=user_ensure, provides=user_ensure, rebind=[user_binding], inject={"tenant_info": None})) context.store[user_retrieve] = user_id return flow<|fim▁end|>
LOG.info("Created role %s assignment for user %s in tenant %s", role_info["id"], user_info["id"], tenant_info["id"])
<|file_name|>user.py<|end_file_name|><|fim▁begin|># Copyright (c) 2014 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the License); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an AS IS BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and# # limitations under the License. import logging from taskflow.patterns import linear_flow from pumphouse import exceptions from pumphouse import events from pumphouse import task LOG = logging.getLogger(__name__) class RetrieveUser(task.BaseCloudTask): <|fim_middle|> class EnsureUser(task.BaseCloudTask): def execute(self, user_info, tenant_info): try: user = self.cloud.keystone.users.find(name=user_info["name"]) # TODO(akscram): Current password should be replaced by temporary. except exceptions.keystone_excs.NotFound: user = self.cloud.keystone.users.create( name=user_info["name"], # TODO(akscram): Here we should generate a temporary # password for the user and use them # along the migration process. # The RepairUserPasswords should repair # original after all operations. password="default", email=user_info["email"], tenant_id=tenant_info["id"] if tenant_info else None, enabled=user_info["enabled"], ) self.created_event(user) return user.to_dict() def created_event(self, user): LOG.info("Created user: %s", user) events.emit("create", { "id": user.id, "type": "user", "cloud": self.cloud.name, "data": user.to_dict(), }, namespace="/events") class EnsureOrphanUser(EnsureUser): def execute(self, user_info): super(EnsureOrphanUser, self).execute(user_info, None) class EnsureUserRole(task.BaseCloudTask): def execute(self, user_info, role_info, tenant_info): try: self.cloud.keystone.tenants.add_user(tenant_info["id"], user_info["id"], role_info["id"]) except exceptions.keystone_excs.Conflict: pass else: self.role_assigned_event(role_info, user_info, tenant_info) return user_info def role_assigned_event(self, role_info, user_info, tenant_info): LOG.info("Created role %s assignment for user %s in tenant %s", role_info["id"], user_info["id"], tenant_info["id"]) def migrate_membership(context, user_id, role_id, tenant_id): user_ensure = "user-{}-ensure".format(user_id) role_ensure = "role-{}-ensure".format(role_id) tenant_ensure = "tenant-{}-ensure".format(tenant_id) user_role_ensure = "user-role-{}-{}-{}-ensure".format(user_id, role_id, tenant_id) task = EnsureUserRole(context.dst_cloud, name=user_role_ensure, provides=user_role_ensure, rebind=[user_ensure, role_ensure, tenant_ensure]) context.store[user_role_ensure] = user_role_ensure return task def migrate_user(context, user_id, tenant_id=None): user_binding = "user-{}".format(user_id) user_retrieve = "{}-retrieve".format(user_binding) user_ensure = "{}-ensure".format(user_binding) flow = linear_flow.Flow("migrate-user-{}".format(user_id)) flow.add(RetrieveUser(context.src_cloud, name=user_binding, provides=user_binding, rebind=[user_retrieve])) if tenant_id is not None: tenant_ensure = "tenant-{}-ensure".format(tenant_id) flow.add(EnsureUser(context.dst_cloud, name=user_ensure, provides=user_ensure, rebind=[user_binding, tenant_ensure])) else: flow.add(EnsureUser(context.dst_cloud, name=user_ensure, provides=user_ensure, rebind=[user_binding], inject={"tenant_info": None})) context.store[user_retrieve] = user_id return flow <|fim▁end|>
def execute(self, user_id): user = self.cloud.keystone.users.get(user_id) self.cloud.identity.fetch(user.id) return user.to_dict()
<|file_name|>user.py<|end_file_name|><|fim▁begin|># Copyright (c) 2014 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the License); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an AS IS BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and# # limitations under the License. import logging from taskflow.patterns import linear_flow from pumphouse import exceptions from pumphouse import events from pumphouse import task LOG = logging.getLogger(__name__) class RetrieveUser(task.BaseCloudTask): def execute(self, user_id): <|fim_middle|> class EnsureUser(task.BaseCloudTask): def execute(self, user_info, tenant_info): try: user = self.cloud.keystone.users.find(name=user_info["name"]) # TODO(akscram): Current password should be replaced by temporary. except exceptions.keystone_excs.NotFound: user = self.cloud.keystone.users.create( name=user_info["name"], # TODO(akscram): Here we should generate a temporary # password for the user and use them # along the migration process. # The RepairUserPasswords should repair # original after all operations. password="default", email=user_info["email"], tenant_id=tenant_info["id"] if tenant_info else None, enabled=user_info["enabled"], ) self.created_event(user) return user.to_dict() def created_event(self, user): LOG.info("Created user: %s", user) events.emit("create", { "id": user.id, "type": "user", "cloud": self.cloud.name, "data": user.to_dict(), }, namespace="/events") class EnsureOrphanUser(EnsureUser): def execute(self, user_info): super(EnsureOrphanUser, self).execute(user_info, None) class EnsureUserRole(task.BaseCloudTask): def execute(self, user_info, role_info, tenant_info): try: self.cloud.keystone.tenants.add_user(tenant_info["id"], user_info["id"], role_info["id"]) except exceptions.keystone_excs.Conflict: pass else: self.role_assigned_event(role_info, user_info, tenant_info) return user_info def role_assigned_event(self, role_info, user_info, tenant_info): LOG.info("Created role %s assignment for user %s in tenant %s", role_info["id"], user_info["id"], tenant_info["id"]) def migrate_membership(context, user_id, role_id, tenant_id): user_ensure = "user-{}-ensure".format(user_id) role_ensure = "role-{}-ensure".format(role_id) tenant_ensure = "tenant-{}-ensure".format(tenant_id) user_role_ensure = "user-role-{}-{}-{}-ensure".format(user_id, role_id, tenant_id) task = EnsureUserRole(context.dst_cloud, name=user_role_ensure, provides=user_role_ensure, rebind=[user_ensure, role_ensure, tenant_ensure]) context.store[user_role_ensure] = user_role_ensure return task def migrate_user(context, user_id, tenant_id=None): user_binding = "user-{}".format(user_id) user_retrieve = "{}-retrieve".format(user_binding) user_ensure = "{}-ensure".format(user_binding) flow = linear_flow.Flow("migrate-user-{}".format(user_id)) flow.add(RetrieveUser(context.src_cloud, name=user_binding, provides=user_binding, rebind=[user_retrieve])) if tenant_id is not None: tenant_ensure = "tenant-{}-ensure".format(tenant_id) flow.add(EnsureUser(context.dst_cloud, name=user_ensure, provides=user_ensure, rebind=[user_binding, tenant_ensure])) else: flow.add(EnsureUser(context.dst_cloud, name=user_ensure, provides=user_ensure, rebind=[user_binding], inject={"tenant_info": None})) context.store[user_retrieve] = user_id return flow <|fim▁end|>
user = self.cloud.keystone.users.get(user_id) self.cloud.identity.fetch(user.id) return user.to_dict()
<|file_name|>user.py<|end_file_name|><|fim▁begin|># Copyright (c) 2014 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the License); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an AS IS BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and# # limitations under the License. import logging from taskflow.patterns import linear_flow from pumphouse import exceptions from pumphouse import events from pumphouse import task LOG = logging.getLogger(__name__) class RetrieveUser(task.BaseCloudTask): def execute(self, user_id): user = self.cloud.keystone.users.get(user_id) self.cloud.identity.fetch(user.id) return user.to_dict() class EnsureUser(task.BaseCloudTask): <|fim_middle|> class EnsureOrphanUser(EnsureUser): def execute(self, user_info): super(EnsureOrphanUser, self).execute(user_info, None) class EnsureUserRole(task.BaseCloudTask): def execute(self, user_info, role_info, tenant_info): try: self.cloud.keystone.tenants.add_user(tenant_info["id"], user_info["id"], role_info["id"]) except exceptions.keystone_excs.Conflict: pass else: self.role_assigned_event(role_info, user_info, tenant_info) return user_info def role_assigned_event(self, role_info, user_info, tenant_info): LOG.info("Created role %s assignment for user %s in tenant %s", role_info["id"], user_info["id"], tenant_info["id"]) def migrate_membership(context, user_id, role_id, tenant_id): user_ensure = "user-{}-ensure".format(user_id) role_ensure = "role-{}-ensure".format(role_id) tenant_ensure = "tenant-{}-ensure".format(tenant_id) user_role_ensure = "user-role-{}-{}-{}-ensure".format(user_id, role_id, tenant_id) task = EnsureUserRole(context.dst_cloud, name=user_role_ensure, provides=user_role_ensure, rebind=[user_ensure, role_ensure, tenant_ensure]) context.store[user_role_ensure] = user_role_ensure return task def migrate_user(context, user_id, tenant_id=None): user_binding = "user-{}".format(user_id) user_retrieve = "{}-retrieve".format(user_binding) user_ensure = "{}-ensure".format(user_binding) flow = linear_flow.Flow("migrate-user-{}".format(user_id)) flow.add(RetrieveUser(context.src_cloud, name=user_binding, provides=user_binding, rebind=[user_retrieve])) if tenant_id is not None: tenant_ensure = "tenant-{}-ensure".format(tenant_id) flow.add(EnsureUser(context.dst_cloud, name=user_ensure, provides=user_ensure, rebind=[user_binding, tenant_ensure])) else: flow.add(EnsureUser(context.dst_cloud, name=user_ensure, provides=user_ensure, rebind=[user_binding], inject={"tenant_info": None})) context.store[user_retrieve] = user_id return flow <|fim▁end|>
def execute(self, user_info, tenant_info): try: user = self.cloud.keystone.users.find(name=user_info["name"]) # TODO(akscram): Current password should be replaced by temporary. except exceptions.keystone_excs.NotFound: user = self.cloud.keystone.users.create( name=user_info["name"], # TODO(akscram): Here we should generate a temporary # password for the user and use them # along the migration process. # The RepairUserPasswords should repair # original after all operations. password="default", email=user_info["email"], tenant_id=tenant_info["id"] if tenant_info else None, enabled=user_info["enabled"], ) self.created_event(user) return user.to_dict() def created_event(self, user): LOG.info("Created user: %s", user) events.emit("create", { "id": user.id, "type": "user", "cloud": self.cloud.name, "data": user.to_dict(), }, namespace="/events")
<|file_name|>user.py<|end_file_name|><|fim▁begin|># Copyright (c) 2014 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the License); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an AS IS BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and# # limitations under the License. import logging from taskflow.patterns import linear_flow from pumphouse import exceptions from pumphouse import events from pumphouse import task LOG = logging.getLogger(__name__) class RetrieveUser(task.BaseCloudTask): def execute(self, user_id): user = self.cloud.keystone.users.get(user_id) self.cloud.identity.fetch(user.id) return user.to_dict() class EnsureUser(task.BaseCloudTask): def execute(self, user_info, tenant_info): <|fim_middle|> def created_event(self, user): LOG.info("Created user: %s", user) events.emit("create", { "id": user.id, "type": "user", "cloud": self.cloud.name, "data": user.to_dict(), }, namespace="/events") class EnsureOrphanUser(EnsureUser): def execute(self, user_info): super(EnsureOrphanUser, self).execute(user_info, None) class EnsureUserRole(task.BaseCloudTask): def execute(self, user_info, role_info, tenant_info): try: self.cloud.keystone.tenants.add_user(tenant_info["id"], user_info["id"], role_info["id"]) except exceptions.keystone_excs.Conflict: pass else: self.role_assigned_event(role_info, user_info, tenant_info) return user_info def role_assigned_event(self, role_info, user_info, tenant_info): LOG.info("Created role %s assignment for user %s in tenant %s", role_info["id"], user_info["id"], tenant_info["id"]) def migrate_membership(context, user_id, role_id, tenant_id): user_ensure = "user-{}-ensure".format(user_id) role_ensure = "role-{}-ensure".format(role_id) tenant_ensure = "tenant-{}-ensure".format(tenant_id) user_role_ensure = "user-role-{}-{}-{}-ensure".format(user_id, role_id, tenant_id) task = EnsureUserRole(context.dst_cloud, name=user_role_ensure, provides=user_role_ensure, rebind=[user_ensure, role_ensure, tenant_ensure]) context.store[user_role_ensure] = user_role_ensure return task def migrate_user(context, user_id, tenant_id=None): user_binding = "user-{}".format(user_id) user_retrieve = "{}-retrieve".format(user_binding) user_ensure = "{}-ensure".format(user_binding) flow = linear_flow.Flow("migrate-user-{}".format(user_id)) flow.add(RetrieveUser(context.src_cloud, name=user_binding, provides=user_binding, rebind=[user_retrieve])) if tenant_id is not None: tenant_ensure = "tenant-{}-ensure".format(tenant_id) flow.add(EnsureUser(context.dst_cloud, name=user_ensure, provides=user_ensure, rebind=[user_binding, tenant_ensure])) else: flow.add(EnsureUser(context.dst_cloud, name=user_ensure, provides=user_ensure, rebind=[user_binding], inject={"tenant_info": None})) context.store[user_retrieve] = user_id return flow <|fim▁end|>
try: user = self.cloud.keystone.users.find(name=user_info["name"]) # TODO(akscram): Current password should be replaced by temporary. except exceptions.keystone_excs.NotFound: user = self.cloud.keystone.users.create( name=user_info["name"], # TODO(akscram): Here we should generate a temporary # password for the user and use them # along the migration process. # The RepairUserPasswords should repair # original after all operations. password="default", email=user_info["email"], tenant_id=tenant_info["id"] if tenant_info else None, enabled=user_info["enabled"], ) self.created_event(user) return user.to_dict()
<|file_name|>user.py<|end_file_name|><|fim▁begin|># Copyright (c) 2014 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the License); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an AS IS BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and# # limitations under the License. import logging from taskflow.patterns import linear_flow from pumphouse import exceptions from pumphouse import events from pumphouse import task LOG = logging.getLogger(__name__) class RetrieveUser(task.BaseCloudTask): def execute(self, user_id): user = self.cloud.keystone.users.get(user_id) self.cloud.identity.fetch(user.id) return user.to_dict() class EnsureUser(task.BaseCloudTask): def execute(self, user_info, tenant_info): try: user = self.cloud.keystone.users.find(name=user_info["name"]) # TODO(akscram): Current password should be replaced by temporary. except exceptions.keystone_excs.NotFound: user = self.cloud.keystone.users.create( name=user_info["name"], # TODO(akscram): Here we should generate a temporary # password for the user and use them # along the migration process. # The RepairUserPasswords should repair # original after all operations. password="default", email=user_info["email"], tenant_id=tenant_info["id"] if tenant_info else None, enabled=user_info["enabled"], ) self.created_event(user) return user.to_dict() def created_event(self, user): <|fim_middle|> class EnsureOrphanUser(EnsureUser): def execute(self, user_info): super(EnsureOrphanUser, self).execute(user_info, None) class EnsureUserRole(task.BaseCloudTask): def execute(self, user_info, role_info, tenant_info): try: self.cloud.keystone.tenants.add_user(tenant_info["id"], user_info["id"], role_info["id"]) except exceptions.keystone_excs.Conflict: pass else: self.role_assigned_event(role_info, user_info, tenant_info) return user_info def role_assigned_event(self, role_info, user_info, tenant_info): LOG.info("Created role %s assignment for user %s in tenant %s", role_info["id"], user_info["id"], tenant_info["id"]) def migrate_membership(context, user_id, role_id, tenant_id): user_ensure = "user-{}-ensure".format(user_id) role_ensure = "role-{}-ensure".format(role_id) tenant_ensure = "tenant-{}-ensure".format(tenant_id) user_role_ensure = "user-role-{}-{}-{}-ensure".format(user_id, role_id, tenant_id) task = EnsureUserRole(context.dst_cloud, name=user_role_ensure, provides=user_role_ensure, rebind=[user_ensure, role_ensure, tenant_ensure]) context.store[user_role_ensure] = user_role_ensure return task def migrate_user(context, user_id, tenant_id=None): user_binding = "user-{}".format(user_id) user_retrieve = "{}-retrieve".format(user_binding) user_ensure = "{}-ensure".format(user_binding) flow = linear_flow.Flow("migrate-user-{}".format(user_id)) flow.add(RetrieveUser(context.src_cloud, name=user_binding, provides=user_binding, rebind=[user_retrieve])) if tenant_id is not None: tenant_ensure = "tenant-{}-ensure".format(tenant_id) flow.add(EnsureUser(context.dst_cloud, name=user_ensure, provides=user_ensure, rebind=[user_binding, tenant_ensure])) else: flow.add(EnsureUser(context.dst_cloud, name=user_ensure, provides=user_ensure, rebind=[user_binding], inject={"tenant_info": None})) context.store[user_retrieve] = user_id return flow <|fim▁end|>
LOG.info("Created user: %s", user) events.emit("create", { "id": user.id, "type": "user", "cloud": self.cloud.name, "data": user.to_dict(), }, namespace="/events")
<|file_name|>user.py<|end_file_name|><|fim▁begin|># Copyright (c) 2014 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the License); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an AS IS BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and# # limitations under the License. import logging from taskflow.patterns import linear_flow from pumphouse import exceptions from pumphouse import events from pumphouse import task LOG = logging.getLogger(__name__) class RetrieveUser(task.BaseCloudTask): def execute(self, user_id): user = self.cloud.keystone.users.get(user_id) self.cloud.identity.fetch(user.id) return user.to_dict() class EnsureUser(task.BaseCloudTask): def execute(self, user_info, tenant_info): try: user = self.cloud.keystone.users.find(name=user_info["name"]) # TODO(akscram): Current password should be replaced by temporary. except exceptions.keystone_excs.NotFound: user = self.cloud.keystone.users.create( name=user_info["name"], # TODO(akscram): Here we should generate a temporary # password for the user and use them # along the migration process. # The RepairUserPasswords should repair # original after all operations. password="default", email=user_info["email"], tenant_id=tenant_info["id"] if tenant_info else None, enabled=user_info["enabled"], ) self.created_event(user) return user.to_dict() def created_event(self, user): LOG.info("Created user: %s", user) events.emit("create", { "id": user.id, "type": "user", "cloud": self.cloud.name, "data": user.to_dict(), }, namespace="/events") class EnsureOrphanUser(EnsureUser): <|fim_middle|> class EnsureUserRole(task.BaseCloudTask): def execute(self, user_info, role_info, tenant_info): try: self.cloud.keystone.tenants.add_user(tenant_info["id"], user_info["id"], role_info["id"]) except exceptions.keystone_excs.Conflict: pass else: self.role_assigned_event(role_info, user_info, tenant_info) return user_info def role_assigned_event(self, role_info, user_info, tenant_info): LOG.info("Created role %s assignment for user %s in tenant %s", role_info["id"], user_info["id"], tenant_info["id"]) def migrate_membership(context, user_id, role_id, tenant_id): user_ensure = "user-{}-ensure".format(user_id) role_ensure = "role-{}-ensure".format(role_id) tenant_ensure = "tenant-{}-ensure".format(tenant_id) user_role_ensure = "user-role-{}-{}-{}-ensure".format(user_id, role_id, tenant_id) task = EnsureUserRole(context.dst_cloud, name=user_role_ensure, provides=user_role_ensure, rebind=[user_ensure, role_ensure, tenant_ensure]) context.store[user_role_ensure] = user_role_ensure return task def migrate_user(context, user_id, tenant_id=None): user_binding = "user-{}".format(user_id) user_retrieve = "{}-retrieve".format(user_binding) user_ensure = "{}-ensure".format(user_binding) flow = linear_flow.Flow("migrate-user-{}".format(user_id)) flow.add(RetrieveUser(context.src_cloud, name=user_binding, provides=user_binding, rebind=[user_retrieve])) if tenant_id is not None: tenant_ensure = "tenant-{}-ensure".format(tenant_id) flow.add(EnsureUser(context.dst_cloud, name=user_ensure, provides=user_ensure, rebind=[user_binding, tenant_ensure])) else: flow.add(EnsureUser(context.dst_cloud, name=user_ensure, provides=user_ensure, rebind=[user_binding], inject={"tenant_info": None})) context.store[user_retrieve] = user_id return flow <|fim▁end|>
def execute(self, user_info): super(EnsureOrphanUser, self).execute(user_info, None)
<|file_name|>user.py<|end_file_name|><|fim▁begin|># Copyright (c) 2014 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the License); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an AS IS BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and# # limitations under the License. import logging from taskflow.patterns import linear_flow from pumphouse import exceptions from pumphouse import events from pumphouse import task LOG = logging.getLogger(__name__) class RetrieveUser(task.BaseCloudTask): def execute(self, user_id): user = self.cloud.keystone.users.get(user_id) self.cloud.identity.fetch(user.id) return user.to_dict() class EnsureUser(task.BaseCloudTask): def execute(self, user_info, tenant_info): try: user = self.cloud.keystone.users.find(name=user_info["name"]) # TODO(akscram): Current password should be replaced by temporary. except exceptions.keystone_excs.NotFound: user = self.cloud.keystone.users.create( name=user_info["name"], # TODO(akscram): Here we should generate a temporary # password for the user and use them # along the migration process. # The RepairUserPasswords should repair # original after all operations. password="default", email=user_info["email"], tenant_id=tenant_info["id"] if tenant_info else None, enabled=user_info["enabled"], ) self.created_event(user) return user.to_dict() def created_event(self, user): LOG.info("Created user: %s", user) events.emit("create", { "id": user.id, "type": "user", "cloud": self.cloud.name, "data": user.to_dict(), }, namespace="/events") class EnsureOrphanUser(EnsureUser): def execute(self, user_info): <|fim_middle|> class EnsureUserRole(task.BaseCloudTask): def execute(self, user_info, role_info, tenant_info): try: self.cloud.keystone.tenants.add_user(tenant_info["id"], user_info["id"], role_info["id"]) except exceptions.keystone_excs.Conflict: pass else: self.role_assigned_event(role_info, user_info, tenant_info) return user_info def role_assigned_event(self, role_info, user_info, tenant_info): LOG.info("Created role %s assignment for user %s in tenant %s", role_info["id"], user_info["id"], tenant_info["id"]) def migrate_membership(context, user_id, role_id, tenant_id): user_ensure = "user-{}-ensure".format(user_id) role_ensure = "role-{}-ensure".format(role_id) tenant_ensure = "tenant-{}-ensure".format(tenant_id) user_role_ensure = "user-role-{}-{}-{}-ensure".format(user_id, role_id, tenant_id) task = EnsureUserRole(context.dst_cloud, name=user_role_ensure, provides=user_role_ensure, rebind=[user_ensure, role_ensure, tenant_ensure]) context.store[user_role_ensure] = user_role_ensure return task def migrate_user(context, user_id, tenant_id=None): user_binding = "user-{}".format(user_id) user_retrieve = "{}-retrieve".format(user_binding) user_ensure = "{}-ensure".format(user_binding) flow = linear_flow.Flow("migrate-user-{}".format(user_id)) flow.add(RetrieveUser(context.src_cloud, name=user_binding, provides=user_binding, rebind=[user_retrieve])) if tenant_id is not None: tenant_ensure = "tenant-{}-ensure".format(tenant_id) flow.add(EnsureUser(context.dst_cloud, name=user_ensure, provides=user_ensure, rebind=[user_binding, tenant_ensure])) else: flow.add(EnsureUser(context.dst_cloud, name=user_ensure, provides=user_ensure, rebind=[user_binding], inject={"tenant_info": None})) context.store[user_retrieve] = user_id return flow <|fim▁end|>
super(EnsureOrphanUser, self).execute(user_info, None)
<|file_name|>user.py<|end_file_name|><|fim▁begin|># Copyright (c) 2014 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the License); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an AS IS BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and# # limitations under the License. import logging from taskflow.patterns import linear_flow from pumphouse import exceptions from pumphouse import events from pumphouse import task LOG = logging.getLogger(__name__) class RetrieveUser(task.BaseCloudTask): def execute(self, user_id): user = self.cloud.keystone.users.get(user_id) self.cloud.identity.fetch(user.id) return user.to_dict() class EnsureUser(task.BaseCloudTask): def execute(self, user_info, tenant_info): try: user = self.cloud.keystone.users.find(name=user_info["name"]) # TODO(akscram): Current password should be replaced by temporary. except exceptions.keystone_excs.NotFound: user = self.cloud.keystone.users.create( name=user_info["name"], # TODO(akscram): Here we should generate a temporary # password for the user and use them # along the migration process. # The RepairUserPasswords should repair # original after all operations. password="default", email=user_info["email"], tenant_id=tenant_info["id"] if tenant_info else None, enabled=user_info["enabled"], ) self.created_event(user) return user.to_dict() def created_event(self, user): LOG.info("Created user: %s", user) events.emit("create", { "id": user.id, "type": "user", "cloud": self.cloud.name, "data": user.to_dict(), }, namespace="/events") class EnsureOrphanUser(EnsureUser): def execute(self, user_info): super(EnsureOrphanUser, self).execute(user_info, None) class EnsureUserRole(task.BaseCloudTask): <|fim_middle|> def migrate_membership(context, user_id, role_id, tenant_id): user_ensure = "user-{}-ensure".format(user_id) role_ensure = "role-{}-ensure".format(role_id) tenant_ensure = "tenant-{}-ensure".format(tenant_id) user_role_ensure = "user-role-{}-{}-{}-ensure".format(user_id, role_id, tenant_id) task = EnsureUserRole(context.dst_cloud, name=user_role_ensure, provides=user_role_ensure, rebind=[user_ensure, role_ensure, tenant_ensure]) context.store[user_role_ensure] = user_role_ensure return task def migrate_user(context, user_id, tenant_id=None): user_binding = "user-{}".format(user_id) user_retrieve = "{}-retrieve".format(user_binding) user_ensure = "{}-ensure".format(user_binding) flow = linear_flow.Flow("migrate-user-{}".format(user_id)) flow.add(RetrieveUser(context.src_cloud, name=user_binding, provides=user_binding, rebind=[user_retrieve])) if tenant_id is not None: tenant_ensure = "tenant-{}-ensure".format(tenant_id) flow.add(EnsureUser(context.dst_cloud, name=user_ensure, provides=user_ensure, rebind=[user_binding, tenant_ensure])) else: flow.add(EnsureUser(context.dst_cloud, name=user_ensure, provides=user_ensure, rebind=[user_binding], inject={"tenant_info": None})) context.store[user_retrieve] = user_id return flow <|fim▁end|>
def execute(self, user_info, role_info, tenant_info): try: self.cloud.keystone.tenants.add_user(tenant_info["id"], user_info["id"], role_info["id"]) except exceptions.keystone_excs.Conflict: pass else: self.role_assigned_event(role_info, user_info, tenant_info) return user_info def role_assigned_event(self, role_info, user_info, tenant_info): LOG.info("Created role %s assignment for user %s in tenant %s", role_info["id"], user_info["id"], tenant_info["id"])
<|file_name|>user.py<|end_file_name|><|fim▁begin|># Copyright (c) 2014 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the License); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an AS IS BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and# # limitations under the License. import logging from taskflow.patterns import linear_flow from pumphouse import exceptions from pumphouse import events from pumphouse import task LOG = logging.getLogger(__name__) class RetrieveUser(task.BaseCloudTask): def execute(self, user_id): user = self.cloud.keystone.users.get(user_id) self.cloud.identity.fetch(user.id) return user.to_dict() class EnsureUser(task.BaseCloudTask): def execute(self, user_info, tenant_info): try: user = self.cloud.keystone.users.find(name=user_info["name"]) # TODO(akscram): Current password should be replaced by temporary. except exceptions.keystone_excs.NotFound: user = self.cloud.keystone.users.create( name=user_info["name"], # TODO(akscram): Here we should generate a temporary # password for the user and use them # along the migration process. # The RepairUserPasswords should repair # original after all operations. password="default", email=user_info["email"], tenant_id=tenant_info["id"] if tenant_info else None, enabled=user_info["enabled"], ) self.created_event(user) return user.to_dict() def created_event(self, user): LOG.info("Created user: %s", user) events.emit("create", { "id": user.id, "type": "user", "cloud": self.cloud.name, "data": user.to_dict(), }, namespace="/events") class EnsureOrphanUser(EnsureUser): def execute(self, user_info): super(EnsureOrphanUser, self).execute(user_info, None) class EnsureUserRole(task.BaseCloudTask): def execute(self, user_info, role_info, tenant_info): <|fim_middle|> def role_assigned_event(self, role_info, user_info, tenant_info): LOG.info("Created role %s assignment for user %s in tenant %s", role_info["id"], user_info["id"], tenant_info["id"]) def migrate_membership(context, user_id, role_id, tenant_id): user_ensure = "user-{}-ensure".format(user_id) role_ensure = "role-{}-ensure".format(role_id) tenant_ensure = "tenant-{}-ensure".format(tenant_id) user_role_ensure = "user-role-{}-{}-{}-ensure".format(user_id, role_id, tenant_id) task = EnsureUserRole(context.dst_cloud, name=user_role_ensure, provides=user_role_ensure, rebind=[user_ensure, role_ensure, tenant_ensure]) context.store[user_role_ensure] = user_role_ensure return task def migrate_user(context, user_id, tenant_id=None): user_binding = "user-{}".format(user_id) user_retrieve = "{}-retrieve".format(user_binding) user_ensure = "{}-ensure".format(user_binding) flow = linear_flow.Flow("migrate-user-{}".format(user_id)) flow.add(RetrieveUser(context.src_cloud, name=user_binding, provides=user_binding, rebind=[user_retrieve])) if tenant_id is not None: tenant_ensure = "tenant-{}-ensure".format(tenant_id) flow.add(EnsureUser(context.dst_cloud, name=user_ensure, provides=user_ensure, rebind=[user_binding, tenant_ensure])) else: flow.add(EnsureUser(context.dst_cloud, name=user_ensure, provides=user_ensure, rebind=[user_binding], inject={"tenant_info": None})) context.store[user_retrieve] = user_id return flow <|fim▁end|>
try: self.cloud.keystone.tenants.add_user(tenant_info["id"], user_info["id"], role_info["id"]) except exceptions.keystone_excs.Conflict: pass else: self.role_assigned_event(role_info, user_info, tenant_info) return user_info
<|file_name|>user.py<|end_file_name|><|fim▁begin|># Copyright (c) 2014 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the License); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an AS IS BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and# # limitations under the License. import logging from taskflow.patterns import linear_flow from pumphouse import exceptions from pumphouse import events from pumphouse import task LOG = logging.getLogger(__name__) class RetrieveUser(task.BaseCloudTask): def execute(self, user_id): user = self.cloud.keystone.users.get(user_id) self.cloud.identity.fetch(user.id) return user.to_dict() class EnsureUser(task.BaseCloudTask): def execute(self, user_info, tenant_info): try: user = self.cloud.keystone.users.find(name=user_info["name"]) # TODO(akscram): Current password should be replaced by temporary. except exceptions.keystone_excs.NotFound: user = self.cloud.keystone.users.create( name=user_info["name"], # TODO(akscram): Here we should generate a temporary # password for the user and use them # along the migration process. # The RepairUserPasswords should repair # original after all operations. password="default", email=user_info["email"], tenant_id=tenant_info["id"] if tenant_info else None, enabled=user_info["enabled"], ) self.created_event(user) return user.to_dict() def created_event(self, user): LOG.info("Created user: %s", user) events.emit("create", { "id": user.id, "type": "user", "cloud": self.cloud.name, "data": user.to_dict(), }, namespace="/events") class EnsureOrphanUser(EnsureUser): def execute(self, user_info): super(EnsureOrphanUser, self).execute(user_info, None) class EnsureUserRole(task.BaseCloudTask): def execute(self, user_info, role_info, tenant_info): try: self.cloud.keystone.tenants.add_user(tenant_info["id"], user_info["id"], role_info["id"]) except exceptions.keystone_excs.Conflict: pass else: self.role_assigned_event(role_info, user_info, tenant_info) return user_info def role_assigned_event(self, role_info, user_info, tenant_info): <|fim_middle|> def migrate_membership(context, user_id, role_id, tenant_id): user_ensure = "user-{}-ensure".format(user_id) role_ensure = "role-{}-ensure".format(role_id) tenant_ensure = "tenant-{}-ensure".format(tenant_id) user_role_ensure = "user-role-{}-{}-{}-ensure".format(user_id, role_id, tenant_id) task = EnsureUserRole(context.dst_cloud, name=user_role_ensure, provides=user_role_ensure, rebind=[user_ensure, role_ensure, tenant_ensure]) context.store[user_role_ensure] = user_role_ensure return task def migrate_user(context, user_id, tenant_id=None): user_binding = "user-{}".format(user_id) user_retrieve = "{}-retrieve".format(user_binding) user_ensure = "{}-ensure".format(user_binding) flow = linear_flow.Flow("migrate-user-{}".format(user_id)) flow.add(RetrieveUser(context.src_cloud, name=user_binding, provides=user_binding, rebind=[user_retrieve])) if tenant_id is not None: tenant_ensure = "tenant-{}-ensure".format(tenant_id) flow.add(EnsureUser(context.dst_cloud, name=user_ensure, provides=user_ensure, rebind=[user_binding, tenant_ensure])) else: flow.add(EnsureUser(context.dst_cloud, name=user_ensure, provides=user_ensure, rebind=[user_binding], inject={"tenant_info": None})) context.store[user_retrieve] = user_id return flow <|fim▁end|>
LOG.info("Created role %s assignment for user %s in tenant %s", role_info["id"], user_info["id"], tenant_info["id"])
<|file_name|>user.py<|end_file_name|><|fim▁begin|># Copyright (c) 2014 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the License); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an AS IS BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and# # limitations under the License. import logging from taskflow.patterns import linear_flow from pumphouse import exceptions from pumphouse import events from pumphouse import task LOG = logging.getLogger(__name__) class RetrieveUser(task.BaseCloudTask): def execute(self, user_id): user = self.cloud.keystone.users.get(user_id) self.cloud.identity.fetch(user.id) return user.to_dict() class EnsureUser(task.BaseCloudTask): def execute(self, user_info, tenant_info): try: user = self.cloud.keystone.users.find(name=user_info["name"]) # TODO(akscram): Current password should be replaced by temporary. except exceptions.keystone_excs.NotFound: user = self.cloud.keystone.users.create( name=user_info["name"], # TODO(akscram): Here we should generate a temporary # password for the user and use them # along the migration process. # The RepairUserPasswords should repair # original after all operations. password="default", email=user_info["email"], tenant_id=tenant_info["id"] if tenant_info else None, enabled=user_info["enabled"], ) self.created_event(user) return user.to_dict() def created_event(self, user): LOG.info("Created user: %s", user) events.emit("create", { "id": user.id, "type": "user", "cloud": self.cloud.name, "data": user.to_dict(), }, namespace="/events") class EnsureOrphanUser(EnsureUser): def execute(self, user_info): super(EnsureOrphanUser, self).execute(user_info, None) class EnsureUserRole(task.BaseCloudTask): def execute(self, user_info, role_info, tenant_info): try: self.cloud.keystone.tenants.add_user(tenant_info["id"], user_info["id"], role_info["id"]) except exceptions.keystone_excs.Conflict: pass else: self.role_assigned_event(role_info, user_info, tenant_info) return user_info def role_assigned_event(self, role_info, user_info, tenant_info): LOG.info("Created role %s assignment for user %s in tenant %s", role_info["id"], user_info["id"], tenant_info["id"]) def migrate_membership(context, user_id, role_id, tenant_id): <|fim_middle|> def migrate_user(context, user_id, tenant_id=None): user_binding = "user-{}".format(user_id) user_retrieve = "{}-retrieve".format(user_binding) user_ensure = "{}-ensure".format(user_binding) flow = linear_flow.Flow("migrate-user-{}".format(user_id)) flow.add(RetrieveUser(context.src_cloud, name=user_binding, provides=user_binding, rebind=[user_retrieve])) if tenant_id is not None: tenant_ensure = "tenant-{}-ensure".format(tenant_id) flow.add(EnsureUser(context.dst_cloud, name=user_ensure, provides=user_ensure, rebind=[user_binding, tenant_ensure])) else: flow.add(EnsureUser(context.dst_cloud, name=user_ensure, provides=user_ensure, rebind=[user_binding], inject={"tenant_info": None})) context.store[user_retrieve] = user_id return flow <|fim▁end|>
user_ensure = "user-{}-ensure".format(user_id) role_ensure = "role-{}-ensure".format(role_id) tenant_ensure = "tenant-{}-ensure".format(tenant_id) user_role_ensure = "user-role-{}-{}-{}-ensure".format(user_id, role_id, tenant_id) task = EnsureUserRole(context.dst_cloud, name=user_role_ensure, provides=user_role_ensure, rebind=[user_ensure, role_ensure, tenant_ensure]) context.store[user_role_ensure] = user_role_ensure return task
<|file_name|>user.py<|end_file_name|><|fim▁begin|># Copyright (c) 2014 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the License); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an AS IS BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and# # limitations under the License. import logging from taskflow.patterns import linear_flow from pumphouse import exceptions from pumphouse import events from pumphouse import task LOG = logging.getLogger(__name__) class RetrieveUser(task.BaseCloudTask): def execute(self, user_id): user = self.cloud.keystone.users.get(user_id) self.cloud.identity.fetch(user.id) return user.to_dict() class EnsureUser(task.BaseCloudTask): def execute(self, user_info, tenant_info): try: user = self.cloud.keystone.users.find(name=user_info["name"]) # TODO(akscram): Current password should be replaced by temporary. except exceptions.keystone_excs.NotFound: user = self.cloud.keystone.users.create( name=user_info["name"], # TODO(akscram): Here we should generate a temporary # password for the user and use them # along the migration process. # The RepairUserPasswords should repair # original after all operations. password="default", email=user_info["email"], tenant_id=tenant_info["id"] if tenant_info else None, enabled=user_info["enabled"], ) self.created_event(user) return user.to_dict() def created_event(self, user): LOG.info("Created user: %s", user) events.emit("create", { "id": user.id, "type": "user", "cloud": self.cloud.name, "data": user.to_dict(), }, namespace="/events") class EnsureOrphanUser(EnsureUser): def execute(self, user_info): super(EnsureOrphanUser, self).execute(user_info, None) class EnsureUserRole(task.BaseCloudTask): def execute(self, user_info, role_info, tenant_info): try: self.cloud.keystone.tenants.add_user(tenant_info["id"], user_info["id"], role_info["id"]) except exceptions.keystone_excs.Conflict: pass else: self.role_assigned_event(role_info, user_info, tenant_info) return user_info def role_assigned_event(self, role_info, user_info, tenant_info): LOG.info("Created role %s assignment for user %s in tenant %s", role_info["id"], user_info["id"], tenant_info["id"]) def migrate_membership(context, user_id, role_id, tenant_id): user_ensure = "user-{}-ensure".format(user_id) role_ensure = "role-{}-ensure".format(role_id) tenant_ensure = "tenant-{}-ensure".format(tenant_id) user_role_ensure = "user-role-{}-{}-{}-ensure".format(user_id, role_id, tenant_id) task = EnsureUserRole(context.dst_cloud, name=user_role_ensure, provides=user_role_ensure, rebind=[user_ensure, role_ensure, tenant_ensure]) context.store[user_role_ensure] = user_role_ensure return task def migrate_user(context, user_id, tenant_id=None): <|fim_middle|> <|fim▁end|>
user_binding = "user-{}".format(user_id) user_retrieve = "{}-retrieve".format(user_binding) user_ensure = "{}-ensure".format(user_binding) flow = linear_flow.Flow("migrate-user-{}".format(user_id)) flow.add(RetrieveUser(context.src_cloud, name=user_binding, provides=user_binding, rebind=[user_retrieve])) if tenant_id is not None: tenant_ensure = "tenant-{}-ensure".format(tenant_id) flow.add(EnsureUser(context.dst_cloud, name=user_ensure, provides=user_ensure, rebind=[user_binding, tenant_ensure])) else: flow.add(EnsureUser(context.dst_cloud, name=user_ensure, provides=user_ensure, rebind=[user_binding], inject={"tenant_info": None})) context.store[user_retrieve] = user_id return flow
<|file_name|>user.py<|end_file_name|><|fim▁begin|># Copyright (c) 2014 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the License); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an AS IS BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and# # limitations under the License. import logging from taskflow.patterns import linear_flow from pumphouse import exceptions from pumphouse import events from pumphouse import task LOG = logging.getLogger(__name__) class RetrieveUser(task.BaseCloudTask): def execute(self, user_id): user = self.cloud.keystone.users.get(user_id) self.cloud.identity.fetch(user.id) return user.to_dict() class EnsureUser(task.BaseCloudTask): def execute(self, user_info, tenant_info): try: user = self.cloud.keystone.users.find(name=user_info["name"]) # TODO(akscram): Current password should be replaced by temporary. except exceptions.keystone_excs.NotFound: user = self.cloud.keystone.users.create( name=user_info["name"], # TODO(akscram): Here we should generate a temporary # password for the user and use them # along the migration process. # The RepairUserPasswords should repair # original after all operations. password="default", email=user_info["email"], tenant_id=tenant_info["id"] if tenant_info else None, enabled=user_info["enabled"], ) self.created_event(user) return user.to_dict() def created_event(self, user): LOG.info("Created user: %s", user) events.emit("create", { "id": user.id, "type": "user", "cloud": self.cloud.name, "data": user.to_dict(), }, namespace="/events") class EnsureOrphanUser(EnsureUser): def execute(self, user_info): super(EnsureOrphanUser, self).execute(user_info, None) class EnsureUserRole(task.BaseCloudTask): def execute(self, user_info, role_info, tenant_info): try: self.cloud.keystone.tenants.add_user(tenant_info["id"], user_info["id"], role_info["id"]) except exceptions.keystone_excs.Conflict: pass else: <|fim_middle|> return user_info def role_assigned_event(self, role_info, user_info, tenant_info): LOG.info("Created role %s assignment for user %s in tenant %s", role_info["id"], user_info["id"], tenant_info["id"]) def migrate_membership(context, user_id, role_id, tenant_id): user_ensure = "user-{}-ensure".format(user_id) role_ensure = "role-{}-ensure".format(role_id) tenant_ensure = "tenant-{}-ensure".format(tenant_id) user_role_ensure = "user-role-{}-{}-{}-ensure".format(user_id, role_id, tenant_id) task = EnsureUserRole(context.dst_cloud, name=user_role_ensure, provides=user_role_ensure, rebind=[user_ensure, role_ensure, tenant_ensure]) context.store[user_role_ensure] = user_role_ensure return task def migrate_user(context, user_id, tenant_id=None): user_binding = "user-{}".format(user_id) user_retrieve = "{}-retrieve".format(user_binding) user_ensure = "{}-ensure".format(user_binding) flow = linear_flow.Flow("migrate-user-{}".format(user_id)) flow.add(RetrieveUser(context.src_cloud, name=user_binding, provides=user_binding, rebind=[user_retrieve])) if tenant_id is not None: tenant_ensure = "tenant-{}-ensure".format(tenant_id) flow.add(EnsureUser(context.dst_cloud, name=user_ensure, provides=user_ensure, rebind=[user_binding, tenant_ensure])) else: flow.add(EnsureUser(context.dst_cloud, name=user_ensure, provides=user_ensure, rebind=[user_binding], inject={"tenant_info": None})) context.store[user_retrieve] = user_id return flow <|fim▁end|>
self.role_assigned_event(role_info, user_info, tenant_info)
<|file_name|>user.py<|end_file_name|><|fim▁begin|># Copyright (c) 2014 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the License); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an AS IS BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and# # limitations under the License. import logging from taskflow.patterns import linear_flow from pumphouse import exceptions from pumphouse import events from pumphouse import task LOG = logging.getLogger(__name__) class RetrieveUser(task.BaseCloudTask): def execute(self, user_id): user = self.cloud.keystone.users.get(user_id) self.cloud.identity.fetch(user.id) return user.to_dict() class EnsureUser(task.BaseCloudTask): def execute(self, user_info, tenant_info): try: user = self.cloud.keystone.users.find(name=user_info["name"]) # TODO(akscram): Current password should be replaced by temporary. except exceptions.keystone_excs.NotFound: user = self.cloud.keystone.users.create( name=user_info["name"], # TODO(akscram): Here we should generate a temporary # password for the user and use them # along the migration process. # The RepairUserPasswords should repair # original after all operations. password="default", email=user_info["email"], tenant_id=tenant_info["id"] if tenant_info else None, enabled=user_info["enabled"], ) self.created_event(user) return user.to_dict() def created_event(self, user): LOG.info("Created user: %s", user) events.emit("create", { "id": user.id, "type": "user", "cloud": self.cloud.name, "data": user.to_dict(), }, namespace="/events") class EnsureOrphanUser(EnsureUser): def execute(self, user_info): super(EnsureOrphanUser, self).execute(user_info, None) class EnsureUserRole(task.BaseCloudTask): def execute(self, user_info, role_info, tenant_info): try: self.cloud.keystone.tenants.add_user(tenant_info["id"], user_info["id"], role_info["id"]) except exceptions.keystone_excs.Conflict: pass else: self.role_assigned_event(role_info, user_info, tenant_info) return user_info def role_assigned_event(self, role_info, user_info, tenant_info): LOG.info("Created role %s assignment for user %s in tenant %s", role_info["id"], user_info["id"], tenant_info["id"]) def migrate_membership(context, user_id, role_id, tenant_id): user_ensure = "user-{}-ensure".format(user_id) role_ensure = "role-{}-ensure".format(role_id) tenant_ensure = "tenant-{}-ensure".format(tenant_id) user_role_ensure = "user-role-{}-{}-{}-ensure".format(user_id, role_id, tenant_id) task = EnsureUserRole(context.dst_cloud, name=user_role_ensure, provides=user_role_ensure, rebind=[user_ensure, role_ensure, tenant_ensure]) context.store[user_role_ensure] = user_role_ensure return task def migrate_user(context, user_id, tenant_id=None): user_binding = "user-{}".format(user_id) user_retrieve = "{}-retrieve".format(user_binding) user_ensure = "{}-ensure".format(user_binding) flow = linear_flow.Flow("migrate-user-{}".format(user_id)) flow.add(RetrieveUser(context.src_cloud, name=user_binding, provides=user_binding, rebind=[user_retrieve])) if tenant_id is not None: <|fim_middle|> else: flow.add(EnsureUser(context.dst_cloud, name=user_ensure, provides=user_ensure, rebind=[user_binding], inject={"tenant_info": None})) context.store[user_retrieve] = user_id return flow <|fim▁end|>
tenant_ensure = "tenant-{}-ensure".format(tenant_id) flow.add(EnsureUser(context.dst_cloud, name=user_ensure, provides=user_ensure, rebind=[user_binding, tenant_ensure]))
<|file_name|>user.py<|end_file_name|><|fim▁begin|># Copyright (c) 2014 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the License); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an AS IS BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and# # limitations under the License. import logging from taskflow.patterns import linear_flow from pumphouse import exceptions from pumphouse import events from pumphouse import task LOG = logging.getLogger(__name__) class RetrieveUser(task.BaseCloudTask): def execute(self, user_id): user = self.cloud.keystone.users.get(user_id) self.cloud.identity.fetch(user.id) return user.to_dict() class EnsureUser(task.BaseCloudTask): def execute(self, user_info, tenant_info): try: user = self.cloud.keystone.users.find(name=user_info["name"]) # TODO(akscram): Current password should be replaced by temporary. except exceptions.keystone_excs.NotFound: user = self.cloud.keystone.users.create( name=user_info["name"], # TODO(akscram): Here we should generate a temporary # password for the user and use them # along the migration process. # The RepairUserPasswords should repair # original after all operations. password="default", email=user_info["email"], tenant_id=tenant_info["id"] if tenant_info else None, enabled=user_info["enabled"], ) self.created_event(user) return user.to_dict() def created_event(self, user): LOG.info("Created user: %s", user) events.emit("create", { "id": user.id, "type": "user", "cloud": self.cloud.name, "data": user.to_dict(), }, namespace="/events") class EnsureOrphanUser(EnsureUser): def execute(self, user_info): super(EnsureOrphanUser, self).execute(user_info, None) class EnsureUserRole(task.BaseCloudTask): def execute(self, user_info, role_info, tenant_info): try: self.cloud.keystone.tenants.add_user(tenant_info["id"], user_info["id"], role_info["id"]) except exceptions.keystone_excs.Conflict: pass else: self.role_assigned_event(role_info, user_info, tenant_info) return user_info def role_assigned_event(self, role_info, user_info, tenant_info): LOG.info("Created role %s assignment for user %s in tenant %s", role_info["id"], user_info["id"], tenant_info["id"]) def migrate_membership(context, user_id, role_id, tenant_id): user_ensure = "user-{}-ensure".format(user_id) role_ensure = "role-{}-ensure".format(role_id) tenant_ensure = "tenant-{}-ensure".format(tenant_id) user_role_ensure = "user-role-{}-{}-{}-ensure".format(user_id, role_id, tenant_id) task = EnsureUserRole(context.dst_cloud, name=user_role_ensure, provides=user_role_ensure, rebind=[user_ensure, role_ensure, tenant_ensure]) context.store[user_role_ensure] = user_role_ensure return task def migrate_user(context, user_id, tenant_id=None): user_binding = "user-{}".format(user_id) user_retrieve = "{}-retrieve".format(user_binding) user_ensure = "{}-ensure".format(user_binding) flow = linear_flow.Flow("migrate-user-{}".format(user_id)) flow.add(RetrieveUser(context.src_cloud, name=user_binding, provides=user_binding, rebind=[user_retrieve])) if tenant_id is not None: tenant_ensure = "tenant-{}-ensure".format(tenant_id) flow.add(EnsureUser(context.dst_cloud, name=user_ensure, provides=user_ensure, rebind=[user_binding, tenant_ensure])) else: <|fim_middle|> context.store[user_retrieve] = user_id return flow <|fim▁end|>
flow.add(EnsureUser(context.dst_cloud, name=user_ensure, provides=user_ensure, rebind=[user_binding], inject={"tenant_info": None}))
<|file_name|>user.py<|end_file_name|><|fim▁begin|># Copyright (c) 2014 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the License); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an AS IS BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and# # limitations under the License. import logging from taskflow.patterns import linear_flow from pumphouse import exceptions from pumphouse import events from pumphouse import task LOG = logging.getLogger(__name__) class RetrieveUser(task.BaseCloudTask): def <|fim_middle|>(self, user_id): user = self.cloud.keystone.users.get(user_id) self.cloud.identity.fetch(user.id) return user.to_dict() class EnsureUser(task.BaseCloudTask): def execute(self, user_info, tenant_info): try: user = self.cloud.keystone.users.find(name=user_info["name"]) # TODO(akscram): Current password should be replaced by temporary. except exceptions.keystone_excs.NotFound: user = self.cloud.keystone.users.create( name=user_info["name"], # TODO(akscram): Here we should generate a temporary # password for the user and use them # along the migration process. # The RepairUserPasswords should repair # original after all operations. password="default", email=user_info["email"], tenant_id=tenant_info["id"] if tenant_info else None, enabled=user_info["enabled"], ) self.created_event(user) return user.to_dict() def created_event(self, user): LOG.info("Created user: %s", user) events.emit("create", { "id": user.id, "type": "user", "cloud": self.cloud.name, "data": user.to_dict(), }, namespace="/events") class EnsureOrphanUser(EnsureUser): def execute(self, user_info): super(EnsureOrphanUser, self).execute(user_info, None) class EnsureUserRole(task.BaseCloudTask): def execute(self, user_info, role_info, tenant_info): try: self.cloud.keystone.tenants.add_user(tenant_info["id"], user_info["id"], role_info["id"]) except exceptions.keystone_excs.Conflict: pass else: self.role_assigned_event(role_info, user_info, tenant_info) return user_info def role_assigned_event(self, role_info, user_info, tenant_info): LOG.info("Created role %s assignment for user %s in tenant %s", role_info["id"], user_info["id"], tenant_info["id"]) def migrate_membership(context, user_id, role_id, tenant_id): user_ensure = "user-{}-ensure".format(user_id) role_ensure = "role-{}-ensure".format(role_id) tenant_ensure = "tenant-{}-ensure".format(tenant_id) user_role_ensure = "user-role-{}-{}-{}-ensure".format(user_id, role_id, tenant_id) task = EnsureUserRole(context.dst_cloud, name=user_role_ensure, provides=user_role_ensure, rebind=[user_ensure, role_ensure, tenant_ensure]) context.store[user_role_ensure] = user_role_ensure return task def migrate_user(context, user_id, tenant_id=None): user_binding = "user-{}".format(user_id) user_retrieve = "{}-retrieve".format(user_binding) user_ensure = "{}-ensure".format(user_binding) flow = linear_flow.Flow("migrate-user-{}".format(user_id)) flow.add(RetrieveUser(context.src_cloud, name=user_binding, provides=user_binding, rebind=[user_retrieve])) if tenant_id is not None: tenant_ensure = "tenant-{}-ensure".format(tenant_id) flow.add(EnsureUser(context.dst_cloud, name=user_ensure, provides=user_ensure, rebind=[user_binding, tenant_ensure])) else: flow.add(EnsureUser(context.dst_cloud, name=user_ensure, provides=user_ensure, rebind=[user_binding], inject={"tenant_info": None})) context.store[user_retrieve] = user_id return flow <|fim▁end|>
execute
<|file_name|>user.py<|end_file_name|><|fim▁begin|># Copyright (c) 2014 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the License); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an AS IS BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and# # limitations under the License. import logging from taskflow.patterns import linear_flow from pumphouse import exceptions from pumphouse import events from pumphouse import task LOG = logging.getLogger(__name__) class RetrieveUser(task.BaseCloudTask): def execute(self, user_id): user = self.cloud.keystone.users.get(user_id) self.cloud.identity.fetch(user.id) return user.to_dict() class EnsureUser(task.BaseCloudTask): def <|fim_middle|>(self, user_info, tenant_info): try: user = self.cloud.keystone.users.find(name=user_info["name"]) # TODO(akscram): Current password should be replaced by temporary. except exceptions.keystone_excs.NotFound: user = self.cloud.keystone.users.create( name=user_info["name"], # TODO(akscram): Here we should generate a temporary # password for the user and use them # along the migration process. # The RepairUserPasswords should repair # original after all operations. password="default", email=user_info["email"], tenant_id=tenant_info["id"] if tenant_info else None, enabled=user_info["enabled"], ) self.created_event(user) return user.to_dict() def created_event(self, user): LOG.info("Created user: %s", user) events.emit("create", { "id": user.id, "type": "user", "cloud": self.cloud.name, "data": user.to_dict(), }, namespace="/events") class EnsureOrphanUser(EnsureUser): def execute(self, user_info): super(EnsureOrphanUser, self).execute(user_info, None) class EnsureUserRole(task.BaseCloudTask): def execute(self, user_info, role_info, tenant_info): try: self.cloud.keystone.tenants.add_user(tenant_info["id"], user_info["id"], role_info["id"]) except exceptions.keystone_excs.Conflict: pass else: self.role_assigned_event(role_info, user_info, tenant_info) return user_info def role_assigned_event(self, role_info, user_info, tenant_info): LOG.info("Created role %s assignment for user %s in tenant %s", role_info["id"], user_info["id"], tenant_info["id"]) def migrate_membership(context, user_id, role_id, tenant_id): user_ensure = "user-{}-ensure".format(user_id) role_ensure = "role-{}-ensure".format(role_id) tenant_ensure = "tenant-{}-ensure".format(tenant_id) user_role_ensure = "user-role-{}-{}-{}-ensure".format(user_id, role_id, tenant_id) task = EnsureUserRole(context.dst_cloud, name=user_role_ensure, provides=user_role_ensure, rebind=[user_ensure, role_ensure, tenant_ensure]) context.store[user_role_ensure] = user_role_ensure return task def migrate_user(context, user_id, tenant_id=None): user_binding = "user-{}".format(user_id) user_retrieve = "{}-retrieve".format(user_binding) user_ensure = "{}-ensure".format(user_binding) flow = linear_flow.Flow("migrate-user-{}".format(user_id)) flow.add(RetrieveUser(context.src_cloud, name=user_binding, provides=user_binding, rebind=[user_retrieve])) if tenant_id is not None: tenant_ensure = "tenant-{}-ensure".format(tenant_id) flow.add(EnsureUser(context.dst_cloud, name=user_ensure, provides=user_ensure, rebind=[user_binding, tenant_ensure])) else: flow.add(EnsureUser(context.dst_cloud, name=user_ensure, provides=user_ensure, rebind=[user_binding], inject={"tenant_info": None})) context.store[user_retrieve] = user_id return flow <|fim▁end|>
execute
<|file_name|>user.py<|end_file_name|><|fim▁begin|># Copyright (c) 2014 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the License); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an AS IS BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and# # limitations under the License. import logging from taskflow.patterns import linear_flow from pumphouse import exceptions from pumphouse import events from pumphouse import task LOG = logging.getLogger(__name__) class RetrieveUser(task.BaseCloudTask): def execute(self, user_id): user = self.cloud.keystone.users.get(user_id) self.cloud.identity.fetch(user.id) return user.to_dict() class EnsureUser(task.BaseCloudTask): def execute(self, user_info, tenant_info): try: user = self.cloud.keystone.users.find(name=user_info["name"]) # TODO(akscram): Current password should be replaced by temporary. except exceptions.keystone_excs.NotFound: user = self.cloud.keystone.users.create( name=user_info["name"], # TODO(akscram): Here we should generate a temporary # password for the user and use them # along the migration process. # The RepairUserPasswords should repair # original after all operations. password="default", email=user_info["email"], tenant_id=tenant_info["id"] if tenant_info else None, enabled=user_info["enabled"], ) self.created_event(user) return user.to_dict() def <|fim_middle|>(self, user): LOG.info("Created user: %s", user) events.emit("create", { "id": user.id, "type": "user", "cloud": self.cloud.name, "data": user.to_dict(), }, namespace="/events") class EnsureOrphanUser(EnsureUser): def execute(self, user_info): super(EnsureOrphanUser, self).execute(user_info, None) class EnsureUserRole(task.BaseCloudTask): def execute(self, user_info, role_info, tenant_info): try: self.cloud.keystone.tenants.add_user(tenant_info["id"], user_info["id"], role_info["id"]) except exceptions.keystone_excs.Conflict: pass else: self.role_assigned_event(role_info, user_info, tenant_info) return user_info def role_assigned_event(self, role_info, user_info, tenant_info): LOG.info("Created role %s assignment for user %s in tenant %s", role_info["id"], user_info["id"], tenant_info["id"]) def migrate_membership(context, user_id, role_id, tenant_id): user_ensure = "user-{}-ensure".format(user_id) role_ensure = "role-{}-ensure".format(role_id) tenant_ensure = "tenant-{}-ensure".format(tenant_id) user_role_ensure = "user-role-{}-{}-{}-ensure".format(user_id, role_id, tenant_id) task = EnsureUserRole(context.dst_cloud, name=user_role_ensure, provides=user_role_ensure, rebind=[user_ensure, role_ensure, tenant_ensure]) context.store[user_role_ensure] = user_role_ensure return task def migrate_user(context, user_id, tenant_id=None): user_binding = "user-{}".format(user_id) user_retrieve = "{}-retrieve".format(user_binding) user_ensure = "{}-ensure".format(user_binding) flow = linear_flow.Flow("migrate-user-{}".format(user_id)) flow.add(RetrieveUser(context.src_cloud, name=user_binding, provides=user_binding, rebind=[user_retrieve])) if tenant_id is not None: tenant_ensure = "tenant-{}-ensure".format(tenant_id) flow.add(EnsureUser(context.dst_cloud, name=user_ensure, provides=user_ensure, rebind=[user_binding, tenant_ensure])) else: flow.add(EnsureUser(context.dst_cloud, name=user_ensure, provides=user_ensure, rebind=[user_binding], inject={"tenant_info": None})) context.store[user_retrieve] = user_id return flow <|fim▁end|>
created_event
<|file_name|>user.py<|end_file_name|><|fim▁begin|># Copyright (c) 2014 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the License); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an AS IS BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and# # limitations under the License. import logging from taskflow.patterns import linear_flow from pumphouse import exceptions from pumphouse import events from pumphouse import task LOG = logging.getLogger(__name__) class RetrieveUser(task.BaseCloudTask): def execute(self, user_id): user = self.cloud.keystone.users.get(user_id) self.cloud.identity.fetch(user.id) return user.to_dict() class EnsureUser(task.BaseCloudTask): def execute(self, user_info, tenant_info): try: user = self.cloud.keystone.users.find(name=user_info["name"]) # TODO(akscram): Current password should be replaced by temporary. except exceptions.keystone_excs.NotFound: user = self.cloud.keystone.users.create( name=user_info["name"], # TODO(akscram): Here we should generate a temporary # password for the user and use them # along the migration process. # The RepairUserPasswords should repair # original after all operations. password="default", email=user_info["email"], tenant_id=tenant_info["id"] if tenant_info else None, enabled=user_info["enabled"], ) self.created_event(user) return user.to_dict() def created_event(self, user): LOG.info("Created user: %s", user) events.emit("create", { "id": user.id, "type": "user", "cloud": self.cloud.name, "data": user.to_dict(), }, namespace="/events") class EnsureOrphanUser(EnsureUser): def <|fim_middle|>(self, user_info): super(EnsureOrphanUser, self).execute(user_info, None) class EnsureUserRole(task.BaseCloudTask): def execute(self, user_info, role_info, tenant_info): try: self.cloud.keystone.tenants.add_user(tenant_info["id"], user_info["id"], role_info["id"]) except exceptions.keystone_excs.Conflict: pass else: self.role_assigned_event(role_info, user_info, tenant_info) return user_info def role_assigned_event(self, role_info, user_info, tenant_info): LOG.info("Created role %s assignment for user %s in tenant %s", role_info["id"], user_info["id"], tenant_info["id"]) def migrate_membership(context, user_id, role_id, tenant_id): user_ensure = "user-{}-ensure".format(user_id) role_ensure = "role-{}-ensure".format(role_id) tenant_ensure = "tenant-{}-ensure".format(tenant_id) user_role_ensure = "user-role-{}-{}-{}-ensure".format(user_id, role_id, tenant_id) task = EnsureUserRole(context.dst_cloud, name=user_role_ensure, provides=user_role_ensure, rebind=[user_ensure, role_ensure, tenant_ensure]) context.store[user_role_ensure] = user_role_ensure return task def migrate_user(context, user_id, tenant_id=None): user_binding = "user-{}".format(user_id) user_retrieve = "{}-retrieve".format(user_binding) user_ensure = "{}-ensure".format(user_binding) flow = linear_flow.Flow("migrate-user-{}".format(user_id)) flow.add(RetrieveUser(context.src_cloud, name=user_binding, provides=user_binding, rebind=[user_retrieve])) if tenant_id is not None: tenant_ensure = "tenant-{}-ensure".format(tenant_id) flow.add(EnsureUser(context.dst_cloud, name=user_ensure, provides=user_ensure, rebind=[user_binding, tenant_ensure])) else: flow.add(EnsureUser(context.dst_cloud, name=user_ensure, provides=user_ensure, rebind=[user_binding], inject={"tenant_info": None})) context.store[user_retrieve] = user_id return flow <|fim▁end|>
execute
<|file_name|>user.py<|end_file_name|><|fim▁begin|># Copyright (c) 2014 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the License); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an AS IS BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and# # limitations under the License. import logging from taskflow.patterns import linear_flow from pumphouse import exceptions from pumphouse import events from pumphouse import task LOG = logging.getLogger(__name__) class RetrieveUser(task.BaseCloudTask): def execute(self, user_id): user = self.cloud.keystone.users.get(user_id) self.cloud.identity.fetch(user.id) return user.to_dict() class EnsureUser(task.BaseCloudTask): def execute(self, user_info, tenant_info): try: user = self.cloud.keystone.users.find(name=user_info["name"]) # TODO(akscram): Current password should be replaced by temporary. except exceptions.keystone_excs.NotFound: user = self.cloud.keystone.users.create( name=user_info["name"], # TODO(akscram): Here we should generate a temporary # password for the user and use them # along the migration process. # The RepairUserPasswords should repair # original after all operations. password="default", email=user_info["email"], tenant_id=tenant_info["id"] if tenant_info else None, enabled=user_info["enabled"], ) self.created_event(user) return user.to_dict() def created_event(self, user): LOG.info("Created user: %s", user) events.emit("create", { "id": user.id, "type": "user", "cloud": self.cloud.name, "data": user.to_dict(), }, namespace="/events") class EnsureOrphanUser(EnsureUser): def execute(self, user_info): super(EnsureOrphanUser, self).execute(user_info, None) class EnsureUserRole(task.BaseCloudTask): def <|fim_middle|>(self, user_info, role_info, tenant_info): try: self.cloud.keystone.tenants.add_user(tenant_info["id"], user_info["id"], role_info["id"]) except exceptions.keystone_excs.Conflict: pass else: self.role_assigned_event(role_info, user_info, tenant_info) return user_info def role_assigned_event(self, role_info, user_info, tenant_info): LOG.info("Created role %s assignment for user %s in tenant %s", role_info["id"], user_info["id"], tenant_info["id"]) def migrate_membership(context, user_id, role_id, tenant_id): user_ensure = "user-{}-ensure".format(user_id) role_ensure = "role-{}-ensure".format(role_id) tenant_ensure = "tenant-{}-ensure".format(tenant_id) user_role_ensure = "user-role-{}-{}-{}-ensure".format(user_id, role_id, tenant_id) task = EnsureUserRole(context.dst_cloud, name=user_role_ensure, provides=user_role_ensure, rebind=[user_ensure, role_ensure, tenant_ensure]) context.store[user_role_ensure] = user_role_ensure return task def migrate_user(context, user_id, tenant_id=None): user_binding = "user-{}".format(user_id) user_retrieve = "{}-retrieve".format(user_binding) user_ensure = "{}-ensure".format(user_binding) flow = linear_flow.Flow("migrate-user-{}".format(user_id)) flow.add(RetrieveUser(context.src_cloud, name=user_binding, provides=user_binding, rebind=[user_retrieve])) if tenant_id is not None: tenant_ensure = "tenant-{}-ensure".format(tenant_id) flow.add(EnsureUser(context.dst_cloud, name=user_ensure, provides=user_ensure, rebind=[user_binding, tenant_ensure])) else: flow.add(EnsureUser(context.dst_cloud, name=user_ensure, provides=user_ensure, rebind=[user_binding], inject={"tenant_info": None})) context.store[user_retrieve] = user_id return flow <|fim▁end|>
execute
<|file_name|>user.py<|end_file_name|><|fim▁begin|># Copyright (c) 2014 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the License); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an AS IS BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and# # limitations under the License. import logging from taskflow.patterns import linear_flow from pumphouse import exceptions from pumphouse import events from pumphouse import task LOG = logging.getLogger(__name__) class RetrieveUser(task.BaseCloudTask): def execute(self, user_id): user = self.cloud.keystone.users.get(user_id) self.cloud.identity.fetch(user.id) return user.to_dict() class EnsureUser(task.BaseCloudTask): def execute(self, user_info, tenant_info): try: user = self.cloud.keystone.users.find(name=user_info["name"]) # TODO(akscram): Current password should be replaced by temporary. except exceptions.keystone_excs.NotFound: user = self.cloud.keystone.users.create( name=user_info["name"], # TODO(akscram): Here we should generate a temporary # password for the user and use them # along the migration process. # The RepairUserPasswords should repair # original after all operations. password="default", email=user_info["email"], tenant_id=tenant_info["id"] if tenant_info else None, enabled=user_info["enabled"], ) self.created_event(user) return user.to_dict() def created_event(self, user): LOG.info("Created user: %s", user) events.emit("create", { "id": user.id, "type": "user", "cloud": self.cloud.name, "data": user.to_dict(), }, namespace="/events") class EnsureOrphanUser(EnsureUser): def execute(self, user_info): super(EnsureOrphanUser, self).execute(user_info, None) class EnsureUserRole(task.BaseCloudTask): def execute(self, user_info, role_info, tenant_info): try: self.cloud.keystone.tenants.add_user(tenant_info["id"], user_info["id"], role_info["id"]) except exceptions.keystone_excs.Conflict: pass else: self.role_assigned_event(role_info, user_info, tenant_info) return user_info def <|fim_middle|>(self, role_info, user_info, tenant_info): LOG.info("Created role %s assignment for user %s in tenant %s", role_info["id"], user_info["id"], tenant_info["id"]) def migrate_membership(context, user_id, role_id, tenant_id): user_ensure = "user-{}-ensure".format(user_id) role_ensure = "role-{}-ensure".format(role_id) tenant_ensure = "tenant-{}-ensure".format(tenant_id) user_role_ensure = "user-role-{}-{}-{}-ensure".format(user_id, role_id, tenant_id) task = EnsureUserRole(context.dst_cloud, name=user_role_ensure, provides=user_role_ensure, rebind=[user_ensure, role_ensure, tenant_ensure]) context.store[user_role_ensure] = user_role_ensure return task def migrate_user(context, user_id, tenant_id=None): user_binding = "user-{}".format(user_id) user_retrieve = "{}-retrieve".format(user_binding) user_ensure = "{}-ensure".format(user_binding) flow = linear_flow.Flow("migrate-user-{}".format(user_id)) flow.add(RetrieveUser(context.src_cloud, name=user_binding, provides=user_binding, rebind=[user_retrieve])) if tenant_id is not None: tenant_ensure = "tenant-{}-ensure".format(tenant_id) flow.add(EnsureUser(context.dst_cloud, name=user_ensure, provides=user_ensure, rebind=[user_binding, tenant_ensure])) else: flow.add(EnsureUser(context.dst_cloud, name=user_ensure, provides=user_ensure, rebind=[user_binding], inject={"tenant_info": None})) context.store[user_retrieve] = user_id return flow <|fim▁end|>
role_assigned_event
<|file_name|>user.py<|end_file_name|><|fim▁begin|># Copyright (c) 2014 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the License); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an AS IS BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and# # limitations under the License. import logging from taskflow.patterns import linear_flow from pumphouse import exceptions from pumphouse import events from pumphouse import task LOG = logging.getLogger(__name__) class RetrieveUser(task.BaseCloudTask): def execute(self, user_id): user = self.cloud.keystone.users.get(user_id) self.cloud.identity.fetch(user.id) return user.to_dict() class EnsureUser(task.BaseCloudTask): def execute(self, user_info, tenant_info): try: user = self.cloud.keystone.users.find(name=user_info["name"]) # TODO(akscram): Current password should be replaced by temporary. except exceptions.keystone_excs.NotFound: user = self.cloud.keystone.users.create( name=user_info["name"], # TODO(akscram): Here we should generate a temporary # password for the user and use them # along the migration process. # The RepairUserPasswords should repair # original after all operations. password="default", email=user_info["email"], tenant_id=tenant_info["id"] if tenant_info else None, enabled=user_info["enabled"], ) self.created_event(user) return user.to_dict() def created_event(self, user): LOG.info("Created user: %s", user) events.emit("create", { "id": user.id, "type": "user", "cloud": self.cloud.name, "data": user.to_dict(), }, namespace="/events") class EnsureOrphanUser(EnsureUser): def execute(self, user_info): super(EnsureOrphanUser, self).execute(user_info, None) class EnsureUserRole(task.BaseCloudTask): def execute(self, user_info, role_info, tenant_info): try: self.cloud.keystone.tenants.add_user(tenant_info["id"], user_info["id"], role_info["id"]) except exceptions.keystone_excs.Conflict: pass else: self.role_assigned_event(role_info, user_info, tenant_info) return user_info def role_assigned_event(self, role_info, user_info, tenant_info): LOG.info("Created role %s assignment for user %s in tenant %s", role_info["id"], user_info["id"], tenant_info["id"]) def <|fim_middle|>(context, user_id, role_id, tenant_id): user_ensure = "user-{}-ensure".format(user_id) role_ensure = "role-{}-ensure".format(role_id) tenant_ensure = "tenant-{}-ensure".format(tenant_id) user_role_ensure = "user-role-{}-{}-{}-ensure".format(user_id, role_id, tenant_id) task = EnsureUserRole(context.dst_cloud, name=user_role_ensure, provides=user_role_ensure, rebind=[user_ensure, role_ensure, tenant_ensure]) context.store[user_role_ensure] = user_role_ensure return task def migrate_user(context, user_id, tenant_id=None): user_binding = "user-{}".format(user_id) user_retrieve = "{}-retrieve".format(user_binding) user_ensure = "{}-ensure".format(user_binding) flow = linear_flow.Flow("migrate-user-{}".format(user_id)) flow.add(RetrieveUser(context.src_cloud, name=user_binding, provides=user_binding, rebind=[user_retrieve])) if tenant_id is not None: tenant_ensure = "tenant-{}-ensure".format(tenant_id) flow.add(EnsureUser(context.dst_cloud, name=user_ensure, provides=user_ensure, rebind=[user_binding, tenant_ensure])) else: flow.add(EnsureUser(context.dst_cloud, name=user_ensure, provides=user_ensure, rebind=[user_binding], inject={"tenant_info": None})) context.store[user_retrieve] = user_id return flow <|fim▁end|>
migrate_membership
<|file_name|>user.py<|end_file_name|><|fim▁begin|># Copyright (c) 2014 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the License); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an AS IS BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and# # limitations under the License. import logging from taskflow.patterns import linear_flow from pumphouse import exceptions from pumphouse import events from pumphouse import task LOG = logging.getLogger(__name__) class RetrieveUser(task.BaseCloudTask): def execute(self, user_id): user = self.cloud.keystone.users.get(user_id) self.cloud.identity.fetch(user.id) return user.to_dict() class EnsureUser(task.BaseCloudTask): def execute(self, user_info, tenant_info): try: user = self.cloud.keystone.users.find(name=user_info["name"]) # TODO(akscram): Current password should be replaced by temporary. except exceptions.keystone_excs.NotFound: user = self.cloud.keystone.users.create( name=user_info["name"], # TODO(akscram): Here we should generate a temporary # password for the user and use them # along the migration process. # The RepairUserPasswords should repair # original after all operations. password="default", email=user_info["email"], tenant_id=tenant_info["id"] if tenant_info else None, enabled=user_info["enabled"], ) self.created_event(user) return user.to_dict() def created_event(self, user): LOG.info("Created user: %s", user) events.emit("create", { "id": user.id, "type": "user", "cloud": self.cloud.name, "data": user.to_dict(), }, namespace="/events") class EnsureOrphanUser(EnsureUser): def execute(self, user_info): super(EnsureOrphanUser, self).execute(user_info, None) class EnsureUserRole(task.BaseCloudTask): def execute(self, user_info, role_info, tenant_info): try: self.cloud.keystone.tenants.add_user(tenant_info["id"], user_info["id"], role_info["id"]) except exceptions.keystone_excs.Conflict: pass else: self.role_assigned_event(role_info, user_info, tenant_info) return user_info def role_assigned_event(self, role_info, user_info, tenant_info): LOG.info("Created role %s assignment for user %s in tenant %s", role_info["id"], user_info["id"], tenant_info["id"]) def migrate_membership(context, user_id, role_id, tenant_id): user_ensure = "user-{}-ensure".format(user_id) role_ensure = "role-{}-ensure".format(role_id) tenant_ensure = "tenant-{}-ensure".format(tenant_id) user_role_ensure = "user-role-{}-{}-{}-ensure".format(user_id, role_id, tenant_id) task = EnsureUserRole(context.dst_cloud, name=user_role_ensure, provides=user_role_ensure, rebind=[user_ensure, role_ensure, tenant_ensure]) context.store[user_role_ensure] = user_role_ensure return task def <|fim_middle|>(context, user_id, tenant_id=None): user_binding = "user-{}".format(user_id) user_retrieve = "{}-retrieve".format(user_binding) user_ensure = "{}-ensure".format(user_binding) flow = linear_flow.Flow("migrate-user-{}".format(user_id)) flow.add(RetrieveUser(context.src_cloud, name=user_binding, provides=user_binding, rebind=[user_retrieve])) if tenant_id is not None: tenant_ensure = "tenant-{}-ensure".format(tenant_id) flow.add(EnsureUser(context.dst_cloud, name=user_ensure, provides=user_ensure, rebind=[user_binding, tenant_ensure])) else: flow.add(EnsureUser(context.dst_cloud, name=user_ensure, provides=user_ensure, rebind=[user_binding], inject={"tenant_info": None})) context.store[user_retrieve] = user_id return flow <|fim▁end|>
migrate_user
<|file_name|>people.py<|end_file_name|><|fim▁begin|># Recall is a program for storing bookmarks of different things # Copyright (C) 2012 Cal Paterson # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import uuid from bottle import Bottle, request, response, abort import bcrypt from recall.data import whitelist, blacklist from recall import convenience as c from recall import plugins, jobs, messages app = Bottle() app.install(plugins.exceptions) app.install(plugins.ppjson) app.install(plugins.auth) app.install(plugins.cors) app.error_handler = plugins.handler_dict logger = c.logger("people") @app.get("/") def users(): abort(503, "Not yet implemented") @app.get("/<who>/") def user_(who): try: return whitelist(c.db().users.find_one({"email": who}), [ "email", "firstName", "pseudonym" ]) except TypeError: logger.warn("Asked about {email}, but that is not a user".format( email=who)) abort(404, "User not found") @app.get("/<who>/self") def _self(who, user): if who != user["email"]: response.status = 400 else: return whitelist(user, ["pseudonym", "firstName", "surname", "email", "private_email"]) @app.post("/<who>/") def request_invite(who): # FIXME: Don't allow the pseudonym "public" user = whitelist(request.json, [ "pseudonym", "firstName", "surname", "private_email", "token", ]) if "private_email" not in user: abort(400, "You must provide a private_email field") user["email_key"] = str(uuid.uuid4()) user["registered"] = c.unixtime() user["email"] = who c.db().users.ensure_index("email", unique=True) c.db().users.insert(user, safe=True) response.status = 202 logger.info("{email} subscribed".format(email=who)) jobs.enqueue(messages.SendInvite(user)) @app.post("/<who>/<email_key>") def verify_email(who, email_key): if "RECALL_TEST_MODE" in c.settings or "RECALL_DEBUG_MODE" in c.settings: salt = bcrypt.gensalt(1) else: salt = bcrypt.gensalt() password_hash = bcrypt.hashpw(request.json["password"], salt) <|fim▁hole|> if not success: if c.db().users.find_one({"email_key": email_key}): logger.warn("{email} tried to verify a second time".format(email=who)) abort(403, "Already verified") else: logger.warn("Someone tried to verify with a key, but it doesn't exist") abort(404, "Don't know that key") user = c.db().users.find_one({"email_key": email_key}) response.status = 201 return blacklist(user, ["_id", "email_key", "password_hash"])<|fim▁end|>
spec = {"email_key": email_key, "verified": {"$exists": False}} update = {"$set": {"password_hash": password_hash, "verified": c.unixtime()}} success = c.db().users.update(spec, update, safe=True)["updatedExisting"]
<|file_name|>people.py<|end_file_name|><|fim▁begin|># Recall is a program for storing bookmarks of different things # Copyright (C) 2012 Cal Paterson # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import uuid from bottle import Bottle, request, response, abort import bcrypt from recall.data import whitelist, blacklist from recall import convenience as c from recall import plugins, jobs, messages app = Bottle() app.install(plugins.exceptions) app.install(plugins.ppjson) app.install(plugins.auth) app.install(plugins.cors) app.error_handler = plugins.handler_dict logger = c.logger("people") @app.get("/") def users(): <|fim_middle|> @app.get("/<who>/") def user_(who): try: return whitelist(c.db().users.find_one({"email": who}), [ "email", "firstName", "pseudonym" ]) except TypeError: logger.warn("Asked about {email}, but that is not a user".format( email=who)) abort(404, "User not found") @app.get("/<who>/self") def _self(who, user): if who != user["email"]: response.status = 400 else: return whitelist(user, ["pseudonym", "firstName", "surname", "email", "private_email"]) @app.post("/<who>/") def request_invite(who): # FIXME: Don't allow the pseudonym "public" user = whitelist(request.json, [ "pseudonym", "firstName", "surname", "private_email", "token", ]) if "private_email" not in user: abort(400, "You must provide a private_email field") user["email_key"] = str(uuid.uuid4()) user["registered"] = c.unixtime() user["email"] = who c.db().users.ensure_index("email", unique=True) c.db().users.insert(user, safe=True) response.status = 202 logger.info("{email} subscribed".format(email=who)) jobs.enqueue(messages.SendInvite(user)) @app.post("/<who>/<email_key>") def verify_email(who, email_key): if "RECALL_TEST_MODE" in c.settings or "RECALL_DEBUG_MODE" in c.settings: salt = bcrypt.gensalt(1) else: salt = bcrypt.gensalt() password_hash = bcrypt.hashpw(request.json["password"], salt) spec = {"email_key": email_key, "verified": {"$exists": False}} update = {"$set": {"password_hash": password_hash, "verified": c.unixtime()}} success = c.db().users.update(spec, update, safe=True)["updatedExisting"] if not success: if c.db().users.find_one({"email_key": email_key}): logger.warn("{email} tried to verify a second time".format(email=who)) abort(403, "Already verified") else: logger.warn("Someone tried to verify with a key, but it doesn't exist") abort(404, "Don't know that key") user = c.db().users.find_one({"email_key": email_key}) response.status = 201 return blacklist(user, ["_id", "email_key", "password_hash"]) <|fim▁end|>
abort(503, "Not yet implemented")
<|file_name|>people.py<|end_file_name|><|fim▁begin|># Recall is a program for storing bookmarks of different things # Copyright (C) 2012 Cal Paterson # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import uuid from bottle import Bottle, request, response, abort import bcrypt from recall.data import whitelist, blacklist from recall import convenience as c from recall import plugins, jobs, messages app = Bottle() app.install(plugins.exceptions) app.install(plugins.ppjson) app.install(plugins.auth) app.install(plugins.cors) app.error_handler = plugins.handler_dict logger = c.logger("people") @app.get("/") def users(): abort(503, "Not yet implemented") @app.get("/<who>/") def user_(who): <|fim_middle|> @app.get("/<who>/self") def _self(who, user): if who != user["email"]: response.status = 400 else: return whitelist(user, ["pseudonym", "firstName", "surname", "email", "private_email"]) @app.post("/<who>/") def request_invite(who): # FIXME: Don't allow the pseudonym "public" user = whitelist(request.json, [ "pseudonym", "firstName", "surname", "private_email", "token", ]) if "private_email" not in user: abort(400, "You must provide a private_email field") user["email_key"] = str(uuid.uuid4()) user["registered"] = c.unixtime() user["email"] = who c.db().users.ensure_index("email", unique=True) c.db().users.insert(user, safe=True) response.status = 202 logger.info("{email} subscribed".format(email=who)) jobs.enqueue(messages.SendInvite(user)) @app.post("/<who>/<email_key>") def verify_email(who, email_key): if "RECALL_TEST_MODE" in c.settings or "RECALL_DEBUG_MODE" in c.settings: salt = bcrypt.gensalt(1) else: salt = bcrypt.gensalt() password_hash = bcrypt.hashpw(request.json["password"], salt) spec = {"email_key": email_key, "verified": {"$exists": False}} update = {"$set": {"password_hash": password_hash, "verified": c.unixtime()}} success = c.db().users.update(spec, update, safe=True)["updatedExisting"] if not success: if c.db().users.find_one({"email_key": email_key}): logger.warn("{email} tried to verify a second time".format(email=who)) abort(403, "Already verified") else: logger.warn("Someone tried to verify with a key, but it doesn't exist") abort(404, "Don't know that key") user = c.db().users.find_one({"email_key": email_key}) response.status = 201 return blacklist(user, ["_id", "email_key", "password_hash"]) <|fim▁end|>
try: return whitelist(c.db().users.find_one({"email": who}), [ "email", "firstName", "pseudonym" ]) except TypeError: logger.warn("Asked about {email}, but that is not a user".format( email=who)) abort(404, "User not found")
<|file_name|>people.py<|end_file_name|><|fim▁begin|># Recall is a program for storing bookmarks of different things # Copyright (C) 2012 Cal Paterson # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import uuid from bottle import Bottle, request, response, abort import bcrypt from recall.data import whitelist, blacklist from recall import convenience as c from recall import plugins, jobs, messages app = Bottle() app.install(plugins.exceptions) app.install(plugins.ppjson) app.install(plugins.auth) app.install(plugins.cors) app.error_handler = plugins.handler_dict logger = c.logger("people") @app.get("/") def users(): abort(503, "Not yet implemented") @app.get("/<who>/") def user_(who): try: return whitelist(c.db().users.find_one({"email": who}), [ "email", "firstName", "pseudonym" ]) except TypeError: logger.warn("Asked about {email}, but that is not a user".format( email=who)) abort(404, "User not found") @app.get("/<who>/self") def _self(who, user): <|fim_middle|> @app.post("/<who>/") def request_invite(who): # FIXME: Don't allow the pseudonym "public" user = whitelist(request.json, [ "pseudonym", "firstName", "surname", "private_email", "token", ]) if "private_email" not in user: abort(400, "You must provide a private_email field") user["email_key"] = str(uuid.uuid4()) user["registered"] = c.unixtime() user["email"] = who c.db().users.ensure_index("email", unique=True) c.db().users.insert(user, safe=True) response.status = 202 logger.info("{email} subscribed".format(email=who)) jobs.enqueue(messages.SendInvite(user)) @app.post("/<who>/<email_key>") def verify_email(who, email_key): if "RECALL_TEST_MODE" in c.settings or "RECALL_DEBUG_MODE" in c.settings: salt = bcrypt.gensalt(1) else: salt = bcrypt.gensalt() password_hash = bcrypt.hashpw(request.json["password"], salt) spec = {"email_key": email_key, "verified": {"$exists": False}} update = {"$set": {"password_hash": password_hash, "verified": c.unixtime()}} success = c.db().users.update(spec, update, safe=True)["updatedExisting"] if not success: if c.db().users.find_one({"email_key": email_key}): logger.warn("{email} tried to verify a second time".format(email=who)) abort(403, "Already verified") else: logger.warn("Someone tried to verify with a key, but it doesn't exist") abort(404, "Don't know that key") user = c.db().users.find_one({"email_key": email_key}) response.status = 201 return blacklist(user, ["_id", "email_key", "password_hash"]) <|fim▁end|>
if who != user["email"]: response.status = 400 else: return whitelist(user, ["pseudonym", "firstName", "surname", "email", "private_email"])
<|file_name|>people.py<|end_file_name|><|fim▁begin|># Recall is a program for storing bookmarks of different things # Copyright (C) 2012 Cal Paterson # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import uuid from bottle import Bottle, request, response, abort import bcrypt from recall.data import whitelist, blacklist from recall import convenience as c from recall import plugins, jobs, messages app = Bottle() app.install(plugins.exceptions) app.install(plugins.ppjson) app.install(plugins.auth) app.install(plugins.cors) app.error_handler = plugins.handler_dict logger = c.logger("people") @app.get("/") def users(): abort(503, "Not yet implemented") @app.get("/<who>/") def user_(who): try: return whitelist(c.db().users.find_one({"email": who}), [ "email", "firstName", "pseudonym" ]) except TypeError: logger.warn("Asked about {email}, but that is not a user".format( email=who)) abort(404, "User not found") @app.get("/<who>/self") def _self(who, user): if who != user["email"]: response.status = 400 else: return whitelist(user, ["pseudonym", "firstName", "surname", "email", "private_email"]) @app.post("/<who>/") def request_invite(who): # FIXME: Don't allow the pseudonym "public" <|fim_middle|> @app.post("/<who>/<email_key>") def verify_email(who, email_key): if "RECALL_TEST_MODE" in c.settings or "RECALL_DEBUG_MODE" in c.settings: salt = bcrypt.gensalt(1) else: salt = bcrypt.gensalt() password_hash = bcrypt.hashpw(request.json["password"], salt) spec = {"email_key": email_key, "verified": {"$exists": False}} update = {"$set": {"password_hash": password_hash, "verified": c.unixtime()}} success = c.db().users.update(spec, update, safe=True)["updatedExisting"] if not success: if c.db().users.find_one({"email_key": email_key}): logger.warn("{email} tried to verify a second time".format(email=who)) abort(403, "Already verified") else: logger.warn("Someone tried to verify with a key, but it doesn't exist") abort(404, "Don't know that key") user = c.db().users.find_one({"email_key": email_key}) response.status = 201 return blacklist(user, ["_id", "email_key", "password_hash"]) <|fim▁end|>
user = whitelist(request.json, [ "pseudonym", "firstName", "surname", "private_email", "token", ]) if "private_email" not in user: abort(400, "You must provide a private_email field") user["email_key"] = str(uuid.uuid4()) user["registered"] = c.unixtime() user["email"] = who c.db().users.ensure_index("email", unique=True) c.db().users.insert(user, safe=True) response.status = 202 logger.info("{email} subscribed".format(email=who)) jobs.enqueue(messages.SendInvite(user))
<|file_name|>people.py<|end_file_name|><|fim▁begin|># Recall is a program for storing bookmarks of different things # Copyright (C) 2012 Cal Paterson # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import uuid from bottle import Bottle, request, response, abort import bcrypt from recall.data import whitelist, blacklist from recall import convenience as c from recall import plugins, jobs, messages app = Bottle() app.install(plugins.exceptions) app.install(plugins.ppjson) app.install(plugins.auth) app.install(plugins.cors) app.error_handler = plugins.handler_dict logger = c.logger("people") @app.get("/") def users(): abort(503, "Not yet implemented") @app.get("/<who>/") def user_(who): try: return whitelist(c.db().users.find_one({"email": who}), [ "email", "firstName", "pseudonym" ]) except TypeError: logger.warn("Asked about {email}, but that is not a user".format( email=who)) abort(404, "User not found") @app.get("/<who>/self") def _self(who, user): if who != user["email"]: response.status = 400 else: return whitelist(user, ["pseudonym", "firstName", "surname", "email", "private_email"]) @app.post("/<who>/") def request_invite(who): # FIXME: Don't allow the pseudonym "public" user = whitelist(request.json, [ "pseudonym", "firstName", "surname", "private_email", "token", ]) if "private_email" not in user: abort(400, "You must provide a private_email field") user["email_key"] = str(uuid.uuid4()) user["registered"] = c.unixtime() user["email"] = who c.db().users.ensure_index("email", unique=True) c.db().users.insert(user, safe=True) response.status = 202 logger.info("{email} subscribed".format(email=who)) jobs.enqueue(messages.SendInvite(user)) @app.post("/<who>/<email_key>") def verify_email(who, email_key): <|fim_middle|> <|fim▁end|>
if "RECALL_TEST_MODE" in c.settings or "RECALL_DEBUG_MODE" in c.settings: salt = bcrypt.gensalt(1) else: salt = bcrypt.gensalt() password_hash = bcrypt.hashpw(request.json["password"], salt) spec = {"email_key": email_key, "verified": {"$exists": False}} update = {"$set": {"password_hash": password_hash, "verified": c.unixtime()}} success = c.db().users.update(spec, update, safe=True)["updatedExisting"] if not success: if c.db().users.find_one({"email_key": email_key}): logger.warn("{email} tried to verify a second time".format(email=who)) abort(403, "Already verified") else: logger.warn("Someone tried to verify with a key, but it doesn't exist") abort(404, "Don't know that key") user = c.db().users.find_one({"email_key": email_key}) response.status = 201 return blacklist(user, ["_id", "email_key", "password_hash"])
<|file_name|>people.py<|end_file_name|><|fim▁begin|># Recall is a program for storing bookmarks of different things # Copyright (C) 2012 Cal Paterson # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import uuid from bottle import Bottle, request, response, abort import bcrypt from recall.data import whitelist, blacklist from recall import convenience as c from recall import plugins, jobs, messages app = Bottle() app.install(plugins.exceptions) app.install(plugins.ppjson) app.install(plugins.auth) app.install(plugins.cors) app.error_handler = plugins.handler_dict logger = c.logger("people") @app.get("/") def users(): abort(503, "Not yet implemented") @app.get("/<who>/") def user_(who): try: return whitelist(c.db().users.find_one({"email": who}), [ "email", "firstName", "pseudonym" ]) except TypeError: logger.warn("Asked about {email}, but that is not a user".format( email=who)) abort(404, "User not found") @app.get("/<who>/self") def _self(who, user): if who != user["email"]: <|fim_middle|> else: return whitelist(user, ["pseudonym", "firstName", "surname", "email", "private_email"]) @app.post("/<who>/") def request_invite(who): # FIXME: Don't allow the pseudonym "public" user = whitelist(request.json, [ "pseudonym", "firstName", "surname", "private_email", "token", ]) if "private_email" not in user: abort(400, "You must provide a private_email field") user["email_key"] = str(uuid.uuid4()) user["registered"] = c.unixtime() user["email"] = who c.db().users.ensure_index("email", unique=True) c.db().users.insert(user, safe=True) response.status = 202 logger.info("{email} subscribed".format(email=who)) jobs.enqueue(messages.SendInvite(user)) @app.post("/<who>/<email_key>") def verify_email(who, email_key): if "RECALL_TEST_MODE" in c.settings or "RECALL_DEBUG_MODE" in c.settings: salt = bcrypt.gensalt(1) else: salt = bcrypt.gensalt() password_hash = bcrypt.hashpw(request.json["password"], salt) spec = {"email_key": email_key, "verified": {"$exists": False}} update = {"$set": {"password_hash": password_hash, "verified": c.unixtime()}} success = c.db().users.update(spec, update, safe=True)["updatedExisting"] if not success: if c.db().users.find_one({"email_key": email_key}): logger.warn("{email} tried to verify a second time".format(email=who)) abort(403, "Already verified") else: logger.warn("Someone tried to verify with a key, but it doesn't exist") abort(404, "Don't know that key") user = c.db().users.find_one({"email_key": email_key}) response.status = 201 return blacklist(user, ["_id", "email_key", "password_hash"]) <|fim▁end|>
response.status = 400
<|file_name|>people.py<|end_file_name|><|fim▁begin|># Recall is a program for storing bookmarks of different things # Copyright (C) 2012 Cal Paterson # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import uuid from bottle import Bottle, request, response, abort import bcrypt from recall.data import whitelist, blacklist from recall import convenience as c from recall import plugins, jobs, messages app = Bottle() app.install(plugins.exceptions) app.install(plugins.ppjson) app.install(plugins.auth) app.install(plugins.cors) app.error_handler = plugins.handler_dict logger = c.logger("people") @app.get("/") def users(): abort(503, "Not yet implemented") @app.get("/<who>/") def user_(who): try: return whitelist(c.db().users.find_one({"email": who}), [ "email", "firstName", "pseudonym" ]) except TypeError: logger.warn("Asked about {email}, but that is not a user".format( email=who)) abort(404, "User not found") @app.get("/<who>/self") def _self(who, user): if who != user["email"]: response.status = 400 else: <|fim_middle|> @app.post("/<who>/") def request_invite(who): # FIXME: Don't allow the pseudonym "public" user = whitelist(request.json, [ "pseudonym", "firstName", "surname", "private_email", "token", ]) if "private_email" not in user: abort(400, "You must provide a private_email field") user["email_key"] = str(uuid.uuid4()) user["registered"] = c.unixtime() user["email"] = who c.db().users.ensure_index("email", unique=True) c.db().users.insert(user, safe=True) response.status = 202 logger.info("{email} subscribed".format(email=who)) jobs.enqueue(messages.SendInvite(user)) @app.post("/<who>/<email_key>") def verify_email(who, email_key): if "RECALL_TEST_MODE" in c.settings or "RECALL_DEBUG_MODE" in c.settings: salt = bcrypt.gensalt(1) else: salt = bcrypt.gensalt() password_hash = bcrypt.hashpw(request.json["password"], salt) spec = {"email_key": email_key, "verified": {"$exists": False}} update = {"$set": {"password_hash": password_hash, "verified": c.unixtime()}} success = c.db().users.update(spec, update, safe=True)["updatedExisting"] if not success: if c.db().users.find_one({"email_key": email_key}): logger.warn("{email} tried to verify a second time".format(email=who)) abort(403, "Already verified") else: logger.warn("Someone tried to verify with a key, but it doesn't exist") abort(404, "Don't know that key") user = c.db().users.find_one({"email_key": email_key}) response.status = 201 return blacklist(user, ["_id", "email_key", "password_hash"]) <|fim▁end|>
return whitelist(user, ["pseudonym", "firstName", "surname", "email", "private_email"])
<|file_name|>people.py<|end_file_name|><|fim▁begin|># Recall is a program for storing bookmarks of different things # Copyright (C) 2012 Cal Paterson # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import uuid from bottle import Bottle, request, response, abort import bcrypt from recall.data import whitelist, blacklist from recall import convenience as c from recall import plugins, jobs, messages app = Bottle() app.install(plugins.exceptions) app.install(plugins.ppjson) app.install(plugins.auth) app.install(plugins.cors) app.error_handler = plugins.handler_dict logger = c.logger("people") @app.get("/") def users(): abort(503, "Not yet implemented") @app.get("/<who>/") def user_(who): try: return whitelist(c.db().users.find_one({"email": who}), [ "email", "firstName", "pseudonym" ]) except TypeError: logger.warn("Asked about {email}, but that is not a user".format( email=who)) abort(404, "User not found") @app.get("/<who>/self") def _self(who, user): if who != user["email"]: response.status = 400 else: return whitelist(user, ["pseudonym", "firstName", "surname", "email", "private_email"]) @app.post("/<who>/") def request_invite(who): # FIXME: Don't allow the pseudonym "public" user = whitelist(request.json, [ "pseudonym", "firstName", "surname", "private_email", "token", ]) if "private_email" not in user: <|fim_middle|> user["email_key"] = str(uuid.uuid4()) user["registered"] = c.unixtime() user["email"] = who c.db().users.ensure_index("email", unique=True) c.db().users.insert(user, safe=True) response.status = 202 logger.info("{email} subscribed".format(email=who)) jobs.enqueue(messages.SendInvite(user)) @app.post("/<who>/<email_key>") def verify_email(who, email_key): if "RECALL_TEST_MODE" in c.settings or "RECALL_DEBUG_MODE" in c.settings: salt = bcrypt.gensalt(1) else: salt = bcrypt.gensalt() password_hash = bcrypt.hashpw(request.json["password"], salt) spec = {"email_key": email_key, "verified": {"$exists": False}} update = {"$set": {"password_hash": password_hash, "verified": c.unixtime()}} success = c.db().users.update(spec, update, safe=True)["updatedExisting"] if not success: if c.db().users.find_one({"email_key": email_key}): logger.warn("{email} tried to verify a second time".format(email=who)) abort(403, "Already verified") else: logger.warn("Someone tried to verify with a key, but it doesn't exist") abort(404, "Don't know that key") user = c.db().users.find_one({"email_key": email_key}) response.status = 201 return blacklist(user, ["_id", "email_key", "password_hash"]) <|fim▁end|>
abort(400, "You must provide a private_email field")
<|file_name|>people.py<|end_file_name|><|fim▁begin|># Recall is a program for storing bookmarks of different things # Copyright (C) 2012 Cal Paterson # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import uuid from bottle import Bottle, request, response, abort import bcrypt from recall.data import whitelist, blacklist from recall import convenience as c from recall import plugins, jobs, messages app = Bottle() app.install(plugins.exceptions) app.install(plugins.ppjson) app.install(plugins.auth) app.install(plugins.cors) app.error_handler = plugins.handler_dict logger = c.logger("people") @app.get("/") def users(): abort(503, "Not yet implemented") @app.get("/<who>/") def user_(who): try: return whitelist(c.db().users.find_one({"email": who}), [ "email", "firstName", "pseudonym" ]) except TypeError: logger.warn("Asked about {email}, but that is not a user".format( email=who)) abort(404, "User not found") @app.get("/<who>/self") def _self(who, user): if who != user["email"]: response.status = 400 else: return whitelist(user, ["pseudonym", "firstName", "surname", "email", "private_email"]) @app.post("/<who>/") def request_invite(who): # FIXME: Don't allow the pseudonym "public" user = whitelist(request.json, [ "pseudonym", "firstName", "surname", "private_email", "token", ]) if "private_email" not in user: abort(400, "You must provide a private_email field") user["email_key"] = str(uuid.uuid4()) user["registered"] = c.unixtime() user["email"] = who c.db().users.ensure_index("email", unique=True) c.db().users.insert(user, safe=True) response.status = 202 logger.info("{email} subscribed".format(email=who)) jobs.enqueue(messages.SendInvite(user)) @app.post("/<who>/<email_key>") def verify_email(who, email_key): if "RECALL_TEST_MODE" in c.settings or "RECALL_DEBUG_MODE" in c.settings: <|fim_middle|> else: salt = bcrypt.gensalt() password_hash = bcrypt.hashpw(request.json["password"], salt) spec = {"email_key": email_key, "verified": {"$exists": False}} update = {"$set": {"password_hash": password_hash, "verified": c.unixtime()}} success = c.db().users.update(spec, update, safe=True)["updatedExisting"] if not success: if c.db().users.find_one({"email_key": email_key}): logger.warn("{email} tried to verify a second time".format(email=who)) abort(403, "Already verified") else: logger.warn("Someone tried to verify with a key, but it doesn't exist") abort(404, "Don't know that key") user = c.db().users.find_one({"email_key": email_key}) response.status = 201 return blacklist(user, ["_id", "email_key", "password_hash"]) <|fim▁end|>
salt = bcrypt.gensalt(1)
<|file_name|>people.py<|end_file_name|><|fim▁begin|># Recall is a program for storing bookmarks of different things # Copyright (C) 2012 Cal Paterson # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import uuid from bottle import Bottle, request, response, abort import bcrypt from recall.data import whitelist, blacklist from recall import convenience as c from recall import plugins, jobs, messages app = Bottle() app.install(plugins.exceptions) app.install(plugins.ppjson) app.install(plugins.auth) app.install(plugins.cors) app.error_handler = plugins.handler_dict logger = c.logger("people") @app.get("/") def users(): abort(503, "Not yet implemented") @app.get("/<who>/") def user_(who): try: return whitelist(c.db().users.find_one({"email": who}), [ "email", "firstName", "pseudonym" ]) except TypeError: logger.warn("Asked about {email}, but that is not a user".format( email=who)) abort(404, "User not found") @app.get("/<who>/self") def _self(who, user): if who != user["email"]: response.status = 400 else: return whitelist(user, ["pseudonym", "firstName", "surname", "email", "private_email"]) @app.post("/<who>/") def request_invite(who): # FIXME: Don't allow the pseudonym "public" user = whitelist(request.json, [ "pseudonym", "firstName", "surname", "private_email", "token", ]) if "private_email" not in user: abort(400, "You must provide a private_email field") user["email_key"] = str(uuid.uuid4()) user["registered"] = c.unixtime() user["email"] = who c.db().users.ensure_index("email", unique=True) c.db().users.insert(user, safe=True) response.status = 202 logger.info("{email} subscribed".format(email=who)) jobs.enqueue(messages.SendInvite(user)) @app.post("/<who>/<email_key>") def verify_email(who, email_key): if "RECALL_TEST_MODE" in c.settings or "RECALL_DEBUG_MODE" in c.settings: salt = bcrypt.gensalt(1) else: <|fim_middle|> password_hash = bcrypt.hashpw(request.json["password"], salt) spec = {"email_key": email_key, "verified": {"$exists": False}} update = {"$set": {"password_hash": password_hash, "verified": c.unixtime()}} success = c.db().users.update(spec, update, safe=True)["updatedExisting"] if not success: if c.db().users.find_one({"email_key": email_key}): logger.warn("{email} tried to verify a second time".format(email=who)) abort(403, "Already verified") else: logger.warn("Someone tried to verify with a key, but it doesn't exist") abort(404, "Don't know that key") user = c.db().users.find_one({"email_key": email_key}) response.status = 201 return blacklist(user, ["_id", "email_key", "password_hash"]) <|fim▁end|>
salt = bcrypt.gensalt()
<|file_name|>people.py<|end_file_name|><|fim▁begin|># Recall is a program for storing bookmarks of different things # Copyright (C) 2012 Cal Paterson # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import uuid from bottle import Bottle, request, response, abort import bcrypt from recall.data import whitelist, blacklist from recall import convenience as c from recall import plugins, jobs, messages app = Bottle() app.install(plugins.exceptions) app.install(plugins.ppjson) app.install(plugins.auth) app.install(plugins.cors) app.error_handler = plugins.handler_dict logger = c.logger("people") @app.get("/") def users(): abort(503, "Not yet implemented") @app.get("/<who>/") def user_(who): try: return whitelist(c.db().users.find_one({"email": who}), [ "email", "firstName", "pseudonym" ]) except TypeError: logger.warn("Asked about {email}, but that is not a user".format( email=who)) abort(404, "User not found") @app.get("/<who>/self") def _self(who, user): if who != user["email"]: response.status = 400 else: return whitelist(user, ["pseudonym", "firstName", "surname", "email", "private_email"]) @app.post("/<who>/") def request_invite(who): # FIXME: Don't allow the pseudonym "public" user = whitelist(request.json, [ "pseudonym", "firstName", "surname", "private_email", "token", ]) if "private_email" not in user: abort(400, "You must provide a private_email field") user["email_key"] = str(uuid.uuid4()) user["registered"] = c.unixtime() user["email"] = who c.db().users.ensure_index("email", unique=True) c.db().users.insert(user, safe=True) response.status = 202 logger.info("{email} subscribed".format(email=who)) jobs.enqueue(messages.SendInvite(user)) @app.post("/<who>/<email_key>") def verify_email(who, email_key): if "RECALL_TEST_MODE" in c.settings or "RECALL_DEBUG_MODE" in c.settings: salt = bcrypt.gensalt(1) else: salt = bcrypt.gensalt() password_hash = bcrypt.hashpw(request.json["password"], salt) spec = {"email_key": email_key, "verified": {"$exists": False}} update = {"$set": {"password_hash": password_hash, "verified": c.unixtime()}} success = c.db().users.update(spec, update, safe=True)["updatedExisting"] if not success: <|fim_middle|> user = c.db().users.find_one({"email_key": email_key}) response.status = 201 return blacklist(user, ["_id", "email_key", "password_hash"]) <|fim▁end|>
if c.db().users.find_one({"email_key": email_key}): logger.warn("{email} tried to verify a second time".format(email=who)) abort(403, "Already verified") else: logger.warn("Someone tried to verify with a key, but it doesn't exist") abort(404, "Don't know that key")
<|file_name|>people.py<|end_file_name|><|fim▁begin|># Recall is a program for storing bookmarks of different things # Copyright (C) 2012 Cal Paterson # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import uuid from bottle import Bottle, request, response, abort import bcrypt from recall.data import whitelist, blacklist from recall import convenience as c from recall import plugins, jobs, messages app = Bottle() app.install(plugins.exceptions) app.install(plugins.ppjson) app.install(plugins.auth) app.install(plugins.cors) app.error_handler = plugins.handler_dict logger = c.logger("people") @app.get("/") def users(): abort(503, "Not yet implemented") @app.get("/<who>/") def user_(who): try: return whitelist(c.db().users.find_one({"email": who}), [ "email", "firstName", "pseudonym" ]) except TypeError: logger.warn("Asked about {email}, but that is not a user".format( email=who)) abort(404, "User not found") @app.get("/<who>/self") def _self(who, user): if who != user["email"]: response.status = 400 else: return whitelist(user, ["pseudonym", "firstName", "surname", "email", "private_email"]) @app.post("/<who>/") def request_invite(who): # FIXME: Don't allow the pseudonym "public" user = whitelist(request.json, [ "pseudonym", "firstName", "surname", "private_email", "token", ]) if "private_email" not in user: abort(400, "You must provide a private_email field") user["email_key"] = str(uuid.uuid4()) user["registered"] = c.unixtime() user["email"] = who c.db().users.ensure_index("email", unique=True) c.db().users.insert(user, safe=True) response.status = 202 logger.info("{email} subscribed".format(email=who)) jobs.enqueue(messages.SendInvite(user)) @app.post("/<who>/<email_key>") def verify_email(who, email_key): if "RECALL_TEST_MODE" in c.settings or "RECALL_DEBUG_MODE" in c.settings: salt = bcrypt.gensalt(1) else: salt = bcrypt.gensalt() password_hash = bcrypt.hashpw(request.json["password"], salt) spec = {"email_key": email_key, "verified": {"$exists": False}} update = {"$set": {"password_hash": password_hash, "verified": c.unixtime()}} success = c.db().users.update(spec, update, safe=True)["updatedExisting"] if not success: if c.db().users.find_one({"email_key": email_key}): <|fim_middle|> else: logger.warn("Someone tried to verify with a key, but it doesn't exist") abort(404, "Don't know that key") user = c.db().users.find_one({"email_key": email_key}) response.status = 201 return blacklist(user, ["_id", "email_key", "password_hash"]) <|fim▁end|>
logger.warn("{email} tried to verify a second time".format(email=who)) abort(403, "Already verified")
<|file_name|>people.py<|end_file_name|><|fim▁begin|># Recall is a program for storing bookmarks of different things # Copyright (C) 2012 Cal Paterson # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import uuid from bottle import Bottle, request, response, abort import bcrypt from recall.data import whitelist, blacklist from recall import convenience as c from recall import plugins, jobs, messages app = Bottle() app.install(plugins.exceptions) app.install(plugins.ppjson) app.install(plugins.auth) app.install(plugins.cors) app.error_handler = plugins.handler_dict logger = c.logger("people") @app.get("/") def users(): abort(503, "Not yet implemented") @app.get("/<who>/") def user_(who): try: return whitelist(c.db().users.find_one({"email": who}), [ "email", "firstName", "pseudonym" ]) except TypeError: logger.warn("Asked about {email}, but that is not a user".format( email=who)) abort(404, "User not found") @app.get("/<who>/self") def _self(who, user): if who != user["email"]: response.status = 400 else: return whitelist(user, ["pseudonym", "firstName", "surname", "email", "private_email"]) @app.post("/<who>/") def request_invite(who): # FIXME: Don't allow the pseudonym "public" user = whitelist(request.json, [ "pseudonym", "firstName", "surname", "private_email", "token", ]) if "private_email" not in user: abort(400, "You must provide a private_email field") user["email_key"] = str(uuid.uuid4()) user["registered"] = c.unixtime() user["email"] = who c.db().users.ensure_index("email", unique=True) c.db().users.insert(user, safe=True) response.status = 202 logger.info("{email} subscribed".format(email=who)) jobs.enqueue(messages.SendInvite(user)) @app.post("/<who>/<email_key>") def verify_email(who, email_key): if "RECALL_TEST_MODE" in c.settings or "RECALL_DEBUG_MODE" in c.settings: salt = bcrypt.gensalt(1) else: salt = bcrypt.gensalt() password_hash = bcrypt.hashpw(request.json["password"], salt) spec = {"email_key": email_key, "verified": {"$exists": False}} update = {"$set": {"password_hash": password_hash, "verified": c.unixtime()}} success = c.db().users.update(spec, update, safe=True)["updatedExisting"] if not success: if c.db().users.find_one({"email_key": email_key}): logger.warn("{email} tried to verify a second time".format(email=who)) abort(403, "Already verified") else: <|fim_middle|> user = c.db().users.find_one({"email_key": email_key}) response.status = 201 return blacklist(user, ["_id", "email_key", "password_hash"]) <|fim▁end|>
logger.warn("Someone tried to verify with a key, but it doesn't exist") abort(404, "Don't know that key")
<|file_name|>people.py<|end_file_name|><|fim▁begin|># Recall is a program for storing bookmarks of different things # Copyright (C) 2012 Cal Paterson # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import uuid from bottle import Bottle, request, response, abort import bcrypt from recall.data import whitelist, blacklist from recall import convenience as c from recall import plugins, jobs, messages app = Bottle() app.install(plugins.exceptions) app.install(plugins.ppjson) app.install(plugins.auth) app.install(plugins.cors) app.error_handler = plugins.handler_dict logger = c.logger("people") @app.get("/") def <|fim_middle|>(): abort(503, "Not yet implemented") @app.get("/<who>/") def user_(who): try: return whitelist(c.db().users.find_one({"email": who}), [ "email", "firstName", "pseudonym" ]) except TypeError: logger.warn("Asked about {email}, but that is not a user".format( email=who)) abort(404, "User not found") @app.get("/<who>/self") def _self(who, user): if who != user["email"]: response.status = 400 else: return whitelist(user, ["pseudonym", "firstName", "surname", "email", "private_email"]) @app.post("/<who>/") def request_invite(who): # FIXME: Don't allow the pseudonym "public" user = whitelist(request.json, [ "pseudonym", "firstName", "surname", "private_email", "token", ]) if "private_email" not in user: abort(400, "You must provide a private_email field") user["email_key"] = str(uuid.uuid4()) user["registered"] = c.unixtime() user["email"] = who c.db().users.ensure_index("email", unique=True) c.db().users.insert(user, safe=True) response.status = 202 logger.info("{email} subscribed".format(email=who)) jobs.enqueue(messages.SendInvite(user)) @app.post("/<who>/<email_key>") def verify_email(who, email_key): if "RECALL_TEST_MODE" in c.settings or "RECALL_DEBUG_MODE" in c.settings: salt = bcrypt.gensalt(1) else: salt = bcrypt.gensalt() password_hash = bcrypt.hashpw(request.json["password"], salt) spec = {"email_key": email_key, "verified": {"$exists": False}} update = {"$set": {"password_hash": password_hash, "verified": c.unixtime()}} success = c.db().users.update(spec, update, safe=True)["updatedExisting"] if not success: if c.db().users.find_one({"email_key": email_key}): logger.warn("{email} tried to verify a second time".format(email=who)) abort(403, "Already verified") else: logger.warn("Someone tried to verify with a key, but it doesn't exist") abort(404, "Don't know that key") user = c.db().users.find_one({"email_key": email_key}) response.status = 201 return blacklist(user, ["_id", "email_key", "password_hash"]) <|fim▁end|>
users
<|file_name|>people.py<|end_file_name|><|fim▁begin|># Recall is a program for storing bookmarks of different things # Copyright (C) 2012 Cal Paterson # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import uuid from bottle import Bottle, request, response, abort import bcrypt from recall.data import whitelist, blacklist from recall import convenience as c from recall import plugins, jobs, messages app = Bottle() app.install(plugins.exceptions) app.install(plugins.ppjson) app.install(plugins.auth) app.install(plugins.cors) app.error_handler = plugins.handler_dict logger = c.logger("people") @app.get("/") def users(): abort(503, "Not yet implemented") @app.get("/<who>/") def <|fim_middle|>(who): try: return whitelist(c.db().users.find_one({"email": who}), [ "email", "firstName", "pseudonym" ]) except TypeError: logger.warn("Asked about {email}, but that is not a user".format( email=who)) abort(404, "User not found") @app.get("/<who>/self") def _self(who, user): if who != user["email"]: response.status = 400 else: return whitelist(user, ["pseudonym", "firstName", "surname", "email", "private_email"]) @app.post("/<who>/") def request_invite(who): # FIXME: Don't allow the pseudonym "public" user = whitelist(request.json, [ "pseudonym", "firstName", "surname", "private_email", "token", ]) if "private_email" not in user: abort(400, "You must provide a private_email field") user["email_key"] = str(uuid.uuid4()) user["registered"] = c.unixtime() user["email"] = who c.db().users.ensure_index("email", unique=True) c.db().users.insert(user, safe=True) response.status = 202 logger.info("{email} subscribed".format(email=who)) jobs.enqueue(messages.SendInvite(user)) @app.post("/<who>/<email_key>") def verify_email(who, email_key): if "RECALL_TEST_MODE" in c.settings or "RECALL_DEBUG_MODE" in c.settings: salt = bcrypt.gensalt(1) else: salt = bcrypt.gensalt() password_hash = bcrypt.hashpw(request.json["password"], salt) spec = {"email_key": email_key, "verified": {"$exists": False}} update = {"$set": {"password_hash": password_hash, "verified": c.unixtime()}} success = c.db().users.update(spec, update, safe=True)["updatedExisting"] if not success: if c.db().users.find_one({"email_key": email_key}): logger.warn("{email} tried to verify a second time".format(email=who)) abort(403, "Already verified") else: logger.warn("Someone tried to verify with a key, but it doesn't exist") abort(404, "Don't know that key") user = c.db().users.find_one({"email_key": email_key}) response.status = 201 return blacklist(user, ["_id", "email_key", "password_hash"]) <|fim▁end|>
user_
<|file_name|>people.py<|end_file_name|><|fim▁begin|># Recall is a program for storing bookmarks of different things # Copyright (C) 2012 Cal Paterson # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import uuid from bottle import Bottle, request, response, abort import bcrypt from recall.data import whitelist, blacklist from recall import convenience as c from recall import plugins, jobs, messages app = Bottle() app.install(plugins.exceptions) app.install(plugins.ppjson) app.install(plugins.auth) app.install(plugins.cors) app.error_handler = plugins.handler_dict logger = c.logger("people") @app.get("/") def users(): abort(503, "Not yet implemented") @app.get("/<who>/") def user_(who): try: return whitelist(c.db().users.find_one({"email": who}), [ "email", "firstName", "pseudonym" ]) except TypeError: logger.warn("Asked about {email}, but that is not a user".format( email=who)) abort(404, "User not found") @app.get("/<who>/self") def <|fim_middle|>(who, user): if who != user["email"]: response.status = 400 else: return whitelist(user, ["pseudonym", "firstName", "surname", "email", "private_email"]) @app.post("/<who>/") def request_invite(who): # FIXME: Don't allow the pseudonym "public" user = whitelist(request.json, [ "pseudonym", "firstName", "surname", "private_email", "token", ]) if "private_email" not in user: abort(400, "You must provide a private_email field") user["email_key"] = str(uuid.uuid4()) user["registered"] = c.unixtime() user["email"] = who c.db().users.ensure_index("email", unique=True) c.db().users.insert(user, safe=True) response.status = 202 logger.info("{email} subscribed".format(email=who)) jobs.enqueue(messages.SendInvite(user)) @app.post("/<who>/<email_key>") def verify_email(who, email_key): if "RECALL_TEST_MODE" in c.settings or "RECALL_DEBUG_MODE" in c.settings: salt = bcrypt.gensalt(1) else: salt = bcrypt.gensalt() password_hash = bcrypt.hashpw(request.json["password"], salt) spec = {"email_key": email_key, "verified": {"$exists": False}} update = {"$set": {"password_hash": password_hash, "verified": c.unixtime()}} success = c.db().users.update(spec, update, safe=True)["updatedExisting"] if not success: if c.db().users.find_one({"email_key": email_key}): logger.warn("{email} tried to verify a second time".format(email=who)) abort(403, "Already verified") else: logger.warn("Someone tried to verify with a key, but it doesn't exist") abort(404, "Don't know that key") user = c.db().users.find_one({"email_key": email_key}) response.status = 201 return blacklist(user, ["_id", "email_key", "password_hash"]) <|fim▁end|>
_self
<|file_name|>people.py<|end_file_name|><|fim▁begin|># Recall is a program for storing bookmarks of different things # Copyright (C) 2012 Cal Paterson # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import uuid from bottle import Bottle, request, response, abort import bcrypt from recall.data import whitelist, blacklist from recall import convenience as c from recall import plugins, jobs, messages app = Bottle() app.install(plugins.exceptions) app.install(plugins.ppjson) app.install(plugins.auth) app.install(plugins.cors) app.error_handler = plugins.handler_dict logger = c.logger("people") @app.get("/") def users(): abort(503, "Not yet implemented") @app.get("/<who>/") def user_(who): try: return whitelist(c.db().users.find_one({"email": who}), [ "email", "firstName", "pseudonym" ]) except TypeError: logger.warn("Asked about {email}, but that is not a user".format( email=who)) abort(404, "User not found") @app.get("/<who>/self") def _self(who, user): if who != user["email"]: response.status = 400 else: return whitelist(user, ["pseudonym", "firstName", "surname", "email", "private_email"]) @app.post("/<who>/") def <|fim_middle|>(who): # FIXME: Don't allow the pseudonym "public" user = whitelist(request.json, [ "pseudonym", "firstName", "surname", "private_email", "token", ]) if "private_email" not in user: abort(400, "You must provide a private_email field") user["email_key"] = str(uuid.uuid4()) user["registered"] = c.unixtime() user["email"] = who c.db().users.ensure_index("email", unique=True) c.db().users.insert(user, safe=True) response.status = 202 logger.info("{email} subscribed".format(email=who)) jobs.enqueue(messages.SendInvite(user)) @app.post("/<who>/<email_key>") def verify_email(who, email_key): if "RECALL_TEST_MODE" in c.settings or "RECALL_DEBUG_MODE" in c.settings: salt = bcrypt.gensalt(1) else: salt = bcrypt.gensalt() password_hash = bcrypt.hashpw(request.json["password"], salt) spec = {"email_key": email_key, "verified": {"$exists": False}} update = {"$set": {"password_hash": password_hash, "verified": c.unixtime()}} success = c.db().users.update(spec, update, safe=True)["updatedExisting"] if not success: if c.db().users.find_one({"email_key": email_key}): logger.warn("{email} tried to verify a second time".format(email=who)) abort(403, "Already verified") else: logger.warn("Someone tried to verify with a key, but it doesn't exist") abort(404, "Don't know that key") user = c.db().users.find_one({"email_key": email_key}) response.status = 201 return blacklist(user, ["_id", "email_key", "password_hash"]) <|fim▁end|>
request_invite
<|file_name|>people.py<|end_file_name|><|fim▁begin|># Recall is a program for storing bookmarks of different things # Copyright (C) 2012 Cal Paterson # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import uuid from bottle import Bottle, request, response, abort import bcrypt from recall.data import whitelist, blacklist from recall import convenience as c from recall import plugins, jobs, messages app = Bottle() app.install(plugins.exceptions) app.install(plugins.ppjson) app.install(plugins.auth) app.install(plugins.cors) app.error_handler = plugins.handler_dict logger = c.logger("people") @app.get("/") def users(): abort(503, "Not yet implemented") @app.get("/<who>/") def user_(who): try: return whitelist(c.db().users.find_one({"email": who}), [ "email", "firstName", "pseudonym" ]) except TypeError: logger.warn("Asked about {email}, but that is not a user".format( email=who)) abort(404, "User not found") @app.get("/<who>/self") def _self(who, user): if who != user["email"]: response.status = 400 else: return whitelist(user, ["pseudonym", "firstName", "surname", "email", "private_email"]) @app.post("/<who>/") def request_invite(who): # FIXME: Don't allow the pseudonym "public" user = whitelist(request.json, [ "pseudonym", "firstName", "surname", "private_email", "token", ]) if "private_email" not in user: abort(400, "You must provide a private_email field") user["email_key"] = str(uuid.uuid4()) user["registered"] = c.unixtime() user["email"] = who c.db().users.ensure_index("email", unique=True) c.db().users.insert(user, safe=True) response.status = 202 logger.info("{email} subscribed".format(email=who)) jobs.enqueue(messages.SendInvite(user)) @app.post("/<who>/<email_key>") def <|fim_middle|>(who, email_key): if "RECALL_TEST_MODE" in c.settings or "RECALL_DEBUG_MODE" in c.settings: salt = bcrypt.gensalt(1) else: salt = bcrypt.gensalt() password_hash = bcrypt.hashpw(request.json["password"], salt) spec = {"email_key": email_key, "verified": {"$exists": False}} update = {"$set": {"password_hash": password_hash, "verified": c.unixtime()}} success = c.db().users.update(spec, update, safe=True)["updatedExisting"] if not success: if c.db().users.find_one({"email_key": email_key}): logger.warn("{email} tried to verify a second time".format(email=who)) abort(403, "Already verified") else: logger.warn("Someone tried to verify with a key, but it doesn't exist") abort(404, "Don't know that key") user = c.db().users.find_one({"email_key": email_key}) response.status = 201 return blacklist(user, ["_id", "email_key", "password_hash"]) <|fim▁end|>
verify_email
<|file_name|>admin.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from django.contrib import admin from models import FileMapping <|fim▁hole|>admin.site.register(FileMapping)<|fim▁end|>
# Register your models here.
<|file_name|>test_cpe_sorter.py<|end_file_name|><|fim▁begin|>import unittest from matching.cpe_sorter import * unsorted_cpes = [{'wfn': {'version': '4.0', 'target_sw': 'android_marshmallow'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.0:beta:~~~android_marshmallow~~'}, {'wfn': {'version': '1.0.1.2', 'target_sw': 'android_marshmallow'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:1.0.1.2:beta'}, {'wfn': {'version': '4.1.2', 'target_sw': 'ANY'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.1.2:beta'}, {'wfn': {'version': '4.6.3', 'target_sw': 'windows'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.6.3:beta:~~~windows~~'}, {'wfn': {'version': '4.7.1', 'target_sw': 'android'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.1:beta:~~~android~~'}, {'wfn': {'version': '4.7.2', 'target_sw': 'ANY'}, 'uri_binding':'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.2:beta'}, {'wfn': {'version': '4.3.2', 'target_sw': 'linux'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.3.2:beta:~~~linux~~'}, {'wfn': {'version': '2.3.1', 'target_sw': 'linux'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2.3.1:beta'}, {'wfn': {'version': '4.7.3', 'target_sw': 'mac_os_x'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.3:beta:~~~mac_os_x~~'} ] unsorted_cpes_year = [{'wfn': {'version': '2000', 'target_sw': 'android_marshmallow'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2000:beta:~~~android_marshmallow~~'}, {'wfn': {'version': '2007', 'target_sw': 'android_marshmallow'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2007:beta'}, {'wfn': {'version': '4.1.2', 'target_sw': 'ANY'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.1.2:beta'}, {'wfn': {'version': '2010', 'target_sw': 'windows'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2010:beta:~~~windows~~'}, {'wfn': {'version': '4.7.1', 'target_sw': 'android'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.1:beta:~~~android~~'}, {'wfn': {'version': '2001', 'target_sw': 'ANY'}, 'uri_binding':'cpe:/a:string_value_with\:double_points:internet_explorer:2001:beta'}, {'wfn': {'version': '4.3.2', 'target_sw': 'linux'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.3.2:beta:~~~linux~~'}, {'wfn': {'version': '2010', 'target_sw': 'linux'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2010:beta'}, {'wfn': {'version': '4.7.3', 'target_sw': 'mac_os_x'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.3:beta:~~~mac_os_x~~'}, {'wfn': {'version': '2010', 'target_sw': 'mac_os_x'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2010:beta:~~~mac_os_x~~'}] version = '4.7.2' version_without_points = '4_7-2' version_year = '2010' <|fim▁hole|> class TestCPESorter(unittest.TestCase): def test_sort_cpes_by_software_version(self): sorted_cpes = sort_cpes_by_version(unsorted_cpes, version) self.assertEqual(len(unsorted_cpes), len(sorted_cpes)) self.assertEqual(unsorted_cpes[5], sorted_cpes[0]) # 4.7.2 self.assertEqual(unsorted_cpes[4], sorted_cpes[1]) # 4.7.1 self.assertEqual(unsorted_cpes[8], sorted_cpes[2]) # 4.7.3 self.assertEqual(unsorted_cpes[0], sorted_cpes[3]) # 4.0 self.assertEqual(unsorted_cpes[2], sorted_cpes[4]) # 4.1.2 self.assertEqual(unsorted_cpes[3], sorted_cpes[5]) # 4.6.3 self.assertEqual(unsorted_cpes[6], sorted_cpes[6]) # 4.3.2 def test_cpes_and_sorted_cpes_are_equal_when_software_version_not_splitted_by_points(self): sorted_cpes = sort_cpes_by_version(unsorted_cpes, version_without_points) self.assertListEqual(unsorted_cpes, sorted_cpes) def test_sort_cpes_by_version_with_year(self): sorted_cpes = sort_cpes_by_version(unsorted_cpes_year, version_year) self.assertEqual(len(unsorted_cpes_year), len(sorted_cpes)) self.assertEqual(unsorted_cpes_year[3], sorted_cpes[0]) # 2010 self.assertEqual(unsorted_cpes_year[7], sorted_cpes[1]) # 2010 self.assertEqual(unsorted_cpes_year[9], sorted_cpes[2]) # 2010 self.assertEqual(unsorted_cpes_year[0], sorted_cpes[3]) # 2000 self.assertEqual(unsorted_cpes_year[1], sorted_cpes[4]) # 2007 self.assertEqual(unsorted_cpes_year[5], sorted_cpes[5]) # 2001 def test_sort_cpes_by_operating_system_windows(self): sorted_cpes = sort_cpes_by_operating_system(unsorted_cpes, os_windows) self.assertEqual(len(unsorted_cpes), len(sorted_cpes)) self.assertEqual(unsorted_cpes[3], sorted_cpes[0]) def test_sort_cpes_by_operating_system_linux(self): sorted_cpes = sort_cpes_by_operating_system(unsorted_cpes, os_linux) self.assertEqual(len(unsorted_cpes), len(sorted_cpes)) self.assertEqual(unsorted_cpes[6], sorted_cpes[0]) def test_sort_cpes_by_operating_system_android(self): sorted_cpes = sort_cpes_by_operating_system(unsorted_cpes, os_android) self.assertEqual(len(unsorted_cpes), len(sorted_cpes)) self.assertEqual(unsorted_cpes[4], sorted_cpes[0]) self.assertEqual(unsorted_cpes[0], sorted_cpes[1]) if __name__ == '__main__': unittest.main()<|fim▁end|>
os_windows = 'windows_7' os_linux = 'linux_ubuntu' os_android = 'android' os_mac = 'mac_os_x_10.11'
<|file_name|>test_cpe_sorter.py<|end_file_name|><|fim▁begin|>import unittest from matching.cpe_sorter import * unsorted_cpes = [{'wfn': {'version': '4.0', 'target_sw': 'android_marshmallow'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.0:beta:~~~android_marshmallow~~'}, {'wfn': {'version': '1.0.1.2', 'target_sw': 'android_marshmallow'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:1.0.1.2:beta'}, {'wfn': {'version': '4.1.2', 'target_sw': 'ANY'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.1.2:beta'}, {'wfn': {'version': '4.6.3', 'target_sw': 'windows'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.6.3:beta:~~~windows~~'}, {'wfn': {'version': '4.7.1', 'target_sw': 'android'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.1:beta:~~~android~~'}, {'wfn': {'version': '4.7.2', 'target_sw': 'ANY'}, 'uri_binding':'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.2:beta'}, {'wfn': {'version': '4.3.2', 'target_sw': 'linux'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.3.2:beta:~~~linux~~'}, {'wfn': {'version': '2.3.1', 'target_sw': 'linux'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2.3.1:beta'}, {'wfn': {'version': '4.7.3', 'target_sw': 'mac_os_x'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.3:beta:~~~mac_os_x~~'} ] unsorted_cpes_year = [{'wfn': {'version': '2000', 'target_sw': 'android_marshmallow'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2000:beta:~~~android_marshmallow~~'}, {'wfn': {'version': '2007', 'target_sw': 'android_marshmallow'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2007:beta'}, {'wfn': {'version': '4.1.2', 'target_sw': 'ANY'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.1.2:beta'}, {'wfn': {'version': '2010', 'target_sw': 'windows'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2010:beta:~~~windows~~'}, {'wfn': {'version': '4.7.1', 'target_sw': 'android'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.1:beta:~~~android~~'}, {'wfn': {'version': '2001', 'target_sw': 'ANY'}, 'uri_binding':'cpe:/a:string_value_with\:double_points:internet_explorer:2001:beta'}, {'wfn': {'version': '4.3.2', 'target_sw': 'linux'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.3.2:beta:~~~linux~~'}, {'wfn': {'version': '2010', 'target_sw': 'linux'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2010:beta'}, {'wfn': {'version': '4.7.3', 'target_sw': 'mac_os_x'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.3:beta:~~~mac_os_x~~'}, {'wfn': {'version': '2010', 'target_sw': 'mac_os_x'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2010:beta:~~~mac_os_x~~'}] version = '4.7.2' version_without_points = '4_7-2' version_year = '2010' os_windows = 'windows_7' os_linux = 'linux_ubuntu' os_android = 'android' os_mac = 'mac_os_x_10.11' class TestCPESorter(unittest.TestCase): <|fim_middle|> if __name__ == '__main__': unittest.main() <|fim▁end|>
def test_sort_cpes_by_software_version(self): sorted_cpes = sort_cpes_by_version(unsorted_cpes, version) self.assertEqual(len(unsorted_cpes), len(sorted_cpes)) self.assertEqual(unsorted_cpes[5], sorted_cpes[0]) # 4.7.2 self.assertEqual(unsorted_cpes[4], sorted_cpes[1]) # 4.7.1 self.assertEqual(unsorted_cpes[8], sorted_cpes[2]) # 4.7.3 self.assertEqual(unsorted_cpes[0], sorted_cpes[3]) # 4.0 self.assertEqual(unsorted_cpes[2], sorted_cpes[4]) # 4.1.2 self.assertEqual(unsorted_cpes[3], sorted_cpes[5]) # 4.6.3 self.assertEqual(unsorted_cpes[6], sorted_cpes[6]) # 4.3.2 def test_cpes_and_sorted_cpes_are_equal_when_software_version_not_splitted_by_points(self): sorted_cpes = sort_cpes_by_version(unsorted_cpes, version_without_points) self.assertListEqual(unsorted_cpes, sorted_cpes) def test_sort_cpes_by_version_with_year(self): sorted_cpes = sort_cpes_by_version(unsorted_cpes_year, version_year) self.assertEqual(len(unsorted_cpes_year), len(sorted_cpes)) self.assertEqual(unsorted_cpes_year[3], sorted_cpes[0]) # 2010 self.assertEqual(unsorted_cpes_year[7], sorted_cpes[1]) # 2010 self.assertEqual(unsorted_cpes_year[9], sorted_cpes[2]) # 2010 self.assertEqual(unsorted_cpes_year[0], sorted_cpes[3]) # 2000 self.assertEqual(unsorted_cpes_year[1], sorted_cpes[4]) # 2007 self.assertEqual(unsorted_cpes_year[5], sorted_cpes[5]) # 2001 def test_sort_cpes_by_operating_system_windows(self): sorted_cpes = sort_cpes_by_operating_system(unsorted_cpes, os_windows) self.assertEqual(len(unsorted_cpes), len(sorted_cpes)) self.assertEqual(unsorted_cpes[3], sorted_cpes[0]) def test_sort_cpes_by_operating_system_linux(self): sorted_cpes = sort_cpes_by_operating_system(unsorted_cpes, os_linux) self.assertEqual(len(unsorted_cpes), len(sorted_cpes)) self.assertEqual(unsorted_cpes[6], sorted_cpes[0]) def test_sort_cpes_by_operating_system_android(self): sorted_cpes = sort_cpes_by_operating_system(unsorted_cpes, os_android) self.assertEqual(len(unsorted_cpes), len(sorted_cpes)) self.assertEqual(unsorted_cpes[4], sorted_cpes[0]) self.assertEqual(unsorted_cpes[0], sorted_cpes[1])
<|file_name|>test_cpe_sorter.py<|end_file_name|><|fim▁begin|>import unittest from matching.cpe_sorter import * unsorted_cpes = [{'wfn': {'version': '4.0', 'target_sw': 'android_marshmallow'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.0:beta:~~~android_marshmallow~~'}, {'wfn': {'version': '1.0.1.2', 'target_sw': 'android_marshmallow'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:1.0.1.2:beta'}, {'wfn': {'version': '4.1.2', 'target_sw': 'ANY'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.1.2:beta'}, {'wfn': {'version': '4.6.3', 'target_sw': 'windows'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.6.3:beta:~~~windows~~'}, {'wfn': {'version': '4.7.1', 'target_sw': 'android'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.1:beta:~~~android~~'}, {'wfn': {'version': '4.7.2', 'target_sw': 'ANY'}, 'uri_binding':'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.2:beta'}, {'wfn': {'version': '4.3.2', 'target_sw': 'linux'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.3.2:beta:~~~linux~~'}, {'wfn': {'version': '2.3.1', 'target_sw': 'linux'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2.3.1:beta'}, {'wfn': {'version': '4.7.3', 'target_sw': 'mac_os_x'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.3:beta:~~~mac_os_x~~'} ] unsorted_cpes_year = [{'wfn': {'version': '2000', 'target_sw': 'android_marshmallow'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2000:beta:~~~android_marshmallow~~'}, {'wfn': {'version': '2007', 'target_sw': 'android_marshmallow'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2007:beta'}, {'wfn': {'version': '4.1.2', 'target_sw': 'ANY'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.1.2:beta'}, {'wfn': {'version': '2010', 'target_sw': 'windows'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2010:beta:~~~windows~~'}, {'wfn': {'version': '4.7.1', 'target_sw': 'android'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.1:beta:~~~android~~'}, {'wfn': {'version': '2001', 'target_sw': 'ANY'}, 'uri_binding':'cpe:/a:string_value_with\:double_points:internet_explorer:2001:beta'}, {'wfn': {'version': '4.3.2', 'target_sw': 'linux'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.3.2:beta:~~~linux~~'}, {'wfn': {'version': '2010', 'target_sw': 'linux'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2010:beta'}, {'wfn': {'version': '4.7.3', 'target_sw': 'mac_os_x'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.3:beta:~~~mac_os_x~~'}, {'wfn': {'version': '2010', 'target_sw': 'mac_os_x'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2010:beta:~~~mac_os_x~~'}] version = '4.7.2' version_without_points = '4_7-2' version_year = '2010' os_windows = 'windows_7' os_linux = 'linux_ubuntu' os_android = 'android' os_mac = 'mac_os_x_10.11' class TestCPESorter(unittest.TestCase): def test_sort_cpes_by_software_version(self): <|fim_middle|> def test_cpes_and_sorted_cpes_are_equal_when_software_version_not_splitted_by_points(self): sorted_cpes = sort_cpes_by_version(unsorted_cpes, version_without_points) self.assertListEqual(unsorted_cpes, sorted_cpes) def test_sort_cpes_by_version_with_year(self): sorted_cpes = sort_cpes_by_version(unsorted_cpes_year, version_year) self.assertEqual(len(unsorted_cpes_year), len(sorted_cpes)) self.assertEqual(unsorted_cpes_year[3], sorted_cpes[0]) # 2010 self.assertEqual(unsorted_cpes_year[7], sorted_cpes[1]) # 2010 self.assertEqual(unsorted_cpes_year[9], sorted_cpes[2]) # 2010 self.assertEqual(unsorted_cpes_year[0], sorted_cpes[3]) # 2000 self.assertEqual(unsorted_cpes_year[1], sorted_cpes[4]) # 2007 self.assertEqual(unsorted_cpes_year[5], sorted_cpes[5]) # 2001 def test_sort_cpes_by_operating_system_windows(self): sorted_cpes = sort_cpes_by_operating_system(unsorted_cpes, os_windows) self.assertEqual(len(unsorted_cpes), len(sorted_cpes)) self.assertEqual(unsorted_cpes[3], sorted_cpes[0]) def test_sort_cpes_by_operating_system_linux(self): sorted_cpes = sort_cpes_by_operating_system(unsorted_cpes, os_linux) self.assertEqual(len(unsorted_cpes), len(sorted_cpes)) self.assertEqual(unsorted_cpes[6], sorted_cpes[0]) def test_sort_cpes_by_operating_system_android(self): sorted_cpes = sort_cpes_by_operating_system(unsorted_cpes, os_android) self.assertEqual(len(unsorted_cpes), len(sorted_cpes)) self.assertEqual(unsorted_cpes[4], sorted_cpes[0]) self.assertEqual(unsorted_cpes[0], sorted_cpes[1]) if __name__ == '__main__': unittest.main() <|fim▁end|>
sorted_cpes = sort_cpes_by_version(unsorted_cpes, version) self.assertEqual(len(unsorted_cpes), len(sorted_cpes)) self.assertEqual(unsorted_cpes[5], sorted_cpes[0]) # 4.7.2 self.assertEqual(unsorted_cpes[4], sorted_cpes[1]) # 4.7.1 self.assertEqual(unsorted_cpes[8], sorted_cpes[2]) # 4.7.3 self.assertEqual(unsorted_cpes[0], sorted_cpes[3]) # 4.0 self.assertEqual(unsorted_cpes[2], sorted_cpes[4]) # 4.1.2 self.assertEqual(unsorted_cpes[3], sorted_cpes[5]) # 4.6.3 self.assertEqual(unsorted_cpes[6], sorted_cpes[6]) # 4.3.2
<|file_name|>test_cpe_sorter.py<|end_file_name|><|fim▁begin|>import unittest from matching.cpe_sorter import * unsorted_cpes = [{'wfn': {'version': '4.0', 'target_sw': 'android_marshmallow'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.0:beta:~~~android_marshmallow~~'}, {'wfn': {'version': '1.0.1.2', 'target_sw': 'android_marshmallow'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:1.0.1.2:beta'}, {'wfn': {'version': '4.1.2', 'target_sw': 'ANY'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.1.2:beta'}, {'wfn': {'version': '4.6.3', 'target_sw': 'windows'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.6.3:beta:~~~windows~~'}, {'wfn': {'version': '4.7.1', 'target_sw': 'android'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.1:beta:~~~android~~'}, {'wfn': {'version': '4.7.2', 'target_sw': 'ANY'}, 'uri_binding':'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.2:beta'}, {'wfn': {'version': '4.3.2', 'target_sw': 'linux'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.3.2:beta:~~~linux~~'}, {'wfn': {'version': '2.3.1', 'target_sw': 'linux'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2.3.1:beta'}, {'wfn': {'version': '4.7.3', 'target_sw': 'mac_os_x'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.3:beta:~~~mac_os_x~~'} ] unsorted_cpes_year = [{'wfn': {'version': '2000', 'target_sw': 'android_marshmallow'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2000:beta:~~~android_marshmallow~~'}, {'wfn': {'version': '2007', 'target_sw': 'android_marshmallow'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2007:beta'}, {'wfn': {'version': '4.1.2', 'target_sw': 'ANY'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.1.2:beta'}, {'wfn': {'version': '2010', 'target_sw': 'windows'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2010:beta:~~~windows~~'}, {'wfn': {'version': '4.7.1', 'target_sw': 'android'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.1:beta:~~~android~~'}, {'wfn': {'version': '2001', 'target_sw': 'ANY'}, 'uri_binding':'cpe:/a:string_value_with\:double_points:internet_explorer:2001:beta'}, {'wfn': {'version': '4.3.2', 'target_sw': 'linux'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.3.2:beta:~~~linux~~'}, {'wfn': {'version': '2010', 'target_sw': 'linux'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2010:beta'}, {'wfn': {'version': '4.7.3', 'target_sw': 'mac_os_x'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.3:beta:~~~mac_os_x~~'}, {'wfn': {'version': '2010', 'target_sw': 'mac_os_x'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2010:beta:~~~mac_os_x~~'}] version = '4.7.2' version_without_points = '4_7-2' version_year = '2010' os_windows = 'windows_7' os_linux = 'linux_ubuntu' os_android = 'android' os_mac = 'mac_os_x_10.11' class TestCPESorter(unittest.TestCase): def test_sort_cpes_by_software_version(self): sorted_cpes = sort_cpes_by_version(unsorted_cpes, version) self.assertEqual(len(unsorted_cpes), len(sorted_cpes)) self.assertEqual(unsorted_cpes[5], sorted_cpes[0]) # 4.7.2 self.assertEqual(unsorted_cpes[4], sorted_cpes[1]) # 4.7.1 self.assertEqual(unsorted_cpes[8], sorted_cpes[2]) # 4.7.3 self.assertEqual(unsorted_cpes[0], sorted_cpes[3]) # 4.0 self.assertEqual(unsorted_cpes[2], sorted_cpes[4]) # 4.1.2 self.assertEqual(unsorted_cpes[3], sorted_cpes[5]) # 4.6.3 self.assertEqual(unsorted_cpes[6], sorted_cpes[6]) # 4.3.2 def test_cpes_and_sorted_cpes_are_equal_when_software_version_not_splitted_by_points(self): <|fim_middle|> def test_sort_cpes_by_version_with_year(self): sorted_cpes = sort_cpes_by_version(unsorted_cpes_year, version_year) self.assertEqual(len(unsorted_cpes_year), len(sorted_cpes)) self.assertEqual(unsorted_cpes_year[3], sorted_cpes[0]) # 2010 self.assertEqual(unsorted_cpes_year[7], sorted_cpes[1]) # 2010 self.assertEqual(unsorted_cpes_year[9], sorted_cpes[2]) # 2010 self.assertEqual(unsorted_cpes_year[0], sorted_cpes[3]) # 2000 self.assertEqual(unsorted_cpes_year[1], sorted_cpes[4]) # 2007 self.assertEqual(unsorted_cpes_year[5], sorted_cpes[5]) # 2001 def test_sort_cpes_by_operating_system_windows(self): sorted_cpes = sort_cpes_by_operating_system(unsorted_cpes, os_windows) self.assertEqual(len(unsorted_cpes), len(sorted_cpes)) self.assertEqual(unsorted_cpes[3], sorted_cpes[0]) def test_sort_cpes_by_operating_system_linux(self): sorted_cpes = sort_cpes_by_operating_system(unsorted_cpes, os_linux) self.assertEqual(len(unsorted_cpes), len(sorted_cpes)) self.assertEqual(unsorted_cpes[6], sorted_cpes[0]) def test_sort_cpes_by_operating_system_android(self): sorted_cpes = sort_cpes_by_operating_system(unsorted_cpes, os_android) self.assertEqual(len(unsorted_cpes), len(sorted_cpes)) self.assertEqual(unsorted_cpes[4], sorted_cpes[0]) self.assertEqual(unsorted_cpes[0], sorted_cpes[1]) if __name__ == '__main__': unittest.main() <|fim▁end|>
sorted_cpes = sort_cpes_by_version(unsorted_cpes, version_without_points) self.assertListEqual(unsorted_cpes, sorted_cpes)
<|file_name|>test_cpe_sorter.py<|end_file_name|><|fim▁begin|>import unittest from matching.cpe_sorter import * unsorted_cpes = [{'wfn': {'version': '4.0', 'target_sw': 'android_marshmallow'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.0:beta:~~~android_marshmallow~~'}, {'wfn': {'version': '1.0.1.2', 'target_sw': 'android_marshmallow'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:1.0.1.2:beta'}, {'wfn': {'version': '4.1.2', 'target_sw': 'ANY'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.1.2:beta'}, {'wfn': {'version': '4.6.3', 'target_sw': 'windows'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.6.3:beta:~~~windows~~'}, {'wfn': {'version': '4.7.1', 'target_sw': 'android'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.1:beta:~~~android~~'}, {'wfn': {'version': '4.7.2', 'target_sw': 'ANY'}, 'uri_binding':'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.2:beta'}, {'wfn': {'version': '4.3.2', 'target_sw': 'linux'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.3.2:beta:~~~linux~~'}, {'wfn': {'version': '2.3.1', 'target_sw': 'linux'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2.3.1:beta'}, {'wfn': {'version': '4.7.3', 'target_sw': 'mac_os_x'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.3:beta:~~~mac_os_x~~'} ] unsorted_cpes_year = [{'wfn': {'version': '2000', 'target_sw': 'android_marshmallow'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2000:beta:~~~android_marshmallow~~'}, {'wfn': {'version': '2007', 'target_sw': 'android_marshmallow'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2007:beta'}, {'wfn': {'version': '4.1.2', 'target_sw': 'ANY'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.1.2:beta'}, {'wfn': {'version': '2010', 'target_sw': 'windows'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2010:beta:~~~windows~~'}, {'wfn': {'version': '4.7.1', 'target_sw': 'android'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.1:beta:~~~android~~'}, {'wfn': {'version': '2001', 'target_sw': 'ANY'}, 'uri_binding':'cpe:/a:string_value_with\:double_points:internet_explorer:2001:beta'}, {'wfn': {'version': '4.3.2', 'target_sw': 'linux'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.3.2:beta:~~~linux~~'}, {'wfn': {'version': '2010', 'target_sw': 'linux'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2010:beta'}, {'wfn': {'version': '4.7.3', 'target_sw': 'mac_os_x'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.3:beta:~~~mac_os_x~~'}, {'wfn': {'version': '2010', 'target_sw': 'mac_os_x'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2010:beta:~~~mac_os_x~~'}] version = '4.7.2' version_without_points = '4_7-2' version_year = '2010' os_windows = 'windows_7' os_linux = 'linux_ubuntu' os_android = 'android' os_mac = 'mac_os_x_10.11' class TestCPESorter(unittest.TestCase): def test_sort_cpes_by_software_version(self): sorted_cpes = sort_cpes_by_version(unsorted_cpes, version) self.assertEqual(len(unsorted_cpes), len(sorted_cpes)) self.assertEqual(unsorted_cpes[5], sorted_cpes[0]) # 4.7.2 self.assertEqual(unsorted_cpes[4], sorted_cpes[1]) # 4.7.1 self.assertEqual(unsorted_cpes[8], sorted_cpes[2]) # 4.7.3 self.assertEqual(unsorted_cpes[0], sorted_cpes[3]) # 4.0 self.assertEqual(unsorted_cpes[2], sorted_cpes[4]) # 4.1.2 self.assertEqual(unsorted_cpes[3], sorted_cpes[5]) # 4.6.3 self.assertEqual(unsorted_cpes[6], sorted_cpes[6]) # 4.3.2 def test_cpes_and_sorted_cpes_are_equal_when_software_version_not_splitted_by_points(self): sorted_cpes = sort_cpes_by_version(unsorted_cpes, version_without_points) self.assertListEqual(unsorted_cpes, sorted_cpes) def test_sort_cpes_by_version_with_year(self): <|fim_middle|> def test_sort_cpes_by_operating_system_windows(self): sorted_cpes = sort_cpes_by_operating_system(unsorted_cpes, os_windows) self.assertEqual(len(unsorted_cpes), len(sorted_cpes)) self.assertEqual(unsorted_cpes[3], sorted_cpes[0]) def test_sort_cpes_by_operating_system_linux(self): sorted_cpes = sort_cpes_by_operating_system(unsorted_cpes, os_linux) self.assertEqual(len(unsorted_cpes), len(sorted_cpes)) self.assertEqual(unsorted_cpes[6], sorted_cpes[0]) def test_sort_cpes_by_operating_system_android(self): sorted_cpes = sort_cpes_by_operating_system(unsorted_cpes, os_android) self.assertEqual(len(unsorted_cpes), len(sorted_cpes)) self.assertEqual(unsorted_cpes[4], sorted_cpes[0]) self.assertEqual(unsorted_cpes[0], sorted_cpes[1]) if __name__ == '__main__': unittest.main() <|fim▁end|>
sorted_cpes = sort_cpes_by_version(unsorted_cpes_year, version_year) self.assertEqual(len(unsorted_cpes_year), len(sorted_cpes)) self.assertEqual(unsorted_cpes_year[3], sorted_cpes[0]) # 2010 self.assertEqual(unsorted_cpes_year[7], sorted_cpes[1]) # 2010 self.assertEqual(unsorted_cpes_year[9], sorted_cpes[2]) # 2010 self.assertEqual(unsorted_cpes_year[0], sorted_cpes[3]) # 2000 self.assertEqual(unsorted_cpes_year[1], sorted_cpes[4]) # 2007 self.assertEqual(unsorted_cpes_year[5], sorted_cpes[5]) # 2001
<|file_name|>test_cpe_sorter.py<|end_file_name|><|fim▁begin|>import unittest from matching.cpe_sorter import * unsorted_cpes = [{'wfn': {'version': '4.0', 'target_sw': 'android_marshmallow'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.0:beta:~~~android_marshmallow~~'}, {'wfn': {'version': '1.0.1.2', 'target_sw': 'android_marshmallow'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:1.0.1.2:beta'}, {'wfn': {'version': '4.1.2', 'target_sw': 'ANY'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.1.2:beta'}, {'wfn': {'version': '4.6.3', 'target_sw': 'windows'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.6.3:beta:~~~windows~~'}, {'wfn': {'version': '4.7.1', 'target_sw': 'android'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.1:beta:~~~android~~'}, {'wfn': {'version': '4.7.2', 'target_sw': 'ANY'}, 'uri_binding':'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.2:beta'}, {'wfn': {'version': '4.3.2', 'target_sw': 'linux'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.3.2:beta:~~~linux~~'}, {'wfn': {'version': '2.3.1', 'target_sw': 'linux'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2.3.1:beta'}, {'wfn': {'version': '4.7.3', 'target_sw': 'mac_os_x'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.3:beta:~~~mac_os_x~~'} ] unsorted_cpes_year = [{'wfn': {'version': '2000', 'target_sw': 'android_marshmallow'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2000:beta:~~~android_marshmallow~~'}, {'wfn': {'version': '2007', 'target_sw': 'android_marshmallow'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2007:beta'}, {'wfn': {'version': '4.1.2', 'target_sw': 'ANY'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.1.2:beta'}, {'wfn': {'version': '2010', 'target_sw': 'windows'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2010:beta:~~~windows~~'}, {'wfn': {'version': '4.7.1', 'target_sw': 'android'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.1:beta:~~~android~~'}, {'wfn': {'version': '2001', 'target_sw': 'ANY'}, 'uri_binding':'cpe:/a:string_value_with\:double_points:internet_explorer:2001:beta'}, {'wfn': {'version': '4.3.2', 'target_sw': 'linux'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.3.2:beta:~~~linux~~'}, {'wfn': {'version': '2010', 'target_sw': 'linux'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2010:beta'}, {'wfn': {'version': '4.7.3', 'target_sw': 'mac_os_x'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.3:beta:~~~mac_os_x~~'}, {'wfn': {'version': '2010', 'target_sw': 'mac_os_x'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2010:beta:~~~mac_os_x~~'}] version = '4.7.2' version_without_points = '4_7-2' version_year = '2010' os_windows = 'windows_7' os_linux = 'linux_ubuntu' os_android = 'android' os_mac = 'mac_os_x_10.11' class TestCPESorter(unittest.TestCase): def test_sort_cpes_by_software_version(self): sorted_cpes = sort_cpes_by_version(unsorted_cpes, version) self.assertEqual(len(unsorted_cpes), len(sorted_cpes)) self.assertEqual(unsorted_cpes[5], sorted_cpes[0]) # 4.7.2 self.assertEqual(unsorted_cpes[4], sorted_cpes[1]) # 4.7.1 self.assertEqual(unsorted_cpes[8], sorted_cpes[2]) # 4.7.3 self.assertEqual(unsorted_cpes[0], sorted_cpes[3]) # 4.0 self.assertEqual(unsorted_cpes[2], sorted_cpes[4]) # 4.1.2 self.assertEqual(unsorted_cpes[3], sorted_cpes[5]) # 4.6.3 self.assertEqual(unsorted_cpes[6], sorted_cpes[6]) # 4.3.2 def test_cpes_and_sorted_cpes_are_equal_when_software_version_not_splitted_by_points(self): sorted_cpes = sort_cpes_by_version(unsorted_cpes, version_without_points) self.assertListEqual(unsorted_cpes, sorted_cpes) def test_sort_cpes_by_version_with_year(self): sorted_cpes = sort_cpes_by_version(unsorted_cpes_year, version_year) self.assertEqual(len(unsorted_cpes_year), len(sorted_cpes)) self.assertEqual(unsorted_cpes_year[3], sorted_cpes[0]) # 2010 self.assertEqual(unsorted_cpes_year[7], sorted_cpes[1]) # 2010 self.assertEqual(unsorted_cpes_year[9], sorted_cpes[2]) # 2010 self.assertEqual(unsorted_cpes_year[0], sorted_cpes[3]) # 2000 self.assertEqual(unsorted_cpes_year[1], sorted_cpes[4]) # 2007 self.assertEqual(unsorted_cpes_year[5], sorted_cpes[5]) # 2001 def test_sort_cpes_by_operating_system_windows(self): <|fim_middle|> def test_sort_cpes_by_operating_system_linux(self): sorted_cpes = sort_cpes_by_operating_system(unsorted_cpes, os_linux) self.assertEqual(len(unsorted_cpes), len(sorted_cpes)) self.assertEqual(unsorted_cpes[6], sorted_cpes[0]) def test_sort_cpes_by_operating_system_android(self): sorted_cpes = sort_cpes_by_operating_system(unsorted_cpes, os_android) self.assertEqual(len(unsorted_cpes), len(sorted_cpes)) self.assertEqual(unsorted_cpes[4], sorted_cpes[0]) self.assertEqual(unsorted_cpes[0], sorted_cpes[1]) if __name__ == '__main__': unittest.main() <|fim▁end|>
sorted_cpes = sort_cpes_by_operating_system(unsorted_cpes, os_windows) self.assertEqual(len(unsorted_cpes), len(sorted_cpes)) self.assertEqual(unsorted_cpes[3], sorted_cpes[0])
<|file_name|>test_cpe_sorter.py<|end_file_name|><|fim▁begin|>import unittest from matching.cpe_sorter import * unsorted_cpes = [{'wfn': {'version': '4.0', 'target_sw': 'android_marshmallow'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.0:beta:~~~android_marshmallow~~'}, {'wfn': {'version': '1.0.1.2', 'target_sw': 'android_marshmallow'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:1.0.1.2:beta'}, {'wfn': {'version': '4.1.2', 'target_sw': 'ANY'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.1.2:beta'}, {'wfn': {'version': '4.6.3', 'target_sw': 'windows'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.6.3:beta:~~~windows~~'}, {'wfn': {'version': '4.7.1', 'target_sw': 'android'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.1:beta:~~~android~~'}, {'wfn': {'version': '4.7.2', 'target_sw': 'ANY'}, 'uri_binding':'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.2:beta'}, {'wfn': {'version': '4.3.2', 'target_sw': 'linux'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.3.2:beta:~~~linux~~'}, {'wfn': {'version': '2.3.1', 'target_sw': 'linux'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2.3.1:beta'}, {'wfn': {'version': '4.7.3', 'target_sw': 'mac_os_x'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.3:beta:~~~mac_os_x~~'} ] unsorted_cpes_year = [{'wfn': {'version': '2000', 'target_sw': 'android_marshmallow'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2000:beta:~~~android_marshmallow~~'}, {'wfn': {'version': '2007', 'target_sw': 'android_marshmallow'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2007:beta'}, {'wfn': {'version': '4.1.2', 'target_sw': 'ANY'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.1.2:beta'}, {'wfn': {'version': '2010', 'target_sw': 'windows'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2010:beta:~~~windows~~'}, {'wfn': {'version': '4.7.1', 'target_sw': 'android'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.1:beta:~~~android~~'}, {'wfn': {'version': '2001', 'target_sw': 'ANY'}, 'uri_binding':'cpe:/a:string_value_with\:double_points:internet_explorer:2001:beta'}, {'wfn': {'version': '4.3.2', 'target_sw': 'linux'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.3.2:beta:~~~linux~~'}, {'wfn': {'version': '2010', 'target_sw': 'linux'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2010:beta'}, {'wfn': {'version': '4.7.3', 'target_sw': 'mac_os_x'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.3:beta:~~~mac_os_x~~'}, {'wfn': {'version': '2010', 'target_sw': 'mac_os_x'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2010:beta:~~~mac_os_x~~'}] version = '4.7.2' version_without_points = '4_7-2' version_year = '2010' os_windows = 'windows_7' os_linux = 'linux_ubuntu' os_android = 'android' os_mac = 'mac_os_x_10.11' class TestCPESorter(unittest.TestCase): def test_sort_cpes_by_software_version(self): sorted_cpes = sort_cpes_by_version(unsorted_cpes, version) self.assertEqual(len(unsorted_cpes), len(sorted_cpes)) self.assertEqual(unsorted_cpes[5], sorted_cpes[0]) # 4.7.2 self.assertEqual(unsorted_cpes[4], sorted_cpes[1]) # 4.7.1 self.assertEqual(unsorted_cpes[8], sorted_cpes[2]) # 4.7.3 self.assertEqual(unsorted_cpes[0], sorted_cpes[3]) # 4.0 self.assertEqual(unsorted_cpes[2], sorted_cpes[4]) # 4.1.2 self.assertEqual(unsorted_cpes[3], sorted_cpes[5]) # 4.6.3 self.assertEqual(unsorted_cpes[6], sorted_cpes[6]) # 4.3.2 def test_cpes_and_sorted_cpes_are_equal_when_software_version_not_splitted_by_points(self): sorted_cpes = sort_cpes_by_version(unsorted_cpes, version_without_points) self.assertListEqual(unsorted_cpes, sorted_cpes) def test_sort_cpes_by_version_with_year(self): sorted_cpes = sort_cpes_by_version(unsorted_cpes_year, version_year) self.assertEqual(len(unsorted_cpes_year), len(sorted_cpes)) self.assertEqual(unsorted_cpes_year[3], sorted_cpes[0]) # 2010 self.assertEqual(unsorted_cpes_year[7], sorted_cpes[1]) # 2010 self.assertEqual(unsorted_cpes_year[9], sorted_cpes[2]) # 2010 self.assertEqual(unsorted_cpes_year[0], sorted_cpes[3]) # 2000 self.assertEqual(unsorted_cpes_year[1], sorted_cpes[4]) # 2007 self.assertEqual(unsorted_cpes_year[5], sorted_cpes[5]) # 2001 def test_sort_cpes_by_operating_system_windows(self): sorted_cpes = sort_cpes_by_operating_system(unsorted_cpes, os_windows) self.assertEqual(len(unsorted_cpes), len(sorted_cpes)) self.assertEqual(unsorted_cpes[3], sorted_cpes[0]) def test_sort_cpes_by_operating_system_linux(self): <|fim_middle|> def test_sort_cpes_by_operating_system_android(self): sorted_cpes = sort_cpes_by_operating_system(unsorted_cpes, os_android) self.assertEqual(len(unsorted_cpes), len(sorted_cpes)) self.assertEqual(unsorted_cpes[4], sorted_cpes[0]) self.assertEqual(unsorted_cpes[0], sorted_cpes[1]) if __name__ == '__main__': unittest.main() <|fim▁end|>
sorted_cpes = sort_cpes_by_operating_system(unsorted_cpes, os_linux) self.assertEqual(len(unsorted_cpes), len(sorted_cpes)) self.assertEqual(unsorted_cpes[6], sorted_cpes[0])
<|file_name|>test_cpe_sorter.py<|end_file_name|><|fim▁begin|>import unittest from matching.cpe_sorter import * unsorted_cpes = [{'wfn': {'version': '4.0', 'target_sw': 'android_marshmallow'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.0:beta:~~~android_marshmallow~~'}, {'wfn': {'version': '1.0.1.2', 'target_sw': 'android_marshmallow'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:1.0.1.2:beta'}, {'wfn': {'version': '4.1.2', 'target_sw': 'ANY'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.1.2:beta'}, {'wfn': {'version': '4.6.3', 'target_sw': 'windows'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.6.3:beta:~~~windows~~'}, {'wfn': {'version': '4.7.1', 'target_sw': 'android'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.1:beta:~~~android~~'}, {'wfn': {'version': '4.7.2', 'target_sw': 'ANY'}, 'uri_binding':'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.2:beta'}, {'wfn': {'version': '4.3.2', 'target_sw': 'linux'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.3.2:beta:~~~linux~~'}, {'wfn': {'version': '2.3.1', 'target_sw': 'linux'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2.3.1:beta'}, {'wfn': {'version': '4.7.3', 'target_sw': 'mac_os_x'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.3:beta:~~~mac_os_x~~'} ] unsorted_cpes_year = [{'wfn': {'version': '2000', 'target_sw': 'android_marshmallow'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2000:beta:~~~android_marshmallow~~'}, {'wfn': {'version': '2007', 'target_sw': 'android_marshmallow'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2007:beta'}, {'wfn': {'version': '4.1.2', 'target_sw': 'ANY'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.1.2:beta'}, {'wfn': {'version': '2010', 'target_sw': 'windows'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2010:beta:~~~windows~~'}, {'wfn': {'version': '4.7.1', 'target_sw': 'android'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.1:beta:~~~android~~'}, {'wfn': {'version': '2001', 'target_sw': 'ANY'}, 'uri_binding':'cpe:/a:string_value_with\:double_points:internet_explorer:2001:beta'}, {'wfn': {'version': '4.3.2', 'target_sw': 'linux'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.3.2:beta:~~~linux~~'}, {'wfn': {'version': '2010', 'target_sw': 'linux'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2010:beta'}, {'wfn': {'version': '4.7.3', 'target_sw': 'mac_os_x'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.3:beta:~~~mac_os_x~~'}, {'wfn': {'version': '2010', 'target_sw': 'mac_os_x'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2010:beta:~~~mac_os_x~~'}] version = '4.7.2' version_without_points = '4_7-2' version_year = '2010' os_windows = 'windows_7' os_linux = 'linux_ubuntu' os_android = 'android' os_mac = 'mac_os_x_10.11' class TestCPESorter(unittest.TestCase): def test_sort_cpes_by_software_version(self): sorted_cpes = sort_cpes_by_version(unsorted_cpes, version) self.assertEqual(len(unsorted_cpes), len(sorted_cpes)) self.assertEqual(unsorted_cpes[5], sorted_cpes[0]) # 4.7.2 self.assertEqual(unsorted_cpes[4], sorted_cpes[1]) # 4.7.1 self.assertEqual(unsorted_cpes[8], sorted_cpes[2]) # 4.7.3 self.assertEqual(unsorted_cpes[0], sorted_cpes[3]) # 4.0 self.assertEqual(unsorted_cpes[2], sorted_cpes[4]) # 4.1.2 self.assertEqual(unsorted_cpes[3], sorted_cpes[5]) # 4.6.3 self.assertEqual(unsorted_cpes[6], sorted_cpes[6]) # 4.3.2 def test_cpes_and_sorted_cpes_are_equal_when_software_version_not_splitted_by_points(self): sorted_cpes = sort_cpes_by_version(unsorted_cpes, version_without_points) self.assertListEqual(unsorted_cpes, sorted_cpes) def test_sort_cpes_by_version_with_year(self): sorted_cpes = sort_cpes_by_version(unsorted_cpes_year, version_year) self.assertEqual(len(unsorted_cpes_year), len(sorted_cpes)) self.assertEqual(unsorted_cpes_year[3], sorted_cpes[0]) # 2010 self.assertEqual(unsorted_cpes_year[7], sorted_cpes[1]) # 2010 self.assertEqual(unsorted_cpes_year[9], sorted_cpes[2]) # 2010 self.assertEqual(unsorted_cpes_year[0], sorted_cpes[3]) # 2000 self.assertEqual(unsorted_cpes_year[1], sorted_cpes[4]) # 2007 self.assertEqual(unsorted_cpes_year[5], sorted_cpes[5]) # 2001 def test_sort_cpes_by_operating_system_windows(self): sorted_cpes = sort_cpes_by_operating_system(unsorted_cpes, os_windows) self.assertEqual(len(unsorted_cpes), len(sorted_cpes)) self.assertEqual(unsorted_cpes[3], sorted_cpes[0]) def test_sort_cpes_by_operating_system_linux(self): sorted_cpes = sort_cpes_by_operating_system(unsorted_cpes, os_linux) self.assertEqual(len(unsorted_cpes), len(sorted_cpes)) self.assertEqual(unsorted_cpes[6], sorted_cpes[0]) def test_sort_cpes_by_operating_system_android(self): <|fim_middle|> if __name__ == '__main__': unittest.main() <|fim▁end|>
sorted_cpes = sort_cpes_by_operating_system(unsorted_cpes, os_android) self.assertEqual(len(unsorted_cpes), len(sorted_cpes)) self.assertEqual(unsorted_cpes[4], sorted_cpes[0]) self.assertEqual(unsorted_cpes[0], sorted_cpes[1])
<|file_name|>test_cpe_sorter.py<|end_file_name|><|fim▁begin|>import unittest from matching.cpe_sorter import * unsorted_cpes = [{'wfn': {'version': '4.0', 'target_sw': 'android_marshmallow'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.0:beta:~~~android_marshmallow~~'}, {'wfn': {'version': '1.0.1.2', 'target_sw': 'android_marshmallow'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:1.0.1.2:beta'}, {'wfn': {'version': '4.1.2', 'target_sw': 'ANY'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.1.2:beta'}, {'wfn': {'version': '4.6.3', 'target_sw': 'windows'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.6.3:beta:~~~windows~~'}, {'wfn': {'version': '4.7.1', 'target_sw': 'android'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.1:beta:~~~android~~'}, {'wfn': {'version': '4.7.2', 'target_sw': 'ANY'}, 'uri_binding':'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.2:beta'}, {'wfn': {'version': '4.3.2', 'target_sw': 'linux'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.3.2:beta:~~~linux~~'}, {'wfn': {'version': '2.3.1', 'target_sw': 'linux'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2.3.1:beta'}, {'wfn': {'version': '4.7.3', 'target_sw': 'mac_os_x'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.3:beta:~~~mac_os_x~~'} ] unsorted_cpes_year = [{'wfn': {'version': '2000', 'target_sw': 'android_marshmallow'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2000:beta:~~~android_marshmallow~~'}, {'wfn': {'version': '2007', 'target_sw': 'android_marshmallow'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2007:beta'}, {'wfn': {'version': '4.1.2', 'target_sw': 'ANY'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.1.2:beta'}, {'wfn': {'version': '2010', 'target_sw': 'windows'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2010:beta:~~~windows~~'}, {'wfn': {'version': '4.7.1', 'target_sw': 'android'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.1:beta:~~~android~~'}, {'wfn': {'version': '2001', 'target_sw': 'ANY'}, 'uri_binding':'cpe:/a:string_value_with\:double_points:internet_explorer:2001:beta'}, {'wfn': {'version': '4.3.2', 'target_sw': 'linux'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.3.2:beta:~~~linux~~'}, {'wfn': {'version': '2010', 'target_sw': 'linux'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2010:beta'}, {'wfn': {'version': '4.7.3', 'target_sw': 'mac_os_x'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.3:beta:~~~mac_os_x~~'}, {'wfn': {'version': '2010', 'target_sw': 'mac_os_x'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2010:beta:~~~mac_os_x~~'}] version = '4.7.2' version_without_points = '4_7-2' version_year = '2010' os_windows = 'windows_7' os_linux = 'linux_ubuntu' os_android = 'android' os_mac = 'mac_os_x_10.11' class TestCPESorter(unittest.TestCase): def test_sort_cpes_by_software_version(self): sorted_cpes = sort_cpes_by_version(unsorted_cpes, version) self.assertEqual(len(unsorted_cpes), len(sorted_cpes)) self.assertEqual(unsorted_cpes[5], sorted_cpes[0]) # 4.7.2 self.assertEqual(unsorted_cpes[4], sorted_cpes[1]) # 4.7.1 self.assertEqual(unsorted_cpes[8], sorted_cpes[2]) # 4.7.3 self.assertEqual(unsorted_cpes[0], sorted_cpes[3]) # 4.0 self.assertEqual(unsorted_cpes[2], sorted_cpes[4]) # 4.1.2 self.assertEqual(unsorted_cpes[3], sorted_cpes[5]) # 4.6.3 self.assertEqual(unsorted_cpes[6], sorted_cpes[6]) # 4.3.2 def test_cpes_and_sorted_cpes_are_equal_when_software_version_not_splitted_by_points(self): sorted_cpes = sort_cpes_by_version(unsorted_cpes, version_without_points) self.assertListEqual(unsorted_cpes, sorted_cpes) def test_sort_cpes_by_version_with_year(self): sorted_cpes = sort_cpes_by_version(unsorted_cpes_year, version_year) self.assertEqual(len(unsorted_cpes_year), len(sorted_cpes)) self.assertEqual(unsorted_cpes_year[3], sorted_cpes[0]) # 2010 self.assertEqual(unsorted_cpes_year[7], sorted_cpes[1]) # 2010 self.assertEqual(unsorted_cpes_year[9], sorted_cpes[2]) # 2010 self.assertEqual(unsorted_cpes_year[0], sorted_cpes[3]) # 2000 self.assertEqual(unsorted_cpes_year[1], sorted_cpes[4]) # 2007 self.assertEqual(unsorted_cpes_year[5], sorted_cpes[5]) # 2001 def test_sort_cpes_by_operating_system_windows(self): sorted_cpes = sort_cpes_by_operating_system(unsorted_cpes, os_windows) self.assertEqual(len(unsorted_cpes), len(sorted_cpes)) self.assertEqual(unsorted_cpes[3], sorted_cpes[0]) def test_sort_cpes_by_operating_system_linux(self): sorted_cpes = sort_cpes_by_operating_system(unsorted_cpes, os_linux) self.assertEqual(len(unsorted_cpes), len(sorted_cpes)) self.assertEqual(unsorted_cpes[6], sorted_cpes[0]) def test_sort_cpes_by_operating_system_android(self): sorted_cpes = sort_cpes_by_operating_system(unsorted_cpes, os_android) self.assertEqual(len(unsorted_cpes), len(sorted_cpes)) self.assertEqual(unsorted_cpes[4], sorted_cpes[0]) self.assertEqual(unsorted_cpes[0], sorted_cpes[1]) if __name__ == '__main__': <|fim_middle|> <|fim▁end|>
unittest.main()
<|file_name|>test_cpe_sorter.py<|end_file_name|><|fim▁begin|>import unittest from matching.cpe_sorter import * unsorted_cpes = [{'wfn': {'version': '4.0', 'target_sw': 'android_marshmallow'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.0:beta:~~~android_marshmallow~~'}, {'wfn': {'version': '1.0.1.2', 'target_sw': 'android_marshmallow'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:1.0.1.2:beta'}, {'wfn': {'version': '4.1.2', 'target_sw': 'ANY'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.1.2:beta'}, {'wfn': {'version': '4.6.3', 'target_sw': 'windows'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.6.3:beta:~~~windows~~'}, {'wfn': {'version': '4.7.1', 'target_sw': 'android'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.1:beta:~~~android~~'}, {'wfn': {'version': '4.7.2', 'target_sw': 'ANY'}, 'uri_binding':'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.2:beta'}, {'wfn': {'version': '4.3.2', 'target_sw': 'linux'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.3.2:beta:~~~linux~~'}, {'wfn': {'version': '2.3.1', 'target_sw': 'linux'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2.3.1:beta'}, {'wfn': {'version': '4.7.3', 'target_sw': 'mac_os_x'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.3:beta:~~~mac_os_x~~'} ] unsorted_cpes_year = [{'wfn': {'version': '2000', 'target_sw': 'android_marshmallow'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2000:beta:~~~android_marshmallow~~'}, {'wfn': {'version': '2007', 'target_sw': 'android_marshmallow'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2007:beta'}, {'wfn': {'version': '4.1.2', 'target_sw': 'ANY'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.1.2:beta'}, {'wfn': {'version': '2010', 'target_sw': 'windows'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2010:beta:~~~windows~~'}, {'wfn': {'version': '4.7.1', 'target_sw': 'android'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.1:beta:~~~android~~'}, {'wfn': {'version': '2001', 'target_sw': 'ANY'}, 'uri_binding':'cpe:/a:string_value_with\:double_points:internet_explorer:2001:beta'}, {'wfn': {'version': '4.3.2', 'target_sw': 'linux'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.3.2:beta:~~~linux~~'}, {'wfn': {'version': '2010', 'target_sw': 'linux'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2010:beta'}, {'wfn': {'version': '4.7.3', 'target_sw': 'mac_os_x'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.3:beta:~~~mac_os_x~~'}, {'wfn': {'version': '2010', 'target_sw': 'mac_os_x'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2010:beta:~~~mac_os_x~~'}] version = '4.7.2' version_without_points = '4_7-2' version_year = '2010' os_windows = 'windows_7' os_linux = 'linux_ubuntu' os_android = 'android' os_mac = 'mac_os_x_10.11' class TestCPESorter(unittest.TestCase): def <|fim_middle|>(self): sorted_cpes = sort_cpes_by_version(unsorted_cpes, version) self.assertEqual(len(unsorted_cpes), len(sorted_cpes)) self.assertEqual(unsorted_cpes[5], sorted_cpes[0]) # 4.7.2 self.assertEqual(unsorted_cpes[4], sorted_cpes[1]) # 4.7.1 self.assertEqual(unsorted_cpes[8], sorted_cpes[2]) # 4.7.3 self.assertEqual(unsorted_cpes[0], sorted_cpes[3]) # 4.0 self.assertEqual(unsorted_cpes[2], sorted_cpes[4]) # 4.1.2 self.assertEqual(unsorted_cpes[3], sorted_cpes[5]) # 4.6.3 self.assertEqual(unsorted_cpes[6], sorted_cpes[6]) # 4.3.2 def test_cpes_and_sorted_cpes_are_equal_when_software_version_not_splitted_by_points(self): sorted_cpes = sort_cpes_by_version(unsorted_cpes, version_without_points) self.assertListEqual(unsorted_cpes, sorted_cpes) def test_sort_cpes_by_version_with_year(self): sorted_cpes = sort_cpes_by_version(unsorted_cpes_year, version_year) self.assertEqual(len(unsorted_cpes_year), len(sorted_cpes)) self.assertEqual(unsorted_cpes_year[3], sorted_cpes[0]) # 2010 self.assertEqual(unsorted_cpes_year[7], sorted_cpes[1]) # 2010 self.assertEqual(unsorted_cpes_year[9], sorted_cpes[2]) # 2010 self.assertEqual(unsorted_cpes_year[0], sorted_cpes[3]) # 2000 self.assertEqual(unsorted_cpes_year[1], sorted_cpes[4]) # 2007 self.assertEqual(unsorted_cpes_year[5], sorted_cpes[5]) # 2001 def test_sort_cpes_by_operating_system_windows(self): sorted_cpes = sort_cpes_by_operating_system(unsorted_cpes, os_windows) self.assertEqual(len(unsorted_cpes), len(sorted_cpes)) self.assertEqual(unsorted_cpes[3], sorted_cpes[0]) def test_sort_cpes_by_operating_system_linux(self): sorted_cpes = sort_cpes_by_operating_system(unsorted_cpes, os_linux) self.assertEqual(len(unsorted_cpes), len(sorted_cpes)) self.assertEqual(unsorted_cpes[6], sorted_cpes[0]) def test_sort_cpes_by_operating_system_android(self): sorted_cpes = sort_cpes_by_operating_system(unsorted_cpes, os_android) self.assertEqual(len(unsorted_cpes), len(sorted_cpes)) self.assertEqual(unsorted_cpes[4], sorted_cpes[0]) self.assertEqual(unsorted_cpes[0], sorted_cpes[1]) if __name__ == '__main__': unittest.main() <|fim▁end|>
test_sort_cpes_by_software_version
<|file_name|>test_cpe_sorter.py<|end_file_name|><|fim▁begin|>import unittest from matching.cpe_sorter import * unsorted_cpes = [{'wfn': {'version': '4.0', 'target_sw': 'android_marshmallow'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.0:beta:~~~android_marshmallow~~'}, {'wfn': {'version': '1.0.1.2', 'target_sw': 'android_marshmallow'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:1.0.1.2:beta'}, {'wfn': {'version': '4.1.2', 'target_sw': 'ANY'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.1.2:beta'}, {'wfn': {'version': '4.6.3', 'target_sw': 'windows'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.6.3:beta:~~~windows~~'}, {'wfn': {'version': '4.7.1', 'target_sw': 'android'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.1:beta:~~~android~~'}, {'wfn': {'version': '4.7.2', 'target_sw': 'ANY'}, 'uri_binding':'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.2:beta'}, {'wfn': {'version': '4.3.2', 'target_sw': 'linux'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.3.2:beta:~~~linux~~'}, {'wfn': {'version': '2.3.1', 'target_sw': 'linux'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2.3.1:beta'}, {'wfn': {'version': '4.7.3', 'target_sw': 'mac_os_x'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.3:beta:~~~mac_os_x~~'} ] unsorted_cpes_year = [{'wfn': {'version': '2000', 'target_sw': 'android_marshmallow'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2000:beta:~~~android_marshmallow~~'}, {'wfn': {'version': '2007', 'target_sw': 'android_marshmallow'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2007:beta'}, {'wfn': {'version': '4.1.2', 'target_sw': 'ANY'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.1.2:beta'}, {'wfn': {'version': '2010', 'target_sw': 'windows'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2010:beta:~~~windows~~'}, {'wfn': {'version': '4.7.1', 'target_sw': 'android'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.1:beta:~~~android~~'}, {'wfn': {'version': '2001', 'target_sw': 'ANY'}, 'uri_binding':'cpe:/a:string_value_with\:double_points:internet_explorer:2001:beta'}, {'wfn': {'version': '4.3.2', 'target_sw': 'linux'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.3.2:beta:~~~linux~~'}, {'wfn': {'version': '2010', 'target_sw': 'linux'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2010:beta'}, {'wfn': {'version': '4.7.3', 'target_sw': 'mac_os_x'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.3:beta:~~~mac_os_x~~'}, {'wfn': {'version': '2010', 'target_sw': 'mac_os_x'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2010:beta:~~~mac_os_x~~'}] version = '4.7.2' version_without_points = '4_7-2' version_year = '2010' os_windows = 'windows_7' os_linux = 'linux_ubuntu' os_android = 'android' os_mac = 'mac_os_x_10.11' class TestCPESorter(unittest.TestCase): def test_sort_cpes_by_software_version(self): sorted_cpes = sort_cpes_by_version(unsorted_cpes, version) self.assertEqual(len(unsorted_cpes), len(sorted_cpes)) self.assertEqual(unsorted_cpes[5], sorted_cpes[0]) # 4.7.2 self.assertEqual(unsorted_cpes[4], sorted_cpes[1]) # 4.7.1 self.assertEqual(unsorted_cpes[8], sorted_cpes[2]) # 4.7.3 self.assertEqual(unsorted_cpes[0], sorted_cpes[3]) # 4.0 self.assertEqual(unsorted_cpes[2], sorted_cpes[4]) # 4.1.2 self.assertEqual(unsorted_cpes[3], sorted_cpes[5]) # 4.6.3 self.assertEqual(unsorted_cpes[6], sorted_cpes[6]) # 4.3.2 def <|fim_middle|>(self): sorted_cpes = sort_cpes_by_version(unsorted_cpes, version_without_points) self.assertListEqual(unsorted_cpes, sorted_cpes) def test_sort_cpes_by_version_with_year(self): sorted_cpes = sort_cpes_by_version(unsorted_cpes_year, version_year) self.assertEqual(len(unsorted_cpes_year), len(sorted_cpes)) self.assertEqual(unsorted_cpes_year[3], sorted_cpes[0]) # 2010 self.assertEqual(unsorted_cpes_year[7], sorted_cpes[1]) # 2010 self.assertEqual(unsorted_cpes_year[9], sorted_cpes[2]) # 2010 self.assertEqual(unsorted_cpes_year[0], sorted_cpes[3]) # 2000 self.assertEqual(unsorted_cpes_year[1], sorted_cpes[4]) # 2007 self.assertEqual(unsorted_cpes_year[5], sorted_cpes[5]) # 2001 def test_sort_cpes_by_operating_system_windows(self): sorted_cpes = sort_cpes_by_operating_system(unsorted_cpes, os_windows) self.assertEqual(len(unsorted_cpes), len(sorted_cpes)) self.assertEqual(unsorted_cpes[3], sorted_cpes[0]) def test_sort_cpes_by_operating_system_linux(self): sorted_cpes = sort_cpes_by_operating_system(unsorted_cpes, os_linux) self.assertEqual(len(unsorted_cpes), len(sorted_cpes)) self.assertEqual(unsorted_cpes[6], sorted_cpes[0]) def test_sort_cpes_by_operating_system_android(self): sorted_cpes = sort_cpes_by_operating_system(unsorted_cpes, os_android) self.assertEqual(len(unsorted_cpes), len(sorted_cpes)) self.assertEqual(unsorted_cpes[4], sorted_cpes[0]) self.assertEqual(unsorted_cpes[0], sorted_cpes[1]) if __name__ == '__main__': unittest.main() <|fim▁end|>
test_cpes_and_sorted_cpes_are_equal_when_software_version_not_splitted_by_points
<|file_name|>test_cpe_sorter.py<|end_file_name|><|fim▁begin|>import unittest from matching.cpe_sorter import * unsorted_cpes = [{'wfn': {'version': '4.0', 'target_sw': 'android_marshmallow'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.0:beta:~~~android_marshmallow~~'}, {'wfn': {'version': '1.0.1.2', 'target_sw': 'android_marshmallow'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:1.0.1.2:beta'}, {'wfn': {'version': '4.1.2', 'target_sw': 'ANY'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.1.2:beta'}, {'wfn': {'version': '4.6.3', 'target_sw': 'windows'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.6.3:beta:~~~windows~~'}, {'wfn': {'version': '4.7.1', 'target_sw': 'android'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.1:beta:~~~android~~'}, {'wfn': {'version': '4.7.2', 'target_sw': 'ANY'}, 'uri_binding':'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.2:beta'}, {'wfn': {'version': '4.3.2', 'target_sw': 'linux'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.3.2:beta:~~~linux~~'}, {'wfn': {'version': '2.3.1', 'target_sw': 'linux'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2.3.1:beta'}, {'wfn': {'version': '4.7.3', 'target_sw': 'mac_os_x'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.3:beta:~~~mac_os_x~~'} ] unsorted_cpes_year = [{'wfn': {'version': '2000', 'target_sw': 'android_marshmallow'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2000:beta:~~~android_marshmallow~~'}, {'wfn': {'version': '2007', 'target_sw': 'android_marshmallow'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2007:beta'}, {'wfn': {'version': '4.1.2', 'target_sw': 'ANY'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.1.2:beta'}, {'wfn': {'version': '2010', 'target_sw': 'windows'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2010:beta:~~~windows~~'}, {'wfn': {'version': '4.7.1', 'target_sw': 'android'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.1:beta:~~~android~~'}, {'wfn': {'version': '2001', 'target_sw': 'ANY'}, 'uri_binding':'cpe:/a:string_value_with\:double_points:internet_explorer:2001:beta'}, {'wfn': {'version': '4.3.2', 'target_sw': 'linux'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.3.2:beta:~~~linux~~'}, {'wfn': {'version': '2010', 'target_sw': 'linux'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2010:beta'}, {'wfn': {'version': '4.7.3', 'target_sw': 'mac_os_x'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.3:beta:~~~mac_os_x~~'}, {'wfn': {'version': '2010', 'target_sw': 'mac_os_x'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2010:beta:~~~mac_os_x~~'}] version = '4.7.2' version_without_points = '4_7-2' version_year = '2010' os_windows = 'windows_7' os_linux = 'linux_ubuntu' os_android = 'android' os_mac = 'mac_os_x_10.11' class TestCPESorter(unittest.TestCase): def test_sort_cpes_by_software_version(self): sorted_cpes = sort_cpes_by_version(unsorted_cpes, version) self.assertEqual(len(unsorted_cpes), len(sorted_cpes)) self.assertEqual(unsorted_cpes[5], sorted_cpes[0]) # 4.7.2 self.assertEqual(unsorted_cpes[4], sorted_cpes[1]) # 4.7.1 self.assertEqual(unsorted_cpes[8], sorted_cpes[2]) # 4.7.3 self.assertEqual(unsorted_cpes[0], sorted_cpes[3]) # 4.0 self.assertEqual(unsorted_cpes[2], sorted_cpes[4]) # 4.1.2 self.assertEqual(unsorted_cpes[3], sorted_cpes[5]) # 4.6.3 self.assertEqual(unsorted_cpes[6], sorted_cpes[6]) # 4.3.2 def test_cpes_and_sorted_cpes_are_equal_when_software_version_not_splitted_by_points(self): sorted_cpes = sort_cpes_by_version(unsorted_cpes, version_without_points) self.assertListEqual(unsorted_cpes, sorted_cpes) def <|fim_middle|>(self): sorted_cpes = sort_cpes_by_version(unsorted_cpes_year, version_year) self.assertEqual(len(unsorted_cpes_year), len(sorted_cpes)) self.assertEqual(unsorted_cpes_year[3], sorted_cpes[0]) # 2010 self.assertEqual(unsorted_cpes_year[7], sorted_cpes[1]) # 2010 self.assertEqual(unsorted_cpes_year[9], sorted_cpes[2]) # 2010 self.assertEqual(unsorted_cpes_year[0], sorted_cpes[3]) # 2000 self.assertEqual(unsorted_cpes_year[1], sorted_cpes[4]) # 2007 self.assertEqual(unsorted_cpes_year[5], sorted_cpes[5]) # 2001 def test_sort_cpes_by_operating_system_windows(self): sorted_cpes = sort_cpes_by_operating_system(unsorted_cpes, os_windows) self.assertEqual(len(unsorted_cpes), len(sorted_cpes)) self.assertEqual(unsorted_cpes[3], sorted_cpes[0]) def test_sort_cpes_by_operating_system_linux(self): sorted_cpes = sort_cpes_by_operating_system(unsorted_cpes, os_linux) self.assertEqual(len(unsorted_cpes), len(sorted_cpes)) self.assertEqual(unsorted_cpes[6], sorted_cpes[0]) def test_sort_cpes_by_operating_system_android(self): sorted_cpes = sort_cpes_by_operating_system(unsorted_cpes, os_android) self.assertEqual(len(unsorted_cpes), len(sorted_cpes)) self.assertEqual(unsorted_cpes[4], sorted_cpes[0]) self.assertEqual(unsorted_cpes[0], sorted_cpes[1]) if __name__ == '__main__': unittest.main() <|fim▁end|>
test_sort_cpes_by_version_with_year
<|file_name|>test_cpe_sorter.py<|end_file_name|><|fim▁begin|>import unittest from matching.cpe_sorter import * unsorted_cpes = [{'wfn': {'version': '4.0', 'target_sw': 'android_marshmallow'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.0:beta:~~~android_marshmallow~~'}, {'wfn': {'version': '1.0.1.2', 'target_sw': 'android_marshmallow'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:1.0.1.2:beta'}, {'wfn': {'version': '4.1.2', 'target_sw': 'ANY'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.1.2:beta'}, {'wfn': {'version': '4.6.3', 'target_sw': 'windows'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.6.3:beta:~~~windows~~'}, {'wfn': {'version': '4.7.1', 'target_sw': 'android'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.1:beta:~~~android~~'}, {'wfn': {'version': '4.7.2', 'target_sw': 'ANY'}, 'uri_binding':'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.2:beta'}, {'wfn': {'version': '4.3.2', 'target_sw': 'linux'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.3.2:beta:~~~linux~~'}, {'wfn': {'version': '2.3.1', 'target_sw': 'linux'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2.3.1:beta'}, {'wfn': {'version': '4.7.3', 'target_sw': 'mac_os_x'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.3:beta:~~~mac_os_x~~'} ] unsorted_cpes_year = [{'wfn': {'version': '2000', 'target_sw': 'android_marshmallow'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2000:beta:~~~android_marshmallow~~'}, {'wfn': {'version': '2007', 'target_sw': 'android_marshmallow'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2007:beta'}, {'wfn': {'version': '4.1.2', 'target_sw': 'ANY'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.1.2:beta'}, {'wfn': {'version': '2010', 'target_sw': 'windows'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2010:beta:~~~windows~~'}, {'wfn': {'version': '4.7.1', 'target_sw': 'android'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.1:beta:~~~android~~'}, {'wfn': {'version': '2001', 'target_sw': 'ANY'}, 'uri_binding':'cpe:/a:string_value_with\:double_points:internet_explorer:2001:beta'}, {'wfn': {'version': '4.3.2', 'target_sw': 'linux'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.3.2:beta:~~~linux~~'}, {'wfn': {'version': '2010', 'target_sw': 'linux'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2010:beta'}, {'wfn': {'version': '4.7.3', 'target_sw': 'mac_os_x'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.3:beta:~~~mac_os_x~~'}, {'wfn': {'version': '2010', 'target_sw': 'mac_os_x'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2010:beta:~~~mac_os_x~~'}] version = '4.7.2' version_without_points = '4_7-2' version_year = '2010' os_windows = 'windows_7' os_linux = 'linux_ubuntu' os_android = 'android' os_mac = 'mac_os_x_10.11' class TestCPESorter(unittest.TestCase): def test_sort_cpes_by_software_version(self): sorted_cpes = sort_cpes_by_version(unsorted_cpes, version) self.assertEqual(len(unsorted_cpes), len(sorted_cpes)) self.assertEqual(unsorted_cpes[5], sorted_cpes[0]) # 4.7.2 self.assertEqual(unsorted_cpes[4], sorted_cpes[1]) # 4.7.1 self.assertEqual(unsorted_cpes[8], sorted_cpes[2]) # 4.7.3 self.assertEqual(unsorted_cpes[0], sorted_cpes[3]) # 4.0 self.assertEqual(unsorted_cpes[2], sorted_cpes[4]) # 4.1.2 self.assertEqual(unsorted_cpes[3], sorted_cpes[5]) # 4.6.3 self.assertEqual(unsorted_cpes[6], sorted_cpes[6]) # 4.3.2 def test_cpes_and_sorted_cpes_are_equal_when_software_version_not_splitted_by_points(self): sorted_cpes = sort_cpes_by_version(unsorted_cpes, version_without_points) self.assertListEqual(unsorted_cpes, sorted_cpes) def test_sort_cpes_by_version_with_year(self): sorted_cpes = sort_cpes_by_version(unsorted_cpes_year, version_year) self.assertEqual(len(unsorted_cpes_year), len(sorted_cpes)) self.assertEqual(unsorted_cpes_year[3], sorted_cpes[0]) # 2010 self.assertEqual(unsorted_cpes_year[7], sorted_cpes[1]) # 2010 self.assertEqual(unsorted_cpes_year[9], sorted_cpes[2]) # 2010 self.assertEqual(unsorted_cpes_year[0], sorted_cpes[3]) # 2000 self.assertEqual(unsorted_cpes_year[1], sorted_cpes[4]) # 2007 self.assertEqual(unsorted_cpes_year[5], sorted_cpes[5]) # 2001 def <|fim_middle|>(self): sorted_cpes = sort_cpes_by_operating_system(unsorted_cpes, os_windows) self.assertEqual(len(unsorted_cpes), len(sorted_cpes)) self.assertEqual(unsorted_cpes[3], sorted_cpes[0]) def test_sort_cpes_by_operating_system_linux(self): sorted_cpes = sort_cpes_by_operating_system(unsorted_cpes, os_linux) self.assertEqual(len(unsorted_cpes), len(sorted_cpes)) self.assertEqual(unsorted_cpes[6], sorted_cpes[0]) def test_sort_cpes_by_operating_system_android(self): sorted_cpes = sort_cpes_by_operating_system(unsorted_cpes, os_android) self.assertEqual(len(unsorted_cpes), len(sorted_cpes)) self.assertEqual(unsorted_cpes[4], sorted_cpes[0]) self.assertEqual(unsorted_cpes[0], sorted_cpes[1]) if __name__ == '__main__': unittest.main() <|fim▁end|>
test_sort_cpes_by_operating_system_windows
<|file_name|>test_cpe_sorter.py<|end_file_name|><|fim▁begin|>import unittest from matching.cpe_sorter import * unsorted_cpes = [{'wfn': {'version': '4.0', 'target_sw': 'android_marshmallow'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.0:beta:~~~android_marshmallow~~'}, {'wfn': {'version': '1.0.1.2', 'target_sw': 'android_marshmallow'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:1.0.1.2:beta'}, {'wfn': {'version': '4.1.2', 'target_sw': 'ANY'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.1.2:beta'}, {'wfn': {'version': '4.6.3', 'target_sw': 'windows'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.6.3:beta:~~~windows~~'}, {'wfn': {'version': '4.7.1', 'target_sw': 'android'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.1:beta:~~~android~~'}, {'wfn': {'version': '4.7.2', 'target_sw': 'ANY'}, 'uri_binding':'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.2:beta'}, {'wfn': {'version': '4.3.2', 'target_sw': 'linux'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.3.2:beta:~~~linux~~'}, {'wfn': {'version': '2.3.1', 'target_sw': 'linux'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2.3.1:beta'}, {'wfn': {'version': '4.7.3', 'target_sw': 'mac_os_x'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.3:beta:~~~mac_os_x~~'} ] unsorted_cpes_year = [{'wfn': {'version': '2000', 'target_sw': 'android_marshmallow'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2000:beta:~~~android_marshmallow~~'}, {'wfn': {'version': '2007', 'target_sw': 'android_marshmallow'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2007:beta'}, {'wfn': {'version': '4.1.2', 'target_sw': 'ANY'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.1.2:beta'}, {'wfn': {'version': '2010', 'target_sw': 'windows'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2010:beta:~~~windows~~'}, {'wfn': {'version': '4.7.1', 'target_sw': 'android'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.1:beta:~~~android~~'}, {'wfn': {'version': '2001', 'target_sw': 'ANY'}, 'uri_binding':'cpe:/a:string_value_with\:double_points:internet_explorer:2001:beta'}, {'wfn': {'version': '4.3.2', 'target_sw': 'linux'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.3.2:beta:~~~linux~~'}, {'wfn': {'version': '2010', 'target_sw': 'linux'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2010:beta'}, {'wfn': {'version': '4.7.3', 'target_sw': 'mac_os_x'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.3:beta:~~~mac_os_x~~'}, {'wfn': {'version': '2010', 'target_sw': 'mac_os_x'}, 'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2010:beta:~~~mac_os_x~~'}] version = '4.7.2' version_without_points = '4_7-2' version_year = '2010' os_windows = 'windows_7' os_linux = 'linux_ubuntu' os_android = 'android' os_mac = 'mac_os_x_10.11' class TestCPESorter(unittest.TestCase): def test_sort_cpes_by_software_version(self): sorted_cpes = sort_cpes_by_version(unsorted_cpes, version) self.assertEqual(len(unsorted_cpes), len(sorted_cpes)) self.assertEqual(unsorted_cpes[5], sorted_cpes[0]) # 4.7.2 self.assertEqual(unsorted_cpes[4], sorted_cpes[1]) # 4.7.1 self.assertEqual(unsorted_cpes[8], sorted_cpes[2]) # 4.7.3 self.assertEqual(unsorted_cpes[0], sorted_cpes[3]) # 4.0 self.assertEqual(unsorted_cpes[2], sorted_cpes[4]) # 4.1.2 self.assertEqual(unsorted_cpes[3], sorted_cpes[5]) # 4.6.3 self.assertEqual(unsorted_cpes[6], sorted_cpes[6]) # 4.3.2 def test_cpes_and_sorted_cpes_are_equal_when_software_version_not_splitted_by_points(self): sorted_cpes = sort_cpes_by_version(unsorted_cpes, version_without_points) self.assertListEqual(unsorted_cpes, sorted_cpes) def test_sort_cpes_by_version_with_year(self): sorted_cpes = sort_cpes_by_version(unsorted_cpes_year, version_year) self.assertEqual(len(unsorted_cpes_year), len(sorted_cpes)) self.assertEqual(unsorted_cpes_year[3], sorted_cpes[0]) # 2010 self.assertEqual(unsorted_cpes_year[7], sorted_cpes[1]) # 2010 self.assertEqual(unsorted_cpes_year[9], sorted_cpes[2]) # 2010 self.assertEqual(unsorted_cpes_year[0], sorted_cpes[3]) # 2000 self.assertEqual(unsorted_cpes_year[1], sorted_cpes[4]) # 2007 self.assertEqual(unsorted_cpes_year[5], sorted_cpes[5]) # 2001 def test_sort_cpes_by_operating_system_windows(self): sorted_cpes = sort_cpes_by_operating_system(unsorted_cpes, os_windows) self.assertEqual(len(unsorted_cpes), len(sorted_cpes)) self.assertEqual(unsorted_cpes[3], sorted_cpes[0]) def <|fim_middle|>(self): sorted_cpes = sort_cpes_by_operating_system(unsorted_cpes, os_linux) self.assertEqual(len(unsorted_cpes), len(sorted_cpes)) self.assertEqual(unsorted_cpes[6], sorted_cpes[0]) def test_sort_cpes_by_operating_system_android(self): sorted_cpes = sort_cpes_by_operating_system(unsorted_cpes, os_android) self.assertEqual(len(unsorted_cpes), len(sorted_cpes)) self.assertEqual(unsorted_cpes[4], sorted_cpes[0]) self.assertEqual(unsorted_cpes[0], sorted_cpes[1]) if __name__ == '__main__': unittest.main() <|fim▁end|>
test_sort_cpes_by_operating_system_linux