prompt
large_stringlengths
72
9.34k
completion
large_stringlengths
0
7.61k
<|file_name|>StreamService.py<|end_file_name|><|fim▁begin|>from Source import Source from Components.Element import cached from Components.SystemInfo import SystemInfo from enigma import eServiceReference StreamServiceList = [] class StreamService(Source): def __init__(self, navcore): Source.__init__(self) self.ref = None self.__service = None self.navcore = navcore def serviceEvent(self, event): pass @cached def getService(self): <|fim_middle|> service = property(getService) def handleCommand(self, cmd): print "[StreamService] handle command", cmd self.ref = eServiceReference(cmd) def recordEvent(self, service, event): if service is self.__service: return print "[StreamService] RECORD event for us:", service self.changed((self.CHANGED_ALL, )) def execBegin(self): if self.ref is None: print "[StreamService] has no service ref set" return print "[StreamService]e execBegin", self.ref.toString() if SystemInfo["CanNotDoSimultaneousTranscodeAndPIP"]: from Screens.InfoBar import InfoBar if InfoBar.instance and hasattr(InfoBar.instance.session, 'pipshown') and InfoBar.instance.session.pipshown: hasattr(InfoBar.instance, "showPiP") and InfoBar.instance.showPiP() print "[StreamService] try to disable pip before start stream" if hasattr(InfoBar.instance.session, 'pip'): del InfoBar.instance.session.pip InfoBar.instance.session.pipshown = False self.__service = self.navcore.recordService(self.ref) self.navcore.record_event.append(self.recordEvent) if self.__service is not None: if self.__service.__deref__() not in StreamServiceList: StreamServiceList.append(self.__service.__deref__()) self.__service.prepareStreaming() self.__service.start() def execEnd(self): print "[StreamService] execEnd", self.ref.toString() self.navcore.record_event.remove(self.recordEvent) if self.__service is not None: if self.__service.__deref__() in StreamServiceList: StreamServiceList.remove(self.__service.__deref__()) self.navcore.stopRecordService(self.__service) self.__service = None self.ref = None <|fim▁end|>
return self.__service
<|file_name|>StreamService.py<|end_file_name|><|fim▁begin|>from Source import Source from Components.Element import cached from Components.SystemInfo import SystemInfo from enigma import eServiceReference StreamServiceList = [] class StreamService(Source): def __init__(self, navcore): Source.__init__(self) self.ref = None self.__service = None self.navcore = navcore def serviceEvent(self, event): pass @cached def getService(self): return self.__service service = property(getService) def handleCommand(self, cmd): <|fim_middle|> def recordEvent(self, service, event): if service is self.__service: return print "[StreamService] RECORD event for us:", service self.changed((self.CHANGED_ALL, )) def execBegin(self): if self.ref is None: print "[StreamService] has no service ref set" return print "[StreamService]e execBegin", self.ref.toString() if SystemInfo["CanNotDoSimultaneousTranscodeAndPIP"]: from Screens.InfoBar import InfoBar if InfoBar.instance and hasattr(InfoBar.instance.session, 'pipshown') and InfoBar.instance.session.pipshown: hasattr(InfoBar.instance, "showPiP") and InfoBar.instance.showPiP() print "[StreamService] try to disable pip before start stream" if hasattr(InfoBar.instance.session, 'pip'): del InfoBar.instance.session.pip InfoBar.instance.session.pipshown = False self.__service = self.navcore.recordService(self.ref) self.navcore.record_event.append(self.recordEvent) if self.__service is not None: if self.__service.__deref__() not in StreamServiceList: StreamServiceList.append(self.__service.__deref__()) self.__service.prepareStreaming() self.__service.start() def execEnd(self): print "[StreamService] execEnd", self.ref.toString() self.navcore.record_event.remove(self.recordEvent) if self.__service is not None: if self.__service.__deref__() in StreamServiceList: StreamServiceList.remove(self.__service.__deref__()) self.navcore.stopRecordService(self.__service) self.__service = None self.ref = None <|fim▁end|>
print "[StreamService] handle command", cmd self.ref = eServiceReference(cmd)
<|file_name|>StreamService.py<|end_file_name|><|fim▁begin|>from Source import Source from Components.Element import cached from Components.SystemInfo import SystemInfo from enigma import eServiceReference StreamServiceList = [] class StreamService(Source): def __init__(self, navcore): Source.__init__(self) self.ref = None self.__service = None self.navcore = navcore def serviceEvent(self, event): pass @cached def getService(self): return self.__service service = property(getService) def handleCommand(self, cmd): print "[StreamService] handle command", cmd self.ref = eServiceReference(cmd) def recordEvent(self, service, event): <|fim_middle|> def execBegin(self): if self.ref is None: print "[StreamService] has no service ref set" return print "[StreamService]e execBegin", self.ref.toString() if SystemInfo["CanNotDoSimultaneousTranscodeAndPIP"]: from Screens.InfoBar import InfoBar if InfoBar.instance and hasattr(InfoBar.instance.session, 'pipshown') and InfoBar.instance.session.pipshown: hasattr(InfoBar.instance, "showPiP") and InfoBar.instance.showPiP() print "[StreamService] try to disable pip before start stream" if hasattr(InfoBar.instance.session, 'pip'): del InfoBar.instance.session.pip InfoBar.instance.session.pipshown = False self.__service = self.navcore.recordService(self.ref) self.navcore.record_event.append(self.recordEvent) if self.__service is not None: if self.__service.__deref__() not in StreamServiceList: StreamServiceList.append(self.__service.__deref__()) self.__service.prepareStreaming() self.__service.start() def execEnd(self): print "[StreamService] execEnd", self.ref.toString() self.navcore.record_event.remove(self.recordEvent) if self.__service is not None: if self.__service.__deref__() in StreamServiceList: StreamServiceList.remove(self.__service.__deref__()) self.navcore.stopRecordService(self.__service) self.__service = None self.ref = None <|fim▁end|>
if service is self.__service: return print "[StreamService] RECORD event for us:", service self.changed((self.CHANGED_ALL, ))
<|file_name|>StreamService.py<|end_file_name|><|fim▁begin|>from Source import Source from Components.Element import cached from Components.SystemInfo import SystemInfo from enigma import eServiceReference StreamServiceList = [] class StreamService(Source): def __init__(self, navcore): Source.__init__(self) self.ref = None self.__service = None self.navcore = navcore def serviceEvent(self, event): pass @cached def getService(self): return self.__service service = property(getService) def handleCommand(self, cmd): print "[StreamService] handle command", cmd self.ref = eServiceReference(cmd) def recordEvent(self, service, event): if service is self.__service: return print "[StreamService] RECORD event for us:", service self.changed((self.CHANGED_ALL, )) def execBegin(self): <|fim_middle|> def execEnd(self): print "[StreamService] execEnd", self.ref.toString() self.navcore.record_event.remove(self.recordEvent) if self.__service is not None: if self.__service.__deref__() in StreamServiceList: StreamServiceList.remove(self.__service.__deref__()) self.navcore.stopRecordService(self.__service) self.__service = None self.ref = None <|fim▁end|>
if self.ref is None: print "[StreamService] has no service ref set" return print "[StreamService]e execBegin", self.ref.toString() if SystemInfo["CanNotDoSimultaneousTranscodeAndPIP"]: from Screens.InfoBar import InfoBar if InfoBar.instance and hasattr(InfoBar.instance.session, 'pipshown') and InfoBar.instance.session.pipshown: hasattr(InfoBar.instance, "showPiP") and InfoBar.instance.showPiP() print "[StreamService] try to disable pip before start stream" if hasattr(InfoBar.instance.session, 'pip'): del InfoBar.instance.session.pip InfoBar.instance.session.pipshown = False self.__service = self.navcore.recordService(self.ref) self.navcore.record_event.append(self.recordEvent) if self.__service is not None: if self.__service.__deref__() not in StreamServiceList: StreamServiceList.append(self.__service.__deref__()) self.__service.prepareStreaming() self.__service.start()
<|file_name|>StreamService.py<|end_file_name|><|fim▁begin|>from Source import Source from Components.Element import cached from Components.SystemInfo import SystemInfo from enigma import eServiceReference StreamServiceList = [] class StreamService(Source): def __init__(self, navcore): Source.__init__(self) self.ref = None self.__service = None self.navcore = navcore def serviceEvent(self, event): pass @cached def getService(self): return self.__service service = property(getService) def handleCommand(self, cmd): print "[StreamService] handle command", cmd self.ref = eServiceReference(cmd) def recordEvent(self, service, event): if service is self.__service: return print "[StreamService] RECORD event for us:", service self.changed((self.CHANGED_ALL, )) def execBegin(self): if self.ref is None: print "[StreamService] has no service ref set" return print "[StreamService]e execBegin", self.ref.toString() if SystemInfo["CanNotDoSimultaneousTranscodeAndPIP"]: from Screens.InfoBar import InfoBar if InfoBar.instance and hasattr(InfoBar.instance.session, 'pipshown') and InfoBar.instance.session.pipshown: hasattr(InfoBar.instance, "showPiP") and InfoBar.instance.showPiP() print "[StreamService] try to disable pip before start stream" if hasattr(InfoBar.instance.session, 'pip'): del InfoBar.instance.session.pip InfoBar.instance.session.pipshown = False self.__service = self.navcore.recordService(self.ref) self.navcore.record_event.append(self.recordEvent) if self.__service is not None: if self.__service.__deref__() not in StreamServiceList: StreamServiceList.append(self.__service.__deref__()) self.__service.prepareStreaming() self.__service.start() def execEnd(self): <|fim_middle|> <|fim▁end|>
print "[StreamService] execEnd", self.ref.toString() self.navcore.record_event.remove(self.recordEvent) if self.__service is not None: if self.__service.__deref__() in StreamServiceList: StreamServiceList.remove(self.__service.__deref__()) self.navcore.stopRecordService(self.__service) self.__service = None self.ref = None
<|file_name|>StreamService.py<|end_file_name|><|fim▁begin|>from Source import Source from Components.Element import cached from Components.SystemInfo import SystemInfo from enigma import eServiceReference StreamServiceList = [] class StreamService(Source): def __init__(self, navcore): Source.__init__(self) self.ref = None self.__service = None self.navcore = navcore def serviceEvent(self, event): pass @cached def getService(self): return self.__service service = property(getService) def handleCommand(self, cmd): print "[StreamService] handle command", cmd self.ref = eServiceReference(cmd) def recordEvent(self, service, event): if service is self.__service: <|fim_middle|> print "[StreamService] RECORD event for us:", service self.changed((self.CHANGED_ALL, )) def execBegin(self): if self.ref is None: print "[StreamService] has no service ref set" return print "[StreamService]e execBegin", self.ref.toString() if SystemInfo["CanNotDoSimultaneousTranscodeAndPIP"]: from Screens.InfoBar import InfoBar if InfoBar.instance and hasattr(InfoBar.instance.session, 'pipshown') and InfoBar.instance.session.pipshown: hasattr(InfoBar.instance, "showPiP") and InfoBar.instance.showPiP() print "[StreamService] try to disable pip before start stream" if hasattr(InfoBar.instance.session, 'pip'): del InfoBar.instance.session.pip InfoBar.instance.session.pipshown = False self.__service = self.navcore.recordService(self.ref) self.navcore.record_event.append(self.recordEvent) if self.__service is not None: if self.__service.__deref__() not in StreamServiceList: StreamServiceList.append(self.__service.__deref__()) self.__service.prepareStreaming() self.__service.start() def execEnd(self): print "[StreamService] execEnd", self.ref.toString() self.navcore.record_event.remove(self.recordEvent) if self.__service is not None: if self.__service.__deref__() in StreamServiceList: StreamServiceList.remove(self.__service.__deref__()) self.navcore.stopRecordService(self.__service) self.__service = None self.ref = None <|fim▁end|>
return
<|file_name|>StreamService.py<|end_file_name|><|fim▁begin|>from Source import Source from Components.Element import cached from Components.SystemInfo import SystemInfo from enigma import eServiceReference StreamServiceList = [] class StreamService(Source): def __init__(self, navcore): Source.__init__(self) self.ref = None self.__service = None self.navcore = navcore def serviceEvent(self, event): pass @cached def getService(self): return self.__service service = property(getService) def handleCommand(self, cmd): print "[StreamService] handle command", cmd self.ref = eServiceReference(cmd) def recordEvent(self, service, event): if service is self.__service: return print "[StreamService] RECORD event for us:", service self.changed((self.CHANGED_ALL, )) def execBegin(self): if self.ref is None: <|fim_middle|> print "[StreamService]e execBegin", self.ref.toString() if SystemInfo["CanNotDoSimultaneousTranscodeAndPIP"]: from Screens.InfoBar import InfoBar if InfoBar.instance and hasattr(InfoBar.instance.session, 'pipshown') and InfoBar.instance.session.pipshown: hasattr(InfoBar.instance, "showPiP") and InfoBar.instance.showPiP() print "[StreamService] try to disable pip before start stream" if hasattr(InfoBar.instance.session, 'pip'): del InfoBar.instance.session.pip InfoBar.instance.session.pipshown = False self.__service = self.navcore.recordService(self.ref) self.navcore.record_event.append(self.recordEvent) if self.__service is not None: if self.__service.__deref__() not in StreamServiceList: StreamServiceList.append(self.__service.__deref__()) self.__service.prepareStreaming() self.__service.start() def execEnd(self): print "[StreamService] execEnd", self.ref.toString() self.navcore.record_event.remove(self.recordEvent) if self.__service is not None: if self.__service.__deref__() in StreamServiceList: StreamServiceList.remove(self.__service.__deref__()) self.navcore.stopRecordService(self.__service) self.__service = None self.ref = None <|fim▁end|>
print "[StreamService] has no service ref set" return
<|file_name|>StreamService.py<|end_file_name|><|fim▁begin|>from Source import Source from Components.Element import cached from Components.SystemInfo import SystemInfo from enigma import eServiceReference StreamServiceList = [] class StreamService(Source): def __init__(self, navcore): Source.__init__(self) self.ref = None self.__service = None self.navcore = navcore def serviceEvent(self, event): pass @cached def getService(self): return self.__service service = property(getService) def handleCommand(self, cmd): print "[StreamService] handle command", cmd self.ref = eServiceReference(cmd) def recordEvent(self, service, event): if service is self.__service: return print "[StreamService] RECORD event for us:", service self.changed((self.CHANGED_ALL, )) def execBegin(self): if self.ref is None: print "[StreamService] has no service ref set" return print "[StreamService]e execBegin", self.ref.toString() if SystemInfo["CanNotDoSimultaneousTranscodeAndPIP"]: <|fim_middle|> self.__service = self.navcore.recordService(self.ref) self.navcore.record_event.append(self.recordEvent) if self.__service is not None: if self.__service.__deref__() not in StreamServiceList: StreamServiceList.append(self.__service.__deref__()) self.__service.prepareStreaming() self.__service.start() def execEnd(self): print "[StreamService] execEnd", self.ref.toString() self.navcore.record_event.remove(self.recordEvent) if self.__service is not None: if self.__service.__deref__() in StreamServiceList: StreamServiceList.remove(self.__service.__deref__()) self.navcore.stopRecordService(self.__service) self.__service = None self.ref = None <|fim▁end|>
from Screens.InfoBar import InfoBar if InfoBar.instance and hasattr(InfoBar.instance.session, 'pipshown') and InfoBar.instance.session.pipshown: hasattr(InfoBar.instance, "showPiP") and InfoBar.instance.showPiP() print "[StreamService] try to disable pip before start stream" if hasattr(InfoBar.instance.session, 'pip'): del InfoBar.instance.session.pip InfoBar.instance.session.pipshown = False
<|file_name|>StreamService.py<|end_file_name|><|fim▁begin|>from Source import Source from Components.Element import cached from Components.SystemInfo import SystemInfo from enigma import eServiceReference StreamServiceList = [] class StreamService(Source): def __init__(self, navcore): Source.__init__(self) self.ref = None self.__service = None self.navcore = navcore def serviceEvent(self, event): pass @cached def getService(self): return self.__service service = property(getService) def handleCommand(self, cmd): print "[StreamService] handle command", cmd self.ref = eServiceReference(cmd) def recordEvent(self, service, event): if service is self.__service: return print "[StreamService] RECORD event for us:", service self.changed((self.CHANGED_ALL, )) def execBegin(self): if self.ref is None: print "[StreamService] has no service ref set" return print "[StreamService]e execBegin", self.ref.toString() if SystemInfo["CanNotDoSimultaneousTranscodeAndPIP"]: from Screens.InfoBar import InfoBar if InfoBar.instance and hasattr(InfoBar.instance.session, 'pipshown') and InfoBar.instance.session.pipshown: <|fim_middle|> self.__service = self.navcore.recordService(self.ref) self.navcore.record_event.append(self.recordEvent) if self.__service is not None: if self.__service.__deref__() not in StreamServiceList: StreamServiceList.append(self.__service.__deref__()) self.__service.prepareStreaming() self.__service.start() def execEnd(self): print "[StreamService] execEnd", self.ref.toString() self.navcore.record_event.remove(self.recordEvent) if self.__service is not None: if self.__service.__deref__() in StreamServiceList: StreamServiceList.remove(self.__service.__deref__()) self.navcore.stopRecordService(self.__service) self.__service = None self.ref = None <|fim▁end|>
hasattr(InfoBar.instance, "showPiP") and InfoBar.instance.showPiP() print "[StreamService] try to disable pip before start stream" if hasattr(InfoBar.instance.session, 'pip'): del InfoBar.instance.session.pip InfoBar.instance.session.pipshown = False
<|file_name|>StreamService.py<|end_file_name|><|fim▁begin|>from Source import Source from Components.Element import cached from Components.SystemInfo import SystemInfo from enigma import eServiceReference StreamServiceList = [] class StreamService(Source): def __init__(self, navcore): Source.__init__(self) self.ref = None self.__service = None self.navcore = navcore def serviceEvent(self, event): pass @cached def getService(self): return self.__service service = property(getService) def handleCommand(self, cmd): print "[StreamService] handle command", cmd self.ref = eServiceReference(cmd) def recordEvent(self, service, event): if service is self.__service: return print "[StreamService] RECORD event for us:", service self.changed((self.CHANGED_ALL, )) def execBegin(self): if self.ref is None: print "[StreamService] has no service ref set" return print "[StreamService]e execBegin", self.ref.toString() if SystemInfo["CanNotDoSimultaneousTranscodeAndPIP"]: from Screens.InfoBar import InfoBar if InfoBar.instance and hasattr(InfoBar.instance.session, 'pipshown') and InfoBar.instance.session.pipshown: hasattr(InfoBar.instance, "showPiP") and InfoBar.instance.showPiP() print "[StreamService] try to disable pip before start stream" if hasattr(InfoBar.instance.session, 'pip'): <|fim_middle|> self.__service = self.navcore.recordService(self.ref) self.navcore.record_event.append(self.recordEvent) if self.__service is not None: if self.__service.__deref__() not in StreamServiceList: StreamServiceList.append(self.__service.__deref__()) self.__service.prepareStreaming() self.__service.start() def execEnd(self): print "[StreamService] execEnd", self.ref.toString() self.navcore.record_event.remove(self.recordEvent) if self.__service is not None: if self.__service.__deref__() in StreamServiceList: StreamServiceList.remove(self.__service.__deref__()) self.navcore.stopRecordService(self.__service) self.__service = None self.ref = None <|fim▁end|>
del InfoBar.instance.session.pip InfoBar.instance.session.pipshown = False
<|file_name|>StreamService.py<|end_file_name|><|fim▁begin|>from Source import Source from Components.Element import cached from Components.SystemInfo import SystemInfo from enigma import eServiceReference StreamServiceList = [] class StreamService(Source): def __init__(self, navcore): Source.__init__(self) self.ref = None self.__service = None self.navcore = navcore def serviceEvent(self, event): pass @cached def getService(self): return self.__service service = property(getService) def handleCommand(self, cmd): print "[StreamService] handle command", cmd self.ref = eServiceReference(cmd) def recordEvent(self, service, event): if service is self.__service: return print "[StreamService] RECORD event for us:", service self.changed((self.CHANGED_ALL, )) def execBegin(self): if self.ref is None: print "[StreamService] has no service ref set" return print "[StreamService]e execBegin", self.ref.toString() if SystemInfo["CanNotDoSimultaneousTranscodeAndPIP"]: from Screens.InfoBar import InfoBar if InfoBar.instance and hasattr(InfoBar.instance.session, 'pipshown') and InfoBar.instance.session.pipshown: hasattr(InfoBar.instance, "showPiP") and InfoBar.instance.showPiP() print "[StreamService] try to disable pip before start stream" if hasattr(InfoBar.instance.session, 'pip'): del InfoBar.instance.session.pip InfoBar.instance.session.pipshown = False self.__service = self.navcore.recordService(self.ref) self.navcore.record_event.append(self.recordEvent) if self.__service is not None: <|fim_middle|> def execEnd(self): print "[StreamService] execEnd", self.ref.toString() self.navcore.record_event.remove(self.recordEvent) if self.__service is not None: if self.__service.__deref__() in StreamServiceList: StreamServiceList.remove(self.__service.__deref__()) self.navcore.stopRecordService(self.__service) self.__service = None self.ref = None <|fim▁end|>
if self.__service.__deref__() not in StreamServiceList: StreamServiceList.append(self.__service.__deref__()) self.__service.prepareStreaming() self.__service.start()
<|file_name|>StreamService.py<|end_file_name|><|fim▁begin|>from Source import Source from Components.Element import cached from Components.SystemInfo import SystemInfo from enigma import eServiceReference StreamServiceList = [] class StreamService(Source): def __init__(self, navcore): Source.__init__(self) self.ref = None self.__service = None self.navcore = navcore def serviceEvent(self, event): pass @cached def getService(self): return self.__service service = property(getService) def handleCommand(self, cmd): print "[StreamService] handle command", cmd self.ref = eServiceReference(cmd) def recordEvent(self, service, event): if service is self.__service: return print "[StreamService] RECORD event for us:", service self.changed((self.CHANGED_ALL, )) def execBegin(self): if self.ref is None: print "[StreamService] has no service ref set" return print "[StreamService]e execBegin", self.ref.toString() if SystemInfo["CanNotDoSimultaneousTranscodeAndPIP"]: from Screens.InfoBar import InfoBar if InfoBar.instance and hasattr(InfoBar.instance.session, 'pipshown') and InfoBar.instance.session.pipshown: hasattr(InfoBar.instance, "showPiP") and InfoBar.instance.showPiP() print "[StreamService] try to disable pip before start stream" if hasattr(InfoBar.instance.session, 'pip'): del InfoBar.instance.session.pip InfoBar.instance.session.pipshown = False self.__service = self.navcore.recordService(self.ref) self.navcore.record_event.append(self.recordEvent) if self.__service is not None: if self.__service.__deref__() not in StreamServiceList: <|fim_middle|> self.__service.prepareStreaming() self.__service.start() def execEnd(self): print "[StreamService] execEnd", self.ref.toString() self.navcore.record_event.remove(self.recordEvent) if self.__service is not None: if self.__service.__deref__() in StreamServiceList: StreamServiceList.remove(self.__service.__deref__()) self.navcore.stopRecordService(self.__service) self.__service = None self.ref = None <|fim▁end|>
StreamServiceList.append(self.__service.__deref__())
<|file_name|>StreamService.py<|end_file_name|><|fim▁begin|>from Source import Source from Components.Element import cached from Components.SystemInfo import SystemInfo from enigma import eServiceReference StreamServiceList = [] class StreamService(Source): def __init__(self, navcore): Source.__init__(self) self.ref = None self.__service = None self.navcore = navcore def serviceEvent(self, event): pass @cached def getService(self): return self.__service service = property(getService) def handleCommand(self, cmd): print "[StreamService] handle command", cmd self.ref = eServiceReference(cmd) def recordEvent(self, service, event): if service is self.__service: return print "[StreamService] RECORD event for us:", service self.changed((self.CHANGED_ALL, )) def execBegin(self): if self.ref is None: print "[StreamService] has no service ref set" return print "[StreamService]e execBegin", self.ref.toString() if SystemInfo["CanNotDoSimultaneousTranscodeAndPIP"]: from Screens.InfoBar import InfoBar if InfoBar.instance and hasattr(InfoBar.instance.session, 'pipshown') and InfoBar.instance.session.pipshown: hasattr(InfoBar.instance, "showPiP") and InfoBar.instance.showPiP() print "[StreamService] try to disable pip before start stream" if hasattr(InfoBar.instance.session, 'pip'): del InfoBar.instance.session.pip InfoBar.instance.session.pipshown = False self.__service = self.navcore.recordService(self.ref) self.navcore.record_event.append(self.recordEvent) if self.__service is not None: if self.__service.__deref__() not in StreamServiceList: StreamServiceList.append(self.__service.__deref__()) self.__service.prepareStreaming() self.__service.start() def execEnd(self): print "[StreamService] execEnd", self.ref.toString() self.navcore.record_event.remove(self.recordEvent) if self.__service is not None: <|fim_middle|> <|fim▁end|>
if self.__service.__deref__() in StreamServiceList: StreamServiceList.remove(self.__service.__deref__()) self.navcore.stopRecordService(self.__service) self.__service = None self.ref = None
<|file_name|>StreamService.py<|end_file_name|><|fim▁begin|>from Source import Source from Components.Element import cached from Components.SystemInfo import SystemInfo from enigma import eServiceReference StreamServiceList = [] class StreamService(Source): def __init__(self, navcore): Source.__init__(self) self.ref = None self.__service = None self.navcore = navcore def serviceEvent(self, event): pass @cached def getService(self): return self.__service service = property(getService) def handleCommand(self, cmd): print "[StreamService] handle command", cmd self.ref = eServiceReference(cmd) def recordEvent(self, service, event): if service is self.__service: return print "[StreamService] RECORD event for us:", service self.changed((self.CHANGED_ALL, )) def execBegin(self): if self.ref is None: print "[StreamService] has no service ref set" return print "[StreamService]e execBegin", self.ref.toString() if SystemInfo["CanNotDoSimultaneousTranscodeAndPIP"]: from Screens.InfoBar import InfoBar if InfoBar.instance and hasattr(InfoBar.instance.session, 'pipshown') and InfoBar.instance.session.pipshown: hasattr(InfoBar.instance, "showPiP") and InfoBar.instance.showPiP() print "[StreamService] try to disable pip before start stream" if hasattr(InfoBar.instance.session, 'pip'): del InfoBar.instance.session.pip InfoBar.instance.session.pipshown = False self.__service = self.navcore.recordService(self.ref) self.navcore.record_event.append(self.recordEvent) if self.__service is not None: if self.__service.__deref__() not in StreamServiceList: StreamServiceList.append(self.__service.__deref__()) self.__service.prepareStreaming() self.__service.start() def execEnd(self): print "[StreamService] execEnd", self.ref.toString() self.navcore.record_event.remove(self.recordEvent) if self.__service is not None: if self.__service.__deref__() in StreamServiceList: <|fim_middle|> self.navcore.stopRecordService(self.__service) self.__service = None self.ref = None <|fim▁end|>
StreamServiceList.remove(self.__service.__deref__())
<|file_name|>StreamService.py<|end_file_name|><|fim▁begin|>from Source import Source from Components.Element import cached from Components.SystemInfo import SystemInfo from enigma import eServiceReference StreamServiceList = [] class StreamService(Source): def <|fim_middle|>(self, navcore): Source.__init__(self) self.ref = None self.__service = None self.navcore = navcore def serviceEvent(self, event): pass @cached def getService(self): return self.__service service = property(getService) def handleCommand(self, cmd): print "[StreamService] handle command", cmd self.ref = eServiceReference(cmd) def recordEvent(self, service, event): if service is self.__service: return print "[StreamService] RECORD event for us:", service self.changed((self.CHANGED_ALL, )) def execBegin(self): if self.ref is None: print "[StreamService] has no service ref set" return print "[StreamService]e execBegin", self.ref.toString() if SystemInfo["CanNotDoSimultaneousTranscodeAndPIP"]: from Screens.InfoBar import InfoBar if InfoBar.instance and hasattr(InfoBar.instance.session, 'pipshown') and InfoBar.instance.session.pipshown: hasattr(InfoBar.instance, "showPiP") and InfoBar.instance.showPiP() print "[StreamService] try to disable pip before start stream" if hasattr(InfoBar.instance.session, 'pip'): del InfoBar.instance.session.pip InfoBar.instance.session.pipshown = False self.__service = self.navcore.recordService(self.ref) self.navcore.record_event.append(self.recordEvent) if self.__service is not None: if self.__service.__deref__() not in StreamServiceList: StreamServiceList.append(self.__service.__deref__()) self.__service.prepareStreaming() self.__service.start() def execEnd(self): print "[StreamService] execEnd", self.ref.toString() self.navcore.record_event.remove(self.recordEvent) if self.__service is not None: if self.__service.__deref__() in StreamServiceList: StreamServiceList.remove(self.__service.__deref__()) self.navcore.stopRecordService(self.__service) self.__service = None self.ref = None <|fim▁end|>
__init__
<|file_name|>StreamService.py<|end_file_name|><|fim▁begin|>from Source import Source from Components.Element import cached from Components.SystemInfo import SystemInfo from enigma import eServiceReference StreamServiceList = [] class StreamService(Source): def __init__(self, navcore): Source.__init__(self) self.ref = None self.__service = None self.navcore = navcore def <|fim_middle|>(self, event): pass @cached def getService(self): return self.__service service = property(getService) def handleCommand(self, cmd): print "[StreamService] handle command", cmd self.ref = eServiceReference(cmd) def recordEvent(self, service, event): if service is self.__service: return print "[StreamService] RECORD event for us:", service self.changed((self.CHANGED_ALL, )) def execBegin(self): if self.ref is None: print "[StreamService] has no service ref set" return print "[StreamService]e execBegin", self.ref.toString() if SystemInfo["CanNotDoSimultaneousTranscodeAndPIP"]: from Screens.InfoBar import InfoBar if InfoBar.instance and hasattr(InfoBar.instance.session, 'pipshown') and InfoBar.instance.session.pipshown: hasattr(InfoBar.instance, "showPiP") and InfoBar.instance.showPiP() print "[StreamService] try to disable pip before start stream" if hasattr(InfoBar.instance.session, 'pip'): del InfoBar.instance.session.pip InfoBar.instance.session.pipshown = False self.__service = self.navcore.recordService(self.ref) self.navcore.record_event.append(self.recordEvent) if self.__service is not None: if self.__service.__deref__() not in StreamServiceList: StreamServiceList.append(self.__service.__deref__()) self.__service.prepareStreaming() self.__service.start() def execEnd(self): print "[StreamService] execEnd", self.ref.toString() self.navcore.record_event.remove(self.recordEvent) if self.__service is not None: if self.__service.__deref__() in StreamServiceList: StreamServiceList.remove(self.__service.__deref__()) self.navcore.stopRecordService(self.__service) self.__service = None self.ref = None <|fim▁end|>
serviceEvent
<|file_name|>StreamService.py<|end_file_name|><|fim▁begin|>from Source import Source from Components.Element import cached from Components.SystemInfo import SystemInfo from enigma import eServiceReference StreamServiceList = [] class StreamService(Source): def __init__(self, navcore): Source.__init__(self) self.ref = None self.__service = None self.navcore = navcore def serviceEvent(self, event): pass @cached def <|fim_middle|>(self): return self.__service service = property(getService) def handleCommand(self, cmd): print "[StreamService] handle command", cmd self.ref = eServiceReference(cmd) def recordEvent(self, service, event): if service is self.__service: return print "[StreamService] RECORD event for us:", service self.changed((self.CHANGED_ALL, )) def execBegin(self): if self.ref is None: print "[StreamService] has no service ref set" return print "[StreamService]e execBegin", self.ref.toString() if SystemInfo["CanNotDoSimultaneousTranscodeAndPIP"]: from Screens.InfoBar import InfoBar if InfoBar.instance and hasattr(InfoBar.instance.session, 'pipshown') and InfoBar.instance.session.pipshown: hasattr(InfoBar.instance, "showPiP") and InfoBar.instance.showPiP() print "[StreamService] try to disable pip before start stream" if hasattr(InfoBar.instance.session, 'pip'): del InfoBar.instance.session.pip InfoBar.instance.session.pipshown = False self.__service = self.navcore.recordService(self.ref) self.navcore.record_event.append(self.recordEvent) if self.__service is not None: if self.__service.__deref__() not in StreamServiceList: StreamServiceList.append(self.__service.__deref__()) self.__service.prepareStreaming() self.__service.start() def execEnd(self): print "[StreamService] execEnd", self.ref.toString() self.navcore.record_event.remove(self.recordEvent) if self.__service is not None: if self.__service.__deref__() in StreamServiceList: StreamServiceList.remove(self.__service.__deref__()) self.navcore.stopRecordService(self.__service) self.__service = None self.ref = None <|fim▁end|>
getService
<|file_name|>StreamService.py<|end_file_name|><|fim▁begin|>from Source import Source from Components.Element import cached from Components.SystemInfo import SystemInfo from enigma import eServiceReference StreamServiceList = [] class StreamService(Source): def __init__(self, navcore): Source.__init__(self) self.ref = None self.__service = None self.navcore = navcore def serviceEvent(self, event): pass @cached def getService(self): return self.__service service = property(getService) def <|fim_middle|>(self, cmd): print "[StreamService] handle command", cmd self.ref = eServiceReference(cmd) def recordEvent(self, service, event): if service is self.__service: return print "[StreamService] RECORD event for us:", service self.changed((self.CHANGED_ALL, )) def execBegin(self): if self.ref is None: print "[StreamService] has no service ref set" return print "[StreamService]e execBegin", self.ref.toString() if SystemInfo["CanNotDoSimultaneousTranscodeAndPIP"]: from Screens.InfoBar import InfoBar if InfoBar.instance and hasattr(InfoBar.instance.session, 'pipshown') and InfoBar.instance.session.pipshown: hasattr(InfoBar.instance, "showPiP") and InfoBar.instance.showPiP() print "[StreamService] try to disable pip before start stream" if hasattr(InfoBar.instance.session, 'pip'): del InfoBar.instance.session.pip InfoBar.instance.session.pipshown = False self.__service = self.navcore.recordService(self.ref) self.navcore.record_event.append(self.recordEvent) if self.__service is not None: if self.__service.__deref__() not in StreamServiceList: StreamServiceList.append(self.__service.__deref__()) self.__service.prepareStreaming() self.__service.start() def execEnd(self): print "[StreamService] execEnd", self.ref.toString() self.navcore.record_event.remove(self.recordEvent) if self.__service is not None: if self.__service.__deref__() in StreamServiceList: StreamServiceList.remove(self.__service.__deref__()) self.navcore.stopRecordService(self.__service) self.__service = None self.ref = None <|fim▁end|>
handleCommand
<|file_name|>StreamService.py<|end_file_name|><|fim▁begin|>from Source import Source from Components.Element import cached from Components.SystemInfo import SystemInfo from enigma import eServiceReference StreamServiceList = [] class StreamService(Source): def __init__(self, navcore): Source.__init__(self) self.ref = None self.__service = None self.navcore = navcore def serviceEvent(self, event): pass @cached def getService(self): return self.__service service = property(getService) def handleCommand(self, cmd): print "[StreamService] handle command", cmd self.ref = eServiceReference(cmd) def <|fim_middle|>(self, service, event): if service is self.__service: return print "[StreamService] RECORD event for us:", service self.changed((self.CHANGED_ALL, )) def execBegin(self): if self.ref is None: print "[StreamService] has no service ref set" return print "[StreamService]e execBegin", self.ref.toString() if SystemInfo["CanNotDoSimultaneousTranscodeAndPIP"]: from Screens.InfoBar import InfoBar if InfoBar.instance and hasattr(InfoBar.instance.session, 'pipshown') and InfoBar.instance.session.pipshown: hasattr(InfoBar.instance, "showPiP") and InfoBar.instance.showPiP() print "[StreamService] try to disable pip before start stream" if hasattr(InfoBar.instance.session, 'pip'): del InfoBar.instance.session.pip InfoBar.instance.session.pipshown = False self.__service = self.navcore.recordService(self.ref) self.navcore.record_event.append(self.recordEvent) if self.__service is not None: if self.__service.__deref__() not in StreamServiceList: StreamServiceList.append(self.__service.__deref__()) self.__service.prepareStreaming() self.__service.start() def execEnd(self): print "[StreamService] execEnd", self.ref.toString() self.navcore.record_event.remove(self.recordEvent) if self.__service is not None: if self.__service.__deref__() in StreamServiceList: StreamServiceList.remove(self.__service.__deref__()) self.navcore.stopRecordService(self.__service) self.__service = None self.ref = None <|fim▁end|>
recordEvent
<|file_name|>StreamService.py<|end_file_name|><|fim▁begin|>from Source import Source from Components.Element import cached from Components.SystemInfo import SystemInfo from enigma import eServiceReference StreamServiceList = [] class StreamService(Source): def __init__(self, navcore): Source.__init__(self) self.ref = None self.__service = None self.navcore = navcore def serviceEvent(self, event): pass @cached def getService(self): return self.__service service = property(getService) def handleCommand(self, cmd): print "[StreamService] handle command", cmd self.ref = eServiceReference(cmd) def recordEvent(self, service, event): if service is self.__service: return print "[StreamService] RECORD event for us:", service self.changed((self.CHANGED_ALL, )) def <|fim_middle|>(self): if self.ref is None: print "[StreamService] has no service ref set" return print "[StreamService]e execBegin", self.ref.toString() if SystemInfo["CanNotDoSimultaneousTranscodeAndPIP"]: from Screens.InfoBar import InfoBar if InfoBar.instance and hasattr(InfoBar.instance.session, 'pipshown') and InfoBar.instance.session.pipshown: hasattr(InfoBar.instance, "showPiP") and InfoBar.instance.showPiP() print "[StreamService] try to disable pip before start stream" if hasattr(InfoBar.instance.session, 'pip'): del InfoBar.instance.session.pip InfoBar.instance.session.pipshown = False self.__service = self.navcore.recordService(self.ref) self.navcore.record_event.append(self.recordEvent) if self.__service is not None: if self.__service.__deref__() not in StreamServiceList: StreamServiceList.append(self.__service.__deref__()) self.__service.prepareStreaming() self.__service.start() def execEnd(self): print "[StreamService] execEnd", self.ref.toString() self.navcore.record_event.remove(self.recordEvent) if self.__service is not None: if self.__service.__deref__() in StreamServiceList: StreamServiceList.remove(self.__service.__deref__()) self.navcore.stopRecordService(self.__service) self.__service = None self.ref = None <|fim▁end|>
execBegin
<|file_name|>StreamService.py<|end_file_name|><|fim▁begin|>from Source import Source from Components.Element import cached from Components.SystemInfo import SystemInfo from enigma import eServiceReference StreamServiceList = [] class StreamService(Source): def __init__(self, navcore): Source.__init__(self) self.ref = None self.__service = None self.navcore = navcore def serviceEvent(self, event): pass @cached def getService(self): return self.__service service = property(getService) def handleCommand(self, cmd): print "[StreamService] handle command", cmd self.ref = eServiceReference(cmd) def recordEvent(self, service, event): if service is self.__service: return print "[StreamService] RECORD event for us:", service self.changed((self.CHANGED_ALL, )) def execBegin(self): if self.ref is None: print "[StreamService] has no service ref set" return print "[StreamService]e execBegin", self.ref.toString() if SystemInfo["CanNotDoSimultaneousTranscodeAndPIP"]: from Screens.InfoBar import InfoBar if InfoBar.instance and hasattr(InfoBar.instance.session, 'pipshown') and InfoBar.instance.session.pipshown: hasattr(InfoBar.instance, "showPiP") and InfoBar.instance.showPiP() print "[StreamService] try to disable pip before start stream" if hasattr(InfoBar.instance.session, 'pip'): del InfoBar.instance.session.pip InfoBar.instance.session.pipshown = False self.__service = self.navcore.recordService(self.ref) self.navcore.record_event.append(self.recordEvent) if self.__service is not None: if self.__service.__deref__() not in StreamServiceList: StreamServiceList.append(self.__service.__deref__()) self.__service.prepareStreaming() self.__service.start() def <|fim_middle|>(self): print "[StreamService] execEnd", self.ref.toString() self.navcore.record_event.remove(self.recordEvent) if self.__service is not None: if self.__service.__deref__() in StreamServiceList: StreamServiceList.remove(self.__service.__deref__()) self.navcore.stopRecordService(self.__service) self.__service = None self.ref = None <|fim▁end|>
execEnd
<|file_name|>consumers.py<|end_file_name|><|fim▁begin|>import logging import requests from django.conf import settings from django.contrib.sites.models import Site from django.core.mail import EmailMultiAlternatives from django.template.loader import get_template from django.utils import timezone from invitations.models import Invitation logger = logging.getLogger('email') sentry = logging.getLogger('sentry') def send_invite(message): try: invite = Invitation.objects.get( id=message.get('id'), status__in=[Invitation.PENDING, Invitation.ERROR], ) except Invitation.DoesNotExist: sentry.error("Invitation to send not found", exc_info=True, extra={'message': message}) return invite.status = Invitation.PROCESSING invite.save() context = { 'invite': invite,<|fim▁hole|> 'domain': Site.objects.get_current().domain, } subject = "[ContactOtter] Invitation to join ContactOtter from %s" % (invite.sender) if invite.book: subject = "[ContactOtter] Invitation to share %s's contact book" % (invite.sender) txt = get_template('email/invitation.txt').render(context) html = get_template('email/invitation.html').render(context) try: message = EmailMultiAlternatives( subject=subject, body=txt, from_email="ContactOtter <[email protected]>", to=[invite.email,], ) message.attach_alternative(html, "text/html") message.send() invite.status = Invitation.SENT invite.sent = timezone.now() invite.save() except: sentry.exception('Problem sending invite', exc_info=True, extra={'invite_id': invite.id}) invite.status = Invitation.ERROR invite.save()<|fim▁end|>
<|file_name|>consumers.py<|end_file_name|><|fim▁begin|>import logging import requests from django.conf import settings from django.contrib.sites.models import Site from django.core.mail import EmailMultiAlternatives from django.template.loader import get_template from django.utils import timezone from invitations.models import Invitation logger = logging.getLogger('email') sentry = logging.getLogger('sentry') def send_invite(message): <|fim_middle|> <|fim▁end|>
try: invite = Invitation.objects.get( id=message.get('id'), status__in=[Invitation.PENDING, Invitation.ERROR], ) except Invitation.DoesNotExist: sentry.error("Invitation to send not found", exc_info=True, extra={'message': message}) return invite.status = Invitation.PROCESSING invite.save() context = { 'invite': invite, 'domain': Site.objects.get_current().domain, } subject = "[ContactOtter] Invitation to join ContactOtter from %s" % (invite.sender) if invite.book: subject = "[ContactOtter] Invitation to share %s's contact book" % (invite.sender) txt = get_template('email/invitation.txt').render(context) html = get_template('email/invitation.html').render(context) try: message = EmailMultiAlternatives( subject=subject, body=txt, from_email="ContactOtter <[email protected]>", to=[invite.email,], ) message.attach_alternative(html, "text/html") message.send() invite.status = Invitation.SENT invite.sent = timezone.now() invite.save() except: sentry.exception('Problem sending invite', exc_info=True, extra={'invite_id': invite.id}) invite.status = Invitation.ERROR invite.save()
<|file_name|>consumers.py<|end_file_name|><|fim▁begin|>import logging import requests from django.conf import settings from django.contrib.sites.models import Site from django.core.mail import EmailMultiAlternatives from django.template.loader import get_template from django.utils import timezone from invitations.models import Invitation logger = logging.getLogger('email') sentry = logging.getLogger('sentry') def send_invite(message): try: invite = Invitation.objects.get( id=message.get('id'), status__in=[Invitation.PENDING, Invitation.ERROR], ) except Invitation.DoesNotExist: sentry.error("Invitation to send not found", exc_info=True, extra={'message': message}) return invite.status = Invitation.PROCESSING invite.save() context = { 'invite': invite, 'domain': Site.objects.get_current().domain, } subject = "[ContactOtter] Invitation to join ContactOtter from %s" % (invite.sender) if invite.book: <|fim_middle|> txt = get_template('email/invitation.txt').render(context) html = get_template('email/invitation.html').render(context) try: message = EmailMultiAlternatives( subject=subject, body=txt, from_email="ContactOtter <[email protected]>", to=[invite.email,], ) message.attach_alternative(html, "text/html") message.send() invite.status = Invitation.SENT invite.sent = timezone.now() invite.save() except: sentry.exception('Problem sending invite', exc_info=True, extra={'invite_id': invite.id}) invite.status = Invitation.ERROR invite.save() <|fim▁end|>
subject = "[ContactOtter] Invitation to share %s's contact book" % (invite.sender)
<|file_name|>consumers.py<|end_file_name|><|fim▁begin|>import logging import requests from django.conf import settings from django.contrib.sites.models import Site from django.core.mail import EmailMultiAlternatives from django.template.loader import get_template from django.utils import timezone from invitations.models import Invitation logger = logging.getLogger('email') sentry = logging.getLogger('sentry') def <|fim_middle|>(message): try: invite = Invitation.objects.get( id=message.get('id'), status__in=[Invitation.PENDING, Invitation.ERROR], ) except Invitation.DoesNotExist: sentry.error("Invitation to send not found", exc_info=True, extra={'message': message}) return invite.status = Invitation.PROCESSING invite.save() context = { 'invite': invite, 'domain': Site.objects.get_current().domain, } subject = "[ContactOtter] Invitation to join ContactOtter from %s" % (invite.sender) if invite.book: subject = "[ContactOtter] Invitation to share %s's contact book" % (invite.sender) txt = get_template('email/invitation.txt').render(context) html = get_template('email/invitation.html').render(context) try: message = EmailMultiAlternatives( subject=subject, body=txt, from_email="ContactOtter <[email protected]>", to=[invite.email,], ) message.attach_alternative(html, "text/html") message.send() invite.status = Invitation.SENT invite.sent = timezone.now() invite.save() except: sentry.exception('Problem sending invite', exc_info=True, extra={'invite_id': invite.id}) invite.status = Invitation.ERROR invite.save() <|fim▁end|>
send_invite
<|file_name|>order.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*- from openerp.osv import fields, osv from openerp.tools.translate import _ class sale_order_line(osv.Model):<|fim▁hole|> """ OpenERP Model : sale_order_line """ _inherit = 'sale.order.line' _columns = { 'att_bro': fields.boolean('Attach Brochure', required=False, help="""If you check this option, the first attachment related to the product_id marked as brochure will be printed as extra info with sale order"""), } class sale_order(osv.Model): """ OpenERP Model : sale_order_line """ _inherit = 'sale.order' def print_with_attachment(self, cr, user, ids, context={}): for o in self.browse(cr, user, ids, context): for ol in o.order_line: if ol.att_bro: print "Im Here i will go to print %s " % ol.name return True def __get_company_object(self, cr, uid): user = self.pool.get('res.users').browse(cr, uid, uid) print user if not user.company_id: raise except_osv(_('ERROR !'), _( 'There is no company configured for this user')) return user.company_id def _get_report_name(self, cr, uid, context): report = self.__get_company_object(cr, uid).sale_report_id if not report: rep_id = self.pool.get("ir.actions.report.xml").search( cr, uid, [('model', '=', 'sale.order'), ], order="id")[0] report = self.pool.get( "ir.actions.report.xml").browse(cr, uid, rep_id) return report.report_name def print_quotation(self, cr, uid, ids, context=None): pq = super(sale_order, self).print_quotation(cr,uid,ids, context) return {'type': 'ir.actions.report.xml', 'report_name': self._get_report_name(cr, uid, context), 'datas': pq['datas'], 'nodestroy': True}<|fim▁end|>
<|file_name|>order.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*- from openerp.osv import fields, osv from openerp.tools.translate import _ class sale_order_line(osv.Model): <|fim_middle|> class sale_order(osv.Model): """ OpenERP Model : sale_order_line """ _inherit = 'sale.order' def print_with_attachment(self, cr, user, ids, context={}): for o in self.browse(cr, user, ids, context): for ol in o.order_line: if ol.att_bro: print "Im Here i will go to print %s " % ol.name return True def __get_company_object(self, cr, uid): user = self.pool.get('res.users').browse(cr, uid, uid) print user if not user.company_id: raise except_osv(_('ERROR !'), _( 'There is no company configured for this user')) return user.company_id def _get_report_name(self, cr, uid, context): report = self.__get_company_object(cr, uid).sale_report_id if not report: rep_id = self.pool.get("ir.actions.report.xml").search( cr, uid, [('model', '=', 'sale.order'), ], order="id")[0] report = self.pool.get( "ir.actions.report.xml").browse(cr, uid, rep_id) return report.report_name def print_quotation(self, cr, uid, ids, context=None): pq = super(sale_order, self).print_quotation(cr,uid,ids, context) return {'type': 'ir.actions.report.xml', 'report_name': self._get_report_name(cr, uid, context), 'datas': pq['datas'], 'nodestroy': True} <|fim▁end|>
""" OpenERP Model : sale_order_line """ _inherit = 'sale.order.line' _columns = { 'att_bro': fields.boolean('Attach Brochure', required=False, help="""If you check this option, the first attachment related to the product_id marked as brochure will be printed as extra info with sale order"""), }
<|file_name|>order.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*- from openerp.osv import fields, osv from openerp.tools.translate import _ class sale_order_line(osv.Model): """ OpenERP Model : sale_order_line """ _inherit = 'sale.order.line' _columns = { 'att_bro': fields.boolean('Attach Brochure', required=False, help="""If you check this option, the first attachment related to the product_id marked as brochure will be printed as extra info with sale order"""), } class sale_order(osv.Model): <|fim_middle|> <|fim▁end|>
""" OpenERP Model : sale_order_line """ _inherit = 'sale.order' def print_with_attachment(self, cr, user, ids, context={}): for o in self.browse(cr, user, ids, context): for ol in o.order_line: if ol.att_bro: print "Im Here i will go to print %s " % ol.name return True def __get_company_object(self, cr, uid): user = self.pool.get('res.users').browse(cr, uid, uid) print user if not user.company_id: raise except_osv(_('ERROR !'), _( 'There is no company configured for this user')) return user.company_id def _get_report_name(self, cr, uid, context): report = self.__get_company_object(cr, uid).sale_report_id if not report: rep_id = self.pool.get("ir.actions.report.xml").search( cr, uid, [('model', '=', 'sale.order'), ], order="id")[0] report = self.pool.get( "ir.actions.report.xml").browse(cr, uid, rep_id) return report.report_name def print_quotation(self, cr, uid, ids, context=None): pq = super(sale_order, self).print_quotation(cr,uid,ids, context) return {'type': 'ir.actions.report.xml', 'report_name': self._get_report_name(cr, uid, context), 'datas': pq['datas'], 'nodestroy': True}
<|file_name|>order.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*- from openerp.osv import fields, osv from openerp.tools.translate import _ class sale_order_line(osv.Model): """ OpenERP Model : sale_order_line """ _inherit = 'sale.order.line' _columns = { 'att_bro': fields.boolean('Attach Brochure', required=False, help="""If you check this option, the first attachment related to the product_id marked as brochure will be printed as extra info with sale order"""), } class sale_order(osv.Model): """ OpenERP Model : sale_order_line """ _inherit = 'sale.order' def print_with_attachment(self, cr, user, ids, context={}): <|fim_middle|> def __get_company_object(self, cr, uid): user = self.pool.get('res.users').browse(cr, uid, uid) print user if not user.company_id: raise except_osv(_('ERROR !'), _( 'There is no company configured for this user')) return user.company_id def _get_report_name(self, cr, uid, context): report = self.__get_company_object(cr, uid).sale_report_id if not report: rep_id = self.pool.get("ir.actions.report.xml").search( cr, uid, [('model', '=', 'sale.order'), ], order="id")[0] report = self.pool.get( "ir.actions.report.xml").browse(cr, uid, rep_id) return report.report_name def print_quotation(self, cr, uid, ids, context=None): pq = super(sale_order, self).print_quotation(cr,uid,ids, context) return {'type': 'ir.actions.report.xml', 'report_name': self._get_report_name(cr, uid, context), 'datas': pq['datas'], 'nodestroy': True} <|fim▁end|>
for o in self.browse(cr, user, ids, context): for ol in o.order_line: if ol.att_bro: print "Im Here i will go to print %s " % ol.name return True
<|file_name|>order.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*- from openerp.osv import fields, osv from openerp.tools.translate import _ class sale_order_line(osv.Model): """ OpenERP Model : sale_order_line """ _inherit = 'sale.order.line' _columns = { 'att_bro': fields.boolean('Attach Brochure', required=False, help="""If you check this option, the first attachment related to the product_id marked as brochure will be printed as extra info with sale order"""), } class sale_order(osv.Model): """ OpenERP Model : sale_order_line """ _inherit = 'sale.order' def print_with_attachment(self, cr, user, ids, context={}): for o in self.browse(cr, user, ids, context): for ol in o.order_line: if ol.att_bro: print "Im Here i will go to print %s " % ol.name return True def __get_company_object(self, cr, uid): <|fim_middle|> def _get_report_name(self, cr, uid, context): report = self.__get_company_object(cr, uid).sale_report_id if not report: rep_id = self.pool.get("ir.actions.report.xml").search( cr, uid, [('model', '=', 'sale.order'), ], order="id")[0] report = self.pool.get( "ir.actions.report.xml").browse(cr, uid, rep_id) return report.report_name def print_quotation(self, cr, uid, ids, context=None): pq = super(sale_order, self).print_quotation(cr,uid,ids, context) return {'type': 'ir.actions.report.xml', 'report_name': self._get_report_name(cr, uid, context), 'datas': pq['datas'], 'nodestroy': True} <|fim▁end|>
user = self.pool.get('res.users').browse(cr, uid, uid) print user if not user.company_id: raise except_osv(_('ERROR !'), _( 'There is no company configured for this user')) return user.company_id
<|file_name|>order.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*- from openerp.osv import fields, osv from openerp.tools.translate import _ class sale_order_line(osv.Model): """ OpenERP Model : sale_order_line """ _inherit = 'sale.order.line' _columns = { 'att_bro': fields.boolean('Attach Brochure', required=False, help="""If you check this option, the first attachment related to the product_id marked as brochure will be printed as extra info with sale order"""), } class sale_order(osv.Model): """ OpenERP Model : sale_order_line """ _inherit = 'sale.order' def print_with_attachment(self, cr, user, ids, context={}): for o in self.browse(cr, user, ids, context): for ol in o.order_line: if ol.att_bro: print "Im Here i will go to print %s " % ol.name return True def __get_company_object(self, cr, uid): user = self.pool.get('res.users').browse(cr, uid, uid) print user if not user.company_id: raise except_osv(_('ERROR !'), _( 'There is no company configured for this user')) return user.company_id def _get_report_name(self, cr, uid, context): <|fim_middle|> def print_quotation(self, cr, uid, ids, context=None): pq = super(sale_order, self).print_quotation(cr,uid,ids, context) return {'type': 'ir.actions.report.xml', 'report_name': self._get_report_name(cr, uid, context), 'datas': pq['datas'], 'nodestroy': True} <|fim▁end|>
report = self.__get_company_object(cr, uid).sale_report_id if not report: rep_id = self.pool.get("ir.actions.report.xml").search( cr, uid, [('model', '=', 'sale.order'), ], order="id")[0] report = self.pool.get( "ir.actions.report.xml").browse(cr, uid, rep_id) return report.report_name
<|file_name|>order.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*- from openerp.osv import fields, osv from openerp.tools.translate import _ class sale_order_line(osv.Model): """ OpenERP Model : sale_order_line """ _inherit = 'sale.order.line' _columns = { 'att_bro': fields.boolean('Attach Brochure', required=False, help="""If you check this option, the first attachment related to the product_id marked as brochure will be printed as extra info with sale order"""), } class sale_order(osv.Model): """ OpenERP Model : sale_order_line """ _inherit = 'sale.order' def print_with_attachment(self, cr, user, ids, context={}): for o in self.browse(cr, user, ids, context): for ol in o.order_line: if ol.att_bro: print "Im Here i will go to print %s " % ol.name return True def __get_company_object(self, cr, uid): user = self.pool.get('res.users').browse(cr, uid, uid) print user if not user.company_id: raise except_osv(_('ERROR !'), _( 'There is no company configured for this user')) return user.company_id def _get_report_name(self, cr, uid, context): report = self.__get_company_object(cr, uid).sale_report_id if not report: rep_id = self.pool.get("ir.actions.report.xml").search( cr, uid, [('model', '=', 'sale.order'), ], order="id")[0] report = self.pool.get( "ir.actions.report.xml").browse(cr, uid, rep_id) return report.report_name def print_quotation(self, cr, uid, ids, context=None): <|fim_middle|> <|fim▁end|>
pq = super(sale_order, self).print_quotation(cr,uid,ids, context) return {'type': 'ir.actions.report.xml', 'report_name': self._get_report_name(cr, uid, context), 'datas': pq['datas'], 'nodestroy': True}
<|file_name|>order.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*- from openerp.osv import fields, osv from openerp.tools.translate import _ class sale_order_line(osv.Model): """ OpenERP Model : sale_order_line """ _inherit = 'sale.order.line' _columns = { 'att_bro': fields.boolean('Attach Brochure', required=False, help="""If you check this option, the first attachment related to the product_id marked as brochure will be printed as extra info with sale order"""), } class sale_order(osv.Model): """ OpenERP Model : sale_order_line """ _inherit = 'sale.order' def print_with_attachment(self, cr, user, ids, context={}): for o in self.browse(cr, user, ids, context): for ol in o.order_line: if ol.att_bro: <|fim_middle|> return True def __get_company_object(self, cr, uid): user = self.pool.get('res.users').browse(cr, uid, uid) print user if not user.company_id: raise except_osv(_('ERROR !'), _( 'There is no company configured for this user')) return user.company_id def _get_report_name(self, cr, uid, context): report = self.__get_company_object(cr, uid).sale_report_id if not report: rep_id = self.pool.get("ir.actions.report.xml").search( cr, uid, [('model', '=', 'sale.order'), ], order="id")[0] report = self.pool.get( "ir.actions.report.xml").browse(cr, uid, rep_id) return report.report_name def print_quotation(self, cr, uid, ids, context=None): pq = super(sale_order, self).print_quotation(cr,uid,ids, context) return {'type': 'ir.actions.report.xml', 'report_name': self._get_report_name(cr, uid, context), 'datas': pq['datas'], 'nodestroy': True} <|fim▁end|>
print "Im Here i will go to print %s " % ol.name
<|file_name|>order.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*- from openerp.osv import fields, osv from openerp.tools.translate import _ class sale_order_line(osv.Model): """ OpenERP Model : sale_order_line """ _inherit = 'sale.order.line' _columns = { 'att_bro': fields.boolean('Attach Brochure', required=False, help="""If you check this option, the first attachment related to the product_id marked as brochure will be printed as extra info with sale order"""), } class sale_order(osv.Model): """ OpenERP Model : sale_order_line """ _inherit = 'sale.order' def print_with_attachment(self, cr, user, ids, context={}): for o in self.browse(cr, user, ids, context): for ol in o.order_line: if ol.att_bro: print "Im Here i will go to print %s " % ol.name return True def __get_company_object(self, cr, uid): user = self.pool.get('res.users').browse(cr, uid, uid) print user if not user.company_id: <|fim_middle|> return user.company_id def _get_report_name(self, cr, uid, context): report = self.__get_company_object(cr, uid).sale_report_id if not report: rep_id = self.pool.get("ir.actions.report.xml").search( cr, uid, [('model', '=', 'sale.order'), ], order="id")[0] report = self.pool.get( "ir.actions.report.xml").browse(cr, uid, rep_id) return report.report_name def print_quotation(self, cr, uid, ids, context=None): pq = super(sale_order, self).print_quotation(cr,uid,ids, context) return {'type': 'ir.actions.report.xml', 'report_name': self._get_report_name(cr, uid, context), 'datas': pq['datas'], 'nodestroy': True} <|fim▁end|>
raise except_osv(_('ERROR !'), _( 'There is no company configured for this user'))
<|file_name|>order.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*- from openerp.osv import fields, osv from openerp.tools.translate import _ class sale_order_line(osv.Model): """ OpenERP Model : sale_order_line """ _inherit = 'sale.order.line' _columns = { 'att_bro': fields.boolean('Attach Brochure', required=False, help="""If you check this option, the first attachment related to the product_id marked as brochure will be printed as extra info with sale order"""), } class sale_order(osv.Model): """ OpenERP Model : sale_order_line """ _inherit = 'sale.order' def print_with_attachment(self, cr, user, ids, context={}): for o in self.browse(cr, user, ids, context): for ol in o.order_line: if ol.att_bro: print "Im Here i will go to print %s " % ol.name return True def __get_company_object(self, cr, uid): user = self.pool.get('res.users').browse(cr, uid, uid) print user if not user.company_id: raise except_osv(_('ERROR !'), _( 'There is no company configured for this user')) return user.company_id def _get_report_name(self, cr, uid, context): report = self.__get_company_object(cr, uid).sale_report_id if not report: <|fim_middle|> return report.report_name def print_quotation(self, cr, uid, ids, context=None): pq = super(sale_order, self).print_quotation(cr,uid,ids, context) return {'type': 'ir.actions.report.xml', 'report_name': self._get_report_name(cr, uid, context), 'datas': pq['datas'], 'nodestroy': True} <|fim▁end|>
rep_id = self.pool.get("ir.actions.report.xml").search( cr, uid, [('model', '=', 'sale.order'), ], order="id")[0] report = self.pool.get( "ir.actions.report.xml").browse(cr, uid, rep_id)
<|file_name|>order.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*- from openerp.osv import fields, osv from openerp.tools.translate import _ class sale_order_line(osv.Model): """ OpenERP Model : sale_order_line """ _inherit = 'sale.order.line' _columns = { 'att_bro': fields.boolean('Attach Brochure', required=False, help="""If you check this option, the first attachment related to the product_id marked as brochure will be printed as extra info with sale order"""), } class sale_order(osv.Model): """ OpenERP Model : sale_order_line """ _inherit = 'sale.order' def <|fim_middle|>(self, cr, user, ids, context={}): for o in self.browse(cr, user, ids, context): for ol in o.order_line: if ol.att_bro: print "Im Here i will go to print %s " % ol.name return True def __get_company_object(self, cr, uid): user = self.pool.get('res.users').browse(cr, uid, uid) print user if not user.company_id: raise except_osv(_('ERROR !'), _( 'There is no company configured for this user')) return user.company_id def _get_report_name(self, cr, uid, context): report = self.__get_company_object(cr, uid).sale_report_id if not report: rep_id = self.pool.get("ir.actions.report.xml").search( cr, uid, [('model', '=', 'sale.order'), ], order="id")[0] report = self.pool.get( "ir.actions.report.xml").browse(cr, uid, rep_id) return report.report_name def print_quotation(self, cr, uid, ids, context=None): pq = super(sale_order, self).print_quotation(cr,uid,ids, context) return {'type': 'ir.actions.report.xml', 'report_name': self._get_report_name(cr, uid, context), 'datas': pq['datas'], 'nodestroy': True} <|fim▁end|>
print_with_attachment
<|file_name|>order.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*- from openerp.osv import fields, osv from openerp.tools.translate import _ class sale_order_line(osv.Model): """ OpenERP Model : sale_order_line """ _inherit = 'sale.order.line' _columns = { 'att_bro': fields.boolean('Attach Brochure', required=False, help="""If you check this option, the first attachment related to the product_id marked as brochure will be printed as extra info with sale order"""), } class sale_order(osv.Model): """ OpenERP Model : sale_order_line """ _inherit = 'sale.order' def print_with_attachment(self, cr, user, ids, context={}): for o in self.browse(cr, user, ids, context): for ol in o.order_line: if ol.att_bro: print "Im Here i will go to print %s " % ol.name return True def <|fim_middle|>(self, cr, uid): user = self.pool.get('res.users').browse(cr, uid, uid) print user if not user.company_id: raise except_osv(_('ERROR !'), _( 'There is no company configured for this user')) return user.company_id def _get_report_name(self, cr, uid, context): report = self.__get_company_object(cr, uid).sale_report_id if not report: rep_id = self.pool.get("ir.actions.report.xml").search( cr, uid, [('model', '=', 'sale.order'), ], order="id")[0] report = self.pool.get( "ir.actions.report.xml").browse(cr, uid, rep_id) return report.report_name def print_quotation(self, cr, uid, ids, context=None): pq = super(sale_order, self).print_quotation(cr,uid,ids, context) return {'type': 'ir.actions.report.xml', 'report_name': self._get_report_name(cr, uid, context), 'datas': pq['datas'], 'nodestroy': True} <|fim▁end|>
__get_company_object
<|file_name|>order.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*- from openerp.osv import fields, osv from openerp.tools.translate import _ class sale_order_line(osv.Model): """ OpenERP Model : sale_order_line """ _inherit = 'sale.order.line' _columns = { 'att_bro': fields.boolean('Attach Brochure', required=False, help="""If you check this option, the first attachment related to the product_id marked as brochure will be printed as extra info with sale order"""), } class sale_order(osv.Model): """ OpenERP Model : sale_order_line """ _inherit = 'sale.order' def print_with_attachment(self, cr, user, ids, context={}): for o in self.browse(cr, user, ids, context): for ol in o.order_line: if ol.att_bro: print "Im Here i will go to print %s " % ol.name return True def __get_company_object(self, cr, uid): user = self.pool.get('res.users').browse(cr, uid, uid) print user if not user.company_id: raise except_osv(_('ERROR !'), _( 'There is no company configured for this user')) return user.company_id def <|fim_middle|>(self, cr, uid, context): report = self.__get_company_object(cr, uid).sale_report_id if not report: rep_id = self.pool.get("ir.actions.report.xml").search( cr, uid, [('model', '=', 'sale.order'), ], order="id")[0] report = self.pool.get( "ir.actions.report.xml").browse(cr, uid, rep_id) return report.report_name def print_quotation(self, cr, uid, ids, context=None): pq = super(sale_order, self).print_quotation(cr,uid,ids, context) return {'type': 'ir.actions.report.xml', 'report_name': self._get_report_name(cr, uid, context), 'datas': pq['datas'], 'nodestroy': True} <|fim▁end|>
_get_report_name
<|file_name|>order.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*- from openerp.osv import fields, osv from openerp.tools.translate import _ class sale_order_line(osv.Model): """ OpenERP Model : sale_order_line """ _inherit = 'sale.order.line' _columns = { 'att_bro': fields.boolean('Attach Brochure', required=False, help="""If you check this option, the first attachment related to the product_id marked as brochure will be printed as extra info with sale order"""), } class sale_order(osv.Model): """ OpenERP Model : sale_order_line """ _inherit = 'sale.order' def print_with_attachment(self, cr, user, ids, context={}): for o in self.browse(cr, user, ids, context): for ol in o.order_line: if ol.att_bro: print "Im Here i will go to print %s " % ol.name return True def __get_company_object(self, cr, uid): user = self.pool.get('res.users').browse(cr, uid, uid) print user if not user.company_id: raise except_osv(_('ERROR !'), _( 'There is no company configured for this user')) return user.company_id def _get_report_name(self, cr, uid, context): report = self.__get_company_object(cr, uid).sale_report_id if not report: rep_id = self.pool.get("ir.actions.report.xml").search( cr, uid, [('model', '=', 'sale.order'), ], order="id")[0] report = self.pool.get( "ir.actions.report.xml").browse(cr, uid, rep_id) return report.report_name def <|fim_middle|>(self, cr, uid, ids, context=None): pq = super(sale_order, self).print_quotation(cr,uid,ids, context) return {'type': 'ir.actions.report.xml', 'report_name': self._get_report_name(cr, uid, context), 'datas': pq['datas'], 'nodestroy': True} <|fim▁end|>
print_quotation
<|file_name|>MD2.py<|end_file_name|><|fim▁begin|># =================================================================== # # Copyright (c) 2014, Legrandin <[email protected]> # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in # the documentation and/or other materials provided with the # distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # =================================================================== from Cryptodome.Util.py3compat import bord from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, VoidPointer, SmartPointer, create_string_buffer, get_raw_buffer, c_size_t, c_uint8_ptr) _raw_md2_lib = load_pycryptodome_raw_lib( "Cryptodome.Hash._MD2", """ int md2_init(void **shaState); int md2_destroy(void *shaState); int md2_update(void *hs, const uint8_t *buf, size_t len); int md2_digest(const void *shaState, uint8_t digest[20]); int md2_copy(const void *src, void *dst); """) class MD2Hash(object): """An MD2 hash object. Do not instantiate directly. Use the :func:`new` function. :ivar oid: ASN.1 Object ID :vartype oid: string :ivar block_size: the size in bytes of the internal message block, input to the compression function :vartype block_size: integer :ivar digest_size: the size in bytes of the resulting hash :vartype digest_size: integer """ # The size of the resulting hash in bytes. digest_size = 16 # The internal block size of the hash algorithm in bytes. block_size = 64 # ASN.1 Object ID oid = "1.2.840.113549.2.2" def __init__(self, data=None): state = VoidPointer() result = _raw_md2_lib.md2_init(state.address_of()) if result: raise ValueError("Error %d while instantiating MD2" % result) self._state = SmartPointer(state.get(), _raw_md2_lib.md2_destroy) if data: self.update(data) def update(self, data): """Continue hashing of a message by consuming the next chunk of data. Args: data (byte string/byte array/memoryview): The next chunk of the message being hashed. """ result = _raw_md2_lib.md2_update(self._state.get(), c_uint8_ptr(data), c_size_t(len(data))) if result: raise ValueError("Error %d while instantiating MD2" % result) def digest(self): """Return the **binary** (non-printable) digest of the message that has been hashed so far. :return: The hash digest, computed over the data processed so far. Binary form. :rtype: byte string """ bfr = create_string_buffer(self.digest_size) result = _raw_md2_lib.md2_digest(self._state.get(), bfr) if result: raise ValueError("Error %d while instantiating MD2" % result) return get_raw_buffer(bfr) def hexdigest(self): """Return the **printable** digest of the message that has been hashed so far. :return: The hash digest, computed over the data processed so far. Hexadecimal encoded. :rtype: string """ return "".join(["%02x" % bord(x) for x in self.digest()]) def copy(self): """Return a copy ("clone") of the hash object. The copy will have the same internal state as the original hash object. This can be used to efficiently compute the digests of strings that share a common initial substring. :return: A hash object of the same type """ clone = MD2Hash() result = _raw_md2_lib.md2_copy(self._state.get(), clone._state.get()) if result: raise ValueError("Error %d while copying MD2" % result) return clone def new(self, data=None):<|fim▁hole|> return MD2Hash(data) def new(data=None): """Create a new hash object. :parameter data: Optional. The very first chunk of the message to hash. It is equivalent to an early call to :meth:`MD2Hash.update`. :type data: byte string/byte array/memoryview :Return: A :class:`MD2Hash` hash object """ return MD2Hash().new(data) # The size of the resulting hash in bytes. digest_size = MD2Hash.digest_size # The internal block size of the hash algorithm in bytes. block_size = MD2Hash.block_size<|fim▁end|>
<|file_name|>MD2.py<|end_file_name|><|fim▁begin|># =================================================================== # # Copyright (c) 2014, Legrandin <[email protected]> # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in # the documentation and/or other materials provided with the # distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # =================================================================== from Cryptodome.Util.py3compat import bord from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, VoidPointer, SmartPointer, create_string_buffer, get_raw_buffer, c_size_t, c_uint8_ptr) _raw_md2_lib = load_pycryptodome_raw_lib( "Cryptodome.Hash._MD2", """ int md2_init(void **shaState); int md2_destroy(void *shaState); int md2_update(void *hs, const uint8_t *buf, size_t len); int md2_digest(const void *shaState, uint8_t digest[20]); int md2_copy(const void *src, void *dst); """) class MD2Hash(object): <|fim_middle|> def new(data=None): """Create a new hash object. :parameter data: Optional. The very first chunk of the message to hash. It is equivalent to an early call to :meth:`MD2Hash.update`. :type data: byte string/byte array/memoryview :Return: A :class:`MD2Hash` hash object """ return MD2Hash().new(data) # The size of the resulting hash in bytes. digest_size = MD2Hash.digest_size # The internal block size of the hash algorithm in bytes. block_size = MD2Hash.block_size <|fim▁end|>
"""An MD2 hash object. Do not instantiate directly. Use the :func:`new` function. :ivar oid: ASN.1 Object ID :vartype oid: string :ivar block_size: the size in bytes of the internal message block, input to the compression function :vartype block_size: integer :ivar digest_size: the size in bytes of the resulting hash :vartype digest_size: integer """ # The size of the resulting hash in bytes. digest_size = 16 # The internal block size of the hash algorithm in bytes. block_size = 64 # ASN.1 Object ID oid = "1.2.840.113549.2.2" def __init__(self, data=None): state = VoidPointer() result = _raw_md2_lib.md2_init(state.address_of()) if result: raise ValueError("Error %d while instantiating MD2" % result) self._state = SmartPointer(state.get(), _raw_md2_lib.md2_destroy) if data: self.update(data) def update(self, data): """Continue hashing of a message by consuming the next chunk of data. Args: data (byte string/byte array/memoryview): The next chunk of the message being hashed. """ result = _raw_md2_lib.md2_update(self._state.get(), c_uint8_ptr(data), c_size_t(len(data))) if result: raise ValueError("Error %d while instantiating MD2" % result) def digest(self): """Return the **binary** (non-printable) digest of the message that has been hashed so far. :return: The hash digest, computed over the data processed so far. Binary form. :rtype: byte string """ bfr = create_string_buffer(self.digest_size) result = _raw_md2_lib.md2_digest(self._state.get(), bfr) if result: raise ValueError("Error %d while instantiating MD2" % result) return get_raw_buffer(bfr) def hexdigest(self): """Return the **printable** digest of the message that has been hashed so far. :return: The hash digest, computed over the data processed so far. Hexadecimal encoded. :rtype: string """ return "".join(["%02x" % bord(x) for x in self.digest()]) def copy(self): """Return a copy ("clone") of the hash object. The copy will have the same internal state as the original hash object. This can be used to efficiently compute the digests of strings that share a common initial substring. :return: A hash object of the same type """ clone = MD2Hash() result = _raw_md2_lib.md2_copy(self._state.get(), clone._state.get()) if result: raise ValueError("Error %d while copying MD2" % result) return clone def new(self, data=None): return MD2Hash(data)
<|file_name|>MD2.py<|end_file_name|><|fim▁begin|># =================================================================== # # Copyright (c) 2014, Legrandin <[email protected]> # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in # the documentation and/or other materials provided with the # distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # =================================================================== from Cryptodome.Util.py3compat import bord from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, VoidPointer, SmartPointer, create_string_buffer, get_raw_buffer, c_size_t, c_uint8_ptr) _raw_md2_lib = load_pycryptodome_raw_lib( "Cryptodome.Hash._MD2", """ int md2_init(void **shaState); int md2_destroy(void *shaState); int md2_update(void *hs, const uint8_t *buf, size_t len); int md2_digest(const void *shaState, uint8_t digest[20]); int md2_copy(const void *src, void *dst); """) class MD2Hash(object): """An MD2 hash object. Do not instantiate directly. Use the :func:`new` function. :ivar oid: ASN.1 Object ID :vartype oid: string :ivar block_size: the size in bytes of the internal message block, input to the compression function :vartype block_size: integer :ivar digest_size: the size in bytes of the resulting hash :vartype digest_size: integer """ # The size of the resulting hash in bytes. digest_size = 16 # The internal block size of the hash algorithm in bytes. block_size = 64 # ASN.1 Object ID oid = "1.2.840.113549.2.2" def __init__(self, data=None): <|fim_middle|> def update(self, data): """Continue hashing of a message by consuming the next chunk of data. Args: data (byte string/byte array/memoryview): The next chunk of the message being hashed. """ result = _raw_md2_lib.md2_update(self._state.get(), c_uint8_ptr(data), c_size_t(len(data))) if result: raise ValueError("Error %d while instantiating MD2" % result) def digest(self): """Return the **binary** (non-printable) digest of the message that has been hashed so far. :return: The hash digest, computed over the data processed so far. Binary form. :rtype: byte string """ bfr = create_string_buffer(self.digest_size) result = _raw_md2_lib.md2_digest(self._state.get(), bfr) if result: raise ValueError("Error %d while instantiating MD2" % result) return get_raw_buffer(bfr) def hexdigest(self): """Return the **printable** digest of the message that has been hashed so far. :return: The hash digest, computed over the data processed so far. Hexadecimal encoded. :rtype: string """ return "".join(["%02x" % bord(x) for x in self.digest()]) def copy(self): """Return a copy ("clone") of the hash object. The copy will have the same internal state as the original hash object. This can be used to efficiently compute the digests of strings that share a common initial substring. :return: A hash object of the same type """ clone = MD2Hash() result = _raw_md2_lib.md2_copy(self._state.get(), clone._state.get()) if result: raise ValueError("Error %d while copying MD2" % result) return clone def new(self, data=None): return MD2Hash(data) def new(data=None): """Create a new hash object. :parameter data: Optional. The very first chunk of the message to hash. It is equivalent to an early call to :meth:`MD2Hash.update`. :type data: byte string/byte array/memoryview :Return: A :class:`MD2Hash` hash object """ return MD2Hash().new(data) # The size of the resulting hash in bytes. digest_size = MD2Hash.digest_size # The internal block size of the hash algorithm in bytes. block_size = MD2Hash.block_size <|fim▁end|>
state = VoidPointer() result = _raw_md2_lib.md2_init(state.address_of()) if result: raise ValueError("Error %d while instantiating MD2" % result) self._state = SmartPointer(state.get(), _raw_md2_lib.md2_destroy) if data: self.update(data)
<|file_name|>MD2.py<|end_file_name|><|fim▁begin|># =================================================================== # # Copyright (c) 2014, Legrandin <[email protected]> # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in # the documentation and/or other materials provided with the # distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # =================================================================== from Cryptodome.Util.py3compat import bord from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, VoidPointer, SmartPointer, create_string_buffer, get_raw_buffer, c_size_t, c_uint8_ptr) _raw_md2_lib = load_pycryptodome_raw_lib( "Cryptodome.Hash._MD2", """ int md2_init(void **shaState); int md2_destroy(void *shaState); int md2_update(void *hs, const uint8_t *buf, size_t len); int md2_digest(const void *shaState, uint8_t digest[20]); int md2_copy(const void *src, void *dst); """) class MD2Hash(object): """An MD2 hash object. Do not instantiate directly. Use the :func:`new` function. :ivar oid: ASN.1 Object ID :vartype oid: string :ivar block_size: the size in bytes of the internal message block, input to the compression function :vartype block_size: integer :ivar digest_size: the size in bytes of the resulting hash :vartype digest_size: integer """ # The size of the resulting hash in bytes. digest_size = 16 # The internal block size of the hash algorithm in bytes. block_size = 64 # ASN.1 Object ID oid = "1.2.840.113549.2.2" def __init__(self, data=None): state = VoidPointer() result = _raw_md2_lib.md2_init(state.address_of()) if result: raise ValueError("Error %d while instantiating MD2" % result) self._state = SmartPointer(state.get(), _raw_md2_lib.md2_destroy) if data: self.update(data) def update(self, data): <|fim_middle|> def digest(self): """Return the **binary** (non-printable) digest of the message that has been hashed so far. :return: The hash digest, computed over the data processed so far. Binary form. :rtype: byte string """ bfr = create_string_buffer(self.digest_size) result = _raw_md2_lib.md2_digest(self._state.get(), bfr) if result: raise ValueError("Error %d while instantiating MD2" % result) return get_raw_buffer(bfr) def hexdigest(self): """Return the **printable** digest of the message that has been hashed so far. :return: The hash digest, computed over the data processed so far. Hexadecimal encoded. :rtype: string """ return "".join(["%02x" % bord(x) for x in self.digest()]) def copy(self): """Return a copy ("clone") of the hash object. The copy will have the same internal state as the original hash object. This can be used to efficiently compute the digests of strings that share a common initial substring. :return: A hash object of the same type """ clone = MD2Hash() result = _raw_md2_lib.md2_copy(self._state.get(), clone._state.get()) if result: raise ValueError("Error %d while copying MD2" % result) return clone def new(self, data=None): return MD2Hash(data) def new(data=None): """Create a new hash object. :parameter data: Optional. The very first chunk of the message to hash. It is equivalent to an early call to :meth:`MD2Hash.update`. :type data: byte string/byte array/memoryview :Return: A :class:`MD2Hash` hash object """ return MD2Hash().new(data) # The size of the resulting hash in bytes. digest_size = MD2Hash.digest_size # The internal block size of the hash algorithm in bytes. block_size = MD2Hash.block_size <|fim▁end|>
"""Continue hashing of a message by consuming the next chunk of data. Args: data (byte string/byte array/memoryview): The next chunk of the message being hashed. """ result = _raw_md2_lib.md2_update(self._state.get(), c_uint8_ptr(data), c_size_t(len(data))) if result: raise ValueError("Error %d while instantiating MD2" % result)
<|file_name|>MD2.py<|end_file_name|><|fim▁begin|># =================================================================== # # Copyright (c) 2014, Legrandin <[email protected]> # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in # the documentation and/or other materials provided with the # distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # =================================================================== from Cryptodome.Util.py3compat import bord from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, VoidPointer, SmartPointer, create_string_buffer, get_raw_buffer, c_size_t, c_uint8_ptr) _raw_md2_lib = load_pycryptodome_raw_lib( "Cryptodome.Hash._MD2", """ int md2_init(void **shaState); int md2_destroy(void *shaState); int md2_update(void *hs, const uint8_t *buf, size_t len); int md2_digest(const void *shaState, uint8_t digest[20]); int md2_copy(const void *src, void *dst); """) class MD2Hash(object): """An MD2 hash object. Do not instantiate directly. Use the :func:`new` function. :ivar oid: ASN.1 Object ID :vartype oid: string :ivar block_size: the size in bytes of the internal message block, input to the compression function :vartype block_size: integer :ivar digest_size: the size in bytes of the resulting hash :vartype digest_size: integer """ # The size of the resulting hash in bytes. digest_size = 16 # The internal block size of the hash algorithm in bytes. block_size = 64 # ASN.1 Object ID oid = "1.2.840.113549.2.2" def __init__(self, data=None): state = VoidPointer() result = _raw_md2_lib.md2_init(state.address_of()) if result: raise ValueError("Error %d while instantiating MD2" % result) self._state = SmartPointer(state.get(), _raw_md2_lib.md2_destroy) if data: self.update(data) def update(self, data): """Continue hashing of a message by consuming the next chunk of data. Args: data (byte string/byte array/memoryview): The next chunk of the message being hashed. """ result = _raw_md2_lib.md2_update(self._state.get(), c_uint8_ptr(data), c_size_t(len(data))) if result: raise ValueError("Error %d while instantiating MD2" % result) def digest(self): <|fim_middle|> def hexdigest(self): """Return the **printable** digest of the message that has been hashed so far. :return: The hash digest, computed over the data processed so far. Hexadecimal encoded. :rtype: string """ return "".join(["%02x" % bord(x) for x in self.digest()]) def copy(self): """Return a copy ("clone") of the hash object. The copy will have the same internal state as the original hash object. This can be used to efficiently compute the digests of strings that share a common initial substring. :return: A hash object of the same type """ clone = MD2Hash() result = _raw_md2_lib.md2_copy(self._state.get(), clone._state.get()) if result: raise ValueError("Error %d while copying MD2" % result) return clone def new(self, data=None): return MD2Hash(data) def new(data=None): """Create a new hash object. :parameter data: Optional. The very first chunk of the message to hash. It is equivalent to an early call to :meth:`MD2Hash.update`. :type data: byte string/byte array/memoryview :Return: A :class:`MD2Hash` hash object """ return MD2Hash().new(data) # The size of the resulting hash in bytes. digest_size = MD2Hash.digest_size # The internal block size of the hash algorithm in bytes. block_size = MD2Hash.block_size <|fim▁end|>
"""Return the **binary** (non-printable) digest of the message that has been hashed so far. :return: The hash digest, computed over the data processed so far. Binary form. :rtype: byte string """ bfr = create_string_buffer(self.digest_size) result = _raw_md2_lib.md2_digest(self._state.get(), bfr) if result: raise ValueError("Error %d while instantiating MD2" % result) return get_raw_buffer(bfr)
<|file_name|>MD2.py<|end_file_name|><|fim▁begin|># =================================================================== # # Copyright (c) 2014, Legrandin <[email protected]> # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in # the documentation and/or other materials provided with the # distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # =================================================================== from Cryptodome.Util.py3compat import bord from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, VoidPointer, SmartPointer, create_string_buffer, get_raw_buffer, c_size_t, c_uint8_ptr) _raw_md2_lib = load_pycryptodome_raw_lib( "Cryptodome.Hash._MD2", """ int md2_init(void **shaState); int md2_destroy(void *shaState); int md2_update(void *hs, const uint8_t *buf, size_t len); int md2_digest(const void *shaState, uint8_t digest[20]); int md2_copy(const void *src, void *dst); """) class MD2Hash(object): """An MD2 hash object. Do not instantiate directly. Use the :func:`new` function. :ivar oid: ASN.1 Object ID :vartype oid: string :ivar block_size: the size in bytes of the internal message block, input to the compression function :vartype block_size: integer :ivar digest_size: the size in bytes of the resulting hash :vartype digest_size: integer """ # The size of the resulting hash in bytes. digest_size = 16 # The internal block size of the hash algorithm in bytes. block_size = 64 # ASN.1 Object ID oid = "1.2.840.113549.2.2" def __init__(self, data=None): state = VoidPointer() result = _raw_md2_lib.md2_init(state.address_of()) if result: raise ValueError("Error %d while instantiating MD2" % result) self._state = SmartPointer(state.get(), _raw_md2_lib.md2_destroy) if data: self.update(data) def update(self, data): """Continue hashing of a message by consuming the next chunk of data. Args: data (byte string/byte array/memoryview): The next chunk of the message being hashed. """ result = _raw_md2_lib.md2_update(self._state.get(), c_uint8_ptr(data), c_size_t(len(data))) if result: raise ValueError("Error %d while instantiating MD2" % result) def digest(self): """Return the **binary** (non-printable) digest of the message that has been hashed so far. :return: The hash digest, computed over the data processed so far. Binary form. :rtype: byte string """ bfr = create_string_buffer(self.digest_size) result = _raw_md2_lib.md2_digest(self._state.get(), bfr) if result: raise ValueError("Error %d while instantiating MD2" % result) return get_raw_buffer(bfr) def hexdigest(self): <|fim_middle|> def copy(self): """Return a copy ("clone") of the hash object. The copy will have the same internal state as the original hash object. This can be used to efficiently compute the digests of strings that share a common initial substring. :return: A hash object of the same type """ clone = MD2Hash() result = _raw_md2_lib.md2_copy(self._state.get(), clone._state.get()) if result: raise ValueError("Error %d while copying MD2" % result) return clone def new(self, data=None): return MD2Hash(data) def new(data=None): """Create a new hash object. :parameter data: Optional. The very first chunk of the message to hash. It is equivalent to an early call to :meth:`MD2Hash.update`. :type data: byte string/byte array/memoryview :Return: A :class:`MD2Hash` hash object """ return MD2Hash().new(data) # The size of the resulting hash in bytes. digest_size = MD2Hash.digest_size # The internal block size of the hash algorithm in bytes. block_size = MD2Hash.block_size <|fim▁end|>
"""Return the **printable** digest of the message that has been hashed so far. :return: The hash digest, computed over the data processed so far. Hexadecimal encoded. :rtype: string """ return "".join(["%02x" % bord(x) for x in self.digest()])
<|file_name|>MD2.py<|end_file_name|><|fim▁begin|># =================================================================== # # Copyright (c) 2014, Legrandin <[email protected]> # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in # the documentation and/or other materials provided with the # distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # =================================================================== from Cryptodome.Util.py3compat import bord from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, VoidPointer, SmartPointer, create_string_buffer, get_raw_buffer, c_size_t, c_uint8_ptr) _raw_md2_lib = load_pycryptodome_raw_lib( "Cryptodome.Hash._MD2", """ int md2_init(void **shaState); int md2_destroy(void *shaState); int md2_update(void *hs, const uint8_t *buf, size_t len); int md2_digest(const void *shaState, uint8_t digest[20]); int md2_copy(const void *src, void *dst); """) class MD2Hash(object): """An MD2 hash object. Do not instantiate directly. Use the :func:`new` function. :ivar oid: ASN.1 Object ID :vartype oid: string :ivar block_size: the size in bytes of the internal message block, input to the compression function :vartype block_size: integer :ivar digest_size: the size in bytes of the resulting hash :vartype digest_size: integer """ # The size of the resulting hash in bytes. digest_size = 16 # The internal block size of the hash algorithm in bytes. block_size = 64 # ASN.1 Object ID oid = "1.2.840.113549.2.2" def __init__(self, data=None): state = VoidPointer() result = _raw_md2_lib.md2_init(state.address_of()) if result: raise ValueError("Error %d while instantiating MD2" % result) self._state = SmartPointer(state.get(), _raw_md2_lib.md2_destroy) if data: self.update(data) def update(self, data): """Continue hashing of a message by consuming the next chunk of data. Args: data (byte string/byte array/memoryview): The next chunk of the message being hashed. """ result = _raw_md2_lib.md2_update(self._state.get(), c_uint8_ptr(data), c_size_t(len(data))) if result: raise ValueError("Error %d while instantiating MD2" % result) def digest(self): """Return the **binary** (non-printable) digest of the message that has been hashed so far. :return: The hash digest, computed over the data processed so far. Binary form. :rtype: byte string """ bfr = create_string_buffer(self.digest_size) result = _raw_md2_lib.md2_digest(self._state.get(), bfr) if result: raise ValueError("Error %d while instantiating MD2" % result) return get_raw_buffer(bfr) def hexdigest(self): """Return the **printable** digest of the message that has been hashed so far. :return: The hash digest, computed over the data processed so far. Hexadecimal encoded. :rtype: string """ return "".join(["%02x" % bord(x) for x in self.digest()]) def copy(self): <|fim_middle|> def new(self, data=None): return MD2Hash(data) def new(data=None): """Create a new hash object. :parameter data: Optional. The very first chunk of the message to hash. It is equivalent to an early call to :meth:`MD2Hash.update`. :type data: byte string/byte array/memoryview :Return: A :class:`MD2Hash` hash object """ return MD2Hash().new(data) # The size of the resulting hash in bytes. digest_size = MD2Hash.digest_size # The internal block size of the hash algorithm in bytes. block_size = MD2Hash.block_size <|fim▁end|>
"""Return a copy ("clone") of the hash object. The copy will have the same internal state as the original hash object. This can be used to efficiently compute the digests of strings that share a common initial substring. :return: A hash object of the same type """ clone = MD2Hash() result = _raw_md2_lib.md2_copy(self._state.get(), clone._state.get()) if result: raise ValueError("Error %d while copying MD2" % result) return clone
<|file_name|>MD2.py<|end_file_name|><|fim▁begin|># =================================================================== # # Copyright (c) 2014, Legrandin <[email protected]> # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in # the documentation and/or other materials provided with the # distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # =================================================================== from Cryptodome.Util.py3compat import bord from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, VoidPointer, SmartPointer, create_string_buffer, get_raw_buffer, c_size_t, c_uint8_ptr) _raw_md2_lib = load_pycryptodome_raw_lib( "Cryptodome.Hash._MD2", """ int md2_init(void **shaState); int md2_destroy(void *shaState); int md2_update(void *hs, const uint8_t *buf, size_t len); int md2_digest(const void *shaState, uint8_t digest[20]); int md2_copy(const void *src, void *dst); """) class MD2Hash(object): """An MD2 hash object. Do not instantiate directly. Use the :func:`new` function. :ivar oid: ASN.1 Object ID :vartype oid: string :ivar block_size: the size in bytes of the internal message block, input to the compression function :vartype block_size: integer :ivar digest_size: the size in bytes of the resulting hash :vartype digest_size: integer """ # The size of the resulting hash in bytes. digest_size = 16 # The internal block size of the hash algorithm in bytes. block_size = 64 # ASN.1 Object ID oid = "1.2.840.113549.2.2" def __init__(self, data=None): state = VoidPointer() result = _raw_md2_lib.md2_init(state.address_of()) if result: raise ValueError("Error %d while instantiating MD2" % result) self._state = SmartPointer(state.get(), _raw_md2_lib.md2_destroy) if data: self.update(data) def update(self, data): """Continue hashing of a message by consuming the next chunk of data. Args: data (byte string/byte array/memoryview): The next chunk of the message being hashed. """ result = _raw_md2_lib.md2_update(self._state.get(), c_uint8_ptr(data), c_size_t(len(data))) if result: raise ValueError("Error %d while instantiating MD2" % result) def digest(self): """Return the **binary** (non-printable) digest of the message that has been hashed so far. :return: The hash digest, computed over the data processed so far. Binary form. :rtype: byte string """ bfr = create_string_buffer(self.digest_size) result = _raw_md2_lib.md2_digest(self._state.get(), bfr) if result: raise ValueError("Error %d while instantiating MD2" % result) return get_raw_buffer(bfr) def hexdigest(self): """Return the **printable** digest of the message that has been hashed so far. :return: The hash digest, computed over the data processed so far. Hexadecimal encoded. :rtype: string """ return "".join(["%02x" % bord(x) for x in self.digest()]) def copy(self): """Return a copy ("clone") of the hash object. The copy will have the same internal state as the original hash object. This can be used to efficiently compute the digests of strings that share a common initial substring. :return: A hash object of the same type """ clone = MD2Hash() result = _raw_md2_lib.md2_copy(self._state.get(), clone._state.get()) if result: raise ValueError("Error %d while copying MD2" % result) return clone def new(self, data=None): <|fim_middle|> def new(data=None): """Create a new hash object. :parameter data: Optional. The very first chunk of the message to hash. It is equivalent to an early call to :meth:`MD2Hash.update`. :type data: byte string/byte array/memoryview :Return: A :class:`MD2Hash` hash object """ return MD2Hash().new(data) # The size of the resulting hash in bytes. digest_size = MD2Hash.digest_size # The internal block size of the hash algorithm in bytes. block_size = MD2Hash.block_size <|fim▁end|>
return MD2Hash(data)
<|file_name|>MD2.py<|end_file_name|><|fim▁begin|># =================================================================== # # Copyright (c) 2014, Legrandin <[email protected]> # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in # the documentation and/or other materials provided with the # distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # =================================================================== from Cryptodome.Util.py3compat import bord from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, VoidPointer, SmartPointer, create_string_buffer, get_raw_buffer, c_size_t, c_uint8_ptr) _raw_md2_lib = load_pycryptodome_raw_lib( "Cryptodome.Hash._MD2", """ int md2_init(void **shaState); int md2_destroy(void *shaState); int md2_update(void *hs, const uint8_t *buf, size_t len); int md2_digest(const void *shaState, uint8_t digest[20]); int md2_copy(const void *src, void *dst); """) class MD2Hash(object): """An MD2 hash object. Do not instantiate directly. Use the :func:`new` function. :ivar oid: ASN.1 Object ID :vartype oid: string :ivar block_size: the size in bytes of the internal message block, input to the compression function :vartype block_size: integer :ivar digest_size: the size in bytes of the resulting hash :vartype digest_size: integer """ # The size of the resulting hash in bytes. digest_size = 16 # The internal block size of the hash algorithm in bytes. block_size = 64 # ASN.1 Object ID oid = "1.2.840.113549.2.2" def __init__(self, data=None): state = VoidPointer() result = _raw_md2_lib.md2_init(state.address_of()) if result: raise ValueError("Error %d while instantiating MD2" % result) self._state = SmartPointer(state.get(), _raw_md2_lib.md2_destroy) if data: self.update(data) def update(self, data): """Continue hashing of a message by consuming the next chunk of data. Args: data (byte string/byte array/memoryview): The next chunk of the message being hashed. """ result = _raw_md2_lib.md2_update(self._state.get(), c_uint8_ptr(data), c_size_t(len(data))) if result: raise ValueError("Error %d while instantiating MD2" % result) def digest(self): """Return the **binary** (non-printable) digest of the message that has been hashed so far. :return: The hash digest, computed over the data processed so far. Binary form. :rtype: byte string """ bfr = create_string_buffer(self.digest_size) result = _raw_md2_lib.md2_digest(self._state.get(), bfr) if result: raise ValueError("Error %d while instantiating MD2" % result) return get_raw_buffer(bfr) def hexdigest(self): """Return the **printable** digest of the message that has been hashed so far. :return: The hash digest, computed over the data processed so far. Hexadecimal encoded. :rtype: string """ return "".join(["%02x" % bord(x) for x in self.digest()]) def copy(self): """Return a copy ("clone") of the hash object. The copy will have the same internal state as the original hash object. This can be used to efficiently compute the digests of strings that share a common initial substring. :return: A hash object of the same type """ clone = MD2Hash() result = _raw_md2_lib.md2_copy(self._state.get(), clone._state.get()) if result: raise ValueError("Error %d while copying MD2" % result) return clone def new(self, data=None): return MD2Hash(data) def new(data=None): <|fim_middle|> # The size of the resulting hash in bytes. digest_size = MD2Hash.digest_size # The internal block size of the hash algorithm in bytes. block_size = MD2Hash.block_size <|fim▁end|>
"""Create a new hash object. :parameter data: Optional. The very first chunk of the message to hash. It is equivalent to an early call to :meth:`MD2Hash.update`. :type data: byte string/byte array/memoryview :Return: A :class:`MD2Hash` hash object """ return MD2Hash().new(data)
<|file_name|>MD2.py<|end_file_name|><|fim▁begin|># =================================================================== # # Copyright (c) 2014, Legrandin <[email protected]> # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in # the documentation and/or other materials provided with the # distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # =================================================================== from Cryptodome.Util.py3compat import bord from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, VoidPointer, SmartPointer, create_string_buffer, get_raw_buffer, c_size_t, c_uint8_ptr) _raw_md2_lib = load_pycryptodome_raw_lib( "Cryptodome.Hash._MD2", """ int md2_init(void **shaState); int md2_destroy(void *shaState); int md2_update(void *hs, const uint8_t *buf, size_t len); int md2_digest(const void *shaState, uint8_t digest[20]); int md2_copy(const void *src, void *dst); """) class MD2Hash(object): """An MD2 hash object. Do not instantiate directly. Use the :func:`new` function. :ivar oid: ASN.1 Object ID :vartype oid: string :ivar block_size: the size in bytes of the internal message block, input to the compression function :vartype block_size: integer :ivar digest_size: the size in bytes of the resulting hash :vartype digest_size: integer """ # The size of the resulting hash in bytes. digest_size = 16 # The internal block size of the hash algorithm in bytes. block_size = 64 # ASN.1 Object ID oid = "1.2.840.113549.2.2" def __init__(self, data=None): state = VoidPointer() result = _raw_md2_lib.md2_init(state.address_of()) if result: <|fim_middle|> self._state = SmartPointer(state.get(), _raw_md2_lib.md2_destroy) if data: self.update(data) def update(self, data): """Continue hashing of a message by consuming the next chunk of data. Args: data (byte string/byte array/memoryview): The next chunk of the message being hashed. """ result = _raw_md2_lib.md2_update(self._state.get(), c_uint8_ptr(data), c_size_t(len(data))) if result: raise ValueError("Error %d while instantiating MD2" % result) def digest(self): """Return the **binary** (non-printable) digest of the message that has been hashed so far. :return: The hash digest, computed over the data processed so far. Binary form. :rtype: byte string """ bfr = create_string_buffer(self.digest_size) result = _raw_md2_lib.md2_digest(self._state.get(), bfr) if result: raise ValueError("Error %d while instantiating MD2" % result) return get_raw_buffer(bfr) def hexdigest(self): """Return the **printable** digest of the message that has been hashed so far. :return: The hash digest, computed over the data processed so far. Hexadecimal encoded. :rtype: string """ return "".join(["%02x" % bord(x) for x in self.digest()]) def copy(self): """Return a copy ("clone") of the hash object. The copy will have the same internal state as the original hash object. This can be used to efficiently compute the digests of strings that share a common initial substring. :return: A hash object of the same type """ clone = MD2Hash() result = _raw_md2_lib.md2_copy(self._state.get(), clone._state.get()) if result: raise ValueError("Error %d while copying MD2" % result) return clone def new(self, data=None): return MD2Hash(data) def new(data=None): """Create a new hash object. :parameter data: Optional. The very first chunk of the message to hash. It is equivalent to an early call to :meth:`MD2Hash.update`. :type data: byte string/byte array/memoryview :Return: A :class:`MD2Hash` hash object """ return MD2Hash().new(data) # The size of the resulting hash in bytes. digest_size = MD2Hash.digest_size # The internal block size of the hash algorithm in bytes. block_size = MD2Hash.block_size <|fim▁end|>
raise ValueError("Error %d while instantiating MD2" % result)
<|file_name|>MD2.py<|end_file_name|><|fim▁begin|># =================================================================== # # Copyright (c) 2014, Legrandin <[email protected]> # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in # the documentation and/or other materials provided with the # distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # =================================================================== from Cryptodome.Util.py3compat import bord from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, VoidPointer, SmartPointer, create_string_buffer, get_raw_buffer, c_size_t, c_uint8_ptr) _raw_md2_lib = load_pycryptodome_raw_lib( "Cryptodome.Hash._MD2", """ int md2_init(void **shaState); int md2_destroy(void *shaState); int md2_update(void *hs, const uint8_t *buf, size_t len); int md2_digest(const void *shaState, uint8_t digest[20]); int md2_copy(const void *src, void *dst); """) class MD2Hash(object): """An MD2 hash object. Do not instantiate directly. Use the :func:`new` function. :ivar oid: ASN.1 Object ID :vartype oid: string :ivar block_size: the size in bytes of the internal message block, input to the compression function :vartype block_size: integer :ivar digest_size: the size in bytes of the resulting hash :vartype digest_size: integer """ # The size of the resulting hash in bytes. digest_size = 16 # The internal block size of the hash algorithm in bytes. block_size = 64 # ASN.1 Object ID oid = "1.2.840.113549.2.2" def __init__(self, data=None): state = VoidPointer() result = _raw_md2_lib.md2_init(state.address_of()) if result: raise ValueError("Error %d while instantiating MD2" % result) self._state = SmartPointer(state.get(), _raw_md2_lib.md2_destroy) if data: <|fim_middle|> def update(self, data): """Continue hashing of a message by consuming the next chunk of data. Args: data (byte string/byte array/memoryview): The next chunk of the message being hashed. """ result = _raw_md2_lib.md2_update(self._state.get(), c_uint8_ptr(data), c_size_t(len(data))) if result: raise ValueError("Error %d while instantiating MD2" % result) def digest(self): """Return the **binary** (non-printable) digest of the message that has been hashed so far. :return: The hash digest, computed over the data processed so far. Binary form. :rtype: byte string """ bfr = create_string_buffer(self.digest_size) result = _raw_md2_lib.md2_digest(self._state.get(), bfr) if result: raise ValueError("Error %d while instantiating MD2" % result) return get_raw_buffer(bfr) def hexdigest(self): """Return the **printable** digest of the message that has been hashed so far. :return: The hash digest, computed over the data processed so far. Hexadecimal encoded. :rtype: string """ return "".join(["%02x" % bord(x) for x in self.digest()]) def copy(self): """Return a copy ("clone") of the hash object. The copy will have the same internal state as the original hash object. This can be used to efficiently compute the digests of strings that share a common initial substring. :return: A hash object of the same type """ clone = MD2Hash() result = _raw_md2_lib.md2_copy(self._state.get(), clone._state.get()) if result: raise ValueError("Error %d while copying MD2" % result) return clone def new(self, data=None): return MD2Hash(data) def new(data=None): """Create a new hash object. :parameter data: Optional. The very first chunk of the message to hash. It is equivalent to an early call to :meth:`MD2Hash.update`. :type data: byte string/byte array/memoryview :Return: A :class:`MD2Hash` hash object """ return MD2Hash().new(data) # The size of the resulting hash in bytes. digest_size = MD2Hash.digest_size # The internal block size of the hash algorithm in bytes. block_size = MD2Hash.block_size <|fim▁end|>
self.update(data)
<|file_name|>MD2.py<|end_file_name|><|fim▁begin|># =================================================================== # # Copyright (c) 2014, Legrandin <[email protected]> # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in # the documentation and/or other materials provided with the # distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # =================================================================== from Cryptodome.Util.py3compat import bord from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, VoidPointer, SmartPointer, create_string_buffer, get_raw_buffer, c_size_t, c_uint8_ptr) _raw_md2_lib = load_pycryptodome_raw_lib( "Cryptodome.Hash._MD2", """ int md2_init(void **shaState); int md2_destroy(void *shaState); int md2_update(void *hs, const uint8_t *buf, size_t len); int md2_digest(const void *shaState, uint8_t digest[20]); int md2_copy(const void *src, void *dst); """) class MD2Hash(object): """An MD2 hash object. Do not instantiate directly. Use the :func:`new` function. :ivar oid: ASN.1 Object ID :vartype oid: string :ivar block_size: the size in bytes of the internal message block, input to the compression function :vartype block_size: integer :ivar digest_size: the size in bytes of the resulting hash :vartype digest_size: integer """ # The size of the resulting hash in bytes. digest_size = 16 # The internal block size of the hash algorithm in bytes. block_size = 64 # ASN.1 Object ID oid = "1.2.840.113549.2.2" def __init__(self, data=None): state = VoidPointer() result = _raw_md2_lib.md2_init(state.address_of()) if result: raise ValueError("Error %d while instantiating MD2" % result) self._state = SmartPointer(state.get(), _raw_md2_lib.md2_destroy) if data: self.update(data) def update(self, data): """Continue hashing of a message by consuming the next chunk of data. Args: data (byte string/byte array/memoryview): The next chunk of the message being hashed. """ result = _raw_md2_lib.md2_update(self._state.get(), c_uint8_ptr(data), c_size_t(len(data))) if result: <|fim_middle|> def digest(self): """Return the **binary** (non-printable) digest of the message that has been hashed so far. :return: The hash digest, computed over the data processed so far. Binary form. :rtype: byte string """ bfr = create_string_buffer(self.digest_size) result = _raw_md2_lib.md2_digest(self._state.get(), bfr) if result: raise ValueError("Error %d while instantiating MD2" % result) return get_raw_buffer(bfr) def hexdigest(self): """Return the **printable** digest of the message that has been hashed so far. :return: The hash digest, computed over the data processed so far. Hexadecimal encoded. :rtype: string """ return "".join(["%02x" % bord(x) for x in self.digest()]) def copy(self): """Return a copy ("clone") of the hash object. The copy will have the same internal state as the original hash object. This can be used to efficiently compute the digests of strings that share a common initial substring. :return: A hash object of the same type """ clone = MD2Hash() result = _raw_md2_lib.md2_copy(self._state.get(), clone._state.get()) if result: raise ValueError("Error %d while copying MD2" % result) return clone def new(self, data=None): return MD2Hash(data) def new(data=None): """Create a new hash object. :parameter data: Optional. The very first chunk of the message to hash. It is equivalent to an early call to :meth:`MD2Hash.update`. :type data: byte string/byte array/memoryview :Return: A :class:`MD2Hash` hash object """ return MD2Hash().new(data) # The size of the resulting hash in bytes. digest_size = MD2Hash.digest_size # The internal block size of the hash algorithm in bytes. block_size = MD2Hash.block_size <|fim▁end|>
raise ValueError("Error %d while instantiating MD2" % result)
<|file_name|>MD2.py<|end_file_name|><|fim▁begin|># =================================================================== # # Copyright (c) 2014, Legrandin <[email protected]> # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in # the documentation and/or other materials provided with the # distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # =================================================================== from Cryptodome.Util.py3compat import bord from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, VoidPointer, SmartPointer, create_string_buffer, get_raw_buffer, c_size_t, c_uint8_ptr) _raw_md2_lib = load_pycryptodome_raw_lib( "Cryptodome.Hash._MD2", """ int md2_init(void **shaState); int md2_destroy(void *shaState); int md2_update(void *hs, const uint8_t *buf, size_t len); int md2_digest(const void *shaState, uint8_t digest[20]); int md2_copy(const void *src, void *dst); """) class MD2Hash(object): """An MD2 hash object. Do not instantiate directly. Use the :func:`new` function. :ivar oid: ASN.1 Object ID :vartype oid: string :ivar block_size: the size in bytes of the internal message block, input to the compression function :vartype block_size: integer :ivar digest_size: the size in bytes of the resulting hash :vartype digest_size: integer """ # The size of the resulting hash in bytes. digest_size = 16 # The internal block size of the hash algorithm in bytes. block_size = 64 # ASN.1 Object ID oid = "1.2.840.113549.2.2" def __init__(self, data=None): state = VoidPointer() result = _raw_md2_lib.md2_init(state.address_of()) if result: raise ValueError("Error %d while instantiating MD2" % result) self._state = SmartPointer(state.get(), _raw_md2_lib.md2_destroy) if data: self.update(data) def update(self, data): """Continue hashing of a message by consuming the next chunk of data. Args: data (byte string/byte array/memoryview): The next chunk of the message being hashed. """ result = _raw_md2_lib.md2_update(self._state.get(), c_uint8_ptr(data), c_size_t(len(data))) if result: raise ValueError("Error %d while instantiating MD2" % result) def digest(self): """Return the **binary** (non-printable) digest of the message that has been hashed so far. :return: The hash digest, computed over the data processed so far. Binary form. :rtype: byte string """ bfr = create_string_buffer(self.digest_size) result = _raw_md2_lib.md2_digest(self._state.get(), bfr) if result: <|fim_middle|> return get_raw_buffer(bfr) def hexdigest(self): """Return the **printable** digest of the message that has been hashed so far. :return: The hash digest, computed over the data processed so far. Hexadecimal encoded. :rtype: string """ return "".join(["%02x" % bord(x) for x in self.digest()]) def copy(self): """Return a copy ("clone") of the hash object. The copy will have the same internal state as the original hash object. This can be used to efficiently compute the digests of strings that share a common initial substring. :return: A hash object of the same type """ clone = MD2Hash() result = _raw_md2_lib.md2_copy(self._state.get(), clone._state.get()) if result: raise ValueError("Error %d while copying MD2" % result) return clone def new(self, data=None): return MD2Hash(data) def new(data=None): """Create a new hash object. :parameter data: Optional. The very first chunk of the message to hash. It is equivalent to an early call to :meth:`MD2Hash.update`. :type data: byte string/byte array/memoryview :Return: A :class:`MD2Hash` hash object """ return MD2Hash().new(data) # The size of the resulting hash in bytes. digest_size = MD2Hash.digest_size # The internal block size of the hash algorithm in bytes. block_size = MD2Hash.block_size <|fim▁end|>
raise ValueError("Error %d while instantiating MD2" % result)
<|file_name|>MD2.py<|end_file_name|><|fim▁begin|># =================================================================== # # Copyright (c) 2014, Legrandin <[email protected]> # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in # the documentation and/or other materials provided with the # distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # =================================================================== from Cryptodome.Util.py3compat import bord from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, VoidPointer, SmartPointer, create_string_buffer, get_raw_buffer, c_size_t, c_uint8_ptr) _raw_md2_lib = load_pycryptodome_raw_lib( "Cryptodome.Hash._MD2", """ int md2_init(void **shaState); int md2_destroy(void *shaState); int md2_update(void *hs, const uint8_t *buf, size_t len); int md2_digest(const void *shaState, uint8_t digest[20]); int md2_copy(const void *src, void *dst); """) class MD2Hash(object): """An MD2 hash object. Do not instantiate directly. Use the :func:`new` function. :ivar oid: ASN.1 Object ID :vartype oid: string :ivar block_size: the size in bytes of the internal message block, input to the compression function :vartype block_size: integer :ivar digest_size: the size in bytes of the resulting hash :vartype digest_size: integer """ # The size of the resulting hash in bytes. digest_size = 16 # The internal block size of the hash algorithm in bytes. block_size = 64 # ASN.1 Object ID oid = "1.2.840.113549.2.2" def __init__(self, data=None): state = VoidPointer() result = _raw_md2_lib.md2_init(state.address_of()) if result: raise ValueError("Error %d while instantiating MD2" % result) self._state = SmartPointer(state.get(), _raw_md2_lib.md2_destroy) if data: self.update(data) def update(self, data): """Continue hashing of a message by consuming the next chunk of data. Args: data (byte string/byte array/memoryview): The next chunk of the message being hashed. """ result = _raw_md2_lib.md2_update(self._state.get(), c_uint8_ptr(data), c_size_t(len(data))) if result: raise ValueError("Error %d while instantiating MD2" % result) def digest(self): """Return the **binary** (non-printable) digest of the message that has been hashed so far. :return: The hash digest, computed over the data processed so far. Binary form. :rtype: byte string """ bfr = create_string_buffer(self.digest_size) result = _raw_md2_lib.md2_digest(self._state.get(), bfr) if result: raise ValueError("Error %d while instantiating MD2" % result) return get_raw_buffer(bfr) def hexdigest(self): """Return the **printable** digest of the message that has been hashed so far. :return: The hash digest, computed over the data processed so far. Hexadecimal encoded. :rtype: string """ return "".join(["%02x" % bord(x) for x in self.digest()]) def copy(self): """Return a copy ("clone") of the hash object. The copy will have the same internal state as the original hash object. This can be used to efficiently compute the digests of strings that share a common initial substring. :return: A hash object of the same type """ clone = MD2Hash() result = _raw_md2_lib.md2_copy(self._state.get(), clone._state.get()) if result: <|fim_middle|> return clone def new(self, data=None): return MD2Hash(data) def new(data=None): """Create a new hash object. :parameter data: Optional. The very first chunk of the message to hash. It is equivalent to an early call to :meth:`MD2Hash.update`. :type data: byte string/byte array/memoryview :Return: A :class:`MD2Hash` hash object """ return MD2Hash().new(data) # The size of the resulting hash in bytes. digest_size = MD2Hash.digest_size # The internal block size of the hash algorithm in bytes. block_size = MD2Hash.block_size <|fim▁end|>
raise ValueError("Error %d while copying MD2" % result)
<|file_name|>MD2.py<|end_file_name|><|fim▁begin|># =================================================================== # # Copyright (c) 2014, Legrandin <[email protected]> # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in # the documentation and/or other materials provided with the # distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # =================================================================== from Cryptodome.Util.py3compat import bord from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, VoidPointer, SmartPointer, create_string_buffer, get_raw_buffer, c_size_t, c_uint8_ptr) _raw_md2_lib = load_pycryptodome_raw_lib( "Cryptodome.Hash._MD2", """ int md2_init(void **shaState); int md2_destroy(void *shaState); int md2_update(void *hs, const uint8_t *buf, size_t len); int md2_digest(const void *shaState, uint8_t digest[20]); int md2_copy(const void *src, void *dst); """) class MD2Hash(object): """An MD2 hash object. Do not instantiate directly. Use the :func:`new` function. :ivar oid: ASN.1 Object ID :vartype oid: string :ivar block_size: the size in bytes of the internal message block, input to the compression function :vartype block_size: integer :ivar digest_size: the size in bytes of the resulting hash :vartype digest_size: integer """ # The size of the resulting hash in bytes. digest_size = 16 # The internal block size of the hash algorithm in bytes. block_size = 64 # ASN.1 Object ID oid = "1.2.840.113549.2.2" def <|fim_middle|>(self, data=None): state = VoidPointer() result = _raw_md2_lib.md2_init(state.address_of()) if result: raise ValueError("Error %d while instantiating MD2" % result) self._state = SmartPointer(state.get(), _raw_md2_lib.md2_destroy) if data: self.update(data) def update(self, data): """Continue hashing of a message by consuming the next chunk of data. Args: data (byte string/byte array/memoryview): The next chunk of the message being hashed. """ result = _raw_md2_lib.md2_update(self._state.get(), c_uint8_ptr(data), c_size_t(len(data))) if result: raise ValueError("Error %d while instantiating MD2" % result) def digest(self): """Return the **binary** (non-printable) digest of the message that has been hashed so far. :return: The hash digest, computed over the data processed so far. Binary form. :rtype: byte string """ bfr = create_string_buffer(self.digest_size) result = _raw_md2_lib.md2_digest(self._state.get(), bfr) if result: raise ValueError("Error %d while instantiating MD2" % result) return get_raw_buffer(bfr) def hexdigest(self): """Return the **printable** digest of the message that has been hashed so far. :return: The hash digest, computed over the data processed so far. Hexadecimal encoded. :rtype: string """ return "".join(["%02x" % bord(x) for x in self.digest()]) def copy(self): """Return a copy ("clone") of the hash object. The copy will have the same internal state as the original hash object. This can be used to efficiently compute the digests of strings that share a common initial substring. :return: A hash object of the same type """ clone = MD2Hash() result = _raw_md2_lib.md2_copy(self._state.get(), clone._state.get()) if result: raise ValueError("Error %d while copying MD2" % result) return clone def new(self, data=None): return MD2Hash(data) def new(data=None): """Create a new hash object. :parameter data: Optional. The very first chunk of the message to hash. It is equivalent to an early call to :meth:`MD2Hash.update`. :type data: byte string/byte array/memoryview :Return: A :class:`MD2Hash` hash object """ return MD2Hash().new(data) # The size of the resulting hash in bytes. digest_size = MD2Hash.digest_size # The internal block size of the hash algorithm in bytes. block_size = MD2Hash.block_size <|fim▁end|>
__init__
<|file_name|>MD2.py<|end_file_name|><|fim▁begin|># =================================================================== # # Copyright (c) 2014, Legrandin <[email protected]> # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in # the documentation and/or other materials provided with the # distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # =================================================================== from Cryptodome.Util.py3compat import bord from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, VoidPointer, SmartPointer, create_string_buffer, get_raw_buffer, c_size_t, c_uint8_ptr) _raw_md2_lib = load_pycryptodome_raw_lib( "Cryptodome.Hash._MD2", """ int md2_init(void **shaState); int md2_destroy(void *shaState); int md2_update(void *hs, const uint8_t *buf, size_t len); int md2_digest(const void *shaState, uint8_t digest[20]); int md2_copy(const void *src, void *dst); """) class MD2Hash(object): """An MD2 hash object. Do not instantiate directly. Use the :func:`new` function. :ivar oid: ASN.1 Object ID :vartype oid: string :ivar block_size: the size in bytes of the internal message block, input to the compression function :vartype block_size: integer :ivar digest_size: the size in bytes of the resulting hash :vartype digest_size: integer """ # The size of the resulting hash in bytes. digest_size = 16 # The internal block size of the hash algorithm in bytes. block_size = 64 # ASN.1 Object ID oid = "1.2.840.113549.2.2" def __init__(self, data=None): state = VoidPointer() result = _raw_md2_lib.md2_init(state.address_of()) if result: raise ValueError("Error %d while instantiating MD2" % result) self._state = SmartPointer(state.get(), _raw_md2_lib.md2_destroy) if data: self.update(data) def <|fim_middle|>(self, data): """Continue hashing of a message by consuming the next chunk of data. Args: data (byte string/byte array/memoryview): The next chunk of the message being hashed. """ result = _raw_md2_lib.md2_update(self._state.get(), c_uint8_ptr(data), c_size_t(len(data))) if result: raise ValueError("Error %d while instantiating MD2" % result) def digest(self): """Return the **binary** (non-printable) digest of the message that has been hashed so far. :return: The hash digest, computed over the data processed so far. Binary form. :rtype: byte string """ bfr = create_string_buffer(self.digest_size) result = _raw_md2_lib.md2_digest(self._state.get(), bfr) if result: raise ValueError("Error %d while instantiating MD2" % result) return get_raw_buffer(bfr) def hexdigest(self): """Return the **printable** digest of the message that has been hashed so far. :return: The hash digest, computed over the data processed so far. Hexadecimal encoded. :rtype: string """ return "".join(["%02x" % bord(x) for x in self.digest()]) def copy(self): """Return a copy ("clone") of the hash object. The copy will have the same internal state as the original hash object. This can be used to efficiently compute the digests of strings that share a common initial substring. :return: A hash object of the same type """ clone = MD2Hash() result = _raw_md2_lib.md2_copy(self._state.get(), clone._state.get()) if result: raise ValueError("Error %d while copying MD2" % result) return clone def new(self, data=None): return MD2Hash(data) def new(data=None): """Create a new hash object. :parameter data: Optional. The very first chunk of the message to hash. It is equivalent to an early call to :meth:`MD2Hash.update`. :type data: byte string/byte array/memoryview :Return: A :class:`MD2Hash` hash object """ return MD2Hash().new(data) # The size of the resulting hash in bytes. digest_size = MD2Hash.digest_size # The internal block size of the hash algorithm in bytes. block_size = MD2Hash.block_size <|fim▁end|>
update
<|file_name|>MD2.py<|end_file_name|><|fim▁begin|># =================================================================== # # Copyright (c) 2014, Legrandin <[email protected]> # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in # the documentation and/or other materials provided with the # distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # =================================================================== from Cryptodome.Util.py3compat import bord from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, VoidPointer, SmartPointer, create_string_buffer, get_raw_buffer, c_size_t, c_uint8_ptr) _raw_md2_lib = load_pycryptodome_raw_lib( "Cryptodome.Hash._MD2", """ int md2_init(void **shaState); int md2_destroy(void *shaState); int md2_update(void *hs, const uint8_t *buf, size_t len); int md2_digest(const void *shaState, uint8_t digest[20]); int md2_copy(const void *src, void *dst); """) class MD2Hash(object): """An MD2 hash object. Do not instantiate directly. Use the :func:`new` function. :ivar oid: ASN.1 Object ID :vartype oid: string :ivar block_size: the size in bytes of the internal message block, input to the compression function :vartype block_size: integer :ivar digest_size: the size in bytes of the resulting hash :vartype digest_size: integer """ # The size of the resulting hash in bytes. digest_size = 16 # The internal block size of the hash algorithm in bytes. block_size = 64 # ASN.1 Object ID oid = "1.2.840.113549.2.2" def __init__(self, data=None): state = VoidPointer() result = _raw_md2_lib.md2_init(state.address_of()) if result: raise ValueError("Error %d while instantiating MD2" % result) self._state = SmartPointer(state.get(), _raw_md2_lib.md2_destroy) if data: self.update(data) def update(self, data): """Continue hashing of a message by consuming the next chunk of data. Args: data (byte string/byte array/memoryview): The next chunk of the message being hashed. """ result = _raw_md2_lib.md2_update(self._state.get(), c_uint8_ptr(data), c_size_t(len(data))) if result: raise ValueError("Error %d while instantiating MD2" % result) def <|fim_middle|>(self): """Return the **binary** (non-printable) digest of the message that has been hashed so far. :return: The hash digest, computed over the data processed so far. Binary form. :rtype: byte string """ bfr = create_string_buffer(self.digest_size) result = _raw_md2_lib.md2_digest(self._state.get(), bfr) if result: raise ValueError("Error %d while instantiating MD2" % result) return get_raw_buffer(bfr) def hexdigest(self): """Return the **printable** digest of the message that has been hashed so far. :return: The hash digest, computed over the data processed so far. Hexadecimal encoded. :rtype: string """ return "".join(["%02x" % bord(x) for x in self.digest()]) def copy(self): """Return a copy ("clone") of the hash object. The copy will have the same internal state as the original hash object. This can be used to efficiently compute the digests of strings that share a common initial substring. :return: A hash object of the same type """ clone = MD2Hash() result = _raw_md2_lib.md2_copy(self._state.get(), clone._state.get()) if result: raise ValueError("Error %d while copying MD2" % result) return clone def new(self, data=None): return MD2Hash(data) def new(data=None): """Create a new hash object. :parameter data: Optional. The very first chunk of the message to hash. It is equivalent to an early call to :meth:`MD2Hash.update`. :type data: byte string/byte array/memoryview :Return: A :class:`MD2Hash` hash object """ return MD2Hash().new(data) # The size of the resulting hash in bytes. digest_size = MD2Hash.digest_size # The internal block size of the hash algorithm in bytes. block_size = MD2Hash.block_size <|fim▁end|>
digest
<|file_name|>MD2.py<|end_file_name|><|fim▁begin|># =================================================================== # # Copyright (c) 2014, Legrandin <[email protected]> # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in # the documentation and/or other materials provided with the # distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # =================================================================== from Cryptodome.Util.py3compat import bord from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, VoidPointer, SmartPointer, create_string_buffer, get_raw_buffer, c_size_t, c_uint8_ptr) _raw_md2_lib = load_pycryptodome_raw_lib( "Cryptodome.Hash._MD2", """ int md2_init(void **shaState); int md2_destroy(void *shaState); int md2_update(void *hs, const uint8_t *buf, size_t len); int md2_digest(const void *shaState, uint8_t digest[20]); int md2_copy(const void *src, void *dst); """) class MD2Hash(object): """An MD2 hash object. Do not instantiate directly. Use the :func:`new` function. :ivar oid: ASN.1 Object ID :vartype oid: string :ivar block_size: the size in bytes of the internal message block, input to the compression function :vartype block_size: integer :ivar digest_size: the size in bytes of the resulting hash :vartype digest_size: integer """ # The size of the resulting hash in bytes. digest_size = 16 # The internal block size of the hash algorithm in bytes. block_size = 64 # ASN.1 Object ID oid = "1.2.840.113549.2.2" def __init__(self, data=None): state = VoidPointer() result = _raw_md2_lib.md2_init(state.address_of()) if result: raise ValueError("Error %d while instantiating MD2" % result) self._state = SmartPointer(state.get(), _raw_md2_lib.md2_destroy) if data: self.update(data) def update(self, data): """Continue hashing of a message by consuming the next chunk of data. Args: data (byte string/byte array/memoryview): The next chunk of the message being hashed. """ result = _raw_md2_lib.md2_update(self._state.get(), c_uint8_ptr(data), c_size_t(len(data))) if result: raise ValueError("Error %d while instantiating MD2" % result) def digest(self): """Return the **binary** (non-printable) digest of the message that has been hashed so far. :return: The hash digest, computed over the data processed so far. Binary form. :rtype: byte string """ bfr = create_string_buffer(self.digest_size) result = _raw_md2_lib.md2_digest(self._state.get(), bfr) if result: raise ValueError("Error %d while instantiating MD2" % result) return get_raw_buffer(bfr) def <|fim_middle|>(self): """Return the **printable** digest of the message that has been hashed so far. :return: The hash digest, computed over the data processed so far. Hexadecimal encoded. :rtype: string """ return "".join(["%02x" % bord(x) for x in self.digest()]) def copy(self): """Return a copy ("clone") of the hash object. The copy will have the same internal state as the original hash object. This can be used to efficiently compute the digests of strings that share a common initial substring. :return: A hash object of the same type """ clone = MD2Hash() result = _raw_md2_lib.md2_copy(self._state.get(), clone._state.get()) if result: raise ValueError("Error %d while copying MD2" % result) return clone def new(self, data=None): return MD2Hash(data) def new(data=None): """Create a new hash object. :parameter data: Optional. The very first chunk of the message to hash. It is equivalent to an early call to :meth:`MD2Hash.update`. :type data: byte string/byte array/memoryview :Return: A :class:`MD2Hash` hash object """ return MD2Hash().new(data) # The size of the resulting hash in bytes. digest_size = MD2Hash.digest_size # The internal block size of the hash algorithm in bytes. block_size = MD2Hash.block_size <|fim▁end|>
hexdigest
<|file_name|>MD2.py<|end_file_name|><|fim▁begin|># =================================================================== # # Copyright (c) 2014, Legrandin <[email protected]> # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in # the documentation and/or other materials provided with the # distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # =================================================================== from Cryptodome.Util.py3compat import bord from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, VoidPointer, SmartPointer, create_string_buffer, get_raw_buffer, c_size_t, c_uint8_ptr) _raw_md2_lib = load_pycryptodome_raw_lib( "Cryptodome.Hash._MD2", """ int md2_init(void **shaState); int md2_destroy(void *shaState); int md2_update(void *hs, const uint8_t *buf, size_t len); int md2_digest(const void *shaState, uint8_t digest[20]); int md2_copy(const void *src, void *dst); """) class MD2Hash(object): """An MD2 hash object. Do not instantiate directly. Use the :func:`new` function. :ivar oid: ASN.1 Object ID :vartype oid: string :ivar block_size: the size in bytes of the internal message block, input to the compression function :vartype block_size: integer :ivar digest_size: the size in bytes of the resulting hash :vartype digest_size: integer """ # The size of the resulting hash in bytes. digest_size = 16 # The internal block size of the hash algorithm in bytes. block_size = 64 # ASN.1 Object ID oid = "1.2.840.113549.2.2" def __init__(self, data=None): state = VoidPointer() result = _raw_md2_lib.md2_init(state.address_of()) if result: raise ValueError("Error %d while instantiating MD2" % result) self._state = SmartPointer(state.get(), _raw_md2_lib.md2_destroy) if data: self.update(data) def update(self, data): """Continue hashing of a message by consuming the next chunk of data. Args: data (byte string/byte array/memoryview): The next chunk of the message being hashed. """ result = _raw_md2_lib.md2_update(self._state.get(), c_uint8_ptr(data), c_size_t(len(data))) if result: raise ValueError("Error %d while instantiating MD2" % result) def digest(self): """Return the **binary** (non-printable) digest of the message that has been hashed so far. :return: The hash digest, computed over the data processed so far. Binary form. :rtype: byte string """ bfr = create_string_buffer(self.digest_size) result = _raw_md2_lib.md2_digest(self._state.get(), bfr) if result: raise ValueError("Error %d while instantiating MD2" % result) return get_raw_buffer(bfr) def hexdigest(self): """Return the **printable** digest of the message that has been hashed so far. :return: The hash digest, computed over the data processed so far. Hexadecimal encoded. :rtype: string """ return "".join(["%02x" % bord(x) for x in self.digest()]) def <|fim_middle|>(self): """Return a copy ("clone") of the hash object. The copy will have the same internal state as the original hash object. This can be used to efficiently compute the digests of strings that share a common initial substring. :return: A hash object of the same type """ clone = MD2Hash() result = _raw_md2_lib.md2_copy(self._state.get(), clone._state.get()) if result: raise ValueError("Error %d while copying MD2" % result) return clone def new(self, data=None): return MD2Hash(data) def new(data=None): """Create a new hash object. :parameter data: Optional. The very first chunk of the message to hash. It is equivalent to an early call to :meth:`MD2Hash.update`. :type data: byte string/byte array/memoryview :Return: A :class:`MD2Hash` hash object """ return MD2Hash().new(data) # The size of the resulting hash in bytes. digest_size = MD2Hash.digest_size # The internal block size of the hash algorithm in bytes. block_size = MD2Hash.block_size <|fim▁end|>
copy
<|file_name|>MD2.py<|end_file_name|><|fim▁begin|># =================================================================== # # Copyright (c) 2014, Legrandin <[email protected]> # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in # the documentation and/or other materials provided with the # distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # =================================================================== from Cryptodome.Util.py3compat import bord from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, VoidPointer, SmartPointer, create_string_buffer, get_raw_buffer, c_size_t, c_uint8_ptr) _raw_md2_lib = load_pycryptodome_raw_lib( "Cryptodome.Hash._MD2", """ int md2_init(void **shaState); int md2_destroy(void *shaState); int md2_update(void *hs, const uint8_t *buf, size_t len); int md2_digest(const void *shaState, uint8_t digest[20]); int md2_copy(const void *src, void *dst); """) class MD2Hash(object): """An MD2 hash object. Do not instantiate directly. Use the :func:`new` function. :ivar oid: ASN.1 Object ID :vartype oid: string :ivar block_size: the size in bytes of the internal message block, input to the compression function :vartype block_size: integer :ivar digest_size: the size in bytes of the resulting hash :vartype digest_size: integer """ # The size of the resulting hash in bytes. digest_size = 16 # The internal block size of the hash algorithm in bytes. block_size = 64 # ASN.1 Object ID oid = "1.2.840.113549.2.2" def __init__(self, data=None): state = VoidPointer() result = _raw_md2_lib.md2_init(state.address_of()) if result: raise ValueError("Error %d while instantiating MD2" % result) self._state = SmartPointer(state.get(), _raw_md2_lib.md2_destroy) if data: self.update(data) def update(self, data): """Continue hashing of a message by consuming the next chunk of data. Args: data (byte string/byte array/memoryview): The next chunk of the message being hashed. """ result = _raw_md2_lib.md2_update(self._state.get(), c_uint8_ptr(data), c_size_t(len(data))) if result: raise ValueError("Error %d while instantiating MD2" % result) def digest(self): """Return the **binary** (non-printable) digest of the message that has been hashed so far. :return: The hash digest, computed over the data processed so far. Binary form. :rtype: byte string """ bfr = create_string_buffer(self.digest_size) result = _raw_md2_lib.md2_digest(self._state.get(), bfr) if result: raise ValueError("Error %d while instantiating MD2" % result) return get_raw_buffer(bfr) def hexdigest(self): """Return the **printable** digest of the message that has been hashed so far. :return: The hash digest, computed over the data processed so far. Hexadecimal encoded. :rtype: string """ return "".join(["%02x" % bord(x) for x in self.digest()]) def copy(self): """Return a copy ("clone") of the hash object. The copy will have the same internal state as the original hash object. This can be used to efficiently compute the digests of strings that share a common initial substring. :return: A hash object of the same type """ clone = MD2Hash() result = _raw_md2_lib.md2_copy(self._state.get(), clone._state.get()) if result: raise ValueError("Error %d while copying MD2" % result) return clone def <|fim_middle|>(self, data=None): return MD2Hash(data) def new(data=None): """Create a new hash object. :parameter data: Optional. The very first chunk of the message to hash. It is equivalent to an early call to :meth:`MD2Hash.update`. :type data: byte string/byte array/memoryview :Return: A :class:`MD2Hash` hash object """ return MD2Hash().new(data) # The size of the resulting hash in bytes. digest_size = MD2Hash.digest_size # The internal block size of the hash algorithm in bytes. block_size = MD2Hash.block_size <|fim▁end|>
new
<|file_name|>MD2.py<|end_file_name|><|fim▁begin|># =================================================================== # # Copyright (c) 2014, Legrandin <[email protected]> # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in # the documentation and/or other materials provided with the # distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # =================================================================== from Cryptodome.Util.py3compat import bord from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, VoidPointer, SmartPointer, create_string_buffer, get_raw_buffer, c_size_t, c_uint8_ptr) _raw_md2_lib = load_pycryptodome_raw_lib( "Cryptodome.Hash._MD2", """ int md2_init(void **shaState); int md2_destroy(void *shaState); int md2_update(void *hs, const uint8_t *buf, size_t len); int md2_digest(const void *shaState, uint8_t digest[20]); int md2_copy(const void *src, void *dst); """) class MD2Hash(object): """An MD2 hash object. Do not instantiate directly. Use the :func:`new` function. :ivar oid: ASN.1 Object ID :vartype oid: string :ivar block_size: the size in bytes of the internal message block, input to the compression function :vartype block_size: integer :ivar digest_size: the size in bytes of the resulting hash :vartype digest_size: integer """ # The size of the resulting hash in bytes. digest_size = 16 # The internal block size of the hash algorithm in bytes. block_size = 64 # ASN.1 Object ID oid = "1.2.840.113549.2.2" def __init__(self, data=None): state = VoidPointer() result = _raw_md2_lib.md2_init(state.address_of()) if result: raise ValueError("Error %d while instantiating MD2" % result) self._state = SmartPointer(state.get(), _raw_md2_lib.md2_destroy) if data: self.update(data) def update(self, data): """Continue hashing of a message by consuming the next chunk of data. Args: data (byte string/byte array/memoryview): The next chunk of the message being hashed. """ result = _raw_md2_lib.md2_update(self._state.get(), c_uint8_ptr(data), c_size_t(len(data))) if result: raise ValueError("Error %d while instantiating MD2" % result) def digest(self): """Return the **binary** (non-printable) digest of the message that has been hashed so far. :return: The hash digest, computed over the data processed so far. Binary form. :rtype: byte string """ bfr = create_string_buffer(self.digest_size) result = _raw_md2_lib.md2_digest(self._state.get(), bfr) if result: raise ValueError("Error %d while instantiating MD2" % result) return get_raw_buffer(bfr) def hexdigest(self): """Return the **printable** digest of the message that has been hashed so far. :return: The hash digest, computed over the data processed so far. Hexadecimal encoded. :rtype: string """ return "".join(["%02x" % bord(x) for x in self.digest()]) def copy(self): """Return a copy ("clone") of the hash object. The copy will have the same internal state as the original hash object. This can be used to efficiently compute the digests of strings that share a common initial substring. :return: A hash object of the same type """ clone = MD2Hash() result = _raw_md2_lib.md2_copy(self._state.get(), clone._state.get()) if result: raise ValueError("Error %d while copying MD2" % result) return clone def new(self, data=None): return MD2Hash(data) def <|fim_middle|>(data=None): """Create a new hash object. :parameter data: Optional. The very first chunk of the message to hash. It is equivalent to an early call to :meth:`MD2Hash.update`. :type data: byte string/byte array/memoryview :Return: A :class:`MD2Hash` hash object """ return MD2Hash().new(data) # The size of the resulting hash in bytes. digest_size = MD2Hash.digest_size # The internal block size of the hash algorithm in bytes. block_size = MD2Hash.block_size <|fim▁end|>
new
<|file_name|>postfix.py<|end_file_name|><|fim▁begin|>import sys from stack import Stack def parse_expression_into_parts(expression): """ Parse expression into list of parts :rtype : list :param expression: str # i.e. "2 * 3 + ( 2 - 3 )" """ raise NotImplementedError("complete me!") def evaluate_expression(a, b, op): raise NotImplementedError("complete me!") def evaluate_postfix(parts): raise NotImplementedError("complete me!") if __name__ == "__main__": expr = None<|fim▁hole|> else: print 'Usage: python postfix.py "<expr>" -- i.e. python postfix.py "9 1 3 + 2 * -"' print "Spaces are required between every term."<|fim▁end|>
if len(sys.argv) > 1: expr = sys.argv[1] parts = parse_expression_into_parts(expr) print "Evaluating %s == %s" % (expr, evaluate_postfix(parts))
<|file_name|>postfix.py<|end_file_name|><|fim▁begin|>import sys from stack import Stack def parse_expression_into_parts(expression): <|fim_middle|> def evaluate_expression(a, b, op): raise NotImplementedError("complete me!") def evaluate_postfix(parts): raise NotImplementedError("complete me!") if __name__ == "__main__": expr = None if len(sys.argv) > 1: expr = sys.argv[1] parts = parse_expression_into_parts(expr) print "Evaluating %s == %s" % (expr, evaluate_postfix(parts)) else: print 'Usage: python postfix.py "<expr>" -- i.e. python postfix.py "9 1 3 + 2 * -"' print "Spaces are required between every term." <|fim▁end|>
""" Parse expression into list of parts :rtype : list :param expression: str # i.e. "2 * 3 + ( 2 - 3 )" """ raise NotImplementedError("complete me!")
<|file_name|>postfix.py<|end_file_name|><|fim▁begin|>import sys from stack import Stack def parse_expression_into_parts(expression): """ Parse expression into list of parts :rtype : list :param expression: str # i.e. "2 * 3 + ( 2 - 3 )" """ raise NotImplementedError("complete me!") def evaluate_expression(a, b, op): <|fim_middle|> def evaluate_postfix(parts): raise NotImplementedError("complete me!") if __name__ == "__main__": expr = None if len(sys.argv) > 1: expr = sys.argv[1] parts = parse_expression_into_parts(expr) print "Evaluating %s == %s" % (expr, evaluate_postfix(parts)) else: print 'Usage: python postfix.py "<expr>" -- i.e. python postfix.py "9 1 3 + 2 * -"' print "Spaces are required between every term." <|fim▁end|>
raise NotImplementedError("complete me!")
<|file_name|>postfix.py<|end_file_name|><|fim▁begin|>import sys from stack import Stack def parse_expression_into_parts(expression): """ Parse expression into list of parts :rtype : list :param expression: str # i.e. "2 * 3 + ( 2 - 3 )" """ raise NotImplementedError("complete me!") def evaluate_expression(a, b, op): raise NotImplementedError("complete me!") def evaluate_postfix(parts): <|fim_middle|> if __name__ == "__main__": expr = None if len(sys.argv) > 1: expr = sys.argv[1] parts = parse_expression_into_parts(expr) print "Evaluating %s == %s" % (expr, evaluate_postfix(parts)) else: print 'Usage: python postfix.py "<expr>" -- i.e. python postfix.py "9 1 3 + 2 * -"' print "Spaces are required between every term." <|fim▁end|>
raise NotImplementedError("complete me!")
<|file_name|>postfix.py<|end_file_name|><|fim▁begin|>import sys from stack import Stack def parse_expression_into_parts(expression): """ Parse expression into list of parts :rtype : list :param expression: str # i.e. "2 * 3 + ( 2 - 3 )" """ raise NotImplementedError("complete me!") def evaluate_expression(a, b, op): raise NotImplementedError("complete me!") def evaluate_postfix(parts): raise NotImplementedError("complete me!") if __name__ == "__main__": <|fim_middle|> <|fim▁end|>
expr = None if len(sys.argv) > 1: expr = sys.argv[1] parts = parse_expression_into_parts(expr) print "Evaluating %s == %s" % (expr, evaluate_postfix(parts)) else: print 'Usage: python postfix.py "<expr>" -- i.e. python postfix.py "9 1 3 + 2 * -"' print "Spaces are required between every term."
<|file_name|>postfix.py<|end_file_name|><|fim▁begin|>import sys from stack import Stack def parse_expression_into_parts(expression): """ Parse expression into list of parts :rtype : list :param expression: str # i.e. "2 * 3 + ( 2 - 3 )" """ raise NotImplementedError("complete me!") def evaluate_expression(a, b, op): raise NotImplementedError("complete me!") def evaluate_postfix(parts): raise NotImplementedError("complete me!") if __name__ == "__main__": expr = None if len(sys.argv) > 1: <|fim_middle|> else: print 'Usage: python postfix.py "<expr>" -- i.e. python postfix.py "9 1 3 + 2 * -"' print "Spaces are required between every term." <|fim▁end|>
expr = sys.argv[1] parts = parse_expression_into_parts(expr) print "Evaluating %s == %s" % (expr, evaluate_postfix(parts))
<|file_name|>postfix.py<|end_file_name|><|fim▁begin|>import sys from stack import Stack def parse_expression_into_parts(expression): """ Parse expression into list of parts :rtype : list :param expression: str # i.e. "2 * 3 + ( 2 - 3 )" """ raise NotImplementedError("complete me!") def evaluate_expression(a, b, op): raise NotImplementedError("complete me!") def evaluate_postfix(parts): raise NotImplementedError("complete me!") if __name__ == "__main__": expr = None if len(sys.argv) > 1: expr = sys.argv[1] parts = parse_expression_into_parts(expr) print "Evaluating %s == %s" % (expr, evaluate_postfix(parts)) else: <|fim_middle|> <|fim▁end|>
print 'Usage: python postfix.py "<expr>" -- i.e. python postfix.py "9 1 3 + 2 * -"' print "Spaces are required between every term."
<|file_name|>postfix.py<|end_file_name|><|fim▁begin|>import sys from stack import Stack def <|fim_middle|>(expression): """ Parse expression into list of parts :rtype : list :param expression: str # i.e. "2 * 3 + ( 2 - 3 )" """ raise NotImplementedError("complete me!") def evaluate_expression(a, b, op): raise NotImplementedError("complete me!") def evaluate_postfix(parts): raise NotImplementedError("complete me!") if __name__ == "__main__": expr = None if len(sys.argv) > 1: expr = sys.argv[1] parts = parse_expression_into_parts(expr) print "Evaluating %s == %s" % (expr, evaluate_postfix(parts)) else: print 'Usage: python postfix.py "<expr>" -- i.e. python postfix.py "9 1 3 + 2 * -"' print "Spaces are required between every term." <|fim▁end|>
parse_expression_into_parts
<|file_name|>postfix.py<|end_file_name|><|fim▁begin|>import sys from stack import Stack def parse_expression_into_parts(expression): """ Parse expression into list of parts :rtype : list :param expression: str # i.e. "2 * 3 + ( 2 - 3 )" """ raise NotImplementedError("complete me!") def <|fim_middle|>(a, b, op): raise NotImplementedError("complete me!") def evaluate_postfix(parts): raise NotImplementedError("complete me!") if __name__ == "__main__": expr = None if len(sys.argv) > 1: expr = sys.argv[1] parts = parse_expression_into_parts(expr) print "Evaluating %s == %s" % (expr, evaluate_postfix(parts)) else: print 'Usage: python postfix.py "<expr>" -- i.e. python postfix.py "9 1 3 + 2 * -"' print "Spaces are required between every term." <|fim▁end|>
evaluate_expression
<|file_name|>postfix.py<|end_file_name|><|fim▁begin|>import sys from stack import Stack def parse_expression_into_parts(expression): """ Parse expression into list of parts :rtype : list :param expression: str # i.e. "2 * 3 + ( 2 - 3 )" """ raise NotImplementedError("complete me!") def evaluate_expression(a, b, op): raise NotImplementedError("complete me!") def <|fim_middle|>(parts): raise NotImplementedError("complete me!") if __name__ == "__main__": expr = None if len(sys.argv) > 1: expr = sys.argv[1] parts = parse_expression_into_parts(expr) print "Evaluating %s == %s" % (expr, evaluate_postfix(parts)) else: print 'Usage: python postfix.py "<expr>" -- i.e. python postfix.py "9 1 3 + 2 * -"' print "Spaces are required between every term." <|fim▁end|>
evaluate_postfix
<|file_name|>ensure_exception_handled.py<|end_file_name|><|fim▁begin|>import traceback class EnsureExceptionHandledGuard: """Helper for ensuring that Future's exceptions were handled. This solves a nasty problem with Futures and Tasks that have an exception set: if nobody asks for the exception, the exception is never logged. This violates the Zen of Python: 'Errors should never pass silently. Unless explicitly silenced.' However, we don't want to log the exception as soon as set_exception() is called: if the calling code is written properly, it will get the exception and handle it properly. But we *do* want to log it if result() or exception() was never called -- otherwise developers waste a lot of time wondering why their buggy code fails silently. An earlier attempt added a __del__() method to the Future class itself, but this backfired because the presence of __del__() prevents garbage collection from breaking cycles. A way out of this catch-22 is to avoid having a __del__() method on the Future class itself, but instead to have a reference to a helper object with a __del__() method that logs the traceback, where we ensure that the helper object doesn't participate in cycles, and only the Future has a reference to it. The helper object is added when set_exception() is called. When the Future is collected, and the helper is present, the helper object is also collected, and its __del__() method will log the traceback. When the Future's result() or exception() method is called (and a helper object is present), it removes the the helper object, after calling its clear() method to prevent it from logging. One downside is that we do a fair amount of work to extract the traceback from the exception, even when it is never logged. It would seem cheaper to just store the exception object, but that<|fim▁hole|> references the traceback, which references stack frames, which may reference the Future, which references the _EnsureExceptionHandledGuard, and then the _EnsureExceptionHandledGuard would be included in a cycle, which is what we're trying to avoid! As an optimization, we don't immediately format the exception; we only do the work when activate() is called, which call is delayed until after all the Future's callbacks have run. Since usually a Future has at least one callback (typically set by 'yield from') and usually that callback extracts the callback, thereby removing the need to format the exception. PS. I don't claim credit for this solution. I first heard of it in a discussion about closing files when they are collected. """ __slots__ = ['exc', 'tb', 'hndl', 'cls'] def __init__(self, exc, handler): self.exc = exc self.hndl = handler self.cls = type(exc) self.tb = None def activate(self): exc = self.exc if exc is not None: self.exc = None self.tb = traceback.format_exception(exc.__class__, exc, exc.__traceback__) def clear(self): self.exc = None self.tb = None def __del__(self): if self.tb: self.hndl(self.cls, self.tb)<|fim▁end|>
<|file_name|>ensure_exception_handled.py<|end_file_name|><|fim▁begin|>import traceback class EnsureExceptionHandledGuard: <|fim_middle|> <|fim▁end|>
"""Helper for ensuring that Future's exceptions were handled. This solves a nasty problem with Futures and Tasks that have an exception set: if nobody asks for the exception, the exception is never logged. This violates the Zen of Python: 'Errors should never pass silently. Unless explicitly silenced.' However, we don't want to log the exception as soon as set_exception() is called: if the calling code is written properly, it will get the exception and handle it properly. But we *do* want to log it if result() or exception() was never called -- otherwise developers waste a lot of time wondering why their buggy code fails silently. An earlier attempt added a __del__() method to the Future class itself, but this backfired because the presence of __del__() prevents garbage collection from breaking cycles. A way out of this catch-22 is to avoid having a __del__() method on the Future class itself, but instead to have a reference to a helper object with a __del__() method that logs the traceback, where we ensure that the helper object doesn't participate in cycles, and only the Future has a reference to it. The helper object is added when set_exception() is called. When the Future is collected, and the helper is present, the helper object is also collected, and its __del__() method will log the traceback. When the Future's result() or exception() method is called (and a helper object is present), it removes the the helper object, after calling its clear() method to prevent it from logging. One downside is that we do a fair amount of work to extract the traceback from the exception, even when it is never logged. It would seem cheaper to just store the exception object, but that references the traceback, which references stack frames, which may reference the Future, which references the _EnsureExceptionHandledGuard, and then the _EnsureExceptionHandledGuard would be included in a cycle, which is what we're trying to avoid! As an optimization, we don't immediately format the exception; we only do the work when activate() is called, which call is delayed until after all the Future's callbacks have run. Since usually a Future has at least one callback (typically set by 'yield from') and usually that callback extracts the callback, thereby removing the need to format the exception. PS. I don't claim credit for this solution. I first heard of it in a discussion about closing files when they are collected. """ __slots__ = ['exc', 'tb', 'hndl', 'cls'] def __init__(self, exc, handler): self.exc = exc self.hndl = handler self.cls = type(exc) self.tb = None def activate(self): exc = self.exc if exc is not None: self.exc = None self.tb = traceback.format_exception(exc.__class__, exc, exc.__traceback__) def clear(self): self.exc = None self.tb = None def __del__(self): if self.tb: self.hndl(self.cls, self.tb)
<|file_name|>ensure_exception_handled.py<|end_file_name|><|fim▁begin|>import traceback class EnsureExceptionHandledGuard: """Helper for ensuring that Future's exceptions were handled. This solves a nasty problem with Futures and Tasks that have an exception set: if nobody asks for the exception, the exception is never logged. This violates the Zen of Python: 'Errors should never pass silently. Unless explicitly silenced.' However, we don't want to log the exception as soon as set_exception() is called: if the calling code is written properly, it will get the exception and handle it properly. But we *do* want to log it if result() or exception() was never called -- otherwise developers waste a lot of time wondering why their buggy code fails silently. An earlier attempt added a __del__() method to the Future class itself, but this backfired because the presence of __del__() prevents garbage collection from breaking cycles. A way out of this catch-22 is to avoid having a __del__() method on the Future class itself, but instead to have a reference to a helper object with a __del__() method that logs the traceback, where we ensure that the helper object doesn't participate in cycles, and only the Future has a reference to it. The helper object is added when set_exception() is called. When the Future is collected, and the helper is present, the helper object is also collected, and its __del__() method will log the traceback. When the Future's result() or exception() method is called (and a helper object is present), it removes the the helper object, after calling its clear() method to prevent it from logging. One downside is that we do a fair amount of work to extract the traceback from the exception, even when it is never logged. It would seem cheaper to just store the exception object, but that references the traceback, which references stack frames, which may reference the Future, which references the _EnsureExceptionHandledGuard, and then the _EnsureExceptionHandledGuard would be included in a cycle, which is what we're trying to avoid! As an optimization, we don't immediately format the exception; we only do the work when activate() is called, which call is delayed until after all the Future's callbacks have run. Since usually a Future has at least one callback (typically set by 'yield from') and usually that callback extracts the callback, thereby removing the need to format the exception. PS. I don't claim credit for this solution. I first heard of it in a discussion about closing files when they are collected. """ __slots__ = ['exc', 'tb', 'hndl', 'cls'] def __init__(self, exc, handler): <|fim_middle|> def activate(self): exc = self.exc if exc is not None: self.exc = None self.tb = traceback.format_exception(exc.__class__, exc, exc.__traceback__) def clear(self): self.exc = None self.tb = None def __del__(self): if self.tb: self.hndl(self.cls, self.tb) <|fim▁end|>
self.exc = exc self.hndl = handler self.cls = type(exc) self.tb = None
<|file_name|>ensure_exception_handled.py<|end_file_name|><|fim▁begin|>import traceback class EnsureExceptionHandledGuard: """Helper for ensuring that Future's exceptions were handled. This solves a nasty problem with Futures and Tasks that have an exception set: if nobody asks for the exception, the exception is never logged. This violates the Zen of Python: 'Errors should never pass silently. Unless explicitly silenced.' However, we don't want to log the exception as soon as set_exception() is called: if the calling code is written properly, it will get the exception and handle it properly. But we *do* want to log it if result() or exception() was never called -- otherwise developers waste a lot of time wondering why their buggy code fails silently. An earlier attempt added a __del__() method to the Future class itself, but this backfired because the presence of __del__() prevents garbage collection from breaking cycles. A way out of this catch-22 is to avoid having a __del__() method on the Future class itself, but instead to have a reference to a helper object with a __del__() method that logs the traceback, where we ensure that the helper object doesn't participate in cycles, and only the Future has a reference to it. The helper object is added when set_exception() is called. When the Future is collected, and the helper is present, the helper object is also collected, and its __del__() method will log the traceback. When the Future's result() or exception() method is called (and a helper object is present), it removes the the helper object, after calling its clear() method to prevent it from logging. One downside is that we do a fair amount of work to extract the traceback from the exception, even when it is never logged. It would seem cheaper to just store the exception object, but that references the traceback, which references stack frames, which may reference the Future, which references the _EnsureExceptionHandledGuard, and then the _EnsureExceptionHandledGuard would be included in a cycle, which is what we're trying to avoid! As an optimization, we don't immediately format the exception; we only do the work when activate() is called, which call is delayed until after all the Future's callbacks have run. Since usually a Future has at least one callback (typically set by 'yield from') and usually that callback extracts the callback, thereby removing the need to format the exception. PS. I don't claim credit for this solution. I first heard of it in a discussion about closing files when they are collected. """ __slots__ = ['exc', 'tb', 'hndl', 'cls'] def __init__(self, exc, handler): self.exc = exc self.hndl = handler self.cls = type(exc) self.tb = None def activate(self): <|fim_middle|> def clear(self): self.exc = None self.tb = None def __del__(self): if self.tb: self.hndl(self.cls, self.tb) <|fim▁end|>
exc = self.exc if exc is not None: self.exc = None self.tb = traceback.format_exception(exc.__class__, exc, exc.__traceback__)
<|file_name|>ensure_exception_handled.py<|end_file_name|><|fim▁begin|>import traceback class EnsureExceptionHandledGuard: """Helper for ensuring that Future's exceptions were handled. This solves a nasty problem with Futures and Tasks that have an exception set: if nobody asks for the exception, the exception is never logged. This violates the Zen of Python: 'Errors should never pass silently. Unless explicitly silenced.' However, we don't want to log the exception as soon as set_exception() is called: if the calling code is written properly, it will get the exception and handle it properly. But we *do* want to log it if result() or exception() was never called -- otherwise developers waste a lot of time wondering why their buggy code fails silently. An earlier attempt added a __del__() method to the Future class itself, but this backfired because the presence of __del__() prevents garbage collection from breaking cycles. A way out of this catch-22 is to avoid having a __del__() method on the Future class itself, but instead to have a reference to a helper object with a __del__() method that logs the traceback, where we ensure that the helper object doesn't participate in cycles, and only the Future has a reference to it. The helper object is added when set_exception() is called. When the Future is collected, and the helper is present, the helper object is also collected, and its __del__() method will log the traceback. When the Future's result() or exception() method is called (and a helper object is present), it removes the the helper object, after calling its clear() method to prevent it from logging. One downside is that we do a fair amount of work to extract the traceback from the exception, even when it is never logged. It would seem cheaper to just store the exception object, but that references the traceback, which references stack frames, which may reference the Future, which references the _EnsureExceptionHandledGuard, and then the _EnsureExceptionHandledGuard would be included in a cycle, which is what we're trying to avoid! As an optimization, we don't immediately format the exception; we only do the work when activate() is called, which call is delayed until after all the Future's callbacks have run. Since usually a Future has at least one callback (typically set by 'yield from') and usually that callback extracts the callback, thereby removing the need to format the exception. PS. I don't claim credit for this solution. I first heard of it in a discussion about closing files when they are collected. """ __slots__ = ['exc', 'tb', 'hndl', 'cls'] def __init__(self, exc, handler): self.exc = exc self.hndl = handler self.cls = type(exc) self.tb = None def activate(self): exc = self.exc if exc is not None: self.exc = None self.tb = traceback.format_exception(exc.__class__, exc, exc.__traceback__) def clear(self): <|fim_middle|> def __del__(self): if self.tb: self.hndl(self.cls, self.tb) <|fim▁end|>
self.exc = None self.tb = None
<|file_name|>ensure_exception_handled.py<|end_file_name|><|fim▁begin|>import traceback class EnsureExceptionHandledGuard: """Helper for ensuring that Future's exceptions were handled. This solves a nasty problem with Futures and Tasks that have an exception set: if nobody asks for the exception, the exception is never logged. This violates the Zen of Python: 'Errors should never pass silently. Unless explicitly silenced.' However, we don't want to log the exception as soon as set_exception() is called: if the calling code is written properly, it will get the exception and handle it properly. But we *do* want to log it if result() or exception() was never called -- otherwise developers waste a lot of time wondering why their buggy code fails silently. An earlier attempt added a __del__() method to the Future class itself, but this backfired because the presence of __del__() prevents garbage collection from breaking cycles. A way out of this catch-22 is to avoid having a __del__() method on the Future class itself, but instead to have a reference to a helper object with a __del__() method that logs the traceback, where we ensure that the helper object doesn't participate in cycles, and only the Future has a reference to it. The helper object is added when set_exception() is called. When the Future is collected, and the helper is present, the helper object is also collected, and its __del__() method will log the traceback. When the Future's result() or exception() method is called (and a helper object is present), it removes the the helper object, after calling its clear() method to prevent it from logging. One downside is that we do a fair amount of work to extract the traceback from the exception, even when it is never logged. It would seem cheaper to just store the exception object, but that references the traceback, which references stack frames, which may reference the Future, which references the _EnsureExceptionHandledGuard, and then the _EnsureExceptionHandledGuard would be included in a cycle, which is what we're trying to avoid! As an optimization, we don't immediately format the exception; we only do the work when activate() is called, which call is delayed until after all the Future's callbacks have run. Since usually a Future has at least one callback (typically set by 'yield from') and usually that callback extracts the callback, thereby removing the need to format the exception. PS. I don't claim credit for this solution. I first heard of it in a discussion about closing files when they are collected. """ __slots__ = ['exc', 'tb', 'hndl', 'cls'] def __init__(self, exc, handler): self.exc = exc self.hndl = handler self.cls = type(exc) self.tb = None def activate(self): exc = self.exc if exc is not None: self.exc = None self.tb = traceback.format_exception(exc.__class__, exc, exc.__traceback__) def clear(self): self.exc = None self.tb = None def __del__(self): <|fim_middle|> <|fim▁end|>
if self.tb: self.hndl(self.cls, self.tb)
<|file_name|>ensure_exception_handled.py<|end_file_name|><|fim▁begin|>import traceback class EnsureExceptionHandledGuard: """Helper for ensuring that Future's exceptions were handled. This solves a nasty problem with Futures and Tasks that have an exception set: if nobody asks for the exception, the exception is never logged. This violates the Zen of Python: 'Errors should never pass silently. Unless explicitly silenced.' However, we don't want to log the exception as soon as set_exception() is called: if the calling code is written properly, it will get the exception and handle it properly. But we *do* want to log it if result() or exception() was never called -- otherwise developers waste a lot of time wondering why their buggy code fails silently. An earlier attempt added a __del__() method to the Future class itself, but this backfired because the presence of __del__() prevents garbage collection from breaking cycles. A way out of this catch-22 is to avoid having a __del__() method on the Future class itself, but instead to have a reference to a helper object with a __del__() method that logs the traceback, where we ensure that the helper object doesn't participate in cycles, and only the Future has a reference to it. The helper object is added when set_exception() is called. When the Future is collected, and the helper is present, the helper object is also collected, and its __del__() method will log the traceback. When the Future's result() or exception() method is called (and a helper object is present), it removes the the helper object, after calling its clear() method to prevent it from logging. One downside is that we do a fair amount of work to extract the traceback from the exception, even when it is never logged. It would seem cheaper to just store the exception object, but that references the traceback, which references stack frames, which may reference the Future, which references the _EnsureExceptionHandledGuard, and then the _EnsureExceptionHandledGuard would be included in a cycle, which is what we're trying to avoid! As an optimization, we don't immediately format the exception; we only do the work when activate() is called, which call is delayed until after all the Future's callbacks have run. Since usually a Future has at least one callback (typically set by 'yield from') and usually that callback extracts the callback, thereby removing the need to format the exception. PS. I don't claim credit for this solution. I first heard of it in a discussion about closing files when they are collected. """ __slots__ = ['exc', 'tb', 'hndl', 'cls'] def __init__(self, exc, handler): self.exc = exc self.hndl = handler self.cls = type(exc) self.tb = None def activate(self): exc = self.exc if exc is not None: <|fim_middle|> def clear(self): self.exc = None self.tb = None def __del__(self): if self.tb: self.hndl(self.cls, self.tb) <|fim▁end|>
self.exc = None self.tb = traceback.format_exception(exc.__class__, exc, exc.__traceback__)
<|file_name|>ensure_exception_handled.py<|end_file_name|><|fim▁begin|>import traceback class EnsureExceptionHandledGuard: """Helper for ensuring that Future's exceptions were handled. This solves a nasty problem with Futures and Tasks that have an exception set: if nobody asks for the exception, the exception is never logged. This violates the Zen of Python: 'Errors should never pass silently. Unless explicitly silenced.' However, we don't want to log the exception as soon as set_exception() is called: if the calling code is written properly, it will get the exception and handle it properly. But we *do* want to log it if result() or exception() was never called -- otherwise developers waste a lot of time wondering why their buggy code fails silently. An earlier attempt added a __del__() method to the Future class itself, but this backfired because the presence of __del__() prevents garbage collection from breaking cycles. A way out of this catch-22 is to avoid having a __del__() method on the Future class itself, but instead to have a reference to a helper object with a __del__() method that logs the traceback, where we ensure that the helper object doesn't participate in cycles, and only the Future has a reference to it. The helper object is added when set_exception() is called. When the Future is collected, and the helper is present, the helper object is also collected, and its __del__() method will log the traceback. When the Future's result() or exception() method is called (and a helper object is present), it removes the the helper object, after calling its clear() method to prevent it from logging. One downside is that we do a fair amount of work to extract the traceback from the exception, even when it is never logged. It would seem cheaper to just store the exception object, but that references the traceback, which references stack frames, which may reference the Future, which references the _EnsureExceptionHandledGuard, and then the _EnsureExceptionHandledGuard would be included in a cycle, which is what we're trying to avoid! As an optimization, we don't immediately format the exception; we only do the work when activate() is called, which call is delayed until after all the Future's callbacks have run. Since usually a Future has at least one callback (typically set by 'yield from') and usually that callback extracts the callback, thereby removing the need to format the exception. PS. I don't claim credit for this solution. I first heard of it in a discussion about closing files when they are collected. """ __slots__ = ['exc', 'tb', 'hndl', 'cls'] def __init__(self, exc, handler): self.exc = exc self.hndl = handler self.cls = type(exc) self.tb = None def activate(self): exc = self.exc if exc is not None: self.exc = None self.tb = traceback.format_exception(exc.__class__, exc, exc.__traceback__) def clear(self): self.exc = None self.tb = None def __del__(self): if self.tb: <|fim_middle|> <|fim▁end|>
self.hndl(self.cls, self.tb)
<|file_name|>ensure_exception_handled.py<|end_file_name|><|fim▁begin|>import traceback class EnsureExceptionHandledGuard: """Helper for ensuring that Future's exceptions were handled. This solves a nasty problem with Futures and Tasks that have an exception set: if nobody asks for the exception, the exception is never logged. This violates the Zen of Python: 'Errors should never pass silently. Unless explicitly silenced.' However, we don't want to log the exception as soon as set_exception() is called: if the calling code is written properly, it will get the exception and handle it properly. But we *do* want to log it if result() or exception() was never called -- otherwise developers waste a lot of time wondering why their buggy code fails silently. An earlier attempt added a __del__() method to the Future class itself, but this backfired because the presence of __del__() prevents garbage collection from breaking cycles. A way out of this catch-22 is to avoid having a __del__() method on the Future class itself, but instead to have a reference to a helper object with a __del__() method that logs the traceback, where we ensure that the helper object doesn't participate in cycles, and only the Future has a reference to it. The helper object is added when set_exception() is called. When the Future is collected, and the helper is present, the helper object is also collected, and its __del__() method will log the traceback. When the Future's result() or exception() method is called (and a helper object is present), it removes the the helper object, after calling its clear() method to prevent it from logging. One downside is that we do a fair amount of work to extract the traceback from the exception, even when it is never logged. It would seem cheaper to just store the exception object, but that references the traceback, which references stack frames, which may reference the Future, which references the _EnsureExceptionHandledGuard, and then the _EnsureExceptionHandledGuard would be included in a cycle, which is what we're trying to avoid! As an optimization, we don't immediately format the exception; we only do the work when activate() is called, which call is delayed until after all the Future's callbacks have run. Since usually a Future has at least one callback (typically set by 'yield from') and usually that callback extracts the callback, thereby removing the need to format the exception. PS. I don't claim credit for this solution. I first heard of it in a discussion about closing files when they are collected. """ __slots__ = ['exc', 'tb', 'hndl', 'cls'] def <|fim_middle|>(self, exc, handler): self.exc = exc self.hndl = handler self.cls = type(exc) self.tb = None def activate(self): exc = self.exc if exc is not None: self.exc = None self.tb = traceback.format_exception(exc.__class__, exc, exc.__traceback__) def clear(self): self.exc = None self.tb = None def __del__(self): if self.tb: self.hndl(self.cls, self.tb) <|fim▁end|>
__init__
<|file_name|>ensure_exception_handled.py<|end_file_name|><|fim▁begin|>import traceback class EnsureExceptionHandledGuard: """Helper for ensuring that Future's exceptions were handled. This solves a nasty problem with Futures and Tasks that have an exception set: if nobody asks for the exception, the exception is never logged. This violates the Zen of Python: 'Errors should never pass silently. Unless explicitly silenced.' However, we don't want to log the exception as soon as set_exception() is called: if the calling code is written properly, it will get the exception and handle it properly. But we *do* want to log it if result() or exception() was never called -- otherwise developers waste a lot of time wondering why their buggy code fails silently. An earlier attempt added a __del__() method to the Future class itself, but this backfired because the presence of __del__() prevents garbage collection from breaking cycles. A way out of this catch-22 is to avoid having a __del__() method on the Future class itself, but instead to have a reference to a helper object with a __del__() method that logs the traceback, where we ensure that the helper object doesn't participate in cycles, and only the Future has a reference to it. The helper object is added when set_exception() is called. When the Future is collected, and the helper is present, the helper object is also collected, and its __del__() method will log the traceback. When the Future's result() or exception() method is called (and a helper object is present), it removes the the helper object, after calling its clear() method to prevent it from logging. One downside is that we do a fair amount of work to extract the traceback from the exception, even when it is never logged. It would seem cheaper to just store the exception object, but that references the traceback, which references stack frames, which may reference the Future, which references the _EnsureExceptionHandledGuard, and then the _EnsureExceptionHandledGuard would be included in a cycle, which is what we're trying to avoid! As an optimization, we don't immediately format the exception; we only do the work when activate() is called, which call is delayed until after all the Future's callbacks have run. Since usually a Future has at least one callback (typically set by 'yield from') and usually that callback extracts the callback, thereby removing the need to format the exception. PS. I don't claim credit for this solution. I first heard of it in a discussion about closing files when they are collected. """ __slots__ = ['exc', 'tb', 'hndl', 'cls'] def __init__(self, exc, handler): self.exc = exc self.hndl = handler self.cls = type(exc) self.tb = None def <|fim_middle|>(self): exc = self.exc if exc is not None: self.exc = None self.tb = traceback.format_exception(exc.__class__, exc, exc.__traceback__) def clear(self): self.exc = None self.tb = None def __del__(self): if self.tb: self.hndl(self.cls, self.tb) <|fim▁end|>
activate
<|file_name|>ensure_exception_handled.py<|end_file_name|><|fim▁begin|>import traceback class EnsureExceptionHandledGuard: """Helper for ensuring that Future's exceptions were handled. This solves a nasty problem with Futures and Tasks that have an exception set: if nobody asks for the exception, the exception is never logged. This violates the Zen of Python: 'Errors should never pass silently. Unless explicitly silenced.' However, we don't want to log the exception as soon as set_exception() is called: if the calling code is written properly, it will get the exception and handle it properly. But we *do* want to log it if result() or exception() was never called -- otherwise developers waste a lot of time wondering why their buggy code fails silently. An earlier attempt added a __del__() method to the Future class itself, but this backfired because the presence of __del__() prevents garbage collection from breaking cycles. A way out of this catch-22 is to avoid having a __del__() method on the Future class itself, but instead to have a reference to a helper object with a __del__() method that logs the traceback, where we ensure that the helper object doesn't participate in cycles, and only the Future has a reference to it. The helper object is added when set_exception() is called. When the Future is collected, and the helper is present, the helper object is also collected, and its __del__() method will log the traceback. When the Future's result() or exception() method is called (and a helper object is present), it removes the the helper object, after calling its clear() method to prevent it from logging. One downside is that we do a fair amount of work to extract the traceback from the exception, even when it is never logged. It would seem cheaper to just store the exception object, but that references the traceback, which references stack frames, which may reference the Future, which references the _EnsureExceptionHandledGuard, and then the _EnsureExceptionHandledGuard would be included in a cycle, which is what we're trying to avoid! As an optimization, we don't immediately format the exception; we only do the work when activate() is called, which call is delayed until after all the Future's callbacks have run. Since usually a Future has at least one callback (typically set by 'yield from') and usually that callback extracts the callback, thereby removing the need to format the exception. PS. I don't claim credit for this solution. I first heard of it in a discussion about closing files when they are collected. """ __slots__ = ['exc', 'tb', 'hndl', 'cls'] def __init__(self, exc, handler): self.exc = exc self.hndl = handler self.cls = type(exc) self.tb = None def activate(self): exc = self.exc if exc is not None: self.exc = None self.tb = traceback.format_exception(exc.__class__, exc, exc.__traceback__) def <|fim_middle|>(self): self.exc = None self.tb = None def __del__(self): if self.tb: self.hndl(self.cls, self.tb) <|fim▁end|>
clear
<|file_name|>ensure_exception_handled.py<|end_file_name|><|fim▁begin|>import traceback class EnsureExceptionHandledGuard: """Helper for ensuring that Future's exceptions were handled. This solves a nasty problem with Futures and Tasks that have an exception set: if nobody asks for the exception, the exception is never logged. This violates the Zen of Python: 'Errors should never pass silently. Unless explicitly silenced.' However, we don't want to log the exception as soon as set_exception() is called: if the calling code is written properly, it will get the exception and handle it properly. But we *do* want to log it if result() or exception() was never called -- otherwise developers waste a lot of time wondering why their buggy code fails silently. An earlier attempt added a __del__() method to the Future class itself, but this backfired because the presence of __del__() prevents garbage collection from breaking cycles. A way out of this catch-22 is to avoid having a __del__() method on the Future class itself, but instead to have a reference to a helper object with a __del__() method that logs the traceback, where we ensure that the helper object doesn't participate in cycles, and only the Future has a reference to it. The helper object is added when set_exception() is called. When the Future is collected, and the helper is present, the helper object is also collected, and its __del__() method will log the traceback. When the Future's result() or exception() method is called (and a helper object is present), it removes the the helper object, after calling its clear() method to prevent it from logging. One downside is that we do a fair amount of work to extract the traceback from the exception, even when it is never logged. It would seem cheaper to just store the exception object, but that references the traceback, which references stack frames, which may reference the Future, which references the _EnsureExceptionHandledGuard, and then the _EnsureExceptionHandledGuard would be included in a cycle, which is what we're trying to avoid! As an optimization, we don't immediately format the exception; we only do the work when activate() is called, which call is delayed until after all the Future's callbacks have run. Since usually a Future has at least one callback (typically set by 'yield from') and usually that callback extracts the callback, thereby removing the need to format the exception. PS. I don't claim credit for this solution. I first heard of it in a discussion about closing files when they are collected. """ __slots__ = ['exc', 'tb', 'hndl', 'cls'] def __init__(self, exc, handler): self.exc = exc self.hndl = handler self.cls = type(exc) self.tb = None def activate(self): exc = self.exc if exc is not None: self.exc = None self.tb = traceback.format_exception(exc.__class__, exc, exc.__traceback__) def clear(self): self.exc = None self.tb = None def <|fim_middle|>(self): if self.tb: self.hndl(self.cls, self.tb) <|fim▁end|>
__del__
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from django.contrib.auth.decorators import login_required from django.core.urlresolvers import reverse from django.utils.decorators import method_decorator from django.views import generic from regressiontests.generic_views.models import Artist, Author, Book, Page from regressiontests.generic_views.forms import AuthorForm class CustomTemplateView(generic.TemplateView): template_name = 'generic_views/about.html' def get_context_data(self, **kwargs): return { 'params': kwargs, 'key': 'value' } class ObjectDetail(generic.DetailView): template_name = 'generic_views/detail.html' def get_object(self): return {'foo': 'bar'} class ArtistDetail(generic.DetailView): queryset = Artist.objects.all() class AuthorDetail(generic.DetailView): queryset = Author.objects.all() class PageDetail(generic.DetailView): queryset = Page.objects.all() template_name_field = 'template' class DictList(generic.ListView): """A ListView that doesn't use a model.""" queryset = [ {'first': 'John', 'last': 'Lennon'}, {'last': 'Yoko', 'last': 'Ono'} ] template_name = 'generic_views/list.html' class AuthorList(generic.ListView): queryset = Author.objects.all() class ArtistCreate(generic.CreateView): model = Artist class NaiveAuthorCreate(generic.CreateView): queryset = Author.objects.all() class AuthorCreate(generic.CreateView): model = Author success_url = '/list/authors/' class SpecializedAuthorCreate(generic.CreateView): model = Author form_class = AuthorForm template_name = 'generic_views/form.html' context_object_name = 'thingy' def get_success_url(self): return reverse('author_detail', args=[self.object.id,]) class AuthorCreateRestricted(AuthorCreate): post = method_decorator(login_required)(AuthorCreate.post) class ArtistUpdate(generic.UpdateView): model = Artist class NaiveAuthorUpdate(generic.UpdateView): queryset = Author.objects.all() <|fim▁hole|> model = Author success_url = '/list/authors/' class SpecializedAuthorUpdate(generic.UpdateView): model = Author form_class = AuthorForm template_name = 'generic_views/form.html' context_object_name = 'thingy' def get_success_url(self): return reverse('author_detail', args=[self.object.id,]) class NaiveAuthorDelete(generic.DeleteView): queryset = Author.objects.all() class AuthorDelete(generic.DeleteView): model = Author success_url = '/list/authors/' class SpecializedAuthorDelete(generic.DeleteView): queryset = Author.objects.all() template_name = 'generic_views/confirm_delete.html' context_object_name = 'thingy' def get_success_url(self): return reverse('authors_list') class BookConfig(object): queryset = Book.objects.all() date_field = 'pubdate' class BookArchive(BookConfig, generic.ArchiveIndexView): pass class BookYearArchive(BookConfig, generic.YearArchiveView): pass class BookMonthArchive(BookConfig, generic.MonthArchiveView): pass class BookWeekArchive(BookConfig, generic.WeekArchiveView): pass class BookDayArchive(BookConfig, generic.DayArchiveView): pass class BookTodayArchive(BookConfig, generic.TodayArchiveView): pass class BookDetail(BookConfig, generic.DateDetailView): pass<|fim▁end|>
class AuthorUpdate(generic.UpdateView):
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from django.contrib.auth.decorators import login_required from django.core.urlresolvers import reverse from django.utils.decorators import method_decorator from django.views import generic from regressiontests.generic_views.models import Artist, Author, Book, Page from regressiontests.generic_views.forms import AuthorForm class CustomTemplateView(generic.TemplateView): <|fim_middle|> class ObjectDetail(generic.DetailView): template_name = 'generic_views/detail.html' def get_object(self): return {'foo': 'bar'} class ArtistDetail(generic.DetailView): queryset = Artist.objects.all() class AuthorDetail(generic.DetailView): queryset = Author.objects.all() class PageDetail(generic.DetailView): queryset = Page.objects.all() template_name_field = 'template' class DictList(generic.ListView): """A ListView that doesn't use a model.""" queryset = [ {'first': 'John', 'last': 'Lennon'}, {'last': 'Yoko', 'last': 'Ono'} ] template_name = 'generic_views/list.html' class AuthorList(generic.ListView): queryset = Author.objects.all() class ArtistCreate(generic.CreateView): model = Artist class NaiveAuthorCreate(generic.CreateView): queryset = Author.objects.all() class AuthorCreate(generic.CreateView): model = Author success_url = '/list/authors/' class SpecializedAuthorCreate(generic.CreateView): model = Author form_class = AuthorForm template_name = 'generic_views/form.html' context_object_name = 'thingy' def get_success_url(self): return reverse('author_detail', args=[self.object.id,]) class AuthorCreateRestricted(AuthorCreate): post = method_decorator(login_required)(AuthorCreate.post) class ArtistUpdate(generic.UpdateView): model = Artist class NaiveAuthorUpdate(generic.UpdateView): queryset = Author.objects.all() class AuthorUpdate(generic.UpdateView): model = Author success_url = '/list/authors/' class SpecializedAuthorUpdate(generic.UpdateView): model = Author form_class = AuthorForm template_name = 'generic_views/form.html' context_object_name = 'thingy' def get_success_url(self): return reverse('author_detail', args=[self.object.id,]) class NaiveAuthorDelete(generic.DeleteView): queryset = Author.objects.all() class AuthorDelete(generic.DeleteView): model = Author success_url = '/list/authors/' class SpecializedAuthorDelete(generic.DeleteView): queryset = Author.objects.all() template_name = 'generic_views/confirm_delete.html' context_object_name = 'thingy' def get_success_url(self): return reverse('authors_list') class BookConfig(object): queryset = Book.objects.all() date_field = 'pubdate' class BookArchive(BookConfig, generic.ArchiveIndexView): pass class BookYearArchive(BookConfig, generic.YearArchiveView): pass class BookMonthArchive(BookConfig, generic.MonthArchiveView): pass class BookWeekArchive(BookConfig, generic.WeekArchiveView): pass class BookDayArchive(BookConfig, generic.DayArchiveView): pass class BookTodayArchive(BookConfig, generic.TodayArchiveView): pass class BookDetail(BookConfig, generic.DateDetailView): pass <|fim▁end|>
template_name = 'generic_views/about.html' def get_context_data(self, **kwargs): return { 'params': kwargs, 'key': 'value' }
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from django.contrib.auth.decorators import login_required from django.core.urlresolvers import reverse from django.utils.decorators import method_decorator from django.views import generic from regressiontests.generic_views.models import Artist, Author, Book, Page from regressiontests.generic_views.forms import AuthorForm class CustomTemplateView(generic.TemplateView): template_name = 'generic_views/about.html' def get_context_data(self, **kwargs): <|fim_middle|> class ObjectDetail(generic.DetailView): template_name = 'generic_views/detail.html' def get_object(self): return {'foo': 'bar'} class ArtistDetail(generic.DetailView): queryset = Artist.objects.all() class AuthorDetail(generic.DetailView): queryset = Author.objects.all() class PageDetail(generic.DetailView): queryset = Page.objects.all() template_name_field = 'template' class DictList(generic.ListView): """A ListView that doesn't use a model.""" queryset = [ {'first': 'John', 'last': 'Lennon'}, {'last': 'Yoko', 'last': 'Ono'} ] template_name = 'generic_views/list.html' class AuthorList(generic.ListView): queryset = Author.objects.all() class ArtistCreate(generic.CreateView): model = Artist class NaiveAuthorCreate(generic.CreateView): queryset = Author.objects.all() class AuthorCreate(generic.CreateView): model = Author success_url = '/list/authors/' class SpecializedAuthorCreate(generic.CreateView): model = Author form_class = AuthorForm template_name = 'generic_views/form.html' context_object_name = 'thingy' def get_success_url(self): return reverse('author_detail', args=[self.object.id,]) class AuthorCreateRestricted(AuthorCreate): post = method_decorator(login_required)(AuthorCreate.post) class ArtistUpdate(generic.UpdateView): model = Artist class NaiveAuthorUpdate(generic.UpdateView): queryset = Author.objects.all() class AuthorUpdate(generic.UpdateView): model = Author success_url = '/list/authors/' class SpecializedAuthorUpdate(generic.UpdateView): model = Author form_class = AuthorForm template_name = 'generic_views/form.html' context_object_name = 'thingy' def get_success_url(self): return reverse('author_detail', args=[self.object.id,]) class NaiveAuthorDelete(generic.DeleteView): queryset = Author.objects.all() class AuthorDelete(generic.DeleteView): model = Author success_url = '/list/authors/' class SpecializedAuthorDelete(generic.DeleteView): queryset = Author.objects.all() template_name = 'generic_views/confirm_delete.html' context_object_name = 'thingy' def get_success_url(self): return reverse('authors_list') class BookConfig(object): queryset = Book.objects.all() date_field = 'pubdate' class BookArchive(BookConfig, generic.ArchiveIndexView): pass class BookYearArchive(BookConfig, generic.YearArchiveView): pass class BookMonthArchive(BookConfig, generic.MonthArchiveView): pass class BookWeekArchive(BookConfig, generic.WeekArchiveView): pass class BookDayArchive(BookConfig, generic.DayArchiveView): pass class BookTodayArchive(BookConfig, generic.TodayArchiveView): pass class BookDetail(BookConfig, generic.DateDetailView): pass <|fim▁end|>
return { 'params': kwargs, 'key': 'value' }
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from django.contrib.auth.decorators import login_required from django.core.urlresolvers import reverse from django.utils.decorators import method_decorator from django.views import generic from regressiontests.generic_views.models import Artist, Author, Book, Page from regressiontests.generic_views.forms import AuthorForm class CustomTemplateView(generic.TemplateView): template_name = 'generic_views/about.html' def get_context_data(self, **kwargs): return { 'params': kwargs, 'key': 'value' } class ObjectDetail(generic.DetailView): <|fim_middle|> class ArtistDetail(generic.DetailView): queryset = Artist.objects.all() class AuthorDetail(generic.DetailView): queryset = Author.objects.all() class PageDetail(generic.DetailView): queryset = Page.objects.all() template_name_field = 'template' class DictList(generic.ListView): """A ListView that doesn't use a model.""" queryset = [ {'first': 'John', 'last': 'Lennon'}, {'last': 'Yoko', 'last': 'Ono'} ] template_name = 'generic_views/list.html' class AuthorList(generic.ListView): queryset = Author.objects.all() class ArtistCreate(generic.CreateView): model = Artist class NaiveAuthorCreate(generic.CreateView): queryset = Author.objects.all() class AuthorCreate(generic.CreateView): model = Author success_url = '/list/authors/' class SpecializedAuthorCreate(generic.CreateView): model = Author form_class = AuthorForm template_name = 'generic_views/form.html' context_object_name = 'thingy' def get_success_url(self): return reverse('author_detail', args=[self.object.id,]) class AuthorCreateRestricted(AuthorCreate): post = method_decorator(login_required)(AuthorCreate.post) class ArtistUpdate(generic.UpdateView): model = Artist class NaiveAuthorUpdate(generic.UpdateView): queryset = Author.objects.all() class AuthorUpdate(generic.UpdateView): model = Author success_url = '/list/authors/' class SpecializedAuthorUpdate(generic.UpdateView): model = Author form_class = AuthorForm template_name = 'generic_views/form.html' context_object_name = 'thingy' def get_success_url(self): return reverse('author_detail', args=[self.object.id,]) class NaiveAuthorDelete(generic.DeleteView): queryset = Author.objects.all() class AuthorDelete(generic.DeleteView): model = Author success_url = '/list/authors/' class SpecializedAuthorDelete(generic.DeleteView): queryset = Author.objects.all() template_name = 'generic_views/confirm_delete.html' context_object_name = 'thingy' def get_success_url(self): return reverse('authors_list') class BookConfig(object): queryset = Book.objects.all() date_field = 'pubdate' class BookArchive(BookConfig, generic.ArchiveIndexView): pass class BookYearArchive(BookConfig, generic.YearArchiveView): pass class BookMonthArchive(BookConfig, generic.MonthArchiveView): pass class BookWeekArchive(BookConfig, generic.WeekArchiveView): pass class BookDayArchive(BookConfig, generic.DayArchiveView): pass class BookTodayArchive(BookConfig, generic.TodayArchiveView): pass class BookDetail(BookConfig, generic.DateDetailView): pass <|fim▁end|>
template_name = 'generic_views/detail.html' def get_object(self): return {'foo': 'bar'}
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from django.contrib.auth.decorators import login_required from django.core.urlresolvers import reverse from django.utils.decorators import method_decorator from django.views import generic from regressiontests.generic_views.models import Artist, Author, Book, Page from regressiontests.generic_views.forms import AuthorForm class CustomTemplateView(generic.TemplateView): template_name = 'generic_views/about.html' def get_context_data(self, **kwargs): return { 'params': kwargs, 'key': 'value' } class ObjectDetail(generic.DetailView): template_name = 'generic_views/detail.html' def get_object(self): <|fim_middle|> class ArtistDetail(generic.DetailView): queryset = Artist.objects.all() class AuthorDetail(generic.DetailView): queryset = Author.objects.all() class PageDetail(generic.DetailView): queryset = Page.objects.all() template_name_field = 'template' class DictList(generic.ListView): """A ListView that doesn't use a model.""" queryset = [ {'first': 'John', 'last': 'Lennon'}, {'last': 'Yoko', 'last': 'Ono'} ] template_name = 'generic_views/list.html' class AuthorList(generic.ListView): queryset = Author.objects.all() class ArtistCreate(generic.CreateView): model = Artist class NaiveAuthorCreate(generic.CreateView): queryset = Author.objects.all() class AuthorCreate(generic.CreateView): model = Author success_url = '/list/authors/' class SpecializedAuthorCreate(generic.CreateView): model = Author form_class = AuthorForm template_name = 'generic_views/form.html' context_object_name = 'thingy' def get_success_url(self): return reverse('author_detail', args=[self.object.id,]) class AuthorCreateRestricted(AuthorCreate): post = method_decorator(login_required)(AuthorCreate.post) class ArtistUpdate(generic.UpdateView): model = Artist class NaiveAuthorUpdate(generic.UpdateView): queryset = Author.objects.all() class AuthorUpdate(generic.UpdateView): model = Author success_url = '/list/authors/' class SpecializedAuthorUpdate(generic.UpdateView): model = Author form_class = AuthorForm template_name = 'generic_views/form.html' context_object_name = 'thingy' def get_success_url(self): return reverse('author_detail', args=[self.object.id,]) class NaiveAuthorDelete(generic.DeleteView): queryset = Author.objects.all() class AuthorDelete(generic.DeleteView): model = Author success_url = '/list/authors/' class SpecializedAuthorDelete(generic.DeleteView): queryset = Author.objects.all() template_name = 'generic_views/confirm_delete.html' context_object_name = 'thingy' def get_success_url(self): return reverse('authors_list') class BookConfig(object): queryset = Book.objects.all() date_field = 'pubdate' class BookArchive(BookConfig, generic.ArchiveIndexView): pass class BookYearArchive(BookConfig, generic.YearArchiveView): pass class BookMonthArchive(BookConfig, generic.MonthArchiveView): pass class BookWeekArchive(BookConfig, generic.WeekArchiveView): pass class BookDayArchive(BookConfig, generic.DayArchiveView): pass class BookTodayArchive(BookConfig, generic.TodayArchiveView): pass class BookDetail(BookConfig, generic.DateDetailView): pass <|fim▁end|>
return {'foo': 'bar'}
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from django.contrib.auth.decorators import login_required from django.core.urlresolvers import reverse from django.utils.decorators import method_decorator from django.views import generic from regressiontests.generic_views.models import Artist, Author, Book, Page from regressiontests.generic_views.forms import AuthorForm class CustomTemplateView(generic.TemplateView): template_name = 'generic_views/about.html' def get_context_data(self, **kwargs): return { 'params': kwargs, 'key': 'value' } class ObjectDetail(generic.DetailView): template_name = 'generic_views/detail.html' def get_object(self): return {'foo': 'bar'} class ArtistDetail(generic.DetailView): <|fim_middle|> class AuthorDetail(generic.DetailView): queryset = Author.objects.all() class PageDetail(generic.DetailView): queryset = Page.objects.all() template_name_field = 'template' class DictList(generic.ListView): """A ListView that doesn't use a model.""" queryset = [ {'first': 'John', 'last': 'Lennon'}, {'last': 'Yoko', 'last': 'Ono'} ] template_name = 'generic_views/list.html' class AuthorList(generic.ListView): queryset = Author.objects.all() class ArtistCreate(generic.CreateView): model = Artist class NaiveAuthorCreate(generic.CreateView): queryset = Author.objects.all() class AuthorCreate(generic.CreateView): model = Author success_url = '/list/authors/' class SpecializedAuthorCreate(generic.CreateView): model = Author form_class = AuthorForm template_name = 'generic_views/form.html' context_object_name = 'thingy' def get_success_url(self): return reverse('author_detail', args=[self.object.id,]) class AuthorCreateRestricted(AuthorCreate): post = method_decorator(login_required)(AuthorCreate.post) class ArtistUpdate(generic.UpdateView): model = Artist class NaiveAuthorUpdate(generic.UpdateView): queryset = Author.objects.all() class AuthorUpdate(generic.UpdateView): model = Author success_url = '/list/authors/' class SpecializedAuthorUpdate(generic.UpdateView): model = Author form_class = AuthorForm template_name = 'generic_views/form.html' context_object_name = 'thingy' def get_success_url(self): return reverse('author_detail', args=[self.object.id,]) class NaiveAuthorDelete(generic.DeleteView): queryset = Author.objects.all() class AuthorDelete(generic.DeleteView): model = Author success_url = '/list/authors/' class SpecializedAuthorDelete(generic.DeleteView): queryset = Author.objects.all() template_name = 'generic_views/confirm_delete.html' context_object_name = 'thingy' def get_success_url(self): return reverse('authors_list') class BookConfig(object): queryset = Book.objects.all() date_field = 'pubdate' class BookArchive(BookConfig, generic.ArchiveIndexView): pass class BookYearArchive(BookConfig, generic.YearArchiveView): pass class BookMonthArchive(BookConfig, generic.MonthArchiveView): pass class BookWeekArchive(BookConfig, generic.WeekArchiveView): pass class BookDayArchive(BookConfig, generic.DayArchiveView): pass class BookTodayArchive(BookConfig, generic.TodayArchiveView): pass class BookDetail(BookConfig, generic.DateDetailView): pass <|fim▁end|>
queryset = Artist.objects.all()
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from django.contrib.auth.decorators import login_required from django.core.urlresolvers import reverse from django.utils.decorators import method_decorator from django.views import generic from regressiontests.generic_views.models import Artist, Author, Book, Page from regressiontests.generic_views.forms import AuthorForm class CustomTemplateView(generic.TemplateView): template_name = 'generic_views/about.html' def get_context_data(self, **kwargs): return { 'params': kwargs, 'key': 'value' } class ObjectDetail(generic.DetailView): template_name = 'generic_views/detail.html' def get_object(self): return {'foo': 'bar'} class ArtistDetail(generic.DetailView): queryset = Artist.objects.all() class AuthorDetail(generic.DetailView): <|fim_middle|> class PageDetail(generic.DetailView): queryset = Page.objects.all() template_name_field = 'template' class DictList(generic.ListView): """A ListView that doesn't use a model.""" queryset = [ {'first': 'John', 'last': 'Lennon'}, {'last': 'Yoko', 'last': 'Ono'} ] template_name = 'generic_views/list.html' class AuthorList(generic.ListView): queryset = Author.objects.all() class ArtistCreate(generic.CreateView): model = Artist class NaiveAuthorCreate(generic.CreateView): queryset = Author.objects.all() class AuthorCreate(generic.CreateView): model = Author success_url = '/list/authors/' class SpecializedAuthorCreate(generic.CreateView): model = Author form_class = AuthorForm template_name = 'generic_views/form.html' context_object_name = 'thingy' def get_success_url(self): return reverse('author_detail', args=[self.object.id,]) class AuthorCreateRestricted(AuthorCreate): post = method_decorator(login_required)(AuthorCreate.post) class ArtistUpdate(generic.UpdateView): model = Artist class NaiveAuthorUpdate(generic.UpdateView): queryset = Author.objects.all() class AuthorUpdate(generic.UpdateView): model = Author success_url = '/list/authors/' class SpecializedAuthorUpdate(generic.UpdateView): model = Author form_class = AuthorForm template_name = 'generic_views/form.html' context_object_name = 'thingy' def get_success_url(self): return reverse('author_detail', args=[self.object.id,]) class NaiveAuthorDelete(generic.DeleteView): queryset = Author.objects.all() class AuthorDelete(generic.DeleteView): model = Author success_url = '/list/authors/' class SpecializedAuthorDelete(generic.DeleteView): queryset = Author.objects.all() template_name = 'generic_views/confirm_delete.html' context_object_name = 'thingy' def get_success_url(self): return reverse('authors_list') class BookConfig(object): queryset = Book.objects.all() date_field = 'pubdate' class BookArchive(BookConfig, generic.ArchiveIndexView): pass class BookYearArchive(BookConfig, generic.YearArchiveView): pass class BookMonthArchive(BookConfig, generic.MonthArchiveView): pass class BookWeekArchive(BookConfig, generic.WeekArchiveView): pass class BookDayArchive(BookConfig, generic.DayArchiveView): pass class BookTodayArchive(BookConfig, generic.TodayArchiveView): pass class BookDetail(BookConfig, generic.DateDetailView): pass <|fim▁end|>
queryset = Author.objects.all()
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from django.contrib.auth.decorators import login_required from django.core.urlresolvers import reverse from django.utils.decorators import method_decorator from django.views import generic from regressiontests.generic_views.models import Artist, Author, Book, Page from regressiontests.generic_views.forms import AuthorForm class CustomTemplateView(generic.TemplateView): template_name = 'generic_views/about.html' def get_context_data(self, **kwargs): return { 'params': kwargs, 'key': 'value' } class ObjectDetail(generic.DetailView): template_name = 'generic_views/detail.html' def get_object(self): return {'foo': 'bar'} class ArtistDetail(generic.DetailView): queryset = Artist.objects.all() class AuthorDetail(generic.DetailView): queryset = Author.objects.all() class PageDetail(generic.DetailView): <|fim_middle|> class DictList(generic.ListView): """A ListView that doesn't use a model.""" queryset = [ {'first': 'John', 'last': 'Lennon'}, {'last': 'Yoko', 'last': 'Ono'} ] template_name = 'generic_views/list.html' class AuthorList(generic.ListView): queryset = Author.objects.all() class ArtistCreate(generic.CreateView): model = Artist class NaiveAuthorCreate(generic.CreateView): queryset = Author.objects.all() class AuthorCreate(generic.CreateView): model = Author success_url = '/list/authors/' class SpecializedAuthorCreate(generic.CreateView): model = Author form_class = AuthorForm template_name = 'generic_views/form.html' context_object_name = 'thingy' def get_success_url(self): return reverse('author_detail', args=[self.object.id,]) class AuthorCreateRestricted(AuthorCreate): post = method_decorator(login_required)(AuthorCreate.post) class ArtistUpdate(generic.UpdateView): model = Artist class NaiveAuthorUpdate(generic.UpdateView): queryset = Author.objects.all() class AuthorUpdate(generic.UpdateView): model = Author success_url = '/list/authors/' class SpecializedAuthorUpdate(generic.UpdateView): model = Author form_class = AuthorForm template_name = 'generic_views/form.html' context_object_name = 'thingy' def get_success_url(self): return reverse('author_detail', args=[self.object.id,]) class NaiveAuthorDelete(generic.DeleteView): queryset = Author.objects.all() class AuthorDelete(generic.DeleteView): model = Author success_url = '/list/authors/' class SpecializedAuthorDelete(generic.DeleteView): queryset = Author.objects.all() template_name = 'generic_views/confirm_delete.html' context_object_name = 'thingy' def get_success_url(self): return reverse('authors_list') class BookConfig(object): queryset = Book.objects.all() date_field = 'pubdate' class BookArchive(BookConfig, generic.ArchiveIndexView): pass class BookYearArchive(BookConfig, generic.YearArchiveView): pass class BookMonthArchive(BookConfig, generic.MonthArchiveView): pass class BookWeekArchive(BookConfig, generic.WeekArchiveView): pass class BookDayArchive(BookConfig, generic.DayArchiveView): pass class BookTodayArchive(BookConfig, generic.TodayArchiveView): pass class BookDetail(BookConfig, generic.DateDetailView): pass <|fim▁end|>
queryset = Page.objects.all() template_name_field = 'template'
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from django.contrib.auth.decorators import login_required from django.core.urlresolvers import reverse from django.utils.decorators import method_decorator from django.views import generic from regressiontests.generic_views.models import Artist, Author, Book, Page from regressiontests.generic_views.forms import AuthorForm class CustomTemplateView(generic.TemplateView): template_name = 'generic_views/about.html' def get_context_data(self, **kwargs): return { 'params': kwargs, 'key': 'value' } class ObjectDetail(generic.DetailView): template_name = 'generic_views/detail.html' def get_object(self): return {'foo': 'bar'} class ArtistDetail(generic.DetailView): queryset = Artist.objects.all() class AuthorDetail(generic.DetailView): queryset = Author.objects.all() class PageDetail(generic.DetailView): queryset = Page.objects.all() template_name_field = 'template' class DictList(generic.ListView): <|fim_middle|> class AuthorList(generic.ListView): queryset = Author.objects.all() class ArtistCreate(generic.CreateView): model = Artist class NaiveAuthorCreate(generic.CreateView): queryset = Author.objects.all() class AuthorCreate(generic.CreateView): model = Author success_url = '/list/authors/' class SpecializedAuthorCreate(generic.CreateView): model = Author form_class = AuthorForm template_name = 'generic_views/form.html' context_object_name = 'thingy' def get_success_url(self): return reverse('author_detail', args=[self.object.id,]) class AuthorCreateRestricted(AuthorCreate): post = method_decorator(login_required)(AuthorCreate.post) class ArtistUpdate(generic.UpdateView): model = Artist class NaiveAuthorUpdate(generic.UpdateView): queryset = Author.objects.all() class AuthorUpdate(generic.UpdateView): model = Author success_url = '/list/authors/' class SpecializedAuthorUpdate(generic.UpdateView): model = Author form_class = AuthorForm template_name = 'generic_views/form.html' context_object_name = 'thingy' def get_success_url(self): return reverse('author_detail', args=[self.object.id,]) class NaiveAuthorDelete(generic.DeleteView): queryset = Author.objects.all() class AuthorDelete(generic.DeleteView): model = Author success_url = '/list/authors/' class SpecializedAuthorDelete(generic.DeleteView): queryset = Author.objects.all() template_name = 'generic_views/confirm_delete.html' context_object_name = 'thingy' def get_success_url(self): return reverse('authors_list') class BookConfig(object): queryset = Book.objects.all() date_field = 'pubdate' class BookArchive(BookConfig, generic.ArchiveIndexView): pass class BookYearArchive(BookConfig, generic.YearArchiveView): pass class BookMonthArchive(BookConfig, generic.MonthArchiveView): pass class BookWeekArchive(BookConfig, generic.WeekArchiveView): pass class BookDayArchive(BookConfig, generic.DayArchiveView): pass class BookTodayArchive(BookConfig, generic.TodayArchiveView): pass class BookDetail(BookConfig, generic.DateDetailView): pass <|fim▁end|>
"""A ListView that doesn't use a model.""" queryset = [ {'first': 'John', 'last': 'Lennon'}, {'last': 'Yoko', 'last': 'Ono'} ] template_name = 'generic_views/list.html'
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from django.contrib.auth.decorators import login_required from django.core.urlresolvers import reverse from django.utils.decorators import method_decorator from django.views import generic from regressiontests.generic_views.models import Artist, Author, Book, Page from regressiontests.generic_views.forms import AuthorForm class CustomTemplateView(generic.TemplateView): template_name = 'generic_views/about.html' def get_context_data(self, **kwargs): return { 'params': kwargs, 'key': 'value' } class ObjectDetail(generic.DetailView): template_name = 'generic_views/detail.html' def get_object(self): return {'foo': 'bar'} class ArtistDetail(generic.DetailView): queryset = Artist.objects.all() class AuthorDetail(generic.DetailView): queryset = Author.objects.all() class PageDetail(generic.DetailView): queryset = Page.objects.all() template_name_field = 'template' class DictList(generic.ListView): """A ListView that doesn't use a model.""" queryset = [ {'first': 'John', 'last': 'Lennon'}, {'last': 'Yoko', 'last': 'Ono'} ] template_name = 'generic_views/list.html' class AuthorList(generic.ListView): <|fim_middle|> class ArtistCreate(generic.CreateView): model = Artist class NaiveAuthorCreate(generic.CreateView): queryset = Author.objects.all() class AuthorCreate(generic.CreateView): model = Author success_url = '/list/authors/' class SpecializedAuthorCreate(generic.CreateView): model = Author form_class = AuthorForm template_name = 'generic_views/form.html' context_object_name = 'thingy' def get_success_url(self): return reverse('author_detail', args=[self.object.id,]) class AuthorCreateRestricted(AuthorCreate): post = method_decorator(login_required)(AuthorCreate.post) class ArtistUpdate(generic.UpdateView): model = Artist class NaiveAuthorUpdate(generic.UpdateView): queryset = Author.objects.all() class AuthorUpdate(generic.UpdateView): model = Author success_url = '/list/authors/' class SpecializedAuthorUpdate(generic.UpdateView): model = Author form_class = AuthorForm template_name = 'generic_views/form.html' context_object_name = 'thingy' def get_success_url(self): return reverse('author_detail', args=[self.object.id,]) class NaiveAuthorDelete(generic.DeleteView): queryset = Author.objects.all() class AuthorDelete(generic.DeleteView): model = Author success_url = '/list/authors/' class SpecializedAuthorDelete(generic.DeleteView): queryset = Author.objects.all() template_name = 'generic_views/confirm_delete.html' context_object_name = 'thingy' def get_success_url(self): return reverse('authors_list') class BookConfig(object): queryset = Book.objects.all() date_field = 'pubdate' class BookArchive(BookConfig, generic.ArchiveIndexView): pass class BookYearArchive(BookConfig, generic.YearArchiveView): pass class BookMonthArchive(BookConfig, generic.MonthArchiveView): pass class BookWeekArchive(BookConfig, generic.WeekArchiveView): pass class BookDayArchive(BookConfig, generic.DayArchiveView): pass class BookTodayArchive(BookConfig, generic.TodayArchiveView): pass class BookDetail(BookConfig, generic.DateDetailView): pass <|fim▁end|>
queryset = Author.objects.all()
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from django.contrib.auth.decorators import login_required from django.core.urlresolvers import reverse from django.utils.decorators import method_decorator from django.views import generic from regressiontests.generic_views.models import Artist, Author, Book, Page from regressiontests.generic_views.forms import AuthorForm class CustomTemplateView(generic.TemplateView): template_name = 'generic_views/about.html' def get_context_data(self, **kwargs): return { 'params': kwargs, 'key': 'value' } class ObjectDetail(generic.DetailView): template_name = 'generic_views/detail.html' def get_object(self): return {'foo': 'bar'} class ArtistDetail(generic.DetailView): queryset = Artist.objects.all() class AuthorDetail(generic.DetailView): queryset = Author.objects.all() class PageDetail(generic.DetailView): queryset = Page.objects.all() template_name_field = 'template' class DictList(generic.ListView): """A ListView that doesn't use a model.""" queryset = [ {'first': 'John', 'last': 'Lennon'}, {'last': 'Yoko', 'last': 'Ono'} ] template_name = 'generic_views/list.html' class AuthorList(generic.ListView): queryset = Author.objects.all() class ArtistCreate(generic.CreateView): <|fim_middle|> class NaiveAuthorCreate(generic.CreateView): queryset = Author.objects.all() class AuthorCreate(generic.CreateView): model = Author success_url = '/list/authors/' class SpecializedAuthorCreate(generic.CreateView): model = Author form_class = AuthorForm template_name = 'generic_views/form.html' context_object_name = 'thingy' def get_success_url(self): return reverse('author_detail', args=[self.object.id,]) class AuthorCreateRestricted(AuthorCreate): post = method_decorator(login_required)(AuthorCreate.post) class ArtistUpdate(generic.UpdateView): model = Artist class NaiveAuthorUpdate(generic.UpdateView): queryset = Author.objects.all() class AuthorUpdate(generic.UpdateView): model = Author success_url = '/list/authors/' class SpecializedAuthorUpdate(generic.UpdateView): model = Author form_class = AuthorForm template_name = 'generic_views/form.html' context_object_name = 'thingy' def get_success_url(self): return reverse('author_detail', args=[self.object.id,]) class NaiveAuthorDelete(generic.DeleteView): queryset = Author.objects.all() class AuthorDelete(generic.DeleteView): model = Author success_url = '/list/authors/' class SpecializedAuthorDelete(generic.DeleteView): queryset = Author.objects.all() template_name = 'generic_views/confirm_delete.html' context_object_name = 'thingy' def get_success_url(self): return reverse('authors_list') class BookConfig(object): queryset = Book.objects.all() date_field = 'pubdate' class BookArchive(BookConfig, generic.ArchiveIndexView): pass class BookYearArchive(BookConfig, generic.YearArchiveView): pass class BookMonthArchive(BookConfig, generic.MonthArchiveView): pass class BookWeekArchive(BookConfig, generic.WeekArchiveView): pass class BookDayArchive(BookConfig, generic.DayArchiveView): pass class BookTodayArchive(BookConfig, generic.TodayArchiveView): pass class BookDetail(BookConfig, generic.DateDetailView): pass <|fim▁end|>
model = Artist
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from django.contrib.auth.decorators import login_required from django.core.urlresolvers import reverse from django.utils.decorators import method_decorator from django.views import generic from regressiontests.generic_views.models import Artist, Author, Book, Page from regressiontests.generic_views.forms import AuthorForm class CustomTemplateView(generic.TemplateView): template_name = 'generic_views/about.html' def get_context_data(self, **kwargs): return { 'params': kwargs, 'key': 'value' } class ObjectDetail(generic.DetailView): template_name = 'generic_views/detail.html' def get_object(self): return {'foo': 'bar'} class ArtistDetail(generic.DetailView): queryset = Artist.objects.all() class AuthorDetail(generic.DetailView): queryset = Author.objects.all() class PageDetail(generic.DetailView): queryset = Page.objects.all() template_name_field = 'template' class DictList(generic.ListView): """A ListView that doesn't use a model.""" queryset = [ {'first': 'John', 'last': 'Lennon'}, {'last': 'Yoko', 'last': 'Ono'} ] template_name = 'generic_views/list.html' class AuthorList(generic.ListView): queryset = Author.objects.all() class ArtistCreate(generic.CreateView): model = Artist class NaiveAuthorCreate(generic.CreateView): <|fim_middle|> class AuthorCreate(generic.CreateView): model = Author success_url = '/list/authors/' class SpecializedAuthorCreate(generic.CreateView): model = Author form_class = AuthorForm template_name = 'generic_views/form.html' context_object_name = 'thingy' def get_success_url(self): return reverse('author_detail', args=[self.object.id,]) class AuthorCreateRestricted(AuthorCreate): post = method_decorator(login_required)(AuthorCreate.post) class ArtistUpdate(generic.UpdateView): model = Artist class NaiveAuthorUpdate(generic.UpdateView): queryset = Author.objects.all() class AuthorUpdate(generic.UpdateView): model = Author success_url = '/list/authors/' class SpecializedAuthorUpdate(generic.UpdateView): model = Author form_class = AuthorForm template_name = 'generic_views/form.html' context_object_name = 'thingy' def get_success_url(self): return reverse('author_detail', args=[self.object.id,]) class NaiveAuthorDelete(generic.DeleteView): queryset = Author.objects.all() class AuthorDelete(generic.DeleteView): model = Author success_url = '/list/authors/' class SpecializedAuthorDelete(generic.DeleteView): queryset = Author.objects.all() template_name = 'generic_views/confirm_delete.html' context_object_name = 'thingy' def get_success_url(self): return reverse('authors_list') class BookConfig(object): queryset = Book.objects.all() date_field = 'pubdate' class BookArchive(BookConfig, generic.ArchiveIndexView): pass class BookYearArchive(BookConfig, generic.YearArchiveView): pass class BookMonthArchive(BookConfig, generic.MonthArchiveView): pass class BookWeekArchive(BookConfig, generic.WeekArchiveView): pass class BookDayArchive(BookConfig, generic.DayArchiveView): pass class BookTodayArchive(BookConfig, generic.TodayArchiveView): pass class BookDetail(BookConfig, generic.DateDetailView): pass <|fim▁end|>
queryset = Author.objects.all()
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from django.contrib.auth.decorators import login_required from django.core.urlresolvers import reverse from django.utils.decorators import method_decorator from django.views import generic from regressiontests.generic_views.models import Artist, Author, Book, Page from regressiontests.generic_views.forms import AuthorForm class CustomTemplateView(generic.TemplateView): template_name = 'generic_views/about.html' def get_context_data(self, **kwargs): return { 'params': kwargs, 'key': 'value' } class ObjectDetail(generic.DetailView): template_name = 'generic_views/detail.html' def get_object(self): return {'foo': 'bar'} class ArtistDetail(generic.DetailView): queryset = Artist.objects.all() class AuthorDetail(generic.DetailView): queryset = Author.objects.all() class PageDetail(generic.DetailView): queryset = Page.objects.all() template_name_field = 'template' class DictList(generic.ListView): """A ListView that doesn't use a model.""" queryset = [ {'first': 'John', 'last': 'Lennon'}, {'last': 'Yoko', 'last': 'Ono'} ] template_name = 'generic_views/list.html' class AuthorList(generic.ListView): queryset = Author.objects.all() class ArtistCreate(generic.CreateView): model = Artist class NaiveAuthorCreate(generic.CreateView): queryset = Author.objects.all() class AuthorCreate(generic.CreateView): <|fim_middle|> class SpecializedAuthorCreate(generic.CreateView): model = Author form_class = AuthorForm template_name = 'generic_views/form.html' context_object_name = 'thingy' def get_success_url(self): return reverse('author_detail', args=[self.object.id,]) class AuthorCreateRestricted(AuthorCreate): post = method_decorator(login_required)(AuthorCreate.post) class ArtistUpdate(generic.UpdateView): model = Artist class NaiveAuthorUpdate(generic.UpdateView): queryset = Author.objects.all() class AuthorUpdate(generic.UpdateView): model = Author success_url = '/list/authors/' class SpecializedAuthorUpdate(generic.UpdateView): model = Author form_class = AuthorForm template_name = 'generic_views/form.html' context_object_name = 'thingy' def get_success_url(self): return reverse('author_detail', args=[self.object.id,]) class NaiveAuthorDelete(generic.DeleteView): queryset = Author.objects.all() class AuthorDelete(generic.DeleteView): model = Author success_url = '/list/authors/' class SpecializedAuthorDelete(generic.DeleteView): queryset = Author.objects.all() template_name = 'generic_views/confirm_delete.html' context_object_name = 'thingy' def get_success_url(self): return reverse('authors_list') class BookConfig(object): queryset = Book.objects.all() date_field = 'pubdate' class BookArchive(BookConfig, generic.ArchiveIndexView): pass class BookYearArchive(BookConfig, generic.YearArchiveView): pass class BookMonthArchive(BookConfig, generic.MonthArchiveView): pass class BookWeekArchive(BookConfig, generic.WeekArchiveView): pass class BookDayArchive(BookConfig, generic.DayArchiveView): pass class BookTodayArchive(BookConfig, generic.TodayArchiveView): pass class BookDetail(BookConfig, generic.DateDetailView): pass <|fim▁end|>
model = Author success_url = '/list/authors/'
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from django.contrib.auth.decorators import login_required from django.core.urlresolvers import reverse from django.utils.decorators import method_decorator from django.views import generic from regressiontests.generic_views.models import Artist, Author, Book, Page from regressiontests.generic_views.forms import AuthorForm class CustomTemplateView(generic.TemplateView): template_name = 'generic_views/about.html' def get_context_data(self, **kwargs): return { 'params': kwargs, 'key': 'value' } class ObjectDetail(generic.DetailView): template_name = 'generic_views/detail.html' def get_object(self): return {'foo': 'bar'} class ArtistDetail(generic.DetailView): queryset = Artist.objects.all() class AuthorDetail(generic.DetailView): queryset = Author.objects.all() class PageDetail(generic.DetailView): queryset = Page.objects.all() template_name_field = 'template' class DictList(generic.ListView): """A ListView that doesn't use a model.""" queryset = [ {'first': 'John', 'last': 'Lennon'}, {'last': 'Yoko', 'last': 'Ono'} ] template_name = 'generic_views/list.html' class AuthorList(generic.ListView): queryset = Author.objects.all() class ArtistCreate(generic.CreateView): model = Artist class NaiveAuthorCreate(generic.CreateView): queryset = Author.objects.all() class AuthorCreate(generic.CreateView): model = Author success_url = '/list/authors/' class SpecializedAuthorCreate(generic.CreateView): <|fim_middle|> class AuthorCreateRestricted(AuthorCreate): post = method_decorator(login_required)(AuthorCreate.post) class ArtistUpdate(generic.UpdateView): model = Artist class NaiveAuthorUpdate(generic.UpdateView): queryset = Author.objects.all() class AuthorUpdate(generic.UpdateView): model = Author success_url = '/list/authors/' class SpecializedAuthorUpdate(generic.UpdateView): model = Author form_class = AuthorForm template_name = 'generic_views/form.html' context_object_name = 'thingy' def get_success_url(self): return reverse('author_detail', args=[self.object.id,]) class NaiveAuthorDelete(generic.DeleteView): queryset = Author.objects.all() class AuthorDelete(generic.DeleteView): model = Author success_url = '/list/authors/' class SpecializedAuthorDelete(generic.DeleteView): queryset = Author.objects.all() template_name = 'generic_views/confirm_delete.html' context_object_name = 'thingy' def get_success_url(self): return reverse('authors_list') class BookConfig(object): queryset = Book.objects.all() date_field = 'pubdate' class BookArchive(BookConfig, generic.ArchiveIndexView): pass class BookYearArchive(BookConfig, generic.YearArchiveView): pass class BookMonthArchive(BookConfig, generic.MonthArchiveView): pass class BookWeekArchive(BookConfig, generic.WeekArchiveView): pass class BookDayArchive(BookConfig, generic.DayArchiveView): pass class BookTodayArchive(BookConfig, generic.TodayArchiveView): pass class BookDetail(BookConfig, generic.DateDetailView): pass <|fim▁end|>
model = Author form_class = AuthorForm template_name = 'generic_views/form.html' context_object_name = 'thingy' def get_success_url(self): return reverse('author_detail', args=[self.object.id,])
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from django.contrib.auth.decorators import login_required from django.core.urlresolvers import reverse from django.utils.decorators import method_decorator from django.views import generic from regressiontests.generic_views.models import Artist, Author, Book, Page from regressiontests.generic_views.forms import AuthorForm class CustomTemplateView(generic.TemplateView): template_name = 'generic_views/about.html' def get_context_data(self, **kwargs): return { 'params': kwargs, 'key': 'value' } class ObjectDetail(generic.DetailView): template_name = 'generic_views/detail.html' def get_object(self): return {'foo': 'bar'} class ArtistDetail(generic.DetailView): queryset = Artist.objects.all() class AuthorDetail(generic.DetailView): queryset = Author.objects.all() class PageDetail(generic.DetailView): queryset = Page.objects.all() template_name_field = 'template' class DictList(generic.ListView): """A ListView that doesn't use a model.""" queryset = [ {'first': 'John', 'last': 'Lennon'}, {'last': 'Yoko', 'last': 'Ono'} ] template_name = 'generic_views/list.html' class AuthorList(generic.ListView): queryset = Author.objects.all() class ArtistCreate(generic.CreateView): model = Artist class NaiveAuthorCreate(generic.CreateView): queryset = Author.objects.all() class AuthorCreate(generic.CreateView): model = Author success_url = '/list/authors/' class SpecializedAuthorCreate(generic.CreateView): model = Author form_class = AuthorForm template_name = 'generic_views/form.html' context_object_name = 'thingy' def get_success_url(self): <|fim_middle|> class AuthorCreateRestricted(AuthorCreate): post = method_decorator(login_required)(AuthorCreate.post) class ArtistUpdate(generic.UpdateView): model = Artist class NaiveAuthorUpdate(generic.UpdateView): queryset = Author.objects.all() class AuthorUpdate(generic.UpdateView): model = Author success_url = '/list/authors/' class SpecializedAuthorUpdate(generic.UpdateView): model = Author form_class = AuthorForm template_name = 'generic_views/form.html' context_object_name = 'thingy' def get_success_url(self): return reverse('author_detail', args=[self.object.id,]) class NaiveAuthorDelete(generic.DeleteView): queryset = Author.objects.all() class AuthorDelete(generic.DeleteView): model = Author success_url = '/list/authors/' class SpecializedAuthorDelete(generic.DeleteView): queryset = Author.objects.all() template_name = 'generic_views/confirm_delete.html' context_object_name = 'thingy' def get_success_url(self): return reverse('authors_list') class BookConfig(object): queryset = Book.objects.all() date_field = 'pubdate' class BookArchive(BookConfig, generic.ArchiveIndexView): pass class BookYearArchive(BookConfig, generic.YearArchiveView): pass class BookMonthArchive(BookConfig, generic.MonthArchiveView): pass class BookWeekArchive(BookConfig, generic.WeekArchiveView): pass class BookDayArchive(BookConfig, generic.DayArchiveView): pass class BookTodayArchive(BookConfig, generic.TodayArchiveView): pass class BookDetail(BookConfig, generic.DateDetailView): pass <|fim▁end|>
return reverse('author_detail', args=[self.object.id,])
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from django.contrib.auth.decorators import login_required from django.core.urlresolvers import reverse from django.utils.decorators import method_decorator from django.views import generic from regressiontests.generic_views.models import Artist, Author, Book, Page from regressiontests.generic_views.forms import AuthorForm class CustomTemplateView(generic.TemplateView): template_name = 'generic_views/about.html' def get_context_data(self, **kwargs): return { 'params': kwargs, 'key': 'value' } class ObjectDetail(generic.DetailView): template_name = 'generic_views/detail.html' def get_object(self): return {'foo': 'bar'} class ArtistDetail(generic.DetailView): queryset = Artist.objects.all() class AuthorDetail(generic.DetailView): queryset = Author.objects.all() class PageDetail(generic.DetailView): queryset = Page.objects.all() template_name_field = 'template' class DictList(generic.ListView): """A ListView that doesn't use a model.""" queryset = [ {'first': 'John', 'last': 'Lennon'}, {'last': 'Yoko', 'last': 'Ono'} ] template_name = 'generic_views/list.html' class AuthorList(generic.ListView): queryset = Author.objects.all() class ArtistCreate(generic.CreateView): model = Artist class NaiveAuthorCreate(generic.CreateView): queryset = Author.objects.all() class AuthorCreate(generic.CreateView): model = Author success_url = '/list/authors/' class SpecializedAuthorCreate(generic.CreateView): model = Author form_class = AuthorForm template_name = 'generic_views/form.html' context_object_name = 'thingy' def get_success_url(self): return reverse('author_detail', args=[self.object.id,]) class AuthorCreateRestricted(AuthorCreate): <|fim_middle|> class ArtistUpdate(generic.UpdateView): model = Artist class NaiveAuthorUpdate(generic.UpdateView): queryset = Author.objects.all() class AuthorUpdate(generic.UpdateView): model = Author success_url = '/list/authors/' class SpecializedAuthorUpdate(generic.UpdateView): model = Author form_class = AuthorForm template_name = 'generic_views/form.html' context_object_name = 'thingy' def get_success_url(self): return reverse('author_detail', args=[self.object.id,]) class NaiveAuthorDelete(generic.DeleteView): queryset = Author.objects.all() class AuthorDelete(generic.DeleteView): model = Author success_url = '/list/authors/' class SpecializedAuthorDelete(generic.DeleteView): queryset = Author.objects.all() template_name = 'generic_views/confirm_delete.html' context_object_name = 'thingy' def get_success_url(self): return reverse('authors_list') class BookConfig(object): queryset = Book.objects.all() date_field = 'pubdate' class BookArchive(BookConfig, generic.ArchiveIndexView): pass class BookYearArchive(BookConfig, generic.YearArchiveView): pass class BookMonthArchive(BookConfig, generic.MonthArchiveView): pass class BookWeekArchive(BookConfig, generic.WeekArchiveView): pass class BookDayArchive(BookConfig, generic.DayArchiveView): pass class BookTodayArchive(BookConfig, generic.TodayArchiveView): pass class BookDetail(BookConfig, generic.DateDetailView): pass <|fim▁end|>
post = method_decorator(login_required)(AuthorCreate.post)
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from django.contrib.auth.decorators import login_required from django.core.urlresolvers import reverse from django.utils.decorators import method_decorator from django.views import generic from regressiontests.generic_views.models import Artist, Author, Book, Page from regressiontests.generic_views.forms import AuthorForm class CustomTemplateView(generic.TemplateView): template_name = 'generic_views/about.html' def get_context_data(self, **kwargs): return { 'params': kwargs, 'key': 'value' } class ObjectDetail(generic.DetailView): template_name = 'generic_views/detail.html' def get_object(self): return {'foo': 'bar'} class ArtistDetail(generic.DetailView): queryset = Artist.objects.all() class AuthorDetail(generic.DetailView): queryset = Author.objects.all() class PageDetail(generic.DetailView): queryset = Page.objects.all() template_name_field = 'template' class DictList(generic.ListView): """A ListView that doesn't use a model.""" queryset = [ {'first': 'John', 'last': 'Lennon'}, {'last': 'Yoko', 'last': 'Ono'} ] template_name = 'generic_views/list.html' class AuthorList(generic.ListView): queryset = Author.objects.all() class ArtistCreate(generic.CreateView): model = Artist class NaiveAuthorCreate(generic.CreateView): queryset = Author.objects.all() class AuthorCreate(generic.CreateView): model = Author success_url = '/list/authors/' class SpecializedAuthorCreate(generic.CreateView): model = Author form_class = AuthorForm template_name = 'generic_views/form.html' context_object_name = 'thingy' def get_success_url(self): return reverse('author_detail', args=[self.object.id,]) class AuthorCreateRestricted(AuthorCreate): post = method_decorator(login_required)(AuthorCreate.post) class ArtistUpdate(generic.UpdateView): <|fim_middle|> class NaiveAuthorUpdate(generic.UpdateView): queryset = Author.objects.all() class AuthorUpdate(generic.UpdateView): model = Author success_url = '/list/authors/' class SpecializedAuthorUpdate(generic.UpdateView): model = Author form_class = AuthorForm template_name = 'generic_views/form.html' context_object_name = 'thingy' def get_success_url(self): return reverse('author_detail', args=[self.object.id,]) class NaiveAuthorDelete(generic.DeleteView): queryset = Author.objects.all() class AuthorDelete(generic.DeleteView): model = Author success_url = '/list/authors/' class SpecializedAuthorDelete(generic.DeleteView): queryset = Author.objects.all() template_name = 'generic_views/confirm_delete.html' context_object_name = 'thingy' def get_success_url(self): return reverse('authors_list') class BookConfig(object): queryset = Book.objects.all() date_field = 'pubdate' class BookArchive(BookConfig, generic.ArchiveIndexView): pass class BookYearArchive(BookConfig, generic.YearArchiveView): pass class BookMonthArchive(BookConfig, generic.MonthArchiveView): pass class BookWeekArchive(BookConfig, generic.WeekArchiveView): pass class BookDayArchive(BookConfig, generic.DayArchiveView): pass class BookTodayArchive(BookConfig, generic.TodayArchiveView): pass class BookDetail(BookConfig, generic.DateDetailView): pass <|fim▁end|>
model = Artist
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from django.contrib.auth.decorators import login_required from django.core.urlresolvers import reverse from django.utils.decorators import method_decorator from django.views import generic from regressiontests.generic_views.models import Artist, Author, Book, Page from regressiontests.generic_views.forms import AuthorForm class CustomTemplateView(generic.TemplateView): template_name = 'generic_views/about.html' def get_context_data(self, **kwargs): return { 'params': kwargs, 'key': 'value' } class ObjectDetail(generic.DetailView): template_name = 'generic_views/detail.html' def get_object(self): return {'foo': 'bar'} class ArtistDetail(generic.DetailView): queryset = Artist.objects.all() class AuthorDetail(generic.DetailView): queryset = Author.objects.all() class PageDetail(generic.DetailView): queryset = Page.objects.all() template_name_field = 'template' class DictList(generic.ListView): """A ListView that doesn't use a model.""" queryset = [ {'first': 'John', 'last': 'Lennon'}, {'last': 'Yoko', 'last': 'Ono'} ] template_name = 'generic_views/list.html' class AuthorList(generic.ListView): queryset = Author.objects.all() class ArtistCreate(generic.CreateView): model = Artist class NaiveAuthorCreate(generic.CreateView): queryset = Author.objects.all() class AuthorCreate(generic.CreateView): model = Author success_url = '/list/authors/' class SpecializedAuthorCreate(generic.CreateView): model = Author form_class = AuthorForm template_name = 'generic_views/form.html' context_object_name = 'thingy' def get_success_url(self): return reverse('author_detail', args=[self.object.id,]) class AuthorCreateRestricted(AuthorCreate): post = method_decorator(login_required)(AuthorCreate.post) class ArtistUpdate(generic.UpdateView): model = Artist class NaiveAuthorUpdate(generic.UpdateView): <|fim_middle|> class AuthorUpdate(generic.UpdateView): model = Author success_url = '/list/authors/' class SpecializedAuthorUpdate(generic.UpdateView): model = Author form_class = AuthorForm template_name = 'generic_views/form.html' context_object_name = 'thingy' def get_success_url(self): return reverse('author_detail', args=[self.object.id,]) class NaiveAuthorDelete(generic.DeleteView): queryset = Author.objects.all() class AuthorDelete(generic.DeleteView): model = Author success_url = '/list/authors/' class SpecializedAuthorDelete(generic.DeleteView): queryset = Author.objects.all() template_name = 'generic_views/confirm_delete.html' context_object_name = 'thingy' def get_success_url(self): return reverse('authors_list') class BookConfig(object): queryset = Book.objects.all() date_field = 'pubdate' class BookArchive(BookConfig, generic.ArchiveIndexView): pass class BookYearArchive(BookConfig, generic.YearArchiveView): pass class BookMonthArchive(BookConfig, generic.MonthArchiveView): pass class BookWeekArchive(BookConfig, generic.WeekArchiveView): pass class BookDayArchive(BookConfig, generic.DayArchiveView): pass class BookTodayArchive(BookConfig, generic.TodayArchiveView): pass class BookDetail(BookConfig, generic.DateDetailView): pass <|fim▁end|>
queryset = Author.objects.all()