prompt
large_stringlengths 72
9.34k
| completion
large_stringlengths 0
7.61k
|
---|---|
<|file_name|>alertview.py<|end_file_name|><|fim▁begin|>import kivy
kivy.require('1.9.1')
from kivy.uix.popup import Popup
from kivy.uix.label import Label
from kivy.uix.gridlayout import GridLayout
from kivy.metrics import dp
from kivy.app import Builder
from kivy.properties import StringProperty, ObjectProperty
from kivy.clock import Clock
from kivy.metrics import sp
from kivy.metrics import dp
from iconbutton import IconButton
__all__ = ('alertPopup, confirmPopup, okPopup, editor_popup')
Builder.load_string('''
<ConfirmPopup>:
cols:1
Label:
text: root.text
GridLayout:
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
IconButton:
text: u'\uf00d'
color: ColorScheme.get_primary()
on_release: root.dispatch('on_answer', False)
<OkPopup>:
cols:1
Label:
text: root.text
GridLayout:
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
<EditorPopup>:
id: editor_popup
cols:1
BoxLayout:
id: content
GridLayout:
id: buttons
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
IconButton:
text: u'\uf00d'
color: ColorScheme.get_primary()
on_release: root.dispatch('on_answer', False)
''')
def alertPopup(title, msg):
popup = Popup(title = title,
content=Label(text = msg),
size_hint=(None, None), size=(dp(600), dp(200)))
popup.open()
def confirmPopup(title, msg, answerCallback):
content = ConfirmPopup(text=msg)
content.bind(on_answer=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(None, None),
size=(dp(600),dp(200)),
auto_dismiss= False)
popup.open()
return popup
class ConfirmPopup(GridLayout):
text = StringProperty()
def __init__(self,**kwargs):
self.register_event_type('on_answer')
super(ConfirmPopup,self).__init__(**kwargs)
def on_answer(self, *args):
<|fim_middle|>
def editor_popup(title, content, answerCallback):
content = EditorPopup(content=content)
content.bind(on_answer=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(0.7, 0.8),
auto_dismiss= False,
title_size=sp(18))
popup.open()
return popup
class EditorPopup(GridLayout):
content = ObjectProperty(None)
def __init__(self,**kwargs):
self.register_event_type('on_answer')
super(EditorPopup,self).__init__(**kwargs)
def on_content(self, instance, value):
Clock.schedule_once(lambda dt: self.ids.content.add_widget(value))
def on_answer(self, *args):
pass
def okPopup(title, msg, answerCallback):
content = OkPopup(text=msg)
content.bind(on_ok=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(None, None),
size=(dp(600),dp(200)),
auto_dismiss= False)
popup.open()
return popup
class OkPopup(GridLayout):
text = StringProperty()
def __init__(self,**kwargs):
self.register_event_type('on_ok')
super(OkPopup,self).__init__(**kwargs)
def on_ok(self, *args):
pass <|fim▁end|> | pass |
<|file_name|>alertview.py<|end_file_name|><|fim▁begin|>import kivy
kivy.require('1.9.1')
from kivy.uix.popup import Popup
from kivy.uix.label import Label
from kivy.uix.gridlayout import GridLayout
from kivy.metrics import dp
from kivy.app import Builder
from kivy.properties import StringProperty, ObjectProperty
from kivy.clock import Clock
from kivy.metrics import sp
from kivy.metrics import dp
from iconbutton import IconButton
__all__ = ('alertPopup, confirmPopup, okPopup, editor_popup')
Builder.load_string('''
<ConfirmPopup>:
cols:1
Label:
text: root.text
GridLayout:
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
IconButton:
text: u'\uf00d'
color: ColorScheme.get_primary()
on_release: root.dispatch('on_answer', False)
<OkPopup>:
cols:1
Label:
text: root.text
GridLayout:
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
<EditorPopup>:
id: editor_popup
cols:1
BoxLayout:
id: content
GridLayout:
id: buttons
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
IconButton:
text: u'\uf00d'
color: ColorScheme.get_primary()
on_release: root.dispatch('on_answer', False)
''')
def alertPopup(title, msg):
popup = Popup(title = title,
content=Label(text = msg),
size_hint=(None, None), size=(dp(600), dp(200)))
popup.open()
def confirmPopup(title, msg, answerCallback):
content = ConfirmPopup(text=msg)
content.bind(on_answer=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(None, None),
size=(dp(600),dp(200)),
auto_dismiss= False)
popup.open()
return popup
class ConfirmPopup(GridLayout):
text = StringProperty()
def __init__(self,**kwargs):
self.register_event_type('on_answer')
super(ConfirmPopup,self).__init__(**kwargs)
def on_answer(self, *args):
pass
def editor_popup(title, content, answerCallback):
<|fim_middle|>
class EditorPopup(GridLayout):
content = ObjectProperty(None)
def __init__(self,**kwargs):
self.register_event_type('on_answer')
super(EditorPopup,self).__init__(**kwargs)
def on_content(self, instance, value):
Clock.schedule_once(lambda dt: self.ids.content.add_widget(value))
def on_answer(self, *args):
pass
def okPopup(title, msg, answerCallback):
content = OkPopup(text=msg)
content.bind(on_ok=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(None, None),
size=(dp(600),dp(200)),
auto_dismiss= False)
popup.open()
return popup
class OkPopup(GridLayout):
text = StringProperty()
def __init__(self,**kwargs):
self.register_event_type('on_ok')
super(OkPopup,self).__init__(**kwargs)
def on_ok(self, *args):
pass <|fim▁end|> | content = EditorPopup(content=content)
content.bind(on_answer=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(0.7, 0.8),
auto_dismiss= False,
title_size=sp(18))
popup.open()
return popup |
<|file_name|>alertview.py<|end_file_name|><|fim▁begin|>import kivy
kivy.require('1.9.1')
from kivy.uix.popup import Popup
from kivy.uix.label import Label
from kivy.uix.gridlayout import GridLayout
from kivy.metrics import dp
from kivy.app import Builder
from kivy.properties import StringProperty, ObjectProperty
from kivy.clock import Clock
from kivy.metrics import sp
from kivy.metrics import dp
from iconbutton import IconButton
__all__ = ('alertPopup, confirmPopup, okPopup, editor_popup')
Builder.load_string('''
<ConfirmPopup>:
cols:1
Label:
text: root.text
GridLayout:
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
IconButton:
text: u'\uf00d'
color: ColorScheme.get_primary()
on_release: root.dispatch('on_answer', False)
<OkPopup>:
cols:1
Label:
text: root.text
GridLayout:
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
<EditorPopup>:
id: editor_popup
cols:1
BoxLayout:
id: content
GridLayout:
id: buttons
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
IconButton:
text: u'\uf00d'
color: ColorScheme.get_primary()
on_release: root.dispatch('on_answer', False)
''')
def alertPopup(title, msg):
popup = Popup(title = title,
content=Label(text = msg),
size_hint=(None, None), size=(dp(600), dp(200)))
popup.open()
def confirmPopup(title, msg, answerCallback):
content = ConfirmPopup(text=msg)
content.bind(on_answer=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(None, None),
size=(dp(600),dp(200)),
auto_dismiss= False)
popup.open()
return popup
class ConfirmPopup(GridLayout):
text = StringProperty()
def __init__(self,**kwargs):
self.register_event_type('on_answer')
super(ConfirmPopup,self).__init__(**kwargs)
def on_answer(self, *args):
pass
def editor_popup(title, content, answerCallback):
content = EditorPopup(content=content)
content.bind(on_answer=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(0.7, 0.8),
auto_dismiss= False,
title_size=sp(18))
popup.open()
return popup
class EditorPopup(GridLayout):
<|fim_middle|>
def okPopup(title, msg, answerCallback):
content = OkPopup(text=msg)
content.bind(on_ok=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(None, None),
size=(dp(600),dp(200)),
auto_dismiss= False)
popup.open()
return popup
class OkPopup(GridLayout):
text = StringProperty()
def __init__(self,**kwargs):
self.register_event_type('on_ok')
super(OkPopup,self).__init__(**kwargs)
def on_ok(self, *args):
pass <|fim▁end|> | content = ObjectProperty(None)
def __init__(self,**kwargs):
self.register_event_type('on_answer')
super(EditorPopup,self).__init__(**kwargs)
def on_content(self, instance, value):
Clock.schedule_once(lambda dt: self.ids.content.add_widget(value))
def on_answer(self, *args):
pass |
<|file_name|>alertview.py<|end_file_name|><|fim▁begin|>import kivy
kivy.require('1.9.1')
from kivy.uix.popup import Popup
from kivy.uix.label import Label
from kivy.uix.gridlayout import GridLayout
from kivy.metrics import dp
from kivy.app import Builder
from kivy.properties import StringProperty, ObjectProperty
from kivy.clock import Clock
from kivy.metrics import sp
from kivy.metrics import dp
from iconbutton import IconButton
__all__ = ('alertPopup, confirmPopup, okPopup, editor_popup')
Builder.load_string('''
<ConfirmPopup>:
cols:1
Label:
text: root.text
GridLayout:
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
IconButton:
text: u'\uf00d'
color: ColorScheme.get_primary()
on_release: root.dispatch('on_answer', False)
<OkPopup>:
cols:1
Label:
text: root.text
GridLayout:
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
<EditorPopup>:
id: editor_popup
cols:1
BoxLayout:
id: content
GridLayout:
id: buttons
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
IconButton:
text: u'\uf00d'
color: ColorScheme.get_primary()
on_release: root.dispatch('on_answer', False)
''')
def alertPopup(title, msg):
popup = Popup(title = title,
content=Label(text = msg),
size_hint=(None, None), size=(dp(600), dp(200)))
popup.open()
def confirmPopup(title, msg, answerCallback):
content = ConfirmPopup(text=msg)
content.bind(on_answer=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(None, None),
size=(dp(600),dp(200)),
auto_dismiss= False)
popup.open()
return popup
class ConfirmPopup(GridLayout):
text = StringProperty()
def __init__(self,**kwargs):
self.register_event_type('on_answer')
super(ConfirmPopup,self).__init__(**kwargs)
def on_answer(self, *args):
pass
def editor_popup(title, content, answerCallback):
content = EditorPopup(content=content)
content.bind(on_answer=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(0.7, 0.8),
auto_dismiss= False,
title_size=sp(18))
popup.open()
return popup
class EditorPopup(GridLayout):
content = ObjectProperty(None)
def __init__(self,**kwargs):
<|fim_middle|>
def on_content(self, instance, value):
Clock.schedule_once(lambda dt: self.ids.content.add_widget(value))
def on_answer(self, *args):
pass
def okPopup(title, msg, answerCallback):
content = OkPopup(text=msg)
content.bind(on_ok=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(None, None),
size=(dp(600),dp(200)),
auto_dismiss= False)
popup.open()
return popup
class OkPopup(GridLayout):
text = StringProperty()
def __init__(self,**kwargs):
self.register_event_type('on_ok')
super(OkPopup,self).__init__(**kwargs)
def on_ok(self, *args):
pass <|fim▁end|> | self.register_event_type('on_answer')
super(EditorPopup,self).__init__(**kwargs) |
<|file_name|>alertview.py<|end_file_name|><|fim▁begin|>import kivy
kivy.require('1.9.1')
from kivy.uix.popup import Popup
from kivy.uix.label import Label
from kivy.uix.gridlayout import GridLayout
from kivy.metrics import dp
from kivy.app import Builder
from kivy.properties import StringProperty, ObjectProperty
from kivy.clock import Clock
from kivy.metrics import sp
from kivy.metrics import dp
from iconbutton import IconButton
__all__ = ('alertPopup, confirmPopup, okPopup, editor_popup')
Builder.load_string('''
<ConfirmPopup>:
cols:1
Label:
text: root.text
GridLayout:
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
IconButton:
text: u'\uf00d'
color: ColorScheme.get_primary()
on_release: root.dispatch('on_answer', False)
<OkPopup>:
cols:1
Label:
text: root.text
GridLayout:
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
<EditorPopup>:
id: editor_popup
cols:1
BoxLayout:
id: content
GridLayout:
id: buttons
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
IconButton:
text: u'\uf00d'
color: ColorScheme.get_primary()
on_release: root.dispatch('on_answer', False)
''')
def alertPopup(title, msg):
popup = Popup(title = title,
content=Label(text = msg),
size_hint=(None, None), size=(dp(600), dp(200)))
popup.open()
def confirmPopup(title, msg, answerCallback):
content = ConfirmPopup(text=msg)
content.bind(on_answer=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(None, None),
size=(dp(600),dp(200)),
auto_dismiss= False)
popup.open()
return popup
class ConfirmPopup(GridLayout):
text = StringProperty()
def __init__(self,**kwargs):
self.register_event_type('on_answer')
super(ConfirmPopup,self).__init__(**kwargs)
def on_answer(self, *args):
pass
def editor_popup(title, content, answerCallback):
content = EditorPopup(content=content)
content.bind(on_answer=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(0.7, 0.8),
auto_dismiss= False,
title_size=sp(18))
popup.open()
return popup
class EditorPopup(GridLayout):
content = ObjectProperty(None)
def __init__(self,**kwargs):
self.register_event_type('on_answer')
super(EditorPopup,self).__init__(**kwargs)
def on_content(self, instance, value):
<|fim_middle|>
def on_answer(self, *args):
pass
def okPopup(title, msg, answerCallback):
content = OkPopup(text=msg)
content.bind(on_ok=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(None, None),
size=(dp(600),dp(200)),
auto_dismiss= False)
popup.open()
return popup
class OkPopup(GridLayout):
text = StringProperty()
def __init__(self,**kwargs):
self.register_event_type('on_ok')
super(OkPopup,self).__init__(**kwargs)
def on_ok(self, *args):
pass <|fim▁end|> | Clock.schedule_once(lambda dt: self.ids.content.add_widget(value)) |
<|file_name|>alertview.py<|end_file_name|><|fim▁begin|>import kivy
kivy.require('1.9.1')
from kivy.uix.popup import Popup
from kivy.uix.label import Label
from kivy.uix.gridlayout import GridLayout
from kivy.metrics import dp
from kivy.app import Builder
from kivy.properties import StringProperty, ObjectProperty
from kivy.clock import Clock
from kivy.metrics import sp
from kivy.metrics import dp
from iconbutton import IconButton
__all__ = ('alertPopup, confirmPopup, okPopup, editor_popup')
Builder.load_string('''
<ConfirmPopup>:
cols:1
Label:
text: root.text
GridLayout:
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
IconButton:
text: u'\uf00d'
color: ColorScheme.get_primary()
on_release: root.dispatch('on_answer', False)
<OkPopup>:
cols:1
Label:
text: root.text
GridLayout:
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
<EditorPopup>:
id: editor_popup
cols:1
BoxLayout:
id: content
GridLayout:
id: buttons
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
IconButton:
text: u'\uf00d'
color: ColorScheme.get_primary()
on_release: root.dispatch('on_answer', False)
''')
def alertPopup(title, msg):
popup = Popup(title = title,
content=Label(text = msg),
size_hint=(None, None), size=(dp(600), dp(200)))
popup.open()
def confirmPopup(title, msg, answerCallback):
content = ConfirmPopup(text=msg)
content.bind(on_answer=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(None, None),
size=(dp(600),dp(200)),
auto_dismiss= False)
popup.open()
return popup
class ConfirmPopup(GridLayout):
text = StringProperty()
def __init__(self,**kwargs):
self.register_event_type('on_answer')
super(ConfirmPopup,self).__init__(**kwargs)
def on_answer(self, *args):
pass
def editor_popup(title, content, answerCallback):
content = EditorPopup(content=content)
content.bind(on_answer=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(0.7, 0.8),
auto_dismiss= False,
title_size=sp(18))
popup.open()
return popup
class EditorPopup(GridLayout):
content = ObjectProperty(None)
def __init__(self,**kwargs):
self.register_event_type('on_answer')
super(EditorPopup,self).__init__(**kwargs)
def on_content(self, instance, value):
Clock.schedule_once(lambda dt: self.ids.content.add_widget(value))
def on_answer(self, *args):
<|fim_middle|>
def okPopup(title, msg, answerCallback):
content = OkPopup(text=msg)
content.bind(on_ok=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(None, None),
size=(dp(600),dp(200)),
auto_dismiss= False)
popup.open()
return popup
class OkPopup(GridLayout):
text = StringProperty()
def __init__(self,**kwargs):
self.register_event_type('on_ok')
super(OkPopup,self).__init__(**kwargs)
def on_ok(self, *args):
pass <|fim▁end|> | pass |
<|file_name|>alertview.py<|end_file_name|><|fim▁begin|>import kivy
kivy.require('1.9.1')
from kivy.uix.popup import Popup
from kivy.uix.label import Label
from kivy.uix.gridlayout import GridLayout
from kivy.metrics import dp
from kivy.app import Builder
from kivy.properties import StringProperty, ObjectProperty
from kivy.clock import Clock
from kivy.metrics import sp
from kivy.metrics import dp
from iconbutton import IconButton
__all__ = ('alertPopup, confirmPopup, okPopup, editor_popup')
Builder.load_string('''
<ConfirmPopup>:
cols:1
Label:
text: root.text
GridLayout:
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
IconButton:
text: u'\uf00d'
color: ColorScheme.get_primary()
on_release: root.dispatch('on_answer', False)
<OkPopup>:
cols:1
Label:
text: root.text
GridLayout:
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
<EditorPopup>:
id: editor_popup
cols:1
BoxLayout:
id: content
GridLayout:
id: buttons
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
IconButton:
text: u'\uf00d'
color: ColorScheme.get_primary()
on_release: root.dispatch('on_answer', False)
''')
def alertPopup(title, msg):
popup = Popup(title = title,
content=Label(text = msg),
size_hint=(None, None), size=(dp(600), dp(200)))
popup.open()
def confirmPopup(title, msg, answerCallback):
content = ConfirmPopup(text=msg)
content.bind(on_answer=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(None, None),
size=(dp(600),dp(200)),
auto_dismiss= False)
popup.open()
return popup
class ConfirmPopup(GridLayout):
text = StringProperty()
def __init__(self,**kwargs):
self.register_event_type('on_answer')
super(ConfirmPopup,self).__init__(**kwargs)
def on_answer(self, *args):
pass
def editor_popup(title, content, answerCallback):
content = EditorPopup(content=content)
content.bind(on_answer=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(0.7, 0.8),
auto_dismiss= False,
title_size=sp(18))
popup.open()
return popup
class EditorPopup(GridLayout):
content = ObjectProperty(None)
def __init__(self,**kwargs):
self.register_event_type('on_answer')
super(EditorPopup,self).__init__(**kwargs)
def on_content(self, instance, value):
Clock.schedule_once(lambda dt: self.ids.content.add_widget(value))
def on_answer(self, *args):
pass
def okPopup(title, msg, answerCallback):
<|fim_middle|>
class OkPopup(GridLayout):
text = StringProperty()
def __init__(self,**kwargs):
self.register_event_type('on_ok')
super(OkPopup,self).__init__(**kwargs)
def on_ok(self, *args):
pass <|fim▁end|> | content = OkPopup(text=msg)
content.bind(on_ok=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(None, None),
size=(dp(600),dp(200)),
auto_dismiss= False)
popup.open()
return popup |
<|file_name|>alertview.py<|end_file_name|><|fim▁begin|>import kivy
kivy.require('1.9.1')
from kivy.uix.popup import Popup
from kivy.uix.label import Label
from kivy.uix.gridlayout import GridLayout
from kivy.metrics import dp
from kivy.app import Builder
from kivy.properties import StringProperty, ObjectProperty
from kivy.clock import Clock
from kivy.metrics import sp
from kivy.metrics import dp
from iconbutton import IconButton
__all__ = ('alertPopup, confirmPopup, okPopup, editor_popup')
Builder.load_string('''
<ConfirmPopup>:
cols:1
Label:
text: root.text
GridLayout:
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
IconButton:
text: u'\uf00d'
color: ColorScheme.get_primary()
on_release: root.dispatch('on_answer', False)
<OkPopup>:
cols:1
Label:
text: root.text
GridLayout:
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
<EditorPopup>:
id: editor_popup
cols:1
BoxLayout:
id: content
GridLayout:
id: buttons
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
IconButton:
text: u'\uf00d'
color: ColorScheme.get_primary()
on_release: root.dispatch('on_answer', False)
''')
def alertPopup(title, msg):
popup = Popup(title = title,
content=Label(text = msg),
size_hint=(None, None), size=(dp(600), dp(200)))
popup.open()
def confirmPopup(title, msg, answerCallback):
content = ConfirmPopup(text=msg)
content.bind(on_answer=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(None, None),
size=(dp(600),dp(200)),
auto_dismiss= False)
popup.open()
return popup
class ConfirmPopup(GridLayout):
text = StringProperty()
def __init__(self,**kwargs):
self.register_event_type('on_answer')
super(ConfirmPopup,self).__init__(**kwargs)
def on_answer(self, *args):
pass
def editor_popup(title, content, answerCallback):
content = EditorPopup(content=content)
content.bind(on_answer=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(0.7, 0.8),
auto_dismiss= False,
title_size=sp(18))
popup.open()
return popup
class EditorPopup(GridLayout):
content = ObjectProperty(None)
def __init__(self,**kwargs):
self.register_event_type('on_answer')
super(EditorPopup,self).__init__(**kwargs)
def on_content(self, instance, value):
Clock.schedule_once(lambda dt: self.ids.content.add_widget(value))
def on_answer(self, *args):
pass
def okPopup(title, msg, answerCallback):
content = OkPopup(text=msg)
content.bind(on_ok=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(None, None),
size=(dp(600),dp(200)),
auto_dismiss= False)
popup.open()
return popup
class OkPopup(GridLayout):
<|fim_middle|>
<|fim▁end|> | text = StringProperty()
def __init__(self,**kwargs):
self.register_event_type('on_ok')
super(OkPopup,self).__init__(**kwargs)
def on_ok(self, *args):
pass |
<|file_name|>alertview.py<|end_file_name|><|fim▁begin|>import kivy
kivy.require('1.9.1')
from kivy.uix.popup import Popup
from kivy.uix.label import Label
from kivy.uix.gridlayout import GridLayout
from kivy.metrics import dp
from kivy.app import Builder
from kivy.properties import StringProperty, ObjectProperty
from kivy.clock import Clock
from kivy.metrics import sp
from kivy.metrics import dp
from iconbutton import IconButton
__all__ = ('alertPopup, confirmPopup, okPopup, editor_popup')
Builder.load_string('''
<ConfirmPopup>:
cols:1
Label:
text: root.text
GridLayout:
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
IconButton:
text: u'\uf00d'
color: ColorScheme.get_primary()
on_release: root.dispatch('on_answer', False)
<OkPopup>:
cols:1
Label:
text: root.text
GridLayout:
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
<EditorPopup>:
id: editor_popup
cols:1
BoxLayout:
id: content
GridLayout:
id: buttons
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
IconButton:
text: u'\uf00d'
color: ColorScheme.get_primary()
on_release: root.dispatch('on_answer', False)
''')
def alertPopup(title, msg):
popup = Popup(title = title,
content=Label(text = msg),
size_hint=(None, None), size=(dp(600), dp(200)))
popup.open()
def confirmPopup(title, msg, answerCallback):
content = ConfirmPopup(text=msg)
content.bind(on_answer=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(None, None),
size=(dp(600),dp(200)),
auto_dismiss= False)
popup.open()
return popup
class ConfirmPopup(GridLayout):
text = StringProperty()
def __init__(self,**kwargs):
self.register_event_type('on_answer')
super(ConfirmPopup,self).__init__(**kwargs)
def on_answer(self, *args):
pass
def editor_popup(title, content, answerCallback):
content = EditorPopup(content=content)
content.bind(on_answer=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(0.7, 0.8),
auto_dismiss= False,
title_size=sp(18))
popup.open()
return popup
class EditorPopup(GridLayout):
content = ObjectProperty(None)
def __init__(self,**kwargs):
self.register_event_type('on_answer')
super(EditorPopup,self).__init__(**kwargs)
def on_content(self, instance, value):
Clock.schedule_once(lambda dt: self.ids.content.add_widget(value))
def on_answer(self, *args):
pass
def okPopup(title, msg, answerCallback):
content = OkPopup(text=msg)
content.bind(on_ok=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(None, None),
size=(dp(600),dp(200)),
auto_dismiss= False)
popup.open()
return popup
class OkPopup(GridLayout):
text = StringProperty()
def __init__(self,**kwargs):
<|fim_middle|>
def on_ok(self, *args):
pass <|fim▁end|> | self.register_event_type('on_ok')
super(OkPopup,self).__init__(**kwargs) |
<|file_name|>alertview.py<|end_file_name|><|fim▁begin|>import kivy
kivy.require('1.9.1')
from kivy.uix.popup import Popup
from kivy.uix.label import Label
from kivy.uix.gridlayout import GridLayout
from kivy.metrics import dp
from kivy.app import Builder
from kivy.properties import StringProperty, ObjectProperty
from kivy.clock import Clock
from kivy.metrics import sp
from kivy.metrics import dp
from iconbutton import IconButton
__all__ = ('alertPopup, confirmPopup, okPopup, editor_popup')
Builder.load_string('''
<ConfirmPopup>:
cols:1
Label:
text: root.text
GridLayout:
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
IconButton:
text: u'\uf00d'
color: ColorScheme.get_primary()
on_release: root.dispatch('on_answer', False)
<OkPopup>:
cols:1
Label:
text: root.text
GridLayout:
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
<EditorPopup>:
id: editor_popup
cols:1
BoxLayout:
id: content
GridLayout:
id: buttons
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
IconButton:
text: u'\uf00d'
color: ColorScheme.get_primary()
on_release: root.dispatch('on_answer', False)
''')
def alertPopup(title, msg):
popup = Popup(title = title,
content=Label(text = msg),
size_hint=(None, None), size=(dp(600), dp(200)))
popup.open()
def confirmPopup(title, msg, answerCallback):
content = ConfirmPopup(text=msg)
content.bind(on_answer=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(None, None),
size=(dp(600),dp(200)),
auto_dismiss= False)
popup.open()
return popup
class ConfirmPopup(GridLayout):
text = StringProperty()
def __init__(self,**kwargs):
self.register_event_type('on_answer')
super(ConfirmPopup,self).__init__(**kwargs)
def on_answer(self, *args):
pass
def editor_popup(title, content, answerCallback):
content = EditorPopup(content=content)
content.bind(on_answer=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(0.7, 0.8),
auto_dismiss= False,
title_size=sp(18))
popup.open()
return popup
class EditorPopup(GridLayout):
content = ObjectProperty(None)
def __init__(self,**kwargs):
self.register_event_type('on_answer')
super(EditorPopup,self).__init__(**kwargs)
def on_content(self, instance, value):
Clock.schedule_once(lambda dt: self.ids.content.add_widget(value))
def on_answer(self, *args):
pass
def okPopup(title, msg, answerCallback):
content = OkPopup(text=msg)
content.bind(on_ok=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(None, None),
size=(dp(600),dp(200)),
auto_dismiss= False)
popup.open()
return popup
class OkPopup(GridLayout):
text = StringProperty()
def __init__(self,**kwargs):
self.register_event_type('on_ok')
super(OkPopup,self).__init__(**kwargs)
def on_ok(self, *args):
<|fim_middle|>
<|fim▁end|> | pass |
<|file_name|>alertview.py<|end_file_name|><|fim▁begin|>import kivy
kivy.require('1.9.1')
from kivy.uix.popup import Popup
from kivy.uix.label import Label
from kivy.uix.gridlayout import GridLayout
from kivy.metrics import dp
from kivy.app import Builder
from kivy.properties import StringProperty, ObjectProperty
from kivy.clock import Clock
from kivy.metrics import sp
from kivy.metrics import dp
from iconbutton import IconButton
__all__ = ('alertPopup, confirmPopup, okPopup, editor_popup')
Builder.load_string('''
<ConfirmPopup>:
cols:1
Label:
text: root.text
GridLayout:
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
IconButton:
text: u'\uf00d'
color: ColorScheme.get_primary()
on_release: root.dispatch('on_answer', False)
<OkPopup>:
cols:1
Label:
text: root.text
GridLayout:
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
<EditorPopup>:
id: editor_popup
cols:1
BoxLayout:
id: content
GridLayout:
id: buttons
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
IconButton:
text: u'\uf00d'
color: ColorScheme.get_primary()
on_release: root.dispatch('on_answer', False)
''')
def <|fim_middle|>(title, msg):
popup = Popup(title = title,
content=Label(text = msg),
size_hint=(None, None), size=(dp(600), dp(200)))
popup.open()
def confirmPopup(title, msg, answerCallback):
content = ConfirmPopup(text=msg)
content.bind(on_answer=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(None, None),
size=(dp(600),dp(200)),
auto_dismiss= False)
popup.open()
return popup
class ConfirmPopup(GridLayout):
text = StringProperty()
def __init__(self,**kwargs):
self.register_event_type('on_answer')
super(ConfirmPopup,self).__init__(**kwargs)
def on_answer(self, *args):
pass
def editor_popup(title, content, answerCallback):
content = EditorPopup(content=content)
content.bind(on_answer=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(0.7, 0.8),
auto_dismiss= False,
title_size=sp(18))
popup.open()
return popup
class EditorPopup(GridLayout):
content = ObjectProperty(None)
def __init__(self,**kwargs):
self.register_event_type('on_answer')
super(EditorPopup,self).__init__(**kwargs)
def on_content(self, instance, value):
Clock.schedule_once(lambda dt: self.ids.content.add_widget(value))
def on_answer(self, *args):
pass
def okPopup(title, msg, answerCallback):
content = OkPopup(text=msg)
content.bind(on_ok=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(None, None),
size=(dp(600),dp(200)),
auto_dismiss= False)
popup.open()
return popup
class OkPopup(GridLayout):
text = StringProperty()
def __init__(self,**kwargs):
self.register_event_type('on_ok')
super(OkPopup,self).__init__(**kwargs)
def on_ok(self, *args):
pass <|fim▁end|> | alertPopup |
<|file_name|>alertview.py<|end_file_name|><|fim▁begin|>import kivy
kivy.require('1.9.1')
from kivy.uix.popup import Popup
from kivy.uix.label import Label
from kivy.uix.gridlayout import GridLayout
from kivy.metrics import dp
from kivy.app import Builder
from kivy.properties import StringProperty, ObjectProperty
from kivy.clock import Clock
from kivy.metrics import sp
from kivy.metrics import dp
from iconbutton import IconButton
__all__ = ('alertPopup, confirmPopup, okPopup, editor_popup')
Builder.load_string('''
<ConfirmPopup>:
cols:1
Label:
text: root.text
GridLayout:
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
IconButton:
text: u'\uf00d'
color: ColorScheme.get_primary()
on_release: root.dispatch('on_answer', False)
<OkPopup>:
cols:1
Label:
text: root.text
GridLayout:
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
<EditorPopup>:
id: editor_popup
cols:1
BoxLayout:
id: content
GridLayout:
id: buttons
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
IconButton:
text: u'\uf00d'
color: ColorScheme.get_primary()
on_release: root.dispatch('on_answer', False)
''')
def alertPopup(title, msg):
popup = Popup(title = title,
content=Label(text = msg),
size_hint=(None, None), size=(dp(600), dp(200)))
popup.open()
def <|fim_middle|>(title, msg, answerCallback):
content = ConfirmPopup(text=msg)
content.bind(on_answer=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(None, None),
size=(dp(600),dp(200)),
auto_dismiss= False)
popup.open()
return popup
class ConfirmPopup(GridLayout):
text = StringProperty()
def __init__(self,**kwargs):
self.register_event_type('on_answer')
super(ConfirmPopup,self).__init__(**kwargs)
def on_answer(self, *args):
pass
def editor_popup(title, content, answerCallback):
content = EditorPopup(content=content)
content.bind(on_answer=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(0.7, 0.8),
auto_dismiss= False,
title_size=sp(18))
popup.open()
return popup
class EditorPopup(GridLayout):
content = ObjectProperty(None)
def __init__(self,**kwargs):
self.register_event_type('on_answer')
super(EditorPopup,self).__init__(**kwargs)
def on_content(self, instance, value):
Clock.schedule_once(lambda dt: self.ids.content.add_widget(value))
def on_answer(self, *args):
pass
def okPopup(title, msg, answerCallback):
content = OkPopup(text=msg)
content.bind(on_ok=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(None, None),
size=(dp(600),dp(200)),
auto_dismiss= False)
popup.open()
return popup
class OkPopup(GridLayout):
text = StringProperty()
def __init__(self,**kwargs):
self.register_event_type('on_ok')
super(OkPopup,self).__init__(**kwargs)
def on_ok(self, *args):
pass <|fim▁end|> | confirmPopup |
<|file_name|>alertview.py<|end_file_name|><|fim▁begin|>import kivy
kivy.require('1.9.1')
from kivy.uix.popup import Popup
from kivy.uix.label import Label
from kivy.uix.gridlayout import GridLayout
from kivy.metrics import dp
from kivy.app import Builder
from kivy.properties import StringProperty, ObjectProperty
from kivy.clock import Clock
from kivy.metrics import sp
from kivy.metrics import dp
from iconbutton import IconButton
__all__ = ('alertPopup, confirmPopup, okPopup, editor_popup')
Builder.load_string('''
<ConfirmPopup>:
cols:1
Label:
text: root.text
GridLayout:
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
IconButton:
text: u'\uf00d'
color: ColorScheme.get_primary()
on_release: root.dispatch('on_answer', False)
<OkPopup>:
cols:1
Label:
text: root.text
GridLayout:
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
<EditorPopup>:
id: editor_popup
cols:1
BoxLayout:
id: content
GridLayout:
id: buttons
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
IconButton:
text: u'\uf00d'
color: ColorScheme.get_primary()
on_release: root.dispatch('on_answer', False)
''')
def alertPopup(title, msg):
popup = Popup(title = title,
content=Label(text = msg),
size_hint=(None, None), size=(dp(600), dp(200)))
popup.open()
def confirmPopup(title, msg, answerCallback):
content = ConfirmPopup(text=msg)
content.bind(on_answer=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(None, None),
size=(dp(600),dp(200)),
auto_dismiss= False)
popup.open()
return popup
class ConfirmPopup(GridLayout):
text = StringProperty()
def <|fim_middle|>(self,**kwargs):
self.register_event_type('on_answer')
super(ConfirmPopup,self).__init__(**kwargs)
def on_answer(self, *args):
pass
def editor_popup(title, content, answerCallback):
content = EditorPopup(content=content)
content.bind(on_answer=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(0.7, 0.8),
auto_dismiss= False,
title_size=sp(18))
popup.open()
return popup
class EditorPopup(GridLayout):
content = ObjectProperty(None)
def __init__(self,**kwargs):
self.register_event_type('on_answer')
super(EditorPopup,self).__init__(**kwargs)
def on_content(self, instance, value):
Clock.schedule_once(lambda dt: self.ids.content.add_widget(value))
def on_answer(self, *args):
pass
def okPopup(title, msg, answerCallback):
content = OkPopup(text=msg)
content.bind(on_ok=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(None, None),
size=(dp(600),dp(200)),
auto_dismiss= False)
popup.open()
return popup
class OkPopup(GridLayout):
text = StringProperty()
def __init__(self,**kwargs):
self.register_event_type('on_ok')
super(OkPopup,self).__init__(**kwargs)
def on_ok(self, *args):
pass <|fim▁end|> | __init__ |
<|file_name|>alertview.py<|end_file_name|><|fim▁begin|>import kivy
kivy.require('1.9.1')
from kivy.uix.popup import Popup
from kivy.uix.label import Label
from kivy.uix.gridlayout import GridLayout
from kivy.metrics import dp
from kivy.app import Builder
from kivy.properties import StringProperty, ObjectProperty
from kivy.clock import Clock
from kivy.metrics import sp
from kivy.metrics import dp
from iconbutton import IconButton
__all__ = ('alertPopup, confirmPopup, okPopup, editor_popup')
Builder.load_string('''
<ConfirmPopup>:
cols:1
Label:
text: root.text
GridLayout:
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
IconButton:
text: u'\uf00d'
color: ColorScheme.get_primary()
on_release: root.dispatch('on_answer', False)
<OkPopup>:
cols:1
Label:
text: root.text
GridLayout:
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
<EditorPopup>:
id: editor_popup
cols:1
BoxLayout:
id: content
GridLayout:
id: buttons
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
IconButton:
text: u'\uf00d'
color: ColorScheme.get_primary()
on_release: root.dispatch('on_answer', False)
''')
def alertPopup(title, msg):
popup = Popup(title = title,
content=Label(text = msg),
size_hint=(None, None), size=(dp(600), dp(200)))
popup.open()
def confirmPopup(title, msg, answerCallback):
content = ConfirmPopup(text=msg)
content.bind(on_answer=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(None, None),
size=(dp(600),dp(200)),
auto_dismiss= False)
popup.open()
return popup
class ConfirmPopup(GridLayout):
text = StringProperty()
def __init__(self,**kwargs):
self.register_event_type('on_answer')
super(ConfirmPopup,self).__init__(**kwargs)
def <|fim_middle|>(self, *args):
pass
def editor_popup(title, content, answerCallback):
content = EditorPopup(content=content)
content.bind(on_answer=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(0.7, 0.8),
auto_dismiss= False,
title_size=sp(18))
popup.open()
return popup
class EditorPopup(GridLayout):
content = ObjectProperty(None)
def __init__(self,**kwargs):
self.register_event_type('on_answer')
super(EditorPopup,self).__init__(**kwargs)
def on_content(self, instance, value):
Clock.schedule_once(lambda dt: self.ids.content.add_widget(value))
def on_answer(self, *args):
pass
def okPopup(title, msg, answerCallback):
content = OkPopup(text=msg)
content.bind(on_ok=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(None, None),
size=(dp(600),dp(200)),
auto_dismiss= False)
popup.open()
return popup
class OkPopup(GridLayout):
text = StringProperty()
def __init__(self,**kwargs):
self.register_event_type('on_ok')
super(OkPopup,self).__init__(**kwargs)
def on_ok(self, *args):
pass <|fim▁end|> | on_answer |
<|file_name|>alertview.py<|end_file_name|><|fim▁begin|>import kivy
kivy.require('1.9.1')
from kivy.uix.popup import Popup
from kivy.uix.label import Label
from kivy.uix.gridlayout import GridLayout
from kivy.metrics import dp
from kivy.app import Builder
from kivy.properties import StringProperty, ObjectProperty
from kivy.clock import Clock
from kivy.metrics import sp
from kivy.metrics import dp
from iconbutton import IconButton
__all__ = ('alertPopup, confirmPopup, okPopup, editor_popup')
Builder.load_string('''
<ConfirmPopup>:
cols:1
Label:
text: root.text
GridLayout:
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
IconButton:
text: u'\uf00d'
color: ColorScheme.get_primary()
on_release: root.dispatch('on_answer', False)
<OkPopup>:
cols:1
Label:
text: root.text
GridLayout:
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
<EditorPopup>:
id: editor_popup
cols:1
BoxLayout:
id: content
GridLayout:
id: buttons
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
IconButton:
text: u'\uf00d'
color: ColorScheme.get_primary()
on_release: root.dispatch('on_answer', False)
''')
def alertPopup(title, msg):
popup = Popup(title = title,
content=Label(text = msg),
size_hint=(None, None), size=(dp(600), dp(200)))
popup.open()
def confirmPopup(title, msg, answerCallback):
content = ConfirmPopup(text=msg)
content.bind(on_answer=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(None, None),
size=(dp(600),dp(200)),
auto_dismiss= False)
popup.open()
return popup
class ConfirmPopup(GridLayout):
text = StringProperty()
def __init__(self,**kwargs):
self.register_event_type('on_answer')
super(ConfirmPopup,self).__init__(**kwargs)
def on_answer(self, *args):
pass
def <|fim_middle|>(title, content, answerCallback):
content = EditorPopup(content=content)
content.bind(on_answer=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(0.7, 0.8),
auto_dismiss= False,
title_size=sp(18))
popup.open()
return popup
class EditorPopup(GridLayout):
content = ObjectProperty(None)
def __init__(self,**kwargs):
self.register_event_type('on_answer')
super(EditorPopup,self).__init__(**kwargs)
def on_content(self, instance, value):
Clock.schedule_once(lambda dt: self.ids.content.add_widget(value))
def on_answer(self, *args):
pass
def okPopup(title, msg, answerCallback):
content = OkPopup(text=msg)
content.bind(on_ok=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(None, None),
size=(dp(600),dp(200)),
auto_dismiss= False)
popup.open()
return popup
class OkPopup(GridLayout):
text = StringProperty()
def __init__(self,**kwargs):
self.register_event_type('on_ok')
super(OkPopup,self).__init__(**kwargs)
def on_ok(self, *args):
pass <|fim▁end|> | editor_popup |
<|file_name|>alertview.py<|end_file_name|><|fim▁begin|>import kivy
kivy.require('1.9.1')
from kivy.uix.popup import Popup
from kivy.uix.label import Label
from kivy.uix.gridlayout import GridLayout
from kivy.metrics import dp
from kivy.app import Builder
from kivy.properties import StringProperty, ObjectProperty
from kivy.clock import Clock
from kivy.metrics import sp
from kivy.metrics import dp
from iconbutton import IconButton
__all__ = ('alertPopup, confirmPopup, okPopup, editor_popup')
Builder.load_string('''
<ConfirmPopup>:
cols:1
Label:
text: root.text
GridLayout:
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
IconButton:
text: u'\uf00d'
color: ColorScheme.get_primary()
on_release: root.dispatch('on_answer', False)
<OkPopup>:
cols:1
Label:
text: root.text
GridLayout:
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
<EditorPopup>:
id: editor_popup
cols:1
BoxLayout:
id: content
GridLayout:
id: buttons
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
IconButton:
text: u'\uf00d'
color: ColorScheme.get_primary()
on_release: root.dispatch('on_answer', False)
''')
def alertPopup(title, msg):
popup = Popup(title = title,
content=Label(text = msg),
size_hint=(None, None), size=(dp(600), dp(200)))
popup.open()
def confirmPopup(title, msg, answerCallback):
content = ConfirmPopup(text=msg)
content.bind(on_answer=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(None, None),
size=(dp(600),dp(200)),
auto_dismiss= False)
popup.open()
return popup
class ConfirmPopup(GridLayout):
text = StringProperty()
def __init__(self,**kwargs):
self.register_event_type('on_answer')
super(ConfirmPopup,self).__init__(**kwargs)
def on_answer(self, *args):
pass
def editor_popup(title, content, answerCallback):
content = EditorPopup(content=content)
content.bind(on_answer=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(0.7, 0.8),
auto_dismiss= False,
title_size=sp(18))
popup.open()
return popup
class EditorPopup(GridLayout):
content = ObjectProperty(None)
def <|fim_middle|>(self,**kwargs):
self.register_event_type('on_answer')
super(EditorPopup,self).__init__(**kwargs)
def on_content(self, instance, value):
Clock.schedule_once(lambda dt: self.ids.content.add_widget(value))
def on_answer(self, *args):
pass
def okPopup(title, msg, answerCallback):
content = OkPopup(text=msg)
content.bind(on_ok=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(None, None),
size=(dp(600),dp(200)),
auto_dismiss= False)
popup.open()
return popup
class OkPopup(GridLayout):
text = StringProperty()
def __init__(self,**kwargs):
self.register_event_type('on_ok')
super(OkPopup,self).__init__(**kwargs)
def on_ok(self, *args):
pass <|fim▁end|> | __init__ |
<|file_name|>alertview.py<|end_file_name|><|fim▁begin|>import kivy
kivy.require('1.9.1')
from kivy.uix.popup import Popup
from kivy.uix.label import Label
from kivy.uix.gridlayout import GridLayout
from kivy.metrics import dp
from kivy.app import Builder
from kivy.properties import StringProperty, ObjectProperty
from kivy.clock import Clock
from kivy.metrics import sp
from kivy.metrics import dp
from iconbutton import IconButton
__all__ = ('alertPopup, confirmPopup, okPopup, editor_popup')
Builder.load_string('''
<ConfirmPopup>:
cols:1
Label:
text: root.text
GridLayout:
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
IconButton:
text: u'\uf00d'
color: ColorScheme.get_primary()
on_release: root.dispatch('on_answer', False)
<OkPopup>:
cols:1
Label:
text: root.text
GridLayout:
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
<EditorPopup>:
id: editor_popup
cols:1
BoxLayout:
id: content
GridLayout:
id: buttons
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
IconButton:
text: u'\uf00d'
color: ColorScheme.get_primary()
on_release: root.dispatch('on_answer', False)
''')
def alertPopup(title, msg):
popup = Popup(title = title,
content=Label(text = msg),
size_hint=(None, None), size=(dp(600), dp(200)))
popup.open()
def confirmPopup(title, msg, answerCallback):
content = ConfirmPopup(text=msg)
content.bind(on_answer=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(None, None),
size=(dp(600),dp(200)),
auto_dismiss= False)
popup.open()
return popup
class ConfirmPopup(GridLayout):
text = StringProperty()
def __init__(self,**kwargs):
self.register_event_type('on_answer')
super(ConfirmPopup,self).__init__(**kwargs)
def on_answer(self, *args):
pass
def editor_popup(title, content, answerCallback):
content = EditorPopup(content=content)
content.bind(on_answer=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(0.7, 0.8),
auto_dismiss= False,
title_size=sp(18))
popup.open()
return popup
class EditorPopup(GridLayout):
content = ObjectProperty(None)
def __init__(self,**kwargs):
self.register_event_type('on_answer')
super(EditorPopup,self).__init__(**kwargs)
def <|fim_middle|>(self, instance, value):
Clock.schedule_once(lambda dt: self.ids.content.add_widget(value))
def on_answer(self, *args):
pass
def okPopup(title, msg, answerCallback):
content = OkPopup(text=msg)
content.bind(on_ok=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(None, None),
size=(dp(600),dp(200)),
auto_dismiss= False)
popup.open()
return popup
class OkPopup(GridLayout):
text = StringProperty()
def __init__(self,**kwargs):
self.register_event_type('on_ok')
super(OkPopup,self).__init__(**kwargs)
def on_ok(self, *args):
pass <|fim▁end|> | on_content |
<|file_name|>alertview.py<|end_file_name|><|fim▁begin|>import kivy
kivy.require('1.9.1')
from kivy.uix.popup import Popup
from kivy.uix.label import Label
from kivy.uix.gridlayout import GridLayout
from kivy.metrics import dp
from kivy.app import Builder
from kivy.properties import StringProperty, ObjectProperty
from kivy.clock import Clock
from kivy.metrics import sp
from kivy.metrics import dp
from iconbutton import IconButton
__all__ = ('alertPopup, confirmPopup, okPopup, editor_popup')
Builder.load_string('''
<ConfirmPopup>:
cols:1
Label:
text: root.text
GridLayout:
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
IconButton:
text: u'\uf00d'
color: ColorScheme.get_primary()
on_release: root.dispatch('on_answer', False)
<OkPopup>:
cols:1
Label:
text: root.text
GridLayout:
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
<EditorPopup>:
id: editor_popup
cols:1
BoxLayout:
id: content
GridLayout:
id: buttons
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
IconButton:
text: u'\uf00d'
color: ColorScheme.get_primary()
on_release: root.dispatch('on_answer', False)
''')
def alertPopup(title, msg):
popup = Popup(title = title,
content=Label(text = msg),
size_hint=(None, None), size=(dp(600), dp(200)))
popup.open()
def confirmPopup(title, msg, answerCallback):
content = ConfirmPopup(text=msg)
content.bind(on_answer=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(None, None),
size=(dp(600),dp(200)),
auto_dismiss= False)
popup.open()
return popup
class ConfirmPopup(GridLayout):
text = StringProperty()
def __init__(self,**kwargs):
self.register_event_type('on_answer')
super(ConfirmPopup,self).__init__(**kwargs)
def on_answer(self, *args):
pass
def editor_popup(title, content, answerCallback):
content = EditorPopup(content=content)
content.bind(on_answer=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(0.7, 0.8),
auto_dismiss= False,
title_size=sp(18))
popup.open()
return popup
class EditorPopup(GridLayout):
content = ObjectProperty(None)
def __init__(self,**kwargs):
self.register_event_type('on_answer')
super(EditorPopup,self).__init__(**kwargs)
def on_content(self, instance, value):
Clock.schedule_once(lambda dt: self.ids.content.add_widget(value))
def <|fim_middle|>(self, *args):
pass
def okPopup(title, msg, answerCallback):
content = OkPopup(text=msg)
content.bind(on_ok=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(None, None),
size=(dp(600),dp(200)),
auto_dismiss= False)
popup.open()
return popup
class OkPopup(GridLayout):
text = StringProperty()
def __init__(self,**kwargs):
self.register_event_type('on_ok')
super(OkPopup,self).__init__(**kwargs)
def on_ok(self, *args):
pass <|fim▁end|> | on_answer |
<|file_name|>alertview.py<|end_file_name|><|fim▁begin|>import kivy
kivy.require('1.9.1')
from kivy.uix.popup import Popup
from kivy.uix.label import Label
from kivy.uix.gridlayout import GridLayout
from kivy.metrics import dp
from kivy.app import Builder
from kivy.properties import StringProperty, ObjectProperty
from kivy.clock import Clock
from kivy.metrics import sp
from kivy.metrics import dp
from iconbutton import IconButton
__all__ = ('alertPopup, confirmPopup, okPopup, editor_popup')
Builder.load_string('''
<ConfirmPopup>:
cols:1
Label:
text: root.text
GridLayout:
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
IconButton:
text: u'\uf00d'
color: ColorScheme.get_primary()
on_release: root.dispatch('on_answer', False)
<OkPopup>:
cols:1
Label:
text: root.text
GridLayout:
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
<EditorPopup>:
id: editor_popup
cols:1
BoxLayout:
id: content
GridLayout:
id: buttons
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
IconButton:
text: u'\uf00d'
color: ColorScheme.get_primary()
on_release: root.dispatch('on_answer', False)
''')
def alertPopup(title, msg):
popup = Popup(title = title,
content=Label(text = msg),
size_hint=(None, None), size=(dp(600), dp(200)))
popup.open()
def confirmPopup(title, msg, answerCallback):
content = ConfirmPopup(text=msg)
content.bind(on_answer=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(None, None),
size=(dp(600),dp(200)),
auto_dismiss= False)
popup.open()
return popup
class ConfirmPopup(GridLayout):
text = StringProperty()
def __init__(self,**kwargs):
self.register_event_type('on_answer')
super(ConfirmPopup,self).__init__(**kwargs)
def on_answer(self, *args):
pass
def editor_popup(title, content, answerCallback):
content = EditorPopup(content=content)
content.bind(on_answer=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(0.7, 0.8),
auto_dismiss= False,
title_size=sp(18))
popup.open()
return popup
class EditorPopup(GridLayout):
content = ObjectProperty(None)
def __init__(self,**kwargs):
self.register_event_type('on_answer')
super(EditorPopup,self).__init__(**kwargs)
def on_content(self, instance, value):
Clock.schedule_once(lambda dt: self.ids.content.add_widget(value))
def on_answer(self, *args):
pass
def <|fim_middle|>(title, msg, answerCallback):
content = OkPopup(text=msg)
content.bind(on_ok=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(None, None),
size=(dp(600),dp(200)),
auto_dismiss= False)
popup.open()
return popup
class OkPopup(GridLayout):
text = StringProperty()
def __init__(self,**kwargs):
self.register_event_type('on_ok')
super(OkPopup,self).__init__(**kwargs)
def on_ok(self, *args):
pass <|fim▁end|> | okPopup |
<|file_name|>alertview.py<|end_file_name|><|fim▁begin|>import kivy
kivy.require('1.9.1')
from kivy.uix.popup import Popup
from kivy.uix.label import Label
from kivy.uix.gridlayout import GridLayout
from kivy.metrics import dp
from kivy.app import Builder
from kivy.properties import StringProperty, ObjectProperty
from kivy.clock import Clock
from kivy.metrics import sp
from kivy.metrics import dp
from iconbutton import IconButton
__all__ = ('alertPopup, confirmPopup, okPopup, editor_popup')
Builder.load_string('''
<ConfirmPopup>:
cols:1
Label:
text: root.text
GridLayout:
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
IconButton:
text: u'\uf00d'
color: ColorScheme.get_primary()
on_release: root.dispatch('on_answer', False)
<OkPopup>:
cols:1
Label:
text: root.text
GridLayout:
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
<EditorPopup>:
id: editor_popup
cols:1
BoxLayout:
id: content
GridLayout:
id: buttons
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
IconButton:
text: u'\uf00d'
color: ColorScheme.get_primary()
on_release: root.dispatch('on_answer', False)
''')
def alertPopup(title, msg):
popup = Popup(title = title,
content=Label(text = msg),
size_hint=(None, None), size=(dp(600), dp(200)))
popup.open()
def confirmPopup(title, msg, answerCallback):
content = ConfirmPopup(text=msg)
content.bind(on_answer=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(None, None),
size=(dp(600),dp(200)),
auto_dismiss= False)
popup.open()
return popup
class ConfirmPopup(GridLayout):
text = StringProperty()
def __init__(self,**kwargs):
self.register_event_type('on_answer')
super(ConfirmPopup,self).__init__(**kwargs)
def on_answer(self, *args):
pass
def editor_popup(title, content, answerCallback):
content = EditorPopup(content=content)
content.bind(on_answer=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(0.7, 0.8),
auto_dismiss= False,
title_size=sp(18))
popup.open()
return popup
class EditorPopup(GridLayout):
content = ObjectProperty(None)
def __init__(self,**kwargs):
self.register_event_type('on_answer')
super(EditorPopup,self).__init__(**kwargs)
def on_content(self, instance, value):
Clock.schedule_once(lambda dt: self.ids.content.add_widget(value))
def on_answer(self, *args):
pass
def okPopup(title, msg, answerCallback):
content = OkPopup(text=msg)
content.bind(on_ok=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(None, None),
size=(dp(600),dp(200)),
auto_dismiss= False)
popup.open()
return popup
class OkPopup(GridLayout):
text = StringProperty()
def <|fim_middle|>(self,**kwargs):
self.register_event_type('on_ok')
super(OkPopup,self).__init__(**kwargs)
def on_ok(self, *args):
pass <|fim▁end|> | __init__ |
<|file_name|>alertview.py<|end_file_name|><|fim▁begin|>import kivy
kivy.require('1.9.1')
from kivy.uix.popup import Popup
from kivy.uix.label import Label
from kivy.uix.gridlayout import GridLayout
from kivy.metrics import dp
from kivy.app import Builder
from kivy.properties import StringProperty, ObjectProperty
from kivy.clock import Clock
from kivy.metrics import sp
from kivy.metrics import dp
from iconbutton import IconButton
__all__ = ('alertPopup, confirmPopup, okPopup, editor_popup')
Builder.load_string('''
<ConfirmPopup>:
cols:1
Label:
text: root.text
GridLayout:
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
IconButton:
text: u'\uf00d'
color: ColorScheme.get_primary()
on_release: root.dispatch('on_answer', False)
<OkPopup>:
cols:1
Label:
text: root.text
GridLayout:
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
<EditorPopup>:
id: editor_popup
cols:1
BoxLayout:
id: content
GridLayout:
id: buttons
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
IconButton:
text: u'\uf00d'
color: ColorScheme.get_primary()
on_release: root.dispatch('on_answer', False)
''')
def alertPopup(title, msg):
popup = Popup(title = title,
content=Label(text = msg),
size_hint=(None, None), size=(dp(600), dp(200)))
popup.open()
def confirmPopup(title, msg, answerCallback):
content = ConfirmPopup(text=msg)
content.bind(on_answer=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(None, None),
size=(dp(600),dp(200)),
auto_dismiss= False)
popup.open()
return popup
class ConfirmPopup(GridLayout):
text = StringProperty()
def __init__(self,**kwargs):
self.register_event_type('on_answer')
super(ConfirmPopup,self).__init__(**kwargs)
def on_answer(self, *args):
pass
def editor_popup(title, content, answerCallback):
content = EditorPopup(content=content)
content.bind(on_answer=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(0.7, 0.8),
auto_dismiss= False,
title_size=sp(18))
popup.open()
return popup
class EditorPopup(GridLayout):
content = ObjectProperty(None)
def __init__(self,**kwargs):
self.register_event_type('on_answer')
super(EditorPopup,self).__init__(**kwargs)
def on_content(self, instance, value):
Clock.schedule_once(lambda dt: self.ids.content.add_widget(value))
def on_answer(self, *args):
pass
def okPopup(title, msg, answerCallback):
content = OkPopup(text=msg)
content.bind(on_ok=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(None, None),
size=(dp(600),dp(200)),
auto_dismiss= False)
popup.open()
return popup
class OkPopup(GridLayout):
text = StringProperty()
def __init__(self,**kwargs):
self.register_event_type('on_ok')
super(OkPopup,self).__init__(**kwargs)
def <|fim_middle|>(self, *args):
pass <|fim▁end|> | on_ok |
<|file_name|>rita.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
rita Pipeline
.. module:: rita
:synopsis: rita pipeline
.. moduleauthor:: Adolfo De Unánue <[email protected]>
"""
import os
import subprocess
from pathlib import Path
import boto3
import zipfile
import io
import csv
import datetime
import luigi
import luigi.s3
import pandas as pd
import sqlalchemy
from contextlib import closing
import requests
import re
from bs4 import BeautifulSoup
## Variables de ambiente
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
## Obtenemos las llaves de AWS
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
## Logging
import rita.config_ini
import logging
logger = logging.getLogger("rita.pipeline")
import rita.pipelines.utils
import rita.pipelines.common
from rita.pipelines.common.tasks import DockerTask
class ritaPipeline(luigi.WrapperTask):
"""
Task principal para el pipeline
"""
def requires(self):
yield DownloadRITACatalogs()
yield DownloadRITAData()
class DownloadRITACatalogs(luigi.WrapperTask):
"""
"""
def requires(self):
baseurl = "https://www.transtats.bts.gov"
url = "https://www.transtats.bts.gov/DL_SelectFields.asp?Table_ID=236"
page = requests.get(url)
soup = BeautifulSoup(page.content, "lxml")
for link in soup.find_all('a', href=re.compile('Download_Lookup')):
catalog_name = link.get('href').split('=L_')[-1]
catalog_url = '{}/{}'.format(baseurl, link.get('href'))
yield DownloadCatalog(catalog_name=catalog_name, catalog_url=catalog_url)
class DownloadCatalog(luigi.Task):
"""
"""
catalog_url = luigi.Parameter()
catalog_name = luigi.Parameter()
root_path = luigi.Parameter()
def run(self):
logger.debug("Guardando en {} el catálogo {}".format(self.output().path, self.catalog_name))
with closing(requests.get(self.catalog_url, stream= True)) as response, \
self.output().open('w') as output_file:
for chunk in response.iter_lines(chunk_size=1024*8):
if chunk:
output_file.write(chunk.decode('utf-8') + '\n')
def output(self):
output_path = '{}/catalogs/{}.csv'.format(self.root_path,
self.catalog_name)
return luigi.s3.S3Target(path=output_path)
class DownloadRITAData(luigi.WrapperTask):
"""
"""
start_year=luigi.IntParameter()
def requires(self):
today = datetime.date.today() + datetime.timedelta(days=-90)
max_year = today.year
max_month = today.month
years = range(self.start_year, max_year)
logger.info("Descargando datos de los años {}".format(years))
for año in years:
if año != max_year:
months = range(1,13)
else:
month = range(1, max_month+1)
for mes in months:
yield DownloadRITAMonthlyData(year=año, month=mes)
class DownloadRITAMonthlyData(DockerTask):
"""
"""
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
raw_path = luigi.Parameter()
@property
def cmd(self):
return '''
docker run --rm --env AWS_ACCESS_KEY_ID={} --env AWS_SECRET_ACCESS_KEY={} rita/download-rita --year {} --month {} --data_path {}/{}
'''.format(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, self.year, self.month, self.root_path, self.raw_path)
def output(self):
return luigi.s3.S3Target(path='{}/{}/{}-{}.zip'.format(self.root_path,
self.raw_path,
str(self.month).zfill(2),
self.year))
class ExtractColumns(luigi.Task):
"""
"""
task_name = "extract-columns"
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()<|fim▁hole|>
def requires(self):
return DownloadRITA(year=self.year, month=self.month)
def run(self):
s3 = boto3.resource('s3')
bucket = s3.Bucket(self.bucket)
input_path = Path(self.input().path)
obj = bucket.Object(str(input_path.relative_to('s3://{}'.format(self.bucket))))
df = None
with io.BytesIO(obj.get()["Body"].read()) as input_file:
input_file.seek(0)
with zipfile.ZipFile(input_file, mode='r') as zip_file:
for subfile in zip_file.namelist():
with zip_file.open(subfile) as file:
df = pd.read_csv(file)
with self.output().open('w') as output_file:
output_file.write(df.loc[:, 'YEAR':'DIV_AIRPORT_LANDINGS'].to_csv(None,
sep="|",
header=True,
index=False,
encoding="utf-8",
quoting=csv.QUOTE_ALL))
def output(self):
return luigi.s3.S3Target('{}/{}/{}/YEAR={}/{}.psv'.format(self.root_path,
self.etl_path,
self.task_name,
self.year,
str(self.month).zfill(2)))
class RTask(luigi.Task):
root_path = luigi.Parameter()
def requires(self):
return RawData()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-r
'''
logger.debug(cmd)
out = subprocess.check_output(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_R.psv"))
class PythonTask(luigi.Task):
def requires(self):
return RTask()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-python --inputfile {} --outputfile {}
'''.format(os.path.join("/rita/data", os.path.basename(self.input().path)),
os.path.join("/rita/data", os.path.basename(self.output().path)))
logger.debug(cmd)
out = subprocess.call(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_python.json"))<|fim▁end|> | bucket = luigi.Parameter()
etl_path = luigi.Parameter() |
<|file_name|>rita.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
rita Pipeline
.. module:: rita
:synopsis: rita pipeline
.. moduleauthor:: Adolfo De Unánue <[email protected]>
"""
import os
import subprocess
from pathlib import Path
import boto3
import zipfile
import io
import csv
import datetime
import luigi
import luigi.s3
import pandas as pd
import sqlalchemy
from contextlib import closing
import requests
import re
from bs4 import BeautifulSoup
## Variables de ambiente
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
## Obtenemos las llaves de AWS
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
## Logging
import rita.config_ini
import logging
logger = logging.getLogger("rita.pipeline")
import rita.pipelines.utils
import rita.pipelines.common
from rita.pipelines.common.tasks import DockerTask
class ritaPipeline(luigi.WrapperTask):
"<|fim_middle|>
class DownloadRITACatalogs(luigi.WrapperTask):
"""
"""
def requires(self):
baseurl = "https://www.transtats.bts.gov"
url = "https://www.transtats.bts.gov/DL_SelectFields.asp?Table_ID=236"
page = requests.get(url)
soup = BeautifulSoup(page.content, "lxml")
for link in soup.find_all('a', href=re.compile('Download_Lookup')):
catalog_name = link.get('href').split('=L_')[-1]
catalog_url = '{}/{}'.format(baseurl, link.get('href'))
yield DownloadCatalog(catalog_name=catalog_name, catalog_url=catalog_url)
class DownloadCatalog(luigi.Task):
"""
"""
catalog_url = luigi.Parameter()
catalog_name = luigi.Parameter()
root_path = luigi.Parameter()
def run(self):
logger.debug("Guardando en {} el catálogo {}".format(self.output().path, self.catalog_name))
with closing(requests.get(self.catalog_url, stream= True)) as response, \
self.output().open('w') as output_file:
for chunk in response.iter_lines(chunk_size=1024*8):
if chunk:
output_file.write(chunk.decode('utf-8') + '\n')
def output(self):
output_path = '{}/catalogs/{}.csv'.format(self.root_path,
self.catalog_name)
return luigi.s3.S3Target(path=output_path)
class DownloadRITAData(luigi.WrapperTask):
"""
"""
start_year=luigi.IntParameter()
def requires(self):
today = datetime.date.today() + datetime.timedelta(days=-90)
max_year = today.year
max_month = today.month
years = range(self.start_year, max_year)
logger.info("Descargando datos de los años {}".format(years))
for año in years:
if año != max_year:
months = range(1,13)
else:
month = range(1, max_month+1)
for mes in months:
yield DownloadRITAMonthlyData(year=año, month=mes)
class DownloadRITAMonthlyData(DockerTask):
"""
"""
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
raw_path = luigi.Parameter()
@property
def cmd(self):
return '''
docker run --rm --env AWS_ACCESS_KEY_ID={} --env AWS_SECRET_ACCESS_KEY={} rita/download-rita --year {} --month {} --data_path {}/{}
'''.format(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, self.year, self.month, self.root_path, self.raw_path)
def output(self):
return luigi.s3.S3Target(path='{}/{}/{}-{}.zip'.format(self.root_path,
self.raw_path,
str(self.month).zfill(2),
self.year))
class ExtractColumns(luigi.Task):
"""
"""
task_name = "extract-columns"
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
bucket = luigi.Parameter()
etl_path = luigi.Parameter()
def requires(self):
return DownloadRITA(year=self.year, month=self.month)
def run(self):
s3 = boto3.resource('s3')
bucket = s3.Bucket(self.bucket)
input_path = Path(self.input().path)
obj = bucket.Object(str(input_path.relative_to('s3://{}'.format(self.bucket))))
df = None
with io.BytesIO(obj.get()["Body"].read()) as input_file:
input_file.seek(0)
with zipfile.ZipFile(input_file, mode='r') as zip_file:
for subfile in zip_file.namelist():
with zip_file.open(subfile) as file:
df = pd.read_csv(file)
with self.output().open('w') as output_file:
output_file.write(df.loc[:, 'YEAR':'DIV_AIRPORT_LANDINGS'].to_csv(None,
sep="|",
header=True,
index=False,
encoding="utf-8",
quoting=csv.QUOTE_ALL))
def output(self):
return luigi.s3.S3Target('{}/{}/{}/YEAR={}/{}.psv'.format(self.root_path,
self.etl_path,
self.task_name,
self.year,
str(self.month).zfill(2)))
class RTask(luigi.Task):
root_path = luigi.Parameter()
def requires(self):
return RawData()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-r
'''
logger.debug(cmd)
out = subprocess.check_output(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_R.psv"))
class PythonTask(luigi.Task):
def requires(self):
return RTask()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-python --inputfile {} --outputfile {}
'''.format(os.path.join("/rita/data", os.path.basename(self.input().path)),
os.path.join("/rita/data", os.path.basename(self.output().path)))
logger.debug(cmd)
out = subprocess.call(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_python.json"))
<|fim▁end|> | ""
Task principal para el pipeline
"""
def requires(self):
yield DownloadRITACatalogs()
yield DownloadRITAData()
|
<|file_name|>rita.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
rita Pipeline
.. module:: rita
:synopsis: rita pipeline
.. moduleauthor:: Adolfo De Unánue <[email protected]>
"""
import os
import subprocess
from pathlib import Path
import boto3
import zipfile
import io
import csv
import datetime
import luigi
import luigi.s3
import pandas as pd
import sqlalchemy
from contextlib import closing
import requests
import re
from bs4 import BeautifulSoup
## Variables de ambiente
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
## Obtenemos las llaves de AWS
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
## Logging
import rita.config_ini
import logging
logger = logging.getLogger("rita.pipeline")
import rita.pipelines.utils
import rita.pipelines.common
from rita.pipelines.common.tasks import DockerTask
class ritaPipeline(luigi.WrapperTask):
"""
Task principal para el pipeline
"""
def requires(self):
y<|fim_middle|>
class DownloadRITACatalogs(luigi.WrapperTask):
"""
"""
def requires(self):
baseurl = "https://www.transtats.bts.gov"
url = "https://www.transtats.bts.gov/DL_SelectFields.asp?Table_ID=236"
page = requests.get(url)
soup = BeautifulSoup(page.content, "lxml")
for link in soup.find_all('a', href=re.compile('Download_Lookup')):
catalog_name = link.get('href').split('=L_')[-1]
catalog_url = '{}/{}'.format(baseurl, link.get('href'))
yield DownloadCatalog(catalog_name=catalog_name, catalog_url=catalog_url)
class DownloadCatalog(luigi.Task):
"""
"""
catalog_url = luigi.Parameter()
catalog_name = luigi.Parameter()
root_path = luigi.Parameter()
def run(self):
logger.debug("Guardando en {} el catálogo {}".format(self.output().path, self.catalog_name))
with closing(requests.get(self.catalog_url, stream= True)) as response, \
self.output().open('w') as output_file:
for chunk in response.iter_lines(chunk_size=1024*8):
if chunk:
output_file.write(chunk.decode('utf-8') + '\n')
def output(self):
output_path = '{}/catalogs/{}.csv'.format(self.root_path,
self.catalog_name)
return luigi.s3.S3Target(path=output_path)
class DownloadRITAData(luigi.WrapperTask):
"""
"""
start_year=luigi.IntParameter()
def requires(self):
today = datetime.date.today() + datetime.timedelta(days=-90)
max_year = today.year
max_month = today.month
years = range(self.start_year, max_year)
logger.info("Descargando datos de los años {}".format(years))
for año in years:
if año != max_year:
months = range(1,13)
else:
month = range(1, max_month+1)
for mes in months:
yield DownloadRITAMonthlyData(year=año, month=mes)
class DownloadRITAMonthlyData(DockerTask):
"""
"""
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
raw_path = luigi.Parameter()
@property
def cmd(self):
return '''
docker run --rm --env AWS_ACCESS_KEY_ID={} --env AWS_SECRET_ACCESS_KEY={} rita/download-rita --year {} --month {} --data_path {}/{}
'''.format(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, self.year, self.month, self.root_path, self.raw_path)
def output(self):
return luigi.s3.S3Target(path='{}/{}/{}-{}.zip'.format(self.root_path,
self.raw_path,
str(self.month).zfill(2),
self.year))
class ExtractColumns(luigi.Task):
"""
"""
task_name = "extract-columns"
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
bucket = luigi.Parameter()
etl_path = luigi.Parameter()
def requires(self):
return DownloadRITA(year=self.year, month=self.month)
def run(self):
s3 = boto3.resource('s3')
bucket = s3.Bucket(self.bucket)
input_path = Path(self.input().path)
obj = bucket.Object(str(input_path.relative_to('s3://{}'.format(self.bucket))))
df = None
with io.BytesIO(obj.get()["Body"].read()) as input_file:
input_file.seek(0)
with zipfile.ZipFile(input_file, mode='r') as zip_file:
for subfile in zip_file.namelist():
with zip_file.open(subfile) as file:
df = pd.read_csv(file)
with self.output().open('w') as output_file:
output_file.write(df.loc[:, 'YEAR':'DIV_AIRPORT_LANDINGS'].to_csv(None,
sep="|",
header=True,
index=False,
encoding="utf-8",
quoting=csv.QUOTE_ALL))
def output(self):
return luigi.s3.S3Target('{}/{}/{}/YEAR={}/{}.psv'.format(self.root_path,
self.etl_path,
self.task_name,
self.year,
str(self.month).zfill(2)))
class RTask(luigi.Task):
root_path = luigi.Parameter()
def requires(self):
return RawData()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-r
'''
logger.debug(cmd)
out = subprocess.check_output(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_R.psv"))
class PythonTask(luigi.Task):
def requires(self):
return RTask()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-python --inputfile {} --outputfile {}
'''.format(os.path.join("/rita/data", os.path.basename(self.input().path)),
os.path.join("/rita/data", os.path.basename(self.output().path)))
logger.debug(cmd)
out = subprocess.call(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_python.json"))
<|fim▁end|> | ield DownloadRITACatalogs()
yield DownloadRITAData()
|
<|file_name|>rita.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
rita Pipeline
.. module:: rita
:synopsis: rita pipeline
.. moduleauthor:: Adolfo De Unánue <[email protected]>
"""
import os
import subprocess
from pathlib import Path
import boto3
import zipfile
import io
import csv
import datetime
import luigi
import luigi.s3
import pandas as pd
import sqlalchemy
from contextlib import closing
import requests
import re
from bs4 import BeautifulSoup
## Variables de ambiente
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
## Obtenemos las llaves de AWS
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
## Logging
import rita.config_ini
import logging
logger = logging.getLogger("rita.pipeline")
import rita.pipelines.utils
import rita.pipelines.common
from rita.pipelines.common.tasks import DockerTask
class ritaPipeline(luigi.WrapperTask):
"""
Task principal para el pipeline
"""
def requires(self):
yield DownloadRITACatalogs()
yield DownloadRITAData()
class DownloadRITACatalogs(luigi.WrapperTask):
"<|fim_middle|>
class DownloadCatalog(luigi.Task):
"""
"""
catalog_url = luigi.Parameter()
catalog_name = luigi.Parameter()
root_path = luigi.Parameter()
def run(self):
logger.debug("Guardando en {} el catálogo {}".format(self.output().path, self.catalog_name))
with closing(requests.get(self.catalog_url, stream= True)) as response, \
self.output().open('w') as output_file:
for chunk in response.iter_lines(chunk_size=1024*8):
if chunk:
output_file.write(chunk.decode('utf-8') + '\n')
def output(self):
output_path = '{}/catalogs/{}.csv'.format(self.root_path,
self.catalog_name)
return luigi.s3.S3Target(path=output_path)
class DownloadRITAData(luigi.WrapperTask):
"""
"""
start_year=luigi.IntParameter()
def requires(self):
today = datetime.date.today() + datetime.timedelta(days=-90)
max_year = today.year
max_month = today.month
years = range(self.start_year, max_year)
logger.info("Descargando datos de los años {}".format(years))
for año in years:
if año != max_year:
months = range(1,13)
else:
month = range(1, max_month+1)
for mes in months:
yield DownloadRITAMonthlyData(year=año, month=mes)
class DownloadRITAMonthlyData(DockerTask):
"""
"""
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
raw_path = luigi.Parameter()
@property
def cmd(self):
return '''
docker run --rm --env AWS_ACCESS_KEY_ID={} --env AWS_SECRET_ACCESS_KEY={} rita/download-rita --year {} --month {} --data_path {}/{}
'''.format(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, self.year, self.month, self.root_path, self.raw_path)
def output(self):
return luigi.s3.S3Target(path='{}/{}/{}-{}.zip'.format(self.root_path,
self.raw_path,
str(self.month).zfill(2),
self.year))
class ExtractColumns(luigi.Task):
"""
"""
task_name = "extract-columns"
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
bucket = luigi.Parameter()
etl_path = luigi.Parameter()
def requires(self):
return DownloadRITA(year=self.year, month=self.month)
def run(self):
s3 = boto3.resource('s3')
bucket = s3.Bucket(self.bucket)
input_path = Path(self.input().path)
obj = bucket.Object(str(input_path.relative_to('s3://{}'.format(self.bucket))))
df = None
with io.BytesIO(obj.get()["Body"].read()) as input_file:
input_file.seek(0)
with zipfile.ZipFile(input_file, mode='r') as zip_file:
for subfile in zip_file.namelist():
with zip_file.open(subfile) as file:
df = pd.read_csv(file)
with self.output().open('w') as output_file:
output_file.write(df.loc[:, 'YEAR':'DIV_AIRPORT_LANDINGS'].to_csv(None,
sep="|",
header=True,
index=False,
encoding="utf-8",
quoting=csv.QUOTE_ALL))
def output(self):
return luigi.s3.S3Target('{}/{}/{}/YEAR={}/{}.psv'.format(self.root_path,
self.etl_path,
self.task_name,
self.year,
str(self.month).zfill(2)))
class RTask(luigi.Task):
root_path = luigi.Parameter()
def requires(self):
return RawData()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-r
'''
logger.debug(cmd)
out = subprocess.check_output(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_R.psv"))
class PythonTask(luigi.Task):
def requires(self):
return RTask()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-python --inputfile {} --outputfile {}
'''.format(os.path.join("/rita/data", os.path.basename(self.input().path)),
os.path.join("/rita/data", os.path.basename(self.output().path)))
logger.debug(cmd)
out = subprocess.call(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_python.json"))
<|fim▁end|> | ""
"""
def requires(self):
baseurl = "https://www.transtats.bts.gov"
url = "https://www.transtats.bts.gov/DL_SelectFields.asp?Table_ID=236"
page = requests.get(url)
soup = BeautifulSoup(page.content, "lxml")
for link in soup.find_all('a', href=re.compile('Download_Lookup')):
catalog_name = link.get('href').split('=L_')[-1]
catalog_url = '{}/{}'.format(baseurl, link.get('href'))
yield DownloadCatalog(catalog_name=catalog_name, catalog_url=catalog_url)
|
<|file_name|>rita.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
rita Pipeline
.. module:: rita
:synopsis: rita pipeline
.. moduleauthor:: Adolfo De Unánue <[email protected]>
"""
import os
import subprocess
from pathlib import Path
import boto3
import zipfile
import io
import csv
import datetime
import luigi
import luigi.s3
import pandas as pd
import sqlalchemy
from contextlib import closing
import requests
import re
from bs4 import BeautifulSoup
## Variables de ambiente
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
## Obtenemos las llaves de AWS
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
## Logging
import rita.config_ini
import logging
logger = logging.getLogger("rita.pipeline")
import rita.pipelines.utils
import rita.pipelines.common
from rita.pipelines.common.tasks import DockerTask
class ritaPipeline(luigi.WrapperTask):
"""
Task principal para el pipeline
"""
def requires(self):
yield DownloadRITACatalogs()
yield DownloadRITAData()
class DownloadRITACatalogs(luigi.WrapperTask):
"""
"""
def requires(self):
b<|fim_middle|>
class DownloadCatalog(luigi.Task):
"""
"""
catalog_url = luigi.Parameter()
catalog_name = luigi.Parameter()
root_path = luigi.Parameter()
def run(self):
logger.debug("Guardando en {} el catálogo {}".format(self.output().path, self.catalog_name))
with closing(requests.get(self.catalog_url, stream= True)) as response, \
self.output().open('w') as output_file:
for chunk in response.iter_lines(chunk_size=1024*8):
if chunk:
output_file.write(chunk.decode('utf-8') + '\n')
def output(self):
output_path = '{}/catalogs/{}.csv'.format(self.root_path,
self.catalog_name)
return luigi.s3.S3Target(path=output_path)
class DownloadRITAData(luigi.WrapperTask):
"""
"""
start_year=luigi.IntParameter()
def requires(self):
today = datetime.date.today() + datetime.timedelta(days=-90)
max_year = today.year
max_month = today.month
years = range(self.start_year, max_year)
logger.info("Descargando datos de los años {}".format(years))
for año in years:
if año != max_year:
months = range(1,13)
else:
month = range(1, max_month+1)
for mes in months:
yield DownloadRITAMonthlyData(year=año, month=mes)
class DownloadRITAMonthlyData(DockerTask):
"""
"""
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
raw_path = luigi.Parameter()
@property
def cmd(self):
return '''
docker run --rm --env AWS_ACCESS_KEY_ID={} --env AWS_SECRET_ACCESS_KEY={} rita/download-rita --year {} --month {} --data_path {}/{}
'''.format(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, self.year, self.month, self.root_path, self.raw_path)
def output(self):
return luigi.s3.S3Target(path='{}/{}/{}-{}.zip'.format(self.root_path,
self.raw_path,
str(self.month).zfill(2),
self.year))
class ExtractColumns(luigi.Task):
"""
"""
task_name = "extract-columns"
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
bucket = luigi.Parameter()
etl_path = luigi.Parameter()
def requires(self):
return DownloadRITA(year=self.year, month=self.month)
def run(self):
s3 = boto3.resource('s3')
bucket = s3.Bucket(self.bucket)
input_path = Path(self.input().path)
obj = bucket.Object(str(input_path.relative_to('s3://{}'.format(self.bucket))))
df = None
with io.BytesIO(obj.get()["Body"].read()) as input_file:
input_file.seek(0)
with zipfile.ZipFile(input_file, mode='r') as zip_file:
for subfile in zip_file.namelist():
with zip_file.open(subfile) as file:
df = pd.read_csv(file)
with self.output().open('w') as output_file:
output_file.write(df.loc[:, 'YEAR':'DIV_AIRPORT_LANDINGS'].to_csv(None,
sep="|",
header=True,
index=False,
encoding="utf-8",
quoting=csv.QUOTE_ALL))
def output(self):
return luigi.s3.S3Target('{}/{}/{}/YEAR={}/{}.psv'.format(self.root_path,
self.etl_path,
self.task_name,
self.year,
str(self.month).zfill(2)))
class RTask(luigi.Task):
root_path = luigi.Parameter()
def requires(self):
return RawData()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-r
'''
logger.debug(cmd)
out = subprocess.check_output(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_R.psv"))
class PythonTask(luigi.Task):
def requires(self):
return RTask()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-python --inputfile {} --outputfile {}
'''.format(os.path.join("/rita/data", os.path.basename(self.input().path)),
os.path.join("/rita/data", os.path.basename(self.output().path)))
logger.debug(cmd)
out = subprocess.call(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_python.json"))
<|fim▁end|> | aseurl = "https://www.transtats.bts.gov"
url = "https://www.transtats.bts.gov/DL_SelectFields.asp?Table_ID=236"
page = requests.get(url)
soup = BeautifulSoup(page.content, "lxml")
for link in soup.find_all('a', href=re.compile('Download_Lookup')):
catalog_name = link.get('href').split('=L_')[-1]
catalog_url = '{}/{}'.format(baseurl, link.get('href'))
yield DownloadCatalog(catalog_name=catalog_name, catalog_url=catalog_url)
|
<|file_name|>rita.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
rita Pipeline
.. module:: rita
:synopsis: rita pipeline
.. moduleauthor:: Adolfo De Unánue <[email protected]>
"""
import os
import subprocess
from pathlib import Path
import boto3
import zipfile
import io
import csv
import datetime
import luigi
import luigi.s3
import pandas as pd
import sqlalchemy
from contextlib import closing
import requests
import re
from bs4 import BeautifulSoup
## Variables de ambiente
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
## Obtenemos las llaves de AWS
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
## Logging
import rita.config_ini
import logging
logger = logging.getLogger("rita.pipeline")
import rita.pipelines.utils
import rita.pipelines.common
from rita.pipelines.common.tasks import DockerTask
class ritaPipeline(luigi.WrapperTask):
"""
Task principal para el pipeline
"""
def requires(self):
yield DownloadRITACatalogs()
yield DownloadRITAData()
class DownloadRITACatalogs(luigi.WrapperTask):
"""
"""
def requires(self):
baseurl = "https://www.transtats.bts.gov"
url = "https://www.transtats.bts.gov/DL_SelectFields.asp?Table_ID=236"
page = requests.get(url)
soup = BeautifulSoup(page.content, "lxml")
for link in soup.find_all('a', href=re.compile('Download_Lookup')):
catalog_name = link.get('href').split('=L_')[-1]
catalog_url = '{}/{}'.format(baseurl, link.get('href'))
yield DownloadCatalog(catalog_name=catalog_name, catalog_url=catalog_url)
class DownloadCatalog(luigi.Task):
"<|fim_middle|>
class DownloadRITAData(luigi.WrapperTask):
"""
"""
start_year=luigi.IntParameter()
def requires(self):
today = datetime.date.today() + datetime.timedelta(days=-90)
max_year = today.year
max_month = today.month
years = range(self.start_year, max_year)
logger.info("Descargando datos de los años {}".format(years))
for año in years:
if año != max_year:
months = range(1,13)
else:
month = range(1, max_month+1)
for mes in months:
yield DownloadRITAMonthlyData(year=año, month=mes)
class DownloadRITAMonthlyData(DockerTask):
"""
"""
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
raw_path = luigi.Parameter()
@property
def cmd(self):
return '''
docker run --rm --env AWS_ACCESS_KEY_ID={} --env AWS_SECRET_ACCESS_KEY={} rita/download-rita --year {} --month {} --data_path {}/{}
'''.format(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, self.year, self.month, self.root_path, self.raw_path)
def output(self):
return luigi.s3.S3Target(path='{}/{}/{}-{}.zip'.format(self.root_path,
self.raw_path,
str(self.month).zfill(2),
self.year))
class ExtractColumns(luigi.Task):
"""
"""
task_name = "extract-columns"
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
bucket = luigi.Parameter()
etl_path = luigi.Parameter()
def requires(self):
return DownloadRITA(year=self.year, month=self.month)
def run(self):
s3 = boto3.resource('s3')
bucket = s3.Bucket(self.bucket)
input_path = Path(self.input().path)
obj = bucket.Object(str(input_path.relative_to('s3://{}'.format(self.bucket))))
df = None
with io.BytesIO(obj.get()["Body"].read()) as input_file:
input_file.seek(0)
with zipfile.ZipFile(input_file, mode='r') as zip_file:
for subfile in zip_file.namelist():
with zip_file.open(subfile) as file:
df = pd.read_csv(file)
with self.output().open('w') as output_file:
output_file.write(df.loc[:, 'YEAR':'DIV_AIRPORT_LANDINGS'].to_csv(None,
sep="|",
header=True,
index=False,
encoding="utf-8",
quoting=csv.QUOTE_ALL))
def output(self):
return luigi.s3.S3Target('{}/{}/{}/YEAR={}/{}.psv'.format(self.root_path,
self.etl_path,
self.task_name,
self.year,
str(self.month).zfill(2)))
class RTask(luigi.Task):
root_path = luigi.Parameter()
def requires(self):
return RawData()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-r
'''
logger.debug(cmd)
out = subprocess.check_output(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_R.psv"))
class PythonTask(luigi.Task):
def requires(self):
return RTask()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-python --inputfile {} --outputfile {}
'''.format(os.path.join("/rita/data", os.path.basename(self.input().path)),
os.path.join("/rita/data", os.path.basename(self.output().path)))
logger.debug(cmd)
out = subprocess.call(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_python.json"))
<|fim▁end|> | ""
"""
catalog_url = luigi.Parameter()
catalog_name = luigi.Parameter()
root_path = luigi.Parameter()
def run(self):
logger.debug("Guardando en {} el catálogo {}".format(self.output().path, self.catalog_name))
with closing(requests.get(self.catalog_url, stream= True)) as response, \
self.output().open('w') as output_file:
for chunk in response.iter_lines(chunk_size=1024*8):
if chunk:
output_file.write(chunk.decode('utf-8') + '\n')
def output(self):
output_path = '{}/catalogs/{}.csv'.format(self.root_path,
self.catalog_name)
return luigi.s3.S3Target(path=output_path)
|
<|file_name|>rita.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
rita Pipeline
.. module:: rita
:synopsis: rita pipeline
.. moduleauthor:: Adolfo De Unánue <[email protected]>
"""
import os
import subprocess
from pathlib import Path
import boto3
import zipfile
import io
import csv
import datetime
import luigi
import luigi.s3
import pandas as pd
import sqlalchemy
from contextlib import closing
import requests
import re
from bs4 import BeautifulSoup
## Variables de ambiente
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
## Obtenemos las llaves de AWS
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
## Logging
import rita.config_ini
import logging
logger = logging.getLogger("rita.pipeline")
import rita.pipelines.utils
import rita.pipelines.common
from rita.pipelines.common.tasks import DockerTask
class ritaPipeline(luigi.WrapperTask):
"""
Task principal para el pipeline
"""
def requires(self):
yield DownloadRITACatalogs()
yield DownloadRITAData()
class DownloadRITACatalogs(luigi.WrapperTask):
"""
"""
def requires(self):
baseurl = "https://www.transtats.bts.gov"
url = "https://www.transtats.bts.gov/DL_SelectFields.asp?Table_ID=236"
page = requests.get(url)
soup = BeautifulSoup(page.content, "lxml")
for link in soup.find_all('a', href=re.compile('Download_Lookup')):
catalog_name = link.get('href').split('=L_')[-1]
catalog_url = '{}/{}'.format(baseurl, link.get('href'))
yield DownloadCatalog(catalog_name=catalog_name, catalog_url=catalog_url)
class DownloadCatalog(luigi.Task):
"""
"""
catalog_url = luigi.Parameter()
catalog_name = luigi.Parameter()
root_path = luigi.Parameter()
def run(self):
l<|fim_middle|>
def output(self):
output_path = '{}/catalogs/{}.csv'.format(self.root_path,
self.catalog_name)
return luigi.s3.S3Target(path=output_path)
class DownloadRITAData(luigi.WrapperTask):
"""
"""
start_year=luigi.IntParameter()
def requires(self):
today = datetime.date.today() + datetime.timedelta(days=-90)
max_year = today.year
max_month = today.month
years = range(self.start_year, max_year)
logger.info("Descargando datos de los años {}".format(years))
for año in years:
if año != max_year:
months = range(1,13)
else:
month = range(1, max_month+1)
for mes in months:
yield DownloadRITAMonthlyData(year=año, month=mes)
class DownloadRITAMonthlyData(DockerTask):
"""
"""
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
raw_path = luigi.Parameter()
@property
def cmd(self):
return '''
docker run --rm --env AWS_ACCESS_KEY_ID={} --env AWS_SECRET_ACCESS_KEY={} rita/download-rita --year {} --month {} --data_path {}/{}
'''.format(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, self.year, self.month, self.root_path, self.raw_path)
def output(self):
return luigi.s3.S3Target(path='{}/{}/{}-{}.zip'.format(self.root_path,
self.raw_path,
str(self.month).zfill(2),
self.year))
class ExtractColumns(luigi.Task):
"""
"""
task_name = "extract-columns"
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
bucket = luigi.Parameter()
etl_path = luigi.Parameter()
def requires(self):
return DownloadRITA(year=self.year, month=self.month)
def run(self):
s3 = boto3.resource('s3')
bucket = s3.Bucket(self.bucket)
input_path = Path(self.input().path)
obj = bucket.Object(str(input_path.relative_to('s3://{}'.format(self.bucket))))
df = None
with io.BytesIO(obj.get()["Body"].read()) as input_file:
input_file.seek(0)
with zipfile.ZipFile(input_file, mode='r') as zip_file:
for subfile in zip_file.namelist():
with zip_file.open(subfile) as file:
df = pd.read_csv(file)
with self.output().open('w') as output_file:
output_file.write(df.loc[:, 'YEAR':'DIV_AIRPORT_LANDINGS'].to_csv(None,
sep="|",
header=True,
index=False,
encoding="utf-8",
quoting=csv.QUOTE_ALL))
def output(self):
return luigi.s3.S3Target('{}/{}/{}/YEAR={}/{}.psv'.format(self.root_path,
self.etl_path,
self.task_name,
self.year,
str(self.month).zfill(2)))
class RTask(luigi.Task):
root_path = luigi.Parameter()
def requires(self):
return RawData()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-r
'''
logger.debug(cmd)
out = subprocess.check_output(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_R.psv"))
class PythonTask(luigi.Task):
def requires(self):
return RTask()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-python --inputfile {} --outputfile {}
'''.format(os.path.join("/rita/data", os.path.basename(self.input().path)),
os.path.join("/rita/data", os.path.basename(self.output().path)))
logger.debug(cmd)
out = subprocess.call(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_python.json"))
<|fim▁end|> | ogger.debug("Guardando en {} el catálogo {}".format(self.output().path, self.catalog_name))
with closing(requests.get(self.catalog_url, stream= True)) as response, \
self.output().open('w') as output_file:
for chunk in response.iter_lines(chunk_size=1024*8):
if chunk:
output_file.write(chunk.decode('utf-8') + '\n')
|
<|file_name|>rita.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
rita Pipeline
.. module:: rita
:synopsis: rita pipeline
.. moduleauthor:: Adolfo De Unánue <[email protected]>
"""
import os
import subprocess
from pathlib import Path
import boto3
import zipfile
import io
import csv
import datetime
import luigi
import luigi.s3
import pandas as pd
import sqlalchemy
from contextlib import closing
import requests
import re
from bs4 import BeautifulSoup
## Variables de ambiente
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
## Obtenemos las llaves de AWS
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
## Logging
import rita.config_ini
import logging
logger = logging.getLogger("rita.pipeline")
import rita.pipelines.utils
import rita.pipelines.common
from rita.pipelines.common.tasks import DockerTask
class ritaPipeline(luigi.WrapperTask):
"""
Task principal para el pipeline
"""
def requires(self):
yield DownloadRITACatalogs()
yield DownloadRITAData()
class DownloadRITACatalogs(luigi.WrapperTask):
"""
"""
def requires(self):
baseurl = "https://www.transtats.bts.gov"
url = "https://www.transtats.bts.gov/DL_SelectFields.asp?Table_ID=236"
page = requests.get(url)
soup = BeautifulSoup(page.content, "lxml")
for link in soup.find_all('a', href=re.compile('Download_Lookup')):
catalog_name = link.get('href').split('=L_')[-1]
catalog_url = '{}/{}'.format(baseurl, link.get('href'))
yield DownloadCatalog(catalog_name=catalog_name, catalog_url=catalog_url)
class DownloadCatalog(luigi.Task):
"""
"""
catalog_url = luigi.Parameter()
catalog_name = luigi.Parameter()
root_path = luigi.Parameter()
def run(self):
logger.debug("Guardando en {} el catálogo {}".format(self.output().path, self.catalog_name))
with closing(requests.get(self.catalog_url, stream= True)) as response, \
self.output().open('w') as output_file:
for chunk in response.iter_lines(chunk_size=1024*8):
if chunk:
output_file.write(chunk.decode('utf-8') + '\n')
def output(self):
ou<|fim_middle|>
class DownloadRITAData(luigi.WrapperTask):
"""
"""
start_year=luigi.IntParameter()
def requires(self):
today = datetime.date.today() + datetime.timedelta(days=-90)
max_year = today.year
max_month = today.month
years = range(self.start_year, max_year)
logger.info("Descargando datos de los años {}".format(years))
for año in years:
if año != max_year:
months = range(1,13)
else:
month = range(1, max_month+1)
for mes in months:
yield DownloadRITAMonthlyData(year=año, month=mes)
class DownloadRITAMonthlyData(DockerTask):
"""
"""
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
raw_path = luigi.Parameter()
@property
def cmd(self):
return '''
docker run --rm --env AWS_ACCESS_KEY_ID={} --env AWS_SECRET_ACCESS_KEY={} rita/download-rita --year {} --month {} --data_path {}/{}
'''.format(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, self.year, self.month, self.root_path, self.raw_path)
def output(self):
return luigi.s3.S3Target(path='{}/{}/{}-{}.zip'.format(self.root_path,
self.raw_path,
str(self.month).zfill(2),
self.year))
class ExtractColumns(luigi.Task):
"""
"""
task_name = "extract-columns"
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
bucket = luigi.Parameter()
etl_path = luigi.Parameter()
def requires(self):
return DownloadRITA(year=self.year, month=self.month)
def run(self):
s3 = boto3.resource('s3')
bucket = s3.Bucket(self.bucket)
input_path = Path(self.input().path)
obj = bucket.Object(str(input_path.relative_to('s3://{}'.format(self.bucket))))
df = None
with io.BytesIO(obj.get()["Body"].read()) as input_file:
input_file.seek(0)
with zipfile.ZipFile(input_file, mode='r') as zip_file:
for subfile in zip_file.namelist():
with zip_file.open(subfile) as file:
df = pd.read_csv(file)
with self.output().open('w') as output_file:
output_file.write(df.loc[:, 'YEAR':'DIV_AIRPORT_LANDINGS'].to_csv(None,
sep="|",
header=True,
index=False,
encoding="utf-8",
quoting=csv.QUOTE_ALL))
def output(self):
return luigi.s3.S3Target('{}/{}/{}/YEAR={}/{}.psv'.format(self.root_path,
self.etl_path,
self.task_name,
self.year,
str(self.month).zfill(2)))
class RTask(luigi.Task):
root_path = luigi.Parameter()
def requires(self):
return RawData()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-r
'''
logger.debug(cmd)
out = subprocess.check_output(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_R.psv"))
class PythonTask(luigi.Task):
def requires(self):
return RTask()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-python --inputfile {} --outputfile {}
'''.format(os.path.join("/rita/data", os.path.basename(self.input().path)),
os.path.join("/rita/data", os.path.basename(self.output().path)))
logger.debug(cmd)
out = subprocess.call(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_python.json"))
<|fim▁end|> | tput_path = '{}/catalogs/{}.csv'.format(self.root_path,
self.catalog_name)
return luigi.s3.S3Target(path=output_path)
|
<|file_name|>rita.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
rita Pipeline
.. module:: rita
:synopsis: rita pipeline
.. moduleauthor:: Adolfo De Unánue <[email protected]>
"""
import os
import subprocess
from pathlib import Path
import boto3
import zipfile
import io
import csv
import datetime
import luigi
import luigi.s3
import pandas as pd
import sqlalchemy
from contextlib import closing
import requests
import re
from bs4 import BeautifulSoup
## Variables de ambiente
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
## Obtenemos las llaves de AWS
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
## Logging
import rita.config_ini
import logging
logger = logging.getLogger("rita.pipeline")
import rita.pipelines.utils
import rita.pipelines.common
from rita.pipelines.common.tasks import DockerTask
class ritaPipeline(luigi.WrapperTask):
"""
Task principal para el pipeline
"""
def requires(self):
yield DownloadRITACatalogs()
yield DownloadRITAData()
class DownloadRITACatalogs(luigi.WrapperTask):
"""
"""
def requires(self):
baseurl = "https://www.transtats.bts.gov"
url = "https://www.transtats.bts.gov/DL_SelectFields.asp?Table_ID=236"
page = requests.get(url)
soup = BeautifulSoup(page.content, "lxml")
for link in soup.find_all('a', href=re.compile('Download_Lookup')):
catalog_name = link.get('href').split('=L_')[-1]
catalog_url = '{}/{}'.format(baseurl, link.get('href'))
yield DownloadCatalog(catalog_name=catalog_name, catalog_url=catalog_url)
class DownloadCatalog(luigi.Task):
"""
"""
catalog_url = luigi.Parameter()
catalog_name = luigi.Parameter()
root_path = luigi.Parameter()
def run(self):
logger.debug("Guardando en {} el catálogo {}".format(self.output().path, self.catalog_name))
with closing(requests.get(self.catalog_url, stream= True)) as response, \
self.output().open('w') as output_file:
for chunk in response.iter_lines(chunk_size=1024*8):
if chunk:
output_file.write(chunk.decode('utf-8') + '\n')
def output(self):
output_path = '{}/catalogs/{}.csv'.format(self.root_path,
self.catalog_name)
return luigi.s3.S3Target(path=output_path)
class DownloadRITAData(luigi.WrapperTask):
""<|fim_middle|>
ss DownloadRITAMonthlyData(DockerTask):
"""
"""
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
raw_path = luigi.Parameter()
@property
def cmd(self):
return '''
docker run --rm --env AWS_ACCESS_KEY_ID={} --env AWS_SECRET_ACCESS_KEY={} rita/download-rita --year {} --month {} --data_path {}/{}
'''.format(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, self.year, self.month, self.root_path, self.raw_path)
def output(self):
return luigi.s3.S3Target(path='{}/{}/{}-{}.zip'.format(self.root_path,
self.raw_path,
str(self.month).zfill(2),
self.year))
class ExtractColumns(luigi.Task):
"""
"""
task_name = "extract-columns"
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
bucket = luigi.Parameter()
etl_path = luigi.Parameter()
def requires(self):
return DownloadRITA(year=self.year, month=self.month)
def run(self):
s3 = boto3.resource('s3')
bucket = s3.Bucket(self.bucket)
input_path = Path(self.input().path)
obj = bucket.Object(str(input_path.relative_to('s3://{}'.format(self.bucket))))
df = None
with io.BytesIO(obj.get()["Body"].read()) as input_file:
input_file.seek(0)
with zipfile.ZipFile(input_file, mode='r') as zip_file:
for subfile in zip_file.namelist():
with zip_file.open(subfile) as file:
df = pd.read_csv(file)
with self.output().open('w') as output_file:
output_file.write(df.loc[:, 'YEAR':'DIV_AIRPORT_LANDINGS'].to_csv(None,
sep="|",
header=True,
index=False,
encoding="utf-8",
quoting=csv.QUOTE_ALL))
def output(self):
return luigi.s3.S3Target('{}/{}/{}/YEAR={}/{}.psv'.format(self.root_path,
self.etl_path,
self.task_name,
self.year,
str(self.month).zfill(2)))
class RTask(luigi.Task):
root_path = luigi.Parameter()
def requires(self):
return RawData()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-r
'''
logger.debug(cmd)
out = subprocess.check_output(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_R.psv"))
class PythonTask(luigi.Task):
def requires(self):
return RTask()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-python --inputfile {} --outputfile {}
'''.format(os.path.join("/rita/data", os.path.basename(self.input().path)),
os.path.join("/rita/data", os.path.basename(self.output().path)))
logger.debug(cmd)
out = subprocess.call(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_python.json"))
<|fim▁end|> | "
"""
start_year=luigi.IntParameter()
def requires(self):
today = datetime.date.today() + datetime.timedelta(days=-90)
max_year = today.year
max_month = today.month
years = range(self.start_year, max_year)
logger.info("Descargando datos de los años {}".format(years))
for año in years:
if año != max_year:
months = range(1,13)
else:
month = range(1, max_month+1)
for mes in months:
yield DownloadRITAMonthlyData(year=año, month=mes)
cla |
<|file_name|>rita.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
rita Pipeline
.. module:: rita
:synopsis: rita pipeline
.. moduleauthor:: Adolfo De Unánue <[email protected]>
"""
import os
import subprocess
from pathlib import Path
import boto3
import zipfile
import io
import csv
import datetime
import luigi
import luigi.s3
import pandas as pd
import sqlalchemy
from contextlib import closing
import requests
import re
from bs4 import BeautifulSoup
## Variables de ambiente
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
## Obtenemos las llaves de AWS
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
## Logging
import rita.config_ini
import logging
logger = logging.getLogger("rita.pipeline")
import rita.pipelines.utils
import rita.pipelines.common
from rita.pipelines.common.tasks import DockerTask
class ritaPipeline(luigi.WrapperTask):
"""
Task principal para el pipeline
"""
def requires(self):
yield DownloadRITACatalogs()
yield DownloadRITAData()
class DownloadRITACatalogs(luigi.WrapperTask):
"""
"""
def requires(self):
baseurl = "https://www.transtats.bts.gov"
url = "https://www.transtats.bts.gov/DL_SelectFields.asp?Table_ID=236"
page = requests.get(url)
soup = BeautifulSoup(page.content, "lxml")
for link in soup.find_all('a', href=re.compile('Download_Lookup')):
catalog_name = link.get('href').split('=L_')[-1]
catalog_url = '{}/{}'.format(baseurl, link.get('href'))
yield DownloadCatalog(catalog_name=catalog_name, catalog_url=catalog_url)
class DownloadCatalog(luigi.Task):
"""
"""
catalog_url = luigi.Parameter()
catalog_name = luigi.Parameter()
root_path = luigi.Parameter()
def run(self):
logger.debug("Guardando en {} el catálogo {}".format(self.output().path, self.catalog_name))
with closing(requests.get(self.catalog_url, stream= True)) as response, \
self.output().open('w') as output_file:
for chunk in response.iter_lines(chunk_size=1024*8):
if chunk:
output_file.write(chunk.decode('utf-8') + '\n')
def output(self):
output_path = '{}/catalogs/{}.csv'.format(self.root_path,
self.catalog_name)
return luigi.s3.S3Target(path=output_path)
class DownloadRITAData(luigi.WrapperTask):
"""
"""
start_year=luigi.IntParameter()
def requires(self):
to<|fim_middle|>
ss DownloadRITAMonthlyData(DockerTask):
"""
"""
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
raw_path = luigi.Parameter()
@property
def cmd(self):
return '''
docker run --rm --env AWS_ACCESS_KEY_ID={} --env AWS_SECRET_ACCESS_KEY={} rita/download-rita --year {} --month {} --data_path {}/{}
'''.format(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, self.year, self.month, self.root_path, self.raw_path)
def output(self):
return luigi.s3.S3Target(path='{}/{}/{}-{}.zip'.format(self.root_path,
self.raw_path,
str(self.month).zfill(2),
self.year))
class ExtractColumns(luigi.Task):
"""
"""
task_name = "extract-columns"
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
bucket = luigi.Parameter()
etl_path = luigi.Parameter()
def requires(self):
return DownloadRITA(year=self.year, month=self.month)
def run(self):
s3 = boto3.resource('s3')
bucket = s3.Bucket(self.bucket)
input_path = Path(self.input().path)
obj = bucket.Object(str(input_path.relative_to('s3://{}'.format(self.bucket))))
df = None
with io.BytesIO(obj.get()["Body"].read()) as input_file:
input_file.seek(0)
with zipfile.ZipFile(input_file, mode='r') as zip_file:
for subfile in zip_file.namelist():
with zip_file.open(subfile) as file:
df = pd.read_csv(file)
with self.output().open('w') as output_file:
output_file.write(df.loc[:, 'YEAR':'DIV_AIRPORT_LANDINGS'].to_csv(None,
sep="|",
header=True,
index=False,
encoding="utf-8",
quoting=csv.QUOTE_ALL))
def output(self):
return luigi.s3.S3Target('{}/{}/{}/YEAR={}/{}.psv'.format(self.root_path,
self.etl_path,
self.task_name,
self.year,
str(self.month).zfill(2)))
class RTask(luigi.Task):
root_path = luigi.Parameter()
def requires(self):
return RawData()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-r
'''
logger.debug(cmd)
out = subprocess.check_output(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_R.psv"))
class PythonTask(luigi.Task):
def requires(self):
return RTask()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-python --inputfile {} --outputfile {}
'''.format(os.path.join("/rita/data", os.path.basename(self.input().path)),
os.path.join("/rita/data", os.path.basename(self.output().path)))
logger.debug(cmd)
out = subprocess.call(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_python.json"))
<|fim▁end|> | day = datetime.date.today() + datetime.timedelta(days=-90)
max_year = today.year
max_month = today.month
years = range(self.start_year, max_year)
logger.info("Descargando datos de los años {}".format(years))
for año in years:
if año != max_year:
months = range(1,13)
else:
month = range(1, max_month+1)
for mes in months:
yield DownloadRITAMonthlyData(year=año, month=mes)
cla |
<|file_name|>rita.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
rita Pipeline
.. module:: rita
:synopsis: rita pipeline
.. moduleauthor:: Adolfo De Unánue <[email protected]>
"""
import os
import subprocess
from pathlib import Path
import boto3
import zipfile
import io
import csv
import datetime
import luigi
import luigi.s3
import pandas as pd
import sqlalchemy
from contextlib import closing
import requests
import re
from bs4 import BeautifulSoup
## Variables de ambiente
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
## Obtenemos las llaves de AWS
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
## Logging
import rita.config_ini
import logging
logger = logging.getLogger("rita.pipeline")
import rita.pipelines.utils
import rita.pipelines.common
from rita.pipelines.common.tasks import DockerTask
class ritaPipeline(luigi.WrapperTask):
"""
Task principal para el pipeline
"""
def requires(self):
yield DownloadRITACatalogs()
yield DownloadRITAData()
class DownloadRITACatalogs(luigi.WrapperTask):
"""
"""
def requires(self):
baseurl = "https://www.transtats.bts.gov"
url = "https://www.transtats.bts.gov/DL_SelectFields.asp?Table_ID=236"
page = requests.get(url)
soup = BeautifulSoup(page.content, "lxml")
for link in soup.find_all('a', href=re.compile('Download_Lookup')):
catalog_name = link.get('href').split('=L_')[-1]
catalog_url = '{}/{}'.format(baseurl, link.get('href'))
yield DownloadCatalog(catalog_name=catalog_name, catalog_url=catalog_url)
class DownloadCatalog(luigi.Task):
"""
"""
catalog_url = luigi.Parameter()
catalog_name = luigi.Parameter()
root_path = luigi.Parameter()
def run(self):
logger.debug("Guardando en {} el catálogo {}".format(self.output().path, self.catalog_name))
with closing(requests.get(self.catalog_url, stream= True)) as response, \
self.output().open('w') as output_file:
for chunk in response.iter_lines(chunk_size=1024*8):
if chunk:
output_file.write(chunk.decode('utf-8') + '\n')
def output(self):
output_path = '{}/catalogs/{}.csv'.format(self.root_path,
self.catalog_name)
return luigi.s3.S3Target(path=output_path)
class DownloadRITAData(luigi.WrapperTask):
"""
"""
start_year=luigi.IntParameter()
def requires(self):
today = datetime.date.today() + datetime.timedelta(days=-90)
max_year = today.year
max_month = today.month
years = range(self.start_year, max_year)
logger.info("Descargando datos de los años {}".format(years))
for año in years:
if año != max_year:
months = range(1,13)
else:
month = range(1, max_month+1)
for mes in months:
yield DownloadRITAMonthlyData(year=año, month=mes)
class DownloadRITAMonthlyData(DockerTask):
"""
<|fim_middle|>
ass ExtractColumns(luigi.Task):
"""
"""
task_name = "extract-columns"
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
bucket = luigi.Parameter()
etl_path = luigi.Parameter()
def requires(self):
return DownloadRITA(year=self.year, month=self.month)
def run(self):
s3 = boto3.resource('s3')
bucket = s3.Bucket(self.bucket)
input_path = Path(self.input().path)
obj = bucket.Object(str(input_path.relative_to('s3://{}'.format(self.bucket))))
df = None
with io.BytesIO(obj.get()["Body"].read()) as input_file:
input_file.seek(0)
with zipfile.ZipFile(input_file, mode='r') as zip_file:
for subfile in zip_file.namelist():
with zip_file.open(subfile) as file:
df = pd.read_csv(file)
with self.output().open('w') as output_file:
output_file.write(df.loc[:, 'YEAR':'DIV_AIRPORT_LANDINGS'].to_csv(None,
sep="|",
header=True,
index=False,
encoding="utf-8",
quoting=csv.QUOTE_ALL))
def output(self):
return luigi.s3.S3Target('{}/{}/{}/YEAR={}/{}.psv'.format(self.root_path,
self.etl_path,
self.task_name,
self.year,
str(self.month).zfill(2)))
class RTask(luigi.Task):
root_path = luigi.Parameter()
def requires(self):
return RawData()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-r
'''
logger.debug(cmd)
out = subprocess.check_output(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_R.psv"))
class PythonTask(luigi.Task):
def requires(self):
return RTask()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-python --inputfile {} --outputfile {}
'''.format(os.path.join("/rita/data", os.path.basename(self.input().path)),
os.path.join("/rita/data", os.path.basename(self.output().path)))
logger.debug(cmd)
out = subprocess.call(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_python.json"))
<|fim▁end|> | """
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
raw_path = luigi.Parameter()
@property
def cmd(self):
return '''
docker run --rm --env AWS_ACCESS_KEY_ID={} --env AWS_SECRET_ACCESS_KEY={} rita/download-rita --year {} --month {} --data_path {}/{}
'''.format(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, self.year, self.month, self.root_path, self.raw_path)
def output(self):
return luigi.s3.S3Target(path='{}/{}/{}-{}.zip'.format(self.root_path,
self.raw_path,
str(self.month).zfill(2),
self.year))
cl |
<|file_name|>rita.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
rita Pipeline
.. module:: rita
:synopsis: rita pipeline
.. moduleauthor:: Adolfo De Unánue <[email protected]>
"""
import os
import subprocess
from pathlib import Path
import boto3
import zipfile
import io
import csv
import datetime
import luigi
import luigi.s3
import pandas as pd
import sqlalchemy
from contextlib import closing
import requests
import re
from bs4 import BeautifulSoup
## Variables de ambiente
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
## Obtenemos las llaves de AWS
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
## Logging
import rita.config_ini
import logging
logger = logging.getLogger("rita.pipeline")
import rita.pipelines.utils
import rita.pipelines.common
from rita.pipelines.common.tasks import DockerTask
class ritaPipeline(luigi.WrapperTask):
"""
Task principal para el pipeline
"""
def requires(self):
yield DownloadRITACatalogs()
yield DownloadRITAData()
class DownloadRITACatalogs(luigi.WrapperTask):
"""
"""
def requires(self):
baseurl = "https://www.transtats.bts.gov"
url = "https://www.transtats.bts.gov/DL_SelectFields.asp?Table_ID=236"
page = requests.get(url)
soup = BeautifulSoup(page.content, "lxml")
for link in soup.find_all('a', href=re.compile('Download_Lookup')):
catalog_name = link.get('href').split('=L_')[-1]
catalog_url = '{}/{}'.format(baseurl, link.get('href'))
yield DownloadCatalog(catalog_name=catalog_name, catalog_url=catalog_url)
class DownloadCatalog(luigi.Task):
"""
"""
catalog_url = luigi.Parameter()
catalog_name = luigi.Parameter()
root_path = luigi.Parameter()
def run(self):
logger.debug("Guardando en {} el catálogo {}".format(self.output().path, self.catalog_name))
with closing(requests.get(self.catalog_url, stream= True)) as response, \
self.output().open('w') as output_file:
for chunk in response.iter_lines(chunk_size=1024*8):
if chunk:
output_file.write(chunk.decode('utf-8') + '\n')
def output(self):
output_path = '{}/catalogs/{}.csv'.format(self.root_path,
self.catalog_name)
return luigi.s3.S3Target(path=output_path)
class DownloadRITAData(luigi.WrapperTask):
"""
"""
start_year=luigi.IntParameter()
def requires(self):
today = datetime.date.today() + datetime.timedelta(days=-90)
max_year = today.year
max_month = today.month
years = range(self.start_year, max_year)
logger.info("Descargando datos de los años {}".format(years))
for año in years:
if año != max_year:
months = range(1,13)
else:
month = range(1, max_month+1)
for mes in months:
yield DownloadRITAMonthlyData(year=año, month=mes)
class DownloadRITAMonthlyData(DockerTask):
"""
"""
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
raw_path = luigi.Parameter()
@property
def cmd(self):
return<|fim_middle|>
def output(self):
return luigi.s3.S3Target(path='{}/{}/{}-{}.zip'.format(self.root_path,
self.raw_path,
str(self.month).zfill(2),
self.year))
class ExtractColumns(luigi.Task):
"""
"""
task_name = "extract-columns"
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
bucket = luigi.Parameter()
etl_path = luigi.Parameter()
def requires(self):
return DownloadRITA(year=self.year, month=self.month)
def run(self):
s3 = boto3.resource('s3')
bucket = s3.Bucket(self.bucket)
input_path = Path(self.input().path)
obj = bucket.Object(str(input_path.relative_to('s3://{}'.format(self.bucket))))
df = None
with io.BytesIO(obj.get()["Body"].read()) as input_file:
input_file.seek(0)
with zipfile.ZipFile(input_file, mode='r') as zip_file:
for subfile in zip_file.namelist():
with zip_file.open(subfile) as file:
df = pd.read_csv(file)
with self.output().open('w') as output_file:
output_file.write(df.loc[:, 'YEAR':'DIV_AIRPORT_LANDINGS'].to_csv(None,
sep="|",
header=True,
index=False,
encoding="utf-8",
quoting=csv.QUOTE_ALL))
def output(self):
return luigi.s3.S3Target('{}/{}/{}/YEAR={}/{}.psv'.format(self.root_path,
self.etl_path,
self.task_name,
self.year,
str(self.month).zfill(2)))
class RTask(luigi.Task):
root_path = luigi.Parameter()
def requires(self):
return RawData()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-r
'''
logger.debug(cmd)
out = subprocess.check_output(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_R.psv"))
class PythonTask(luigi.Task):
def requires(self):
return RTask()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-python --inputfile {} --outputfile {}
'''.format(os.path.join("/rita/data", os.path.basename(self.input().path)),
os.path.join("/rita/data", os.path.basename(self.output().path)))
logger.debug(cmd)
out = subprocess.call(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_python.json"))
<|fim▁end|> | '''
docker run --rm --env AWS_ACCESS_KEY_ID={} --env AWS_SECRET_ACCESS_KEY={} rita/download-rita --year {} --month {} --data_path {}/{}
'''.format(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, self.year, self.month, self.root_path, self.raw_path)
|
<|file_name|>rita.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
rita Pipeline
.. module:: rita
:synopsis: rita pipeline
.. moduleauthor:: Adolfo De Unánue <[email protected]>
"""
import os
import subprocess
from pathlib import Path
import boto3
import zipfile
import io
import csv
import datetime
import luigi
import luigi.s3
import pandas as pd
import sqlalchemy
from contextlib import closing
import requests
import re
from bs4 import BeautifulSoup
## Variables de ambiente
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
## Obtenemos las llaves de AWS
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
## Logging
import rita.config_ini
import logging
logger = logging.getLogger("rita.pipeline")
import rita.pipelines.utils
import rita.pipelines.common
from rita.pipelines.common.tasks import DockerTask
class ritaPipeline(luigi.WrapperTask):
"""
Task principal para el pipeline
"""
def requires(self):
yield DownloadRITACatalogs()
yield DownloadRITAData()
class DownloadRITACatalogs(luigi.WrapperTask):
"""
"""
def requires(self):
baseurl = "https://www.transtats.bts.gov"
url = "https://www.transtats.bts.gov/DL_SelectFields.asp?Table_ID=236"
page = requests.get(url)
soup = BeautifulSoup(page.content, "lxml")
for link in soup.find_all('a', href=re.compile('Download_Lookup')):
catalog_name = link.get('href').split('=L_')[-1]
catalog_url = '{}/{}'.format(baseurl, link.get('href'))
yield DownloadCatalog(catalog_name=catalog_name, catalog_url=catalog_url)
class DownloadCatalog(luigi.Task):
"""
"""
catalog_url = luigi.Parameter()
catalog_name = luigi.Parameter()
root_path = luigi.Parameter()
def run(self):
logger.debug("Guardando en {} el catálogo {}".format(self.output().path, self.catalog_name))
with closing(requests.get(self.catalog_url, stream= True)) as response, \
self.output().open('w') as output_file:
for chunk in response.iter_lines(chunk_size=1024*8):
if chunk:
output_file.write(chunk.decode('utf-8') + '\n')
def output(self):
output_path = '{}/catalogs/{}.csv'.format(self.root_path,
self.catalog_name)
return luigi.s3.S3Target(path=output_path)
class DownloadRITAData(luigi.WrapperTask):
"""
"""
start_year=luigi.IntParameter()
def requires(self):
today = datetime.date.today() + datetime.timedelta(days=-90)
max_year = today.year
max_month = today.month
years = range(self.start_year, max_year)
logger.info("Descargando datos de los años {}".format(years))
for año in years:
if año != max_year:
months = range(1,13)
else:
month = range(1, max_month+1)
for mes in months:
yield DownloadRITAMonthlyData(year=año, month=mes)
class DownloadRITAMonthlyData(DockerTask):
"""
"""
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
raw_path = luigi.Parameter()
@property
def cmd(self):
return '''
docker run --rm --env AWS_ACCESS_KEY_ID={} --env AWS_SECRET_ACCESS_KEY={} rita/download-rita --year {} --month {} --data_path {}/{}
'''.format(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, self.year, self.month, self.root_path, self.raw_path)
def output(self):
return<|fim_middle|>
ass ExtractColumns(luigi.Task):
"""
"""
task_name = "extract-columns"
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
bucket = luigi.Parameter()
etl_path = luigi.Parameter()
def requires(self):
return DownloadRITA(year=self.year, month=self.month)
def run(self):
s3 = boto3.resource('s3')
bucket = s3.Bucket(self.bucket)
input_path = Path(self.input().path)
obj = bucket.Object(str(input_path.relative_to('s3://{}'.format(self.bucket))))
df = None
with io.BytesIO(obj.get()["Body"].read()) as input_file:
input_file.seek(0)
with zipfile.ZipFile(input_file, mode='r') as zip_file:
for subfile in zip_file.namelist():
with zip_file.open(subfile) as file:
df = pd.read_csv(file)
with self.output().open('w') as output_file:
output_file.write(df.loc[:, 'YEAR':'DIV_AIRPORT_LANDINGS'].to_csv(None,
sep="|",
header=True,
index=False,
encoding="utf-8",
quoting=csv.QUOTE_ALL))
def output(self):
return luigi.s3.S3Target('{}/{}/{}/YEAR={}/{}.psv'.format(self.root_path,
self.etl_path,
self.task_name,
self.year,
str(self.month).zfill(2)))
class RTask(luigi.Task):
root_path = luigi.Parameter()
def requires(self):
return RawData()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-r
'''
logger.debug(cmd)
out = subprocess.check_output(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_R.psv"))
class PythonTask(luigi.Task):
def requires(self):
return RTask()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-python --inputfile {} --outputfile {}
'''.format(os.path.join("/rita/data", os.path.basename(self.input().path)),
os.path.join("/rita/data", os.path.basename(self.output().path)))
logger.debug(cmd)
out = subprocess.call(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_python.json"))
<|fim▁end|> | luigi.s3.S3Target(path='{}/{}/{}-{}.zip'.format(self.root_path,
self.raw_path,
str(self.month).zfill(2),
self.year))
cl |
<|file_name|>rita.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
rita Pipeline
.. module:: rita
:synopsis: rita pipeline
.. moduleauthor:: Adolfo De Unánue <[email protected]>
"""
import os
import subprocess
from pathlib import Path
import boto3
import zipfile
import io
import csv
import datetime
import luigi
import luigi.s3
import pandas as pd
import sqlalchemy
from contextlib import closing
import requests
import re
from bs4 import BeautifulSoup
## Variables de ambiente
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
## Obtenemos las llaves de AWS
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
## Logging
import rita.config_ini
import logging
logger = logging.getLogger("rita.pipeline")
import rita.pipelines.utils
import rita.pipelines.common
from rita.pipelines.common.tasks import DockerTask
class ritaPipeline(luigi.WrapperTask):
"""
Task principal para el pipeline
"""
def requires(self):
yield DownloadRITACatalogs()
yield DownloadRITAData()
class DownloadRITACatalogs(luigi.WrapperTask):
"""
"""
def requires(self):
baseurl = "https://www.transtats.bts.gov"
url = "https://www.transtats.bts.gov/DL_SelectFields.asp?Table_ID=236"
page = requests.get(url)
soup = BeautifulSoup(page.content, "lxml")
for link in soup.find_all('a', href=re.compile('Download_Lookup')):
catalog_name = link.get('href').split('=L_')[-1]
catalog_url = '{}/{}'.format(baseurl, link.get('href'))
yield DownloadCatalog(catalog_name=catalog_name, catalog_url=catalog_url)
class DownloadCatalog(luigi.Task):
"""
"""
catalog_url = luigi.Parameter()
catalog_name = luigi.Parameter()
root_path = luigi.Parameter()
def run(self):
logger.debug("Guardando en {} el catálogo {}".format(self.output().path, self.catalog_name))
with closing(requests.get(self.catalog_url, stream= True)) as response, \
self.output().open('w') as output_file:
for chunk in response.iter_lines(chunk_size=1024*8):
if chunk:
output_file.write(chunk.decode('utf-8') + '\n')
def output(self):
output_path = '{}/catalogs/{}.csv'.format(self.root_path,
self.catalog_name)
return luigi.s3.S3Target(path=output_path)
class DownloadRITAData(luigi.WrapperTask):
"""
"""
start_year=luigi.IntParameter()
def requires(self):
today = datetime.date.today() + datetime.timedelta(days=-90)
max_year = today.year
max_month = today.month
years = range(self.start_year, max_year)
logger.info("Descargando datos de los años {}".format(years))
for año in years:
if año != max_year:
months = range(1,13)
else:
month = range(1, max_month+1)
for mes in months:
yield DownloadRITAMonthlyData(year=año, month=mes)
class DownloadRITAMonthlyData(DockerTask):
"""
"""
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
raw_path = luigi.Parameter()
@property
def cmd(self):
return '''
docker run --rm --env AWS_ACCESS_KEY_ID={} --env AWS_SECRET_ACCESS_KEY={} rita/download-rita --year {} --month {} --data_path {}/{}
'''.format(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, self.year, self.month, self.root_path, self.raw_path)
def output(self):
return luigi.s3.S3Target(path='{}/{}/{}-{}.zip'.format(self.root_path,
self.raw_path,
str(self.month).zfill(2),
self.year))
class ExtractColumns(luigi.Task):
"""
<|fim_middle|>
ss RTask(luigi.Task):
root_path = luigi.Parameter()
def requires(self):
return RawData()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-r
'''
logger.debug(cmd)
out = subprocess.check_output(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_R.psv"))
class PythonTask(luigi.Task):
def requires(self):
return RTask()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-python --inputfile {} --outputfile {}
'''.format(os.path.join("/rita/data", os.path.basename(self.input().path)),
os.path.join("/rita/data", os.path.basename(self.output().path)))
logger.debug(cmd)
out = subprocess.call(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_python.json"))
<|fim▁end|> | """
task_name = "extract-columns"
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
bucket = luigi.Parameter()
etl_path = luigi.Parameter()
def requires(self):
return DownloadRITA(year=self.year, month=self.month)
def run(self):
s3 = boto3.resource('s3')
bucket = s3.Bucket(self.bucket)
input_path = Path(self.input().path)
obj = bucket.Object(str(input_path.relative_to('s3://{}'.format(self.bucket))))
df = None
with io.BytesIO(obj.get()["Body"].read()) as input_file:
input_file.seek(0)
with zipfile.ZipFile(input_file, mode='r') as zip_file:
for subfile in zip_file.namelist():
with zip_file.open(subfile) as file:
df = pd.read_csv(file)
with self.output().open('w') as output_file:
output_file.write(df.loc[:, 'YEAR':'DIV_AIRPORT_LANDINGS'].to_csv(None,
sep="|",
header=True,
index=False,
encoding="utf-8",
quoting=csv.QUOTE_ALL))
def output(self):
return luigi.s3.S3Target('{}/{}/{}/YEAR={}/{}.psv'.format(self.root_path,
self.etl_path,
self.task_name,
self.year,
str(self.month).zfill(2)))
cla |
<|file_name|>rita.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
rita Pipeline
.. module:: rita
:synopsis: rita pipeline
.. moduleauthor:: Adolfo De Unánue <[email protected]>
"""
import os
import subprocess
from pathlib import Path
import boto3
import zipfile
import io
import csv
import datetime
import luigi
import luigi.s3
import pandas as pd
import sqlalchemy
from contextlib import closing
import requests
import re
from bs4 import BeautifulSoup
## Variables de ambiente
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
## Obtenemos las llaves de AWS
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
## Logging
import rita.config_ini
import logging
logger = logging.getLogger("rita.pipeline")
import rita.pipelines.utils
import rita.pipelines.common
from rita.pipelines.common.tasks import DockerTask
class ritaPipeline(luigi.WrapperTask):
"""
Task principal para el pipeline
"""
def requires(self):
yield DownloadRITACatalogs()
yield DownloadRITAData()
class DownloadRITACatalogs(luigi.WrapperTask):
"""
"""
def requires(self):
baseurl = "https://www.transtats.bts.gov"
url = "https://www.transtats.bts.gov/DL_SelectFields.asp?Table_ID=236"
page = requests.get(url)
soup = BeautifulSoup(page.content, "lxml")
for link in soup.find_all('a', href=re.compile('Download_Lookup')):
catalog_name = link.get('href').split('=L_')[-1]
catalog_url = '{}/{}'.format(baseurl, link.get('href'))
yield DownloadCatalog(catalog_name=catalog_name, catalog_url=catalog_url)
class DownloadCatalog(luigi.Task):
"""
"""
catalog_url = luigi.Parameter()
catalog_name = luigi.Parameter()
root_path = luigi.Parameter()
def run(self):
logger.debug("Guardando en {} el catálogo {}".format(self.output().path, self.catalog_name))
with closing(requests.get(self.catalog_url, stream= True)) as response, \
self.output().open('w') as output_file:
for chunk in response.iter_lines(chunk_size=1024*8):
if chunk:
output_file.write(chunk.decode('utf-8') + '\n')
def output(self):
output_path = '{}/catalogs/{}.csv'.format(self.root_path,
self.catalog_name)
return luigi.s3.S3Target(path=output_path)
class DownloadRITAData(luigi.WrapperTask):
"""
"""
start_year=luigi.IntParameter()
def requires(self):
today = datetime.date.today() + datetime.timedelta(days=-90)
max_year = today.year
max_month = today.month
years = range(self.start_year, max_year)
logger.info("Descargando datos de los años {}".format(years))
for año in years:
if año != max_year:
months = range(1,13)
else:
month = range(1, max_month+1)
for mes in months:
yield DownloadRITAMonthlyData(year=año, month=mes)
class DownloadRITAMonthlyData(DockerTask):
"""
"""
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
raw_path = luigi.Parameter()
@property
def cmd(self):
return '''
docker run --rm --env AWS_ACCESS_KEY_ID={} --env AWS_SECRET_ACCESS_KEY={} rita/download-rita --year {} --month {} --data_path {}/{}
'''.format(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, self.year, self.month, self.root_path, self.raw_path)
def output(self):
return luigi.s3.S3Target(path='{}/{}/{}-{}.zip'.format(self.root_path,
self.raw_path,
str(self.month).zfill(2),
self.year))
class ExtractColumns(luigi.Task):
"""
"""
task_name = "extract-columns"
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
bucket = luigi.Parameter()
etl_path = luigi.Parameter()
def requires(self):
return<|fim_middle|>
def run(self):
s3 = boto3.resource('s3')
bucket = s3.Bucket(self.bucket)
input_path = Path(self.input().path)
obj = bucket.Object(str(input_path.relative_to('s3://{}'.format(self.bucket))))
df = None
with io.BytesIO(obj.get()["Body"].read()) as input_file:
input_file.seek(0)
with zipfile.ZipFile(input_file, mode='r') as zip_file:
for subfile in zip_file.namelist():
with zip_file.open(subfile) as file:
df = pd.read_csv(file)
with self.output().open('w') as output_file:
output_file.write(df.loc[:, 'YEAR':'DIV_AIRPORT_LANDINGS'].to_csv(None,
sep="|",
header=True,
index=False,
encoding="utf-8",
quoting=csv.QUOTE_ALL))
def output(self):
return luigi.s3.S3Target('{}/{}/{}/YEAR={}/{}.psv'.format(self.root_path,
self.etl_path,
self.task_name,
self.year,
str(self.month).zfill(2)))
class RTask(luigi.Task):
root_path = luigi.Parameter()
def requires(self):
return RawData()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-r
'''
logger.debug(cmd)
out = subprocess.check_output(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_R.psv"))
class PythonTask(luigi.Task):
def requires(self):
return RTask()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-python --inputfile {} --outputfile {}
'''.format(os.path.join("/rita/data", os.path.basename(self.input().path)),
os.path.join("/rita/data", os.path.basename(self.output().path)))
logger.debug(cmd)
out = subprocess.call(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_python.json"))
<|fim▁end|> | DownloadRITA(year=self.year, month=self.month)
|
<|file_name|>rita.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
rita Pipeline
.. module:: rita
:synopsis: rita pipeline
.. moduleauthor:: Adolfo De Unánue <[email protected]>
"""
import os
import subprocess
from pathlib import Path
import boto3
import zipfile
import io
import csv
import datetime
import luigi
import luigi.s3
import pandas as pd
import sqlalchemy
from contextlib import closing
import requests
import re
from bs4 import BeautifulSoup
## Variables de ambiente
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
## Obtenemos las llaves de AWS
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
## Logging
import rita.config_ini
import logging
logger = logging.getLogger("rita.pipeline")
import rita.pipelines.utils
import rita.pipelines.common
from rita.pipelines.common.tasks import DockerTask
class ritaPipeline(luigi.WrapperTask):
"""
Task principal para el pipeline
"""
def requires(self):
yield DownloadRITACatalogs()
yield DownloadRITAData()
class DownloadRITACatalogs(luigi.WrapperTask):
"""
"""
def requires(self):
baseurl = "https://www.transtats.bts.gov"
url = "https://www.transtats.bts.gov/DL_SelectFields.asp?Table_ID=236"
page = requests.get(url)
soup = BeautifulSoup(page.content, "lxml")
for link in soup.find_all('a', href=re.compile('Download_Lookup')):
catalog_name = link.get('href').split('=L_')[-1]
catalog_url = '{}/{}'.format(baseurl, link.get('href'))
yield DownloadCatalog(catalog_name=catalog_name, catalog_url=catalog_url)
class DownloadCatalog(luigi.Task):
"""
"""
catalog_url = luigi.Parameter()
catalog_name = luigi.Parameter()
root_path = luigi.Parameter()
def run(self):
logger.debug("Guardando en {} el catálogo {}".format(self.output().path, self.catalog_name))
with closing(requests.get(self.catalog_url, stream= True)) as response, \
self.output().open('w') as output_file:
for chunk in response.iter_lines(chunk_size=1024*8):
if chunk:
output_file.write(chunk.decode('utf-8') + '\n')
def output(self):
output_path = '{}/catalogs/{}.csv'.format(self.root_path,
self.catalog_name)
return luigi.s3.S3Target(path=output_path)
class DownloadRITAData(luigi.WrapperTask):
"""
"""
start_year=luigi.IntParameter()
def requires(self):
today = datetime.date.today() + datetime.timedelta(days=-90)
max_year = today.year
max_month = today.month
years = range(self.start_year, max_year)
logger.info("Descargando datos de los años {}".format(years))
for año in years:
if año != max_year:
months = range(1,13)
else:
month = range(1, max_month+1)
for mes in months:
yield DownloadRITAMonthlyData(year=año, month=mes)
class DownloadRITAMonthlyData(DockerTask):
"""
"""
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
raw_path = luigi.Parameter()
@property
def cmd(self):
return '''
docker run --rm --env AWS_ACCESS_KEY_ID={} --env AWS_SECRET_ACCESS_KEY={} rita/download-rita --year {} --month {} --data_path {}/{}
'''.format(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, self.year, self.month, self.root_path, self.raw_path)
def output(self):
return luigi.s3.S3Target(path='{}/{}/{}-{}.zip'.format(self.root_path,
self.raw_path,
str(self.month).zfill(2),
self.year))
class ExtractColumns(luigi.Task):
"""
"""
task_name = "extract-columns"
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
bucket = luigi.Parameter()
etl_path = luigi.Parameter()
def requires(self):
return DownloadRITA(year=self.year, month=self.month)
def run(self):
s3 = b<|fim_middle|>
def output(self):
return luigi.s3.S3Target('{}/{}/{}/YEAR={}/{}.psv'.format(self.root_path,
self.etl_path,
self.task_name,
self.year,
str(self.month).zfill(2)))
class RTask(luigi.Task):
root_path = luigi.Parameter()
def requires(self):
return RawData()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-r
'''
logger.debug(cmd)
out = subprocess.check_output(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_R.psv"))
class PythonTask(luigi.Task):
def requires(self):
return RTask()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-python --inputfile {} --outputfile {}
'''.format(os.path.join("/rita/data", os.path.basename(self.input().path)),
os.path.join("/rita/data", os.path.basename(self.output().path)))
logger.debug(cmd)
out = subprocess.call(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_python.json"))
<|fim▁end|> | oto3.resource('s3')
bucket = s3.Bucket(self.bucket)
input_path = Path(self.input().path)
obj = bucket.Object(str(input_path.relative_to('s3://{}'.format(self.bucket))))
df = None
with io.BytesIO(obj.get()["Body"].read()) as input_file:
input_file.seek(0)
with zipfile.ZipFile(input_file, mode='r') as zip_file:
for subfile in zip_file.namelist():
with zip_file.open(subfile) as file:
df = pd.read_csv(file)
with self.output().open('w') as output_file:
output_file.write(df.loc[:, 'YEAR':'DIV_AIRPORT_LANDINGS'].to_csv(None,
sep="|",
header=True,
index=False,
encoding="utf-8",
quoting=csv.QUOTE_ALL))
|
<|file_name|>rita.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
rita Pipeline
.. module:: rita
:synopsis: rita pipeline
.. moduleauthor:: Adolfo De Unánue <[email protected]>
"""
import os
import subprocess
from pathlib import Path
import boto3
import zipfile
import io
import csv
import datetime
import luigi
import luigi.s3
import pandas as pd
import sqlalchemy
from contextlib import closing
import requests
import re
from bs4 import BeautifulSoup
## Variables de ambiente
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
## Obtenemos las llaves de AWS
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
## Logging
import rita.config_ini
import logging
logger = logging.getLogger("rita.pipeline")
import rita.pipelines.utils
import rita.pipelines.common
from rita.pipelines.common.tasks import DockerTask
class ritaPipeline(luigi.WrapperTask):
"""
Task principal para el pipeline
"""
def requires(self):
yield DownloadRITACatalogs()
yield DownloadRITAData()
class DownloadRITACatalogs(luigi.WrapperTask):
"""
"""
def requires(self):
baseurl = "https://www.transtats.bts.gov"
url = "https://www.transtats.bts.gov/DL_SelectFields.asp?Table_ID=236"
page = requests.get(url)
soup = BeautifulSoup(page.content, "lxml")
for link in soup.find_all('a', href=re.compile('Download_Lookup')):
catalog_name = link.get('href').split('=L_')[-1]
catalog_url = '{}/{}'.format(baseurl, link.get('href'))
yield DownloadCatalog(catalog_name=catalog_name, catalog_url=catalog_url)
class DownloadCatalog(luigi.Task):
"""
"""
catalog_url = luigi.Parameter()
catalog_name = luigi.Parameter()
root_path = luigi.Parameter()
def run(self):
logger.debug("Guardando en {} el catálogo {}".format(self.output().path, self.catalog_name))
with closing(requests.get(self.catalog_url, stream= True)) as response, \
self.output().open('w') as output_file:
for chunk in response.iter_lines(chunk_size=1024*8):
if chunk:
output_file.write(chunk.decode('utf-8') + '\n')
def output(self):
output_path = '{}/catalogs/{}.csv'.format(self.root_path,
self.catalog_name)
return luigi.s3.S3Target(path=output_path)
class DownloadRITAData(luigi.WrapperTask):
"""
"""
start_year=luigi.IntParameter()
def requires(self):
today = datetime.date.today() + datetime.timedelta(days=-90)
max_year = today.year
max_month = today.month
years = range(self.start_year, max_year)
logger.info("Descargando datos de los años {}".format(years))
for año in years:
if año != max_year:
months = range(1,13)
else:
month = range(1, max_month+1)
for mes in months:
yield DownloadRITAMonthlyData(year=año, month=mes)
class DownloadRITAMonthlyData(DockerTask):
"""
"""
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
raw_path = luigi.Parameter()
@property
def cmd(self):
return '''
docker run --rm --env AWS_ACCESS_KEY_ID={} --env AWS_SECRET_ACCESS_KEY={} rita/download-rita --year {} --month {} --data_path {}/{}
'''.format(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, self.year, self.month, self.root_path, self.raw_path)
def output(self):
return luigi.s3.S3Target(path='{}/{}/{}-{}.zip'.format(self.root_path,
self.raw_path,
str(self.month).zfill(2),
self.year))
class ExtractColumns(luigi.Task):
"""
"""
task_name = "extract-columns"
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
bucket = luigi.Parameter()
etl_path = luigi.Parameter()
def requires(self):
return DownloadRITA(year=self.year, month=self.month)
def run(self):
s3 = boto3.resource('s3')
bucket = s3.Bucket(self.bucket)
input_path = Path(self.input().path)
obj = bucket.Object(str(input_path.relative_to('s3://{}'.format(self.bucket))))
df = None
with io.BytesIO(obj.get()["Body"].read()) as input_file:
input_file.seek(0)
with zipfile.ZipFile(input_file, mode='r') as zip_file:
for subfile in zip_file.namelist():
with zip_file.open(subfile) as file:
df = pd.read_csv(file)
with self.output().open('w') as output_file:
output_file.write(df.loc[:, 'YEAR':'DIV_AIRPORT_LANDINGS'].to_csv(None,
sep="|",
header=True,
index=False,
encoding="utf-8",
quoting=csv.QUOTE_ALL))
def output(self):
return<|fim_middle|>
ss RTask(luigi.Task):
root_path = luigi.Parameter()
def requires(self):
return RawData()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-r
'''
logger.debug(cmd)
out = subprocess.check_output(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_R.psv"))
class PythonTask(luigi.Task):
def requires(self):
return RTask()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-python --inputfile {} --outputfile {}
'''.format(os.path.join("/rita/data", os.path.basename(self.input().path)),
os.path.join("/rita/data", os.path.basename(self.output().path)))
logger.debug(cmd)
out = subprocess.call(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_python.json"))
<|fim▁end|> | luigi.s3.S3Target('{}/{}/{}/YEAR={}/{}.psv'.format(self.root_path,
self.etl_path,
self.task_name,
self.year,
str(self.month).zfill(2)))
cla |
<|file_name|>rita.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
rita Pipeline
.. module:: rita
:synopsis: rita pipeline
.. moduleauthor:: Adolfo De Unánue <[email protected]>
"""
import os
import subprocess
from pathlib import Path
import boto3
import zipfile
import io
import csv
import datetime
import luigi
import luigi.s3
import pandas as pd
import sqlalchemy
from contextlib import closing
import requests
import re
from bs4 import BeautifulSoup
## Variables de ambiente
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
## Obtenemos las llaves de AWS
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
## Logging
import rita.config_ini
import logging
logger = logging.getLogger("rita.pipeline")
import rita.pipelines.utils
import rita.pipelines.common
from rita.pipelines.common.tasks import DockerTask
class ritaPipeline(luigi.WrapperTask):
"""
Task principal para el pipeline
"""
def requires(self):
yield DownloadRITACatalogs()
yield DownloadRITAData()
class DownloadRITACatalogs(luigi.WrapperTask):
"""
"""
def requires(self):
baseurl = "https://www.transtats.bts.gov"
url = "https://www.transtats.bts.gov/DL_SelectFields.asp?Table_ID=236"
page = requests.get(url)
soup = BeautifulSoup(page.content, "lxml")
for link in soup.find_all('a', href=re.compile('Download_Lookup')):
catalog_name = link.get('href').split('=L_')[-1]
catalog_url = '{}/{}'.format(baseurl, link.get('href'))
yield DownloadCatalog(catalog_name=catalog_name, catalog_url=catalog_url)
class DownloadCatalog(luigi.Task):
"""
"""
catalog_url = luigi.Parameter()
catalog_name = luigi.Parameter()
root_path = luigi.Parameter()
def run(self):
logger.debug("Guardando en {} el catálogo {}".format(self.output().path, self.catalog_name))
with closing(requests.get(self.catalog_url, stream= True)) as response, \
self.output().open('w') as output_file:
for chunk in response.iter_lines(chunk_size=1024*8):
if chunk:
output_file.write(chunk.decode('utf-8') + '\n')
def output(self):
output_path = '{}/catalogs/{}.csv'.format(self.root_path,
self.catalog_name)
return luigi.s3.S3Target(path=output_path)
class DownloadRITAData(luigi.WrapperTask):
"""
"""
start_year=luigi.IntParameter()
def requires(self):
today = datetime.date.today() + datetime.timedelta(days=-90)
max_year = today.year
max_month = today.month
years = range(self.start_year, max_year)
logger.info("Descargando datos de los años {}".format(years))
for año in years:
if año != max_year:
months = range(1,13)
else:
month = range(1, max_month+1)
for mes in months:
yield DownloadRITAMonthlyData(year=año, month=mes)
class DownloadRITAMonthlyData(DockerTask):
"""
"""
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
raw_path = luigi.Parameter()
@property
def cmd(self):
return '''
docker run --rm --env AWS_ACCESS_KEY_ID={} --env AWS_SECRET_ACCESS_KEY={} rita/download-rita --year {} --month {} --data_path {}/{}
'''.format(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, self.year, self.month, self.root_path, self.raw_path)
def output(self):
return luigi.s3.S3Target(path='{}/{}/{}-{}.zip'.format(self.root_path,
self.raw_path,
str(self.month).zfill(2),
self.year))
class ExtractColumns(luigi.Task):
"""
"""
task_name = "extract-columns"
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
bucket = luigi.Parameter()
etl_path = luigi.Parameter()
def requires(self):
return DownloadRITA(year=self.year, month=self.month)
def run(self):
s3 = boto3.resource('s3')
bucket = s3.Bucket(self.bucket)
input_path = Path(self.input().path)
obj = bucket.Object(str(input_path.relative_to('s3://{}'.format(self.bucket))))
df = None
with io.BytesIO(obj.get()["Body"].read()) as input_file:
input_file.seek(0)
with zipfile.ZipFile(input_file, mode='r') as zip_file:
for subfile in zip_file.namelist():
with zip_file.open(subfile) as file:
df = pd.read_csv(file)
with self.output().open('w') as output_file:
output_file.write(df.loc[:, 'YEAR':'DIV_AIRPORT_LANDINGS'].to_csv(None,
sep="|",
header=True,
index=False,
encoding="utf-8",
quoting=csv.QUOTE_ALL))
def output(self):
return luigi.s3.S3Target('{}/{}/{}/YEAR={}/{}.psv'.format(self.root_path,
self.etl_path,
self.task_name,
self.year,
str(self.month).zfill(2)))
class RTask(luigi.Task):
root_p<|fim_middle|>
ss PythonTask(luigi.Task):
def requires(self):
return RTask()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-python --inputfile {} --outputfile {}
'''.format(os.path.join("/rita/data", os.path.basename(self.input().path)),
os.path.join("/rita/data", os.path.basename(self.output().path)))
logger.debug(cmd)
out = subprocess.call(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_python.json"))
<|fim▁end|> | ath = luigi.Parameter()
def requires(self):
return RawData()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-r
'''
logger.debug(cmd)
out = subprocess.check_output(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_R.psv"))
cla |
<|file_name|>rita.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
rita Pipeline
.. module:: rita
:synopsis: rita pipeline
.. moduleauthor:: Adolfo De Unánue <[email protected]>
"""
import os
import subprocess
from pathlib import Path
import boto3
import zipfile
import io
import csv
import datetime
import luigi
import luigi.s3
import pandas as pd
import sqlalchemy
from contextlib import closing
import requests
import re
from bs4 import BeautifulSoup
## Variables de ambiente
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
## Obtenemos las llaves de AWS
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
## Logging
import rita.config_ini
import logging
logger = logging.getLogger("rita.pipeline")
import rita.pipelines.utils
import rita.pipelines.common
from rita.pipelines.common.tasks import DockerTask
class ritaPipeline(luigi.WrapperTask):
"""
Task principal para el pipeline
"""
def requires(self):
yield DownloadRITACatalogs()
yield DownloadRITAData()
class DownloadRITACatalogs(luigi.WrapperTask):
"""
"""
def requires(self):
baseurl = "https://www.transtats.bts.gov"
url = "https://www.transtats.bts.gov/DL_SelectFields.asp?Table_ID=236"
page = requests.get(url)
soup = BeautifulSoup(page.content, "lxml")
for link in soup.find_all('a', href=re.compile('Download_Lookup')):
catalog_name = link.get('href').split('=L_')[-1]
catalog_url = '{}/{}'.format(baseurl, link.get('href'))
yield DownloadCatalog(catalog_name=catalog_name, catalog_url=catalog_url)
class DownloadCatalog(luigi.Task):
"""
"""
catalog_url = luigi.Parameter()
catalog_name = luigi.Parameter()
root_path = luigi.Parameter()
def run(self):
logger.debug("Guardando en {} el catálogo {}".format(self.output().path, self.catalog_name))
with closing(requests.get(self.catalog_url, stream= True)) as response, \
self.output().open('w') as output_file:
for chunk in response.iter_lines(chunk_size=1024*8):
if chunk:
output_file.write(chunk.decode('utf-8') + '\n')
def output(self):
output_path = '{}/catalogs/{}.csv'.format(self.root_path,
self.catalog_name)
return luigi.s3.S3Target(path=output_path)
class DownloadRITAData(luigi.WrapperTask):
"""
"""
start_year=luigi.IntParameter()
def requires(self):
today = datetime.date.today() + datetime.timedelta(days=-90)
max_year = today.year
max_month = today.month
years = range(self.start_year, max_year)
logger.info("Descargando datos de los años {}".format(years))
for año in years:
if año != max_year:
months = range(1,13)
else:
month = range(1, max_month+1)
for mes in months:
yield DownloadRITAMonthlyData(year=año, month=mes)
class DownloadRITAMonthlyData(DockerTask):
"""
"""
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
raw_path = luigi.Parameter()
@property
def cmd(self):
return '''
docker run --rm --env AWS_ACCESS_KEY_ID={} --env AWS_SECRET_ACCESS_KEY={} rita/download-rita --year {} --month {} --data_path {}/{}
'''.format(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, self.year, self.month, self.root_path, self.raw_path)
def output(self):
return luigi.s3.S3Target(path='{}/{}/{}-{}.zip'.format(self.root_path,
self.raw_path,
str(self.month).zfill(2),
self.year))
class ExtractColumns(luigi.Task):
"""
"""
task_name = "extract-columns"
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
bucket = luigi.Parameter()
etl_path = luigi.Parameter()
def requires(self):
return DownloadRITA(year=self.year, month=self.month)
def run(self):
s3 = boto3.resource('s3')
bucket = s3.Bucket(self.bucket)
input_path = Path(self.input().path)
obj = bucket.Object(str(input_path.relative_to('s3://{}'.format(self.bucket))))
df = None
with io.BytesIO(obj.get()["Body"].read()) as input_file:
input_file.seek(0)
with zipfile.ZipFile(input_file, mode='r') as zip_file:
for subfile in zip_file.namelist():
with zip_file.open(subfile) as file:
df = pd.read_csv(file)
with self.output().open('w') as output_file:
output_file.write(df.loc[:, 'YEAR':'DIV_AIRPORT_LANDINGS'].to_csv(None,
sep="|",
header=True,
index=False,
encoding="utf-8",
quoting=csv.QUOTE_ALL))
def output(self):
return luigi.s3.S3Target('{}/{}/{}/YEAR={}/{}.psv'.format(self.root_path,
self.etl_path,
self.task_name,
self.year,
str(self.month).zfill(2)))
class RTask(luigi.Task):
root_path = luigi.Parameter()
def requires(self):
return<|fim_middle|>
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-r
'''
logger.debug(cmd)
out = subprocess.check_output(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_R.psv"))
class PythonTask(luigi.Task):
def requires(self):
return RTask()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-python --inputfile {} --outputfile {}
'''.format(os.path.join("/rita/data", os.path.basename(self.input().path)),
os.path.join("/rita/data", os.path.basename(self.output().path)))
logger.debug(cmd)
out = subprocess.call(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_python.json"))
<|fim▁end|> | RawData()
|
<|file_name|>rita.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
rita Pipeline
.. module:: rita
:synopsis: rita pipeline
.. moduleauthor:: Adolfo De Unánue <[email protected]>
"""
import os
import subprocess
from pathlib import Path
import boto3
import zipfile
import io
import csv
import datetime
import luigi
import luigi.s3
import pandas as pd
import sqlalchemy
from contextlib import closing
import requests
import re
from bs4 import BeautifulSoup
## Variables de ambiente
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
## Obtenemos las llaves de AWS
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
## Logging
import rita.config_ini
import logging
logger = logging.getLogger("rita.pipeline")
import rita.pipelines.utils
import rita.pipelines.common
from rita.pipelines.common.tasks import DockerTask
class ritaPipeline(luigi.WrapperTask):
"""
Task principal para el pipeline
"""
def requires(self):
yield DownloadRITACatalogs()
yield DownloadRITAData()
class DownloadRITACatalogs(luigi.WrapperTask):
"""
"""
def requires(self):
baseurl = "https://www.transtats.bts.gov"
url = "https://www.transtats.bts.gov/DL_SelectFields.asp?Table_ID=236"
page = requests.get(url)
soup = BeautifulSoup(page.content, "lxml")
for link in soup.find_all('a', href=re.compile('Download_Lookup')):
catalog_name = link.get('href').split('=L_')[-1]
catalog_url = '{}/{}'.format(baseurl, link.get('href'))
yield DownloadCatalog(catalog_name=catalog_name, catalog_url=catalog_url)
class DownloadCatalog(luigi.Task):
"""
"""
catalog_url = luigi.Parameter()
catalog_name = luigi.Parameter()
root_path = luigi.Parameter()
def run(self):
logger.debug("Guardando en {} el catálogo {}".format(self.output().path, self.catalog_name))
with closing(requests.get(self.catalog_url, stream= True)) as response, \
self.output().open('w') as output_file:
for chunk in response.iter_lines(chunk_size=1024*8):
if chunk:
output_file.write(chunk.decode('utf-8') + '\n')
def output(self):
output_path = '{}/catalogs/{}.csv'.format(self.root_path,
self.catalog_name)
return luigi.s3.S3Target(path=output_path)
class DownloadRITAData(luigi.WrapperTask):
"""
"""
start_year=luigi.IntParameter()
def requires(self):
today = datetime.date.today() + datetime.timedelta(days=-90)
max_year = today.year
max_month = today.month
years = range(self.start_year, max_year)
logger.info("Descargando datos de los años {}".format(years))
for año in years:
if año != max_year:
months = range(1,13)
else:
month = range(1, max_month+1)
for mes in months:
yield DownloadRITAMonthlyData(year=año, month=mes)
class DownloadRITAMonthlyData(DockerTask):
"""
"""
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
raw_path = luigi.Parameter()
@property
def cmd(self):
return '''
docker run --rm --env AWS_ACCESS_KEY_ID={} --env AWS_SECRET_ACCESS_KEY={} rita/download-rita --year {} --month {} --data_path {}/{}
'''.format(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, self.year, self.month, self.root_path, self.raw_path)
def output(self):
return luigi.s3.S3Target(path='{}/{}/{}-{}.zip'.format(self.root_path,
self.raw_path,
str(self.month).zfill(2),
self.year))
class ExtractColumns(luigi.Task):
"""
"""
task_name = "extract-columns"
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
bucket = luigi.Parameter()
etl_path = luigi.Parameter()
def requires(self):
return DownloadRITA(year=self.year, month=self.month)
def run(self):
s3 = boto3.resource('s3')
bucket = s3.Bucket(self.bucket)
input_path = Path(self.input().path)
obj = bucket.Object(str(input_path.relative_to('s3://{}'.format(self.bucket))))
df = None
with io.BytesIO(obj.get()["Body"].read()) as input_file:
input_file.seek(0)
with zipfile.ZipFile(input_file, mode='r') as zip_file:
for subfile in zip_file.namelist():
with zip_file.open(subfile) as file:
df = pd.read_csv(file)
with self.output().open('w') as output_file:
output_file.write(df.loc[:, 'YEAR':'DIV_AIRPORT_LANDINGS'].to_csv(None,
sep="|",
header=True,
index=False,
encoding="utf-8",
quoting=csv.QUOTE_ALL))
def output(self):
return luigi.s3.S3Target('{}/{}/{}/YEAR={}/{}.psv'.format(self.root_path,
self.etl_path,
self.task_name,
self.year,
str(self.month).zfill(2)))
class RTask(luigi.Task):
root_path = luigi.Parameter()
def requires(self):
return RawData()
def run(self):
cmd = <|fim_middle|>
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_R.psv"))
class PythonTask(luigi.Task):
def requires(self):
return RTask()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-python --inputfile {} --outputfile {}
'''.format(os.path.join("/rita/data", os.path.basename(self.input().path)),
os.path.join("/rita/data", os.path.basename(self.output().path)))
logger.debug(cmd)
out = subprocess.call(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_python.json"))
<|fim▁end|> | '''
docker run --rm -v rita_store:/rita/data rita/test-r
'''
logger.debug(cmd)
out = subprocess.check_output(cmd, shell=True)
logger.debug(out)
|
<|file_name|>rita.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
rita Pipeline
.. module:: rita
:synopsis: rita pipeline
.. moduleauthor:: Adolfo De Unánue <[email protected]>
"""
import os
import subprocess
from pathlib import Path
import boto3
import zipfile
import io
import csv
import datetime
import luigi
import luigi.s3
import pandas as pd
import sqlalchemy
from contextlib import closing
import requests
import re
from bs4 import BeautifulSoup
## Variables de ambiente
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
## Obtenemos las llaves de AWS
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
## Logging
import rita.config_ini
import logging
logger = logging.getLogger("rita.pipeline")
import rita.pipelines.utils
import rita.pipelines.common
from rita.pipelines.common.tasks import DockerTask
class ritaPipeline(luigi.WrapperTask):
"""
Task principal para el pipeline
"""
def requires(self):
yield DownloadRITACatalogs()
yield DownloadRITAData()
class DownloadRITACatalogs(luigi.WrapperTask):
"""
"""
def requires(self):
baseurl = "https://www.transtats.bts.gov"
url = "https://www.transtats.bts.gov/DL_SelectFields.asp?Table_ID=236"
page = requests.get(url)
soup = BeautifulSoup(page.content, "lxml")
for link in soup.find_all('a', href=re.compile('Download_Lookup')):
catalog_name = link.get('href').split('=L_')[-1]
catalog_url = '{}/{}'.format(baseurl, link.get('href'))
yield DownloadCatalog(catalog_name=catalog_name, catalog_url=catalog_url)
class DownloadCatalog(luigi.Task):
"""
"""
catalog_url = luigi.Parameter()
catalog_name = luigi.Parameter()
root_path = luigi.Parameter()
def run(self):
logger.debug("Guardando en {} el catálogo {}".format(self.output().path, self.catalog_name))
with closing(requests.get(self.catalog_url, stream= True)) as response, \
self.output().open('w') as output_file:
for chunk in response.iter_lines(chunk_size=1024*8):
if chunk:
output_file.write(chunk.decode('utf-8') + '\n')
def output(self):
output_path = '{}/catalogs/{}.csv'.format(self.root_path,
self.catalog_name)
return luigi.s3.S3Target(path=output_path)
class DownloadRITAData(luigi.WrapperTask):
"""
"""
start_year=luigi.IntParameter()
def requires(self):
today = datetime.date.today() + datetime.timedelta(days=-90)
max_year = today.year
max_month = today.month
years = range(self.start_year, max_year)
logger.info("Descargando datos de los años {}".format(years))
for año in years:
if año != max_year:
months = range(1,13)
else:
month = range(1, max_month+1)
for mes in months:
yield DownloadRITAMonthlyData(year=año, month=mes)
class DownloadRITAMonthlyData(DockerTask):
"""
"""
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
raw_path = luigi.Parameter()
@property
def cmd(self):
return '''
docker run --rm --env AWS_ACCESS_KEY_ID={} --env AWS_SECRET_ACCESS_KEY={} rita/download-rita --year {} --month {} --data_path {}/{}
'''.format(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, self.year, self.month, self.root_path, self.raw_path)
def output(self):
return luigi.s3.S3Target(path='{}/{}/{}-{}.zip'.format(self.root_path,
self.raw_path,
str(self.month).zfill(2),
self.year))
class ExtractColumns(luigi.Task):
"""
"""
task_name = "extract-columns"
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
bucket = luigi.Parameter()
etl_path = luigi.Parameter()
def requires(self):
return DownloadRITA(year=self.year, month=self.month)
def run(self):
s3 = boto3.resource('s3')
bucket = s3.Bucket(self.bucket)
input_path = Path(self.input().path)
obj = bucket.Object(str(input_path.relative_to('s3://{}'.format(self.bucket))))
df = None
with io.BytesIO(obj.get()["Body"].read()) as input_file:
input_file.seek(0)
with zipfile.ZipFile(input_file, mode='r') as zip_file:
for subfile in zip_file.namelist():
with zip_file.open(subfile) as file:
df = pd.read_csv(file)
with self.output().open('w') as output_file:
output_file.write(df.loc[:, 'YEAR':'DIV_AIRPORT_LANDINGS'].to_csv(None,
sep="|",
header=True,
index=False,
encoding="utf-8",
quoting=csv.QUOTE_ALL))
def output(self):
return luigi.s3.S3Target('{}/{}/{}/YEAR={}/{}.psv'.format(self.root_path,
self.etl_path,
self.task_name,
self.year,
str(self.month).zfill(2)))
class RTask(luigi.Task):
root_path = luigi.Parameter()
def requires(self):
return RawData()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-r
'''
logger.debug(cmd)
out = subprocess.check_output(cmd, shell=True)
logger.debug(out)
def output(self):
return<|fim_middle|>
ss PythonTask(luigi.Task):
def requires(self):
return RTask()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-python --inputfile {} --outputfile {}
'''.format(os.path.join("/rita/data", os.path.basename(self.input().path)),
os.path.join("/rita/data", os.path.basename(self.output().path)))
logger.debug(cmd)
out = subprocess.call(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_python.json"))
<|fim▁end|> | luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_R.psv"))
cla |
<|file_name|>rita.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
rita Pipeline
.. module:: rita
:synopsis: rita pipeline
.. moduleauthor:: Adolfo De Unánue <[email protected]>
"""
import os
import subprocess
from pathlib import Path
import boto3
import zipfile
import io
import csv
import datetime
import luigi
import luigi.s3
import pandas as pd
import sqlalchemy
from contextlib import closing
import requests
import re
from bs4 import BeautifulSoup
## Variables de ambiente
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
## Obtenemos las llaves de AWS
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
## Logging
import rita.config_ini
import logging
logger = logging.getLogger("rita.pipeline")
import rita.pipelines.utils
import rita.pipelines.common
from rita.pipelines.common.tasks import DockerTask
class ritaPipeline(luigi.WrapperTask):
"""
Task principal para el pipeline
"""
def requires(self):
yield DownloadRITACatalogs()
yield DownloadRITAData()
class DownloadRITACatalogs(luigi.WrapperTask):
"""
"""
def requires(self):
baseurl = "https://www.transtats.bts.gov"
url = "https://www.transtats.bts.gov/DL_SelectFields.asp?Table_ID=236"
page = requests.get(url)
soup = BeautifulSoup(page.content, "lxml")
for link in soup.find_all('a', href=re.compile('Download_Lookup')):
catalog_name = link.get('href').split('=L_')[-1]
catalog_url = '{}/{}'.format(baseurl, link.get('href'))
yield DownloadCatalog(catalog_name=catalog_name, catalog_url=catalog_url)
class DownloadCatalog(luigi.Task):
"""
"""
catalog_url = luigi.Parameter()
catalog_name = luigi.Parameter()
root_path = luigi.Parameter()
def run(self):
logger.debug("Guardando en {} el catálogo {}".format(self.output().path, self.catalog_name))
with closing(requests.get(self.catalog_url, stream= True)) as response, \
self.output().open('w') as output_file:
for chunk in response.iter_lines(chunk_size=1024*8):
if chunk:
output_file.write(chunk.decode('utf-8') + '\n')
def output(self):
output_path = '{}/catalogs/{}.csv'.format(self.root_path,
self.catalog_name)
return luigi.s3.S3Target(path=output_path)
class DownloadRITAData(luigi.WrapperTask):
"""
"""
start_year=luigi.IntParameter()
def requires(self):
today = datetime.date.today() + datetime.timedelta(days=-90)
max_year = today.year
max_month = today.month
years = range(self.start_year, max_year)
logger.info("Descargando datos de los años {}".format(years))
for año in years:
if año != max_year:
months = range(1,13)
else:
month = range(1, max_month+1)
for mes in months:
yield DownloadRITAMonthlyData(year=año, month=mes)
class DownloadRITAMonthlyData(DockerTask):
"""
"""
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
raw_path = luigi.Parameter()
@property
def cmd(self):
return '''
docker run --rm --env AWS_ACCESS_KEY_ID={} --env AWS_SECRET_ACCESS_KEY={} rita/download-rita --year {} --month {} --data_path {}/{}
'''.format(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, self.year, self.month, self.root_path, self.raw_path)
def output(self):
return luigi.s3.S3Target(path='{}/{}/{}-{}.zip'.format(self.root_path,
self.raw_path,
str(self.month).zfill(2),
self.year))
class ExtractColumns(luigi.Task):
"""
"""
task_name = "extract-columns"
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
bucket = luigi.Parameter()
etl_path = luigi.Parameter()
def requires(self):
return DownloadRITA(year=self.year, month=self.month)
def run(self):
s3 = boto3.resource('s3')
bucket = s3.Bucket(self.bucket)
input_path = Path(self.input().path)
obj = bucket.Object(str(input_path.relative_to('s3://{}'.format(self.bucket))))
df = None
with io.BytesIO(obj.get()["Body"].read()) as input_file:
input_file.seek(0)
with zipfile.ZipFile(input_file, mode='r') as zip_file:
for subfile in zip_file.namelist():
with zip_file.open(subfile) as file:
df = pd.read_csv(file)
with self.output().open('w') as output_file:
output_file.write(df.loc[:, 'YEAR':'DIV_AIRPORT_LANDINGS'].to_csv(None,
sep="|",
header=True,
index=False,
encoding="utf-8",
quoting=csv.QUOTE_ALL))
def output(self):
return luigi.s3.S3Target('{}/{}/{}/YEAR={}/{}.psv'.format(self.root_path,
self.etl_path,
self.task_name,
self.year,
str(self.month).zfill(2)))
class RTask(luigi.Task):
root_path = luigi.Parameter()
def requires(self):
return RawData()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-r
'''
logger.debug(cmd)
out = subprocess.check_output(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_R.psv"))
class PythonTask(luigi.Task):
def re<|fim_middle|>
<|fim▁end|> | quires(self):
return RTask()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-python --inputfile {} --outputfile {}
'''.format(os.path.join("/rita/data", os.path.basename(self.input().path)),
os.path.join("/rita/data", os.path.basename(self.output().path)))
logger.debug(cmd)
out = subprocess.call(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_python.json"))
|
<|file_name|>rita.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
rita Pipeline
.. module:: rita
:synopsis: rita pipeline
.. moduleauthor:: Adolfo De Unánue <[email protected]>
"""
import os
import subprocess
from pathlib import Path
import boto3
import zipfile
import io
import csv
import datetime
import luigi
import luigi.s3
import pandas as pd
import sqlalchemy
from contextlib import closing
import requests
import re
from bs4 import BeautifulSoup
## Variables de ambiente
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
## Obtenemos las llaves de AWS
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
## Logging
import rita.config_ini
import logging
logger = logging.getLogger("rita.pipeline")
import rita.pipelines.utils
import rita.pipelines.common
from rita.pipelines.common.tasks import DockerTask
class ritaPipeline(luigi.WrapperTask):
"""
Task principal para el pipeline
"""
def requires(self):
yield DownloadRITACatalogs()
yield DownloadRITAData()
class DownloadRITACatalogs(luigi.WrapperTask):
"""
"""
def requires(self):
baseurl = "https://www.transtats.bts.gov"
url = "https://www.transtats.bts.gov/DL_SelectFields.asp?Table_ID=236"
page = requests.get(url)
soup = BeautifulSoup(page.content, "lxml")
for link in soup.find_all('a', href=re.compile('Download_Lookup')):
catalog_name = link.get('href').split('=L_')[-1]
catalog_url = '{}/{}'.format(baseurl, link.get('href'))
yield DownloadCatalog(catalog_name=catalog_name, catalog_url=catalog_url)
class DownloadCatalog(luigi.Task):
"""
"""
catalog_url = luigi.Parameter()
catalog_name = luigi.Parameter()
root_path = luigi.Parameter()
def run(self):
logger.debug("Guardando en {} el catálogo {}".format(self.output().path, self.catalog_name))
with closing(requests.get(self.catalog_url, stream= True)) as response, \
self.output().open('w') as output_file:
for chunk in response.iter_lines(chunk_size=1024*8):
if chunk:
output_file.write(chunk.decode('utf-8') + '\n')
def output(self):
output_path = '{}/catalogs/{}.csv'.format(self.root_path,
self.catalog_name)
return luigi.s3.S3Target(path=output_path)
class DownloadRITAData(luigi.WrapperTask):
"""
"""
start_year=luigi.IntParameter()
def requires(self):
today = datetime.date.today() + datetime.timedelta(days=-90)
max_year = today.year
max_month = today.month
years = range(self.start_year, max_year)
logger.info("Descargando datos de los años {}".format(years))
for año in years:
if año != max_year:
months = range(1,13)
else:
month = range(1, max_month+1)
for mes in months:
yield DownloadRITAMonthlyData(year=año, month=mes)
class DownloadRITAMonthlyData(DockerTask):
"""
"""
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
raw_path = luigi.Parameter()
@property
def cmd(self):
return '''
docker run --rm --env AWS_ACCESS_KEY_ID={} --env AWS_SECRET_ACCESS_KEY={} rita/download-rita --year {} --month {} --data_path {}/{}
'''.format(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, self.year, self.month, self.root_path, self.raw_path)
def output(self):
return luigi.s3.S3Target(path='{}/{}/{}-{}.zip'.format(self.root_path,
self.raw_path,
str(self.month).zfill(2),
self.year))
class ExtractColumns(luigi.Task):
"""
"""
task_name = "extract-columns"
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
bucket = luigi.Parameter()
etl_path = luigi.Parameter()
def requires(self):
return DownloadRITA(year=self.year, month=self.month)
def run(self):
s3 = boto3.resource('s3')
bucket = s3.Bucket(self.bucket)
input_path = Path(self.input().path)
obj = bucket.Object(str(input_path.relative_to('s3://{}'.format(self.bucket))))
df = None
with io.BytesIO(obj.get()["Body"].read()) as input_file:
input_file.seek(0)
with zipfile.ZipFile(input_file, mode='r') as zip_file:
for subfile in zip_file.namelist():
with zip_file.open(subfile) as file:
df = pd.read_csv(file)
with self.output().open('w') as output_file:
output_file.write(df.loc[:, 'YEAR':'DIV_AIRPORT_LANDINGS'].to_csv(None,
sep="|",
header=True,
index=False,
encoding="utf-8",
quoting=csv.QUOTE_ALL))
def output(self):
return luigi.s3.S3Target('{}/{}/{}/YEAR={}/{}.psv'.format(self.root_path,
self.etl_path,
self.task_name,
self.year,
str(self.month).zfill(2)))
class RTask(luigi.Task):
root_path = luigi.Parameter()
def requires(self):
return RawData()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-r
'''
logger.debug(cmd)
out = subprocess.check_output(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_R.psv"))
class PythonTask(luigi.Task):
def requires(self):
return<|fim_middle|>
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-python --inputfile {} --outputfile {}
'''.format(os.path.join("/rita/data", os.path.basename(self.input().path)),
os.path.join("/rita/data", os.path.basename(self.output().path)))
logger.debug(cmd)
out = subprocess.call(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_python.json"))
<|fim▁end|> | RTask()
|
<|file_name|>rita.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
rita Pipeline
.. module:: rita
:synopsis: rita pipeline
.. moduleauthor:: Adolfo De Unánue <[email protected]>
"""
import os
import subprocess
from pathlib import Path
import boto3
import zipfile
import io
import csv
import datetime
import luigi
import luigi.s3
import pandas as pd
import sqlalchemy
from contextlib import closing
import requests
import re
from bs4 import BeautifulSoup
## Variables de ambiente
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
## Obtenemos las llaves de AWS
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
## Logging
import rita.config_ini
import logging
logger = logging.getLogger("rita.pipeline")
import rita.pipelines.utils
import rita.pipelines.common
from rita.pipelines.common.tasks import DockerTask
class ritaPipeline(luigi.WrapperTask):
"""
Task principal para el pipeline
"""
def requires(self):
yield DownloadRITACatalogs()
yield DownloadRITAData()
class DownloadRITACatalogs(luigi.WrapperTask):
"""
"""
def requires(self):
baseurl = "https://www.transtats.bts.gov"
url = "https://www.transtats.bts.gov/DL_SelectFields.asp?Table_ID=236"
page = requests.get(url)
soup = BeautifulSoup(page.content, "lxml")
for link in soup.find_all('a', href=re.compile('Download_Lookup')):
catalog_name = link.get('href').split('=L_')[-1]
catalog_url = '{}/{}'.format(baseurl, link.get('href'))
yield DownloadCatalog(catalog_name=catalog_name, catalog_url=catalog_url)
class DownloadCatalog(luigi.Task):
"""
"""
catalog_url = luigi.Parameter()
catalog_name = luigi.Parameter()
root_path = luigi.Parameter()
def run(self):
logger.debug("Guardando en {} el catálogo {}".format(self.output().path, self.catalog_name))
with closing(requests.get(self.catalog_url, stream= True)) as response, \
self.output().open('w') as output_file:
for chunk in response.iter_lines(chunk_size=1024*8):
if chunk:
output_file.write(chunk.decode('utf-8') + '\n')
def output(self):
output_path = '{}/catalogs/{}.csv'.format(self.root_path,
self.catalog_name)
return luigi.s3.S3Target(path=output_path)
class DownloadRITAData(luigi.WrapperTask):
"""
"""
start_year=luigi.IntParameter()
def requires(self):
today = datetime.date.today() + datetime.timedelta(days=-90)
max_year = today.year
max_month = today.month
years = range(self.start_year, max_year)
logger.info("Descargando datos de los años {}".format(years))
for año in years:
if año != max_year:
months = range(1,13)
else:
month = range(1, max_month+1)
for mes in months:
yield DownloadRITAMonthlyData(year=año, month=mes)
class DownloadRITAMonthlyData(DockerTask):
"""
"""
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
raw_path = luigi.Parameter()
@property
def cmd(self):
return '''
docker run --rm --env AWS_ACCESS_KEY_ID={} --env AWS_SECRET_ACCESS_KEY={} rita/download-rita --year {} --month {} --data_path {}/{}
'''.format(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, self.year, self.month, self.root_path, self.raw_path)
def output(self):
return luigi.s3.S3Target(path='{}/{}/{}-{}.zip'.format(self.root_path,
self.raw_path,
str(self.month).zfill(2),
self.year))
class ExtractColumns(luigi.Task):
"""
"""
task_name = "extract-columns"
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
bucket = luigi.Parameter()
etl_path = luigi.Parameter()
def requires(self):
return DownloadRITA(year=self.year, month=self.month)
def run(self):
s3 = boto3.resource('s3')
bucket = s3.Bucket(self.bucket)
input_path = Path(self.input().path)
obj = bucket.Object(str(input_path.relative_to('s3://{}'.format(self.bucket))))
df = None
with io.BytesIO(obj.get()["Body"].read()) as input_file:
input_file.seek(0)
with zipfile.ZipFile(input_file, mode='r') as zip_file:
for subfile in zip_file.namelist():
with zip_file.open(subfile) as file:
df = pd.read_csv(file)
with self.output().open('w') as output_file:
output_file.write(df.loc[:, 'YEAR':'DIV_AIRPORT_LANDINGS'].to_csv(None,
sep="|",
header=True,
index=False,
encoding="utf-8",
quoting=csv.QUOTE_ALL))
def output(self):
return luigi.s3.S3Target('{}/{}/{}/YEAR={}/{}.psv'.format(self.root_path,
self.etl_path,
self.task_name,
self.year,
str(self.month).zfill(2)))
class RTask(luigi.Task):
root_path = luigi.Parameter()
def requires(self):
return RawData()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-r
'''
logger.debug(cmd)
out = subprocess.check_output(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_R.psv"))
class PythonTask(luigi.Task):
def requires(self):
return RTask()
def run(self):
cmd = <|fim_middle|>
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_python.json"))
<|fim▁end|> | '''
docker run --rm -v rita_store:/rita/data rita/test-python --inputfile {} --outputfile {}
'''.format(os.path.join("/rita/data", os.path.basename(self.input().path)),
os.path.join("/rita/data", os.path.basename(self.output().path)))
logger.debug(cmd)
out = subprocess.call(cmd, shell=True)
logger.debug(out)
|
<|file_name|>rita.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
rita Pipeline
.. module:: rita
:synopsis: rita pipeline
.. moduleauthor:: Adolfo De Unánue <[email protected]>
"""
import os
import subprocess
from pathlib import Path
import boto3
import zipfile
import io
import csv
import datetime
import luigi
import luigi.s3
import pandas as pd
import sqlalchemy
from contextlib import closing
import requests
import re
from bs4 import BeautifulSoup
## Variables de ambiente
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
## Obtenemos las llaves de AWS
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
## Logging
import rita.config_ini
import logging
logger = logging.getLogger("rita.pipeline")
import rita.pipelines.utils
import rita.pipelines.common
from rita.pipelines.common.tasks import DockerTask
class ritaPipeline(luigi.WrapperTask):
"""
Task principal para el pipeline
"""
def requires(self):
yield DownloadRITACatalogs()
yield DownloadRITAData()
class DownloadRITACatalogs(luigi.WrapperTask):
"""
"""
def requires(self):
baseurl = "https://www.transtats.bts.gov"
url = "https://www.transtats.bts.gov/DL_SelectFields.asp?Table_ID=236"
page = requests.get(url)
soup = BeautifulSoup(page.content, "lxml")
for link in soup.find_all('a', href=re.compile('Download_Lookup')):
catalog_name = link.get('href').split('=L_')[-1]
catalog_url = '{}/{}'.format(baseurl, link.get('href'))
yield DownloadCatalog(catalog_name=catalog_name, catalog_url=catalog_url)
class DownloadCatalog(luigi.Task):
"""
"""
catalog_url = luigi.Parameter()
catalog_name = luigi.Parameter()
root_path = luigi.Parameter()
def run(self):
logger.debug("Guardando en {} el catálogo {}".format(self.output().path, self.catalog_name))
with closing(requests.get(self.catalog_url, stream= True)) as response, \
self.output().open('w') as output_file:
for chunk in response.iter_lines(chunk_size=1024*8):
if chunk:
output_file.write(chunk.decode('utf-8') + '\n')
def output(self):
output_path = '{}/catalogs/{}.csv'.format(self.root_path,
self.catalog_name)
return luigi.s3.S3Target(path=output_path)
class DownloadRITAData(luigi.WrapperTask):
"""
"""
start_year=luigi.IntParameter()
def requires(self):
today = datetime.date.today() + datetime.timedelta(days=-90)
max_year = today.year
max_month = today.month
years = range(self.start_year, max_year)
logger.info("Descargando datos de los años {}".format(years))
for año in years:
if año != max_year:
months = range(1,13)
else:
month = range(1, max_month+1)
for mes in months:
yield DownloadRITAMonthlyData(year=año, month=mes)
class DownloadRITAMonthlyData(DockerTask):
"""
"""
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
raw_path = luigi.Parameter()
@property
def cmd(self):
return '''
docker run --rm --env AWS_ACCESS_KEY_ID={} --env AWS_SECRET_ACCESS_KEY={} rita/download-rita --year {} --month {} --data_path {}/{}
'''.format(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, self.year, self.month, self.root_path, self.raw_path)
def output(self):
return luigi.s3.S3Target(path='{}/{}/{}-{}.zip'.format(self.root_path,
self.raw_path,
str(self.month).zfill(2),
self.year))
class ExtractColumns(luigi.Task):
"""
"""
task_name = "extract-columns"
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
bucket = luigi.Parameter()
etl_path = luigi.Parameter()
def requires(self):
return DownloadRITA(year=self.year, month=self.month)
def run(self):
s3 = boto3.resource('s3')
bucket = s3.Bucket(self.bucket)
input_path = Path(self.input().path)
obj = bucket.Object(str(input_path.relative_to('s3://{}'.format(self.bucket))))
df = None
with io.BytesIO(obj.get()["Body"].read()) as input_file:
input_file.seek(0)
with zipfile.ZipFile(input_file, mode='r') as zip_file:
for subfile in zip_file.namelist():
with zip_file.open(subfile) as file:
df = pd.read_csv(file)
with self.output().open('w') as output_file:
output_file.write(df.loc[:, 'YEAR':'DIV_AIRPORT_LANDINGS'].to_csv(None,
sep="|",
header=True,
index=False,
encoding="utf-8",
quoting=csv.QUOTE_ALL))
def output(self):
return luigi.s3.S3Target('{}/{}/{}/YEAR={}/{}.psv'.format(self.root_path,
self.etl_path,
self.task_name,
self.year,
str(self.month).zfill(2)))
class RTask(luigi.Task):
root_path = luigi.Parameter()
def requires(self):
return RawData()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-r
'''
logger.debug(cmd)
out = subprocess.check_output(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_R.psv"))
class PythonTask(luigi.Task):
def requires(self):
return RTask()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-python --inputfile {} --outputfile {}
'''.format(os.path.join("/rita/data", os.path.basename(self.input().path)),
os.path.join("/rita/data", os.path.basename(self.output().path)))
logger.debug(cmd)
out = subprocess.call(cmd, shell=True)
logger.debug(out)
def output(self):
return<|fim_middle|>
<|fim▁end|> | luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_python.json"))
|
<|file_name|>rita.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
rita Pipeline
.. module:: rita
:synopsis: rita pipeline
.. moduleauthor:: Adolfo De Unánue <[email protected]>
"""
import os
import subprocess
from pathlib import Path
import boto3
import zipfile
import io
import csv
import datetime
import luigi
import luigi.s3
import pandas as pd
import sqlalchemy
from contextlib import closing
import requests
import re
from bs4 import BeautifulSoup
## Variables de ambiente
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
## Obtenemos las llaves de AWS
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
## Logging
import rita.config_ini
import logging
logger = logging.getLogger("rita.pipeline")
import rita.pipelines.utils
import rita.pipelines.common
from rita.pipelines.common.tasks import DockerTask
class ritaPipeline(luigi.WrapperTask):
"""
Task principal para el pipeline
"""
def requires(self):
yield DownloadRITACatalogs()
yield DownloadRITAData()
class DownloadRITACatalogs(luigi.WrapperTask):
"""
"""
def requires(self):
baseurl = "https://www.transtats.bts.gov"
url = "https://www.transtats.bts.gov/DL_SelectFields.asp?Table_ID=236"
page = requests.get(url)
soup = BeautifulSoup(page.content, "lxml")
for link in soup.find_all('a', href=re.compile('Download_Lookup')):
catalog_name = link.get('href').split('=L_')[-1]
catalog_url = '{}/{}'.format(baseurl, link.get('href'))
yield DownloadCatalog(catalog_name=catalog_name, catalog_url=catalog_url)
class DownloadCatalog(luigi.Task):
"""
"""
catalog_url = luigi.Parameter()
catalog_name = luigi.Parameter()
root_path = luigi.Parameter()
def run(self):
logger.debug("Guardando en {} el catálogo {}".format(self.output().path, self.catalog_name))
with closing(requests.get(self.catalog_url, stream= True)) as response, \
self.output().open('w') as output_file:
for chunk in response.iter_lines(chunk_size=1024*8):
if chunk:
ou <|fim_middle|>
def output(self):
output_path = '{}/catalogs/{}.csv'.format(self.root_path,
self.catalog_name)
return luigi.s3.S3Target(path=output_path)
class DownloadRITAData(luigi.WrapperTask):
"""
"""
start_year=luigi.IntParameter()
def requires(self):
today = datetime.date.today() + datetime.timedelta(days=-90)
max_year = today.year
max_month = today.month
years = range(self.start_year, max_year)
logger.info("Descargando datos de los años {}".format(years))
for año in years:
if año != max_year:
months = range(1,13)
else:
month = range(1, max_month+1)
for mes in months:
yield DownloadRITAMonthlyData(year=año, month=mes)
class DownloadRITAMonthlyData(DockerTask):
"""
"""
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
raw_path = luigi.Parameter()
@property
def cmd(self):
return '''
docker run --rm --env AWS_ACCESS_KEY_ID={} --env AWS_SECRET_ACCESS_KEY={} rita/download-rita --year {} --month {} --data_path {}/{}
'''.format(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, self.year, self.month, self.root_path, self.raw_path)
def output(self):
return luigi.s3.S3Target(path='{}/{}/{}-{}.zip'.format(self.root_path,
self.raw_path,
str(self.month).zfill(2),
self.year))
class ExtractColumns(luigi.Task):
"""
"""
task_name = "extract-columns"
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
bucket = luigi.Parameter()
etl_path = luigi.Parameter()
def requires(self):
return DownloadRITA(year=self.year, month=self.month)
def run(self):
s3 = boto3.resource('s3')
bucket = s3.Bucket(self.bucket)
input_path = Path(self.input().path)
obj = bucket.Object(str(input_path.relative_to('s3://{}'.format(self.bucket))))
df = None
with io.BytesIO(obj.get()["Body"].read()) as input_file:
input_file.seek(0)
with zipfile.ZipFile(input_file, mode='r') as zip_file:
for subfile in zip_file.namelist():
with zip_file.open(subfile) as file:
df = pd.read_csv(file)
with self.output().open('w') as output_file:
output_file.write(df.loc[:, 'YEAR':'DIV_AIRPORT_LANDINGS'].to_csv(None,
sep="|",
header=True,
index=False,
encoding="utf-8",
quoting=csv.QUOTE_ALL))
def output(self):
return luigi.s3.S3Target('{}/{}/{}/YEAR={}/{}.psv'.format(self.root_path,
self.etl_path,
self.task_name,
self.year,
str(self.month).zfill(2)))
class RTask(luigi.Task):
root_path = luigi.Parameter()
def requires(self):
return RawData()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-r
'''
logger.debug(cmd)
out = subprocess.check_output(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_R.psv"))
class PythonTask(luigi.Task):
def requires(self):
return RTask()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-python --inputfile {} --outputfile {}
'''.format(os.path.join("/rita/data", os.path.basename(self.input().path)),
os.path.join("/rita/data", os.path.basename(self.output().path)))
logger.debug(cmd)
out = subprocess.call(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_python.json"))
<|fim▁end|> | tput_file.write(chunk.decode('utf-8') + '\n')
|
<|file_name|>rita.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
rita Pipeline
.. module:: rita
:synopsis: rita pipeline
.. moduleauthor:: Adolfo De Unánue <[email protected]>
"""
import os
import subprocess
from pathlib import Path
import boto3
import zipfile
import io
import csv
import datetime
import luigi
import luigi.s3
import pandas as pd
import sqlalchemy
from contextlib import closing
import requests
import re
from bs4 import BeautifulSoup
## Variables de ambiente
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
## Obtenemos las llaves de AWS
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
## Logging
import rita.config_ini
import logging
logger = logging.getLogger("rita.pipeline")
import rita.pipelines.utils
import rita.pipelines.common
from rita.pipelines.common.tasks import DockerTask
class ritaPipeline(luigi.WrapperTask):
"""
Task principal para el pipeline
"""
def requires(self):
yield DownloadRITACatalogs()
yield DownloadRITAData()
class DownloadRITACatalogs(luigi.WrapperTask):
"""
"""
def requires(self):
baseurl = "https://www.transtats.bts.gov"
url = "https://www.transtats.bts.gov/DL_SelectFields.asp?Table_ID=236"
page = requests.get(url)
soup = BeautifulSoup(page.content, "lxml")
for link in soup.find_all('a', href=re.compile('Download_Lookup')):
catalog_name = link.get('href').split('=L_')[-1]
catalog_url = '{}/{}'.format(baseurl, link.get('href'))
yield DownloadCatalog(catalog_name=catalog_name, catalog_url=catalog_url)
class DownloadCatalog(luigi.Task):
"""
"""
catalog_url = luigi.Parameter()
catalog_name = luigi.Parameter()
root_path = luigi.Parameter()
def run(self):
logger.debug("Guardando en {} el catálogo {}".format(self.output().path, self.catalog_name))
with closing(requests.get(self.catalog_url, stream= True)) as response, \
self.output().open('w') as output_file:
for chunk in response.iter_lines(chunk_size=1024*8):
if chunk:
output_file.write(chunk.decode('utf-8') + '\n')
def output(self):
output_path = '{}/catalogs/{}.csv'.format(self.root_path,
self.catalog_name)
return luigi.s3.S3Target(path=output_path)
class DownloadRITAData(luigi.WrapperTask):
"""
"""
start_year=luigi.IntParameter()
def requires(self):
today = datetime.date.today() + datetime.timedelta(days=-90)
max_year = today.year
max_month = today.month
years = range(self.start_year, max_year)
logger.info("Descargando datos de los años {}".format(years))
for año in years:
if año != max_year:
month <|fim_middle|>
else:
month = range(1, max_month+1)
for mes in months:
yield DownloadRITAMonthlyData(year=año, month=mes)
class DownloadRITAMonthlyData(DockerTask):
"""
"""
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
raw_path = luigi.Parameter()
@property
def cmd(self):
return '''
docker run --rm --env AWS_ACCESS_KEY_ID={} --env AWS_SECRET_ACCESS_KEY={} rita/download-rita --year {} --month {} --data_path {}/{}
'''.format(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, self.year, self.month, self.root_path, self.raw_path)
def output(self):
return luigi.s3.S3Target(path='{}/{}/{}-{}.zip'.format(self.root_path,
self.raw_path,
str(self.month).zfill(2),
self.year))
class ExtractColumns(luigi.Task):
"""
"""
task_name = "extract-columns"
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
bucket = luigi.Parameter()
etl_path = luigi.Parameter()
def requires(self):
return DownloadRITA(year=self.year, month=self.month)
def run(self):
s3 = boto3.resource('s3')
bucket = s3.Bucket(self.bucket)
input_path = Path(self.input().path)
obj = bucket.Object(str(input_path.relative_to('s3://{}'.format(self.bucket))))
df = None
with io.BytesIO(obj.get()["Body"].read()) as input_file:
input_file.seek(0)
with zipfile.ZipFile(input_file, mode='r') as zip_file:
for subfile in zip_file.namelist():
with zip_file.open(subfile) as file:
df = pd.read_csv(file)
with self.output().open('w') as output_file:
output_file.write(df.loc[:, 'YEAR':'DIV_AIRPORT_LANDINGS'].to_csv(None,
sep="|",
header=True,
index=False,
encoding="utf-8",
quoting=csv.QUOTE_ALL))
def output(self):
return luigi.s3.S3Target('{}/{}/{}/YEAR={}/{}.psv'.format(self.root_path,
self.etl_path,
self.task_name,
self.year,
str(self.month).zfill(2)))
class RTask(luigi.Task):
root_path = luigi.Parameter()
def requires(self):
return RawData()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-r
'''
logger.debug(cmd)
out = subprocess.check_output(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_R.psv"))
class PythonTask(luigi.Task):
def requires(self):
return RTask()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-python --inputfile {} --outputfile {}
'''.format(os.path.join("/rita/data", os.path.basename(self.input().path)),
os.path.join("/rita/data", os.path.basename(self.output().path)))
logger.debug(cmd)
out = subprocess.call(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_python.json"))
<|fim▁end|> | s = range(1,13)
|
<|file_name|>rita.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
rita Pipeline
.. module:: rita
:synopsis: rita pipeline
.. moduleauthor:: Adolfo De Unánue <[email protected]>
"""
import os
import subprocess
from pathlib import Path
import boto3
import zipfile
import io
import csv
import datetime
import luigi
import luigi.s3
import pandas as pd
import sqlalchemy
from contextlib import closing
import requests
import re
from bs4 import BeautifulSoup
## Variables de ambiente
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
## Obtenemos las llaves de AWS
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
## Logging
import rita.config_ini
import logging
logger = logging.getLogger("rita.pipeline")
import rita.pipelines.utils
import rita.pipelines.common
from rita.pipelines.common.tasks import DockerTask
class ritaPipeline(luigi.WrapperTask):
"""
Task principal para el pipeline
"""
def requires(self):
yield DownloadRITACatalogs()
yield DownloadRITAData()
class DownloadRITACatalogs(luigi.WrapperTask):
"""
"""
def requires(self):
baseurl = "https://www.transtats.bts.gov"
url = "https://www.transtats.bts.gov/DL_SelectFields.asp?Table_ID=236"
page = requests.get(url)
soup = BeautifulSoup(page.content, "lxml")
for link in soup.find_all('a', href=re.compile('Download_Lookup')):
catalog_name = link.get('href').split('=L_')[-1]
catalog_url = '{}/{}'.format(baseurl, link.get('href'))
yield DownloadCatalog(catalog_name=catalog_name, catalog_url=catalog_url)
class DownloadCatalog(luigi.Task):
"""
"""
catalog_url = luigi.Parameter()
catalog_name = luigi.Parameter()
root_path = luigi.Parameter()
def run(self):
logger.debug("Guardando en {} el catálogo {}".format(self.output().path, self.catalog_name))
with closing(requests.get(self.catalog_url, stream= True)) as response, \
self.output().open('w') as output_file:
for chunk in response.iter_lines(chunk_size=1024*8):
if chunk:
output_file.write(chunk.decode('utf-8') + '\n')
def output(self):
output_path = '{}/catalogs/{}.csv'.format(self.root_path,
self.catalog_name)
return luigi.s3.S3Target(path=output_path)
class DownloadRITAData(luigi.WrapperTask):
"""
"""
start_year=luigi.IntParameter()
def requires(self):
today = datetime.date.today() + datetime.timedelta(days=-90)
max_year = today.year
max_month = today.month
years = range(self.start_year, max_year)
logger.info("Descargando datos de los años {}".format(years))
for año in years:
if año != max_year:
months = range(1,13)
else:
month <|fim_middle|>
for mes in months:
yield DownloadRITAMonthlyData(year=año, month=mes)
class DownloadRITAMonthlyData(DockerTask):
"""
"""
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
raw_path = luigi.Parameter()
@property
def cmd(self):
return '''
docker run --rm --env AWS_ACCESS_KEY_ID={} --env AWS_SECRET_ACCESS_KEY={} rita/download-rita --year {} --month {} --data_path {}/{}
'''.format(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, self.year, self.month, self.root_path, self.raw_path)
def output(self):
return luigi.s3.S3Target(path='{}/{}/{}-{}.zip'.format(self.root_path,
self.raw_path,
str(self.month).zfill(2),
self.year))
class ExtractColumns(luigi.Task):
"""
"""
task_name = "extract-columns"
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
bucket = luigi.Parameter()
etl_path = luigi.Parameter()
def requires(self):
return DownloadRITA(year=self.year, month=self.month)
def run(self):
s3 = boto3.resource('s3')
bucket = s3.Bucket(self.bucket)
input_path = Path(self.input().path)
obj = bucket.Object(str(input_path.relative_to('s3://{}'.format(self.bucket))))
df = None
with io.BytesIO(obj.get()["Body"].read()) as input_file:
input_file.seek(0)
with zipfile.ZipFile(input_file, mode='r') as zip_file:
for subfile in zip_file.namelist():
with zip_file.open(subfile) as file:
df = pd.read_csv(file)
with self.output().open('w') as output_file:
output_file.write(df.loc[:, 'YEAR':'DIV_AIRPORT_LANDINGS'].to_csv(None,
sep="|",
header=True,
index=False,
encoding="utf-8",
quoting=csv.QUOTE_ALL))
def output(self):
return luigi.s3.S3Target('{}/{}/{}/YEAR={}/{}.psv'.format(self.root_path,
self.etl_path,
self.task_name,
self.year,
str(self.month).zfill(2)))
class RTask(luigi.Task):
root_path = luigi.Parameter()
def requires(self):
return RawData()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-r
'''
logger.debug(cmd)
out = subprocess.check_output(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_R.psv"))
class PythonTask(luigi.Task):
def requires(self):
return RTask()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-python --inputfile {} --outputfile {}
'''.format(os.path.join("/rita/data", os.path.basename(self.input().path)),
os.path.join("/rita/data", os.path.basename(self.output().path)))
logger.debug(cmd)
out = subprocess.call(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_python.json"))
<|fim▁end|> | = range(1, max_month+1)
|
<|file_name|>rita.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
rita Pipeline
.. module:: rita
:synopsis: rita pipeline
.. moduleauthor:: Adolfo De Unánue <[email protected]>
"""
import os
import subprocess
from pathlib import Path
import boto3
import zipfile
import io
import csv
import datetime
import luigi
import luigi.s3
import pandas as pd
import sqlalchemy
from contextlib import closing
import requests
import re
from bs4 import BeautifulSoup
## Variables de ambiente
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
## Obtenemos las llaves de AWS
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
## Logging
import rita.config_ini
import logging
logger = logging.getLogger("rita.pipeline")
import rita.pipelines.utils
import rita.pipelines.common
from rita.pipelines.common.tasks import DockerTask
class ritaPipeline(luigi.WrapperTask):
"""
Task principal para el pipeline
"""
def r<|fim_middle|>self):
yield DownloadRITACatalogs()
yield DownloadRITAData()
class DownloadRITACatalogs(luigi.WrapperTask):
"""
"""
def requires(self):
baseurl = "https://www.transtats.bts.gov"
url = "https://www.transtats.bts.gov/DL_SelectFields.asp?Table_ID=236"
page = requests.get(url)
soup = BeautifulSoup(page.content, "lxml")
for link in soup.find_all('a', href=re.compile('Download_Lookup')):
catalog_name = link.get('href').split('=L_')[-1]
catalog_url = '{}/{}'.format(baseurl, link.get('href'))
yield DownloadCatalog(catalog_name=catalog_name, catalog_url=catalog_url)
class DownloadCatalog(luigi.Task):
"""
"""
catalog_url = luigi.Parameter()
catalog_name = luigi.Parameter()
root_path = luigi.Parameter()
def run(self):
logger.debug("Guardando en {} el catálogo {}".format(self.output().path, self.catalog_name))
with closing(requests.get(self.catalog_url, stream= True)) as response, \
self.output().open('w') as output_file:
for chunk in response.iter_lines(chunk_size=1024*8):
if chunk:
output_file.write(chunk.decode('utf-8') + '\n')
def output(self):
output_path = '{}/catalogs/{}.csv'.format(self.root_path,
self.catalog_name)
return luigi.s3.S3Target(path=output_path)
class DownloadRITAData(luigi.WrapperTask):
"""
"""
start_year=luigi.IntParameter()
def requires(self):
today = datetime.date.today() + datetime.timedelta(days=-90)
max_year = today.year
max_month = today.month
years = range(self.start_year, max_year)
logger.info("Descargando datos de los años {}".format(years))
for año in years:
if año != max_year:
months = range(1,13)
else:
month = range(1, max_month+1)
for mes in months:
yield DownloadRITAMonthlyData(year=año, month=mes)
class DownloadRITAMonthlyData(DockerTask):
"""
"""
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
raw_path = luigi.Parameter()
@property
def cmd(self):
return '''
docker run --rm --env AWS_ACCESS_KEY_ID={} --env AWS_SECRET_ACCESS_KEY={} rita/download-rita --year {} --month {} --data_path {}/{}
'''.format(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, self.year, self.month, self.root_path, self.raw_path)
def output(self):
return luigi.s3.S3Target(path='{}/{}/{}-{}.zip'.format(self.root_path,
self.raw_path,
str(self.month).zfill(2),
self.year))
class ExtractColumns(luigi.Task):
"""
"""
task_name = "extract-columns"
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
bucket = luigi.Parameter()
etl_path = luigi.Parameter()
def requires(self):
return DownloadRITA(year=self.year, month=self.month)
def run(self):
s3 = boto3.resource('s3')
bucket = s3.Bucket(self.bucket)
input_path = Path(self.input().path)
obj = bucket.Object(str(input_path.relative_to('s3://{}'.format(self.bucket))))
df = None
with io.BytesIO(obj.get()["Body"].read()) as input_file:
input_file.seek(0)
with zipfile.ZipFile(input_file, mode='r') as zip_file:
for subfile in zip_file.namelist():
with zip_file.open(subfile) as file:
df = pd.read_csv(file)
with self.output().open('w') as output_file:
output_file.write(df.loc[:, 'YEAR':'DIV_AIRPORT_LANDINGS'].to_csv(None,
sep="|",
header=True,
index=False,
encoding="utf-8",
quoting=csv.QUOTE_ALL))
def output(self):
return luigi.s3.S3Target('{}/{}/{}/YEAR={}/{}.psv'.format(self.root_path,
self.etl_path,
self.task_name,
self.year,
str(self.month).zfill(2)))
class RTask(luigi.Task):
root_path = luigi.Parameter()
def requires(self):
return RawData()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-r
'''
logger.debug(cmd)
out = subprocess.check_output(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_R.psv"))
class PythonTask(luigi.Task):
def requires(self):
return RTask()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-python --inputfile {} --outputfile {}
'''.format(os.path.join("/rita/data", os.path.basename(self.input().path)),
os.path.join("/rita/data", os.path.basename(self.output().path)))
logger.debug(cmd)
out = subprocess.call(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_python.json"))
<|fim▁end|> | equires( |
<|file_name|>rita.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
rita Pipeline
.. module:: rita
:synopsis: rita pipeline
.. moduleauthor:: Adolfo De Unánue <[email protected]>
"""
import os
import subprocess
from pathlib import Path
import boto3
import zipfile
import io
import csv
import datetime
import luigi
import luigi.s3
import pandas as pd
import sqlalchemy
from contextlib import closing
import requests
import re
from bs4 import BeautifulSoup
## Variables de ambiente
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
## Obtenemos las llaves de AWS
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
## Logging
import rita.config_ini
import logging
logger = logging.getLogger("rita.pipeline")
import rita.pipelines.utils
import rita.pipelines.common
from rita.pipelines.common.tasks import DockerTask
class ritaPipeline(luigi.WrapperTask):
"""
Task principal para el pipeline
"""
def requires(self):
yield DownloadRITACatalogs()
yield DownloadRITAData()
class DownloadRITACatalogs(luigi.WrapperTask):
"""
"""
def r<|fim_middle|>self):
baseurl = "https://www.transtats.bts.gov"
url = "https://www.transtats.bts.gov/DL_SelectFields.asp?Table_ID=236"
page = requests.get(url)
soup = BeautifulSoup(page.content, "lxml")
for link in soup.find_all('a', href=re.compile('Download_Lookup')):
catalog_name = link.get('href').split('=L_')[-1]
catalog_url = '{}/{}'.format(baseurl, link.get('href'))
yield DownloadCatalog(catalog_name=catalog_name, catalog_url=catalog_url)
class DownloadCatalog(luigi.Task):
"""
"""
catalog_url = luigi.Parameter()
catalog_name = luigi.Parameter()
root_path = luigi.Parameter()
def run(self):
logger.debug("Guardando en {} el catálogo {}".format(self.output().path, self.catalog_name))
with closing(requests.get(self.catalog_url, stream= True)) as response, \
self.output().open('w') as output_file:
for chunk in response.iter_lines(chunk_size=1024*8):
if chunk:
output_file.write(chunk.decode('utf-8') + '\n')
def output(self):
output_path = '{}/catalogs/{}.csv'.format(self.root_path,
self.catalog_name)
return luigi.s3.S3Target(path=output_path)
class DownloadRITAData(luigi.WrapperTask):
"""
"""
start_year=luigi.IntParameter()
def requires(self):
today = datetime.date.today() + datetime.timedelta(days=-90)
max_year = today.year
max_month = today.month
years = range(self.start_year, max_year)
logger.info("Descargando datos de los años {}".format(years))
for año in years:
if año != max_year:
months = range(1,13)
else:
month = range(1, max_month+1)
for mes in months:
yield DownloadRITAMonthlyData(year=año, month=mes)
class DownloadRITAMonthlyData(DockerTask):
"""
"""
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
raw_path = luigi.Parameter()
@property
def cmd(self):
return '''
docker run --rm --env AWS_ACCESS_KEY_ID={} --env AWS_SECRET_ACCESS_KEY={} rita/download-rita --year {} --month {} --data_path {}/{}
'''.format(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, self.year, self.month, self.root_path, self.raw_path)
def output(self):
return luigi.s3.S3Target(path='{}/{}/{}-{}.zip'.format(self.root_path,
self.raw_path,
str(self.month).zfill(2),
self.year))
class ExtractColumns(luigi.Task):
"""
"""
task_name = "extract-columns"
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
bucket = luigi.Parameter()
etl_path = luigi.Parameter()
def requires(self):
return DownloadRITA(year=self.year, month=self.month)
def run(self):
s3 = boto3.resource('s3')
bucket = s3.Bucket(self.bucket)
input_path = Path(self.input().path)
obj = bucket.Object(str(input_path.relative_to('s3://{}'.format(self.bucket))))
df = None
with io.BytesIO(obj.get()["Body"].read()) as input_file:
input_file.seek(0)
with zipfile.ZipFile(input_file, mode='r') as zip_file:
for subfile in zip_file.namelist():
with zip_file.open(subfile) as file:
df = pd.read_csv(file)
with self.output().open('w') as output_file:
output_file.write(df.loc[:, 'YEAR':'DIV_AIRPORT_LANDINGS'].to_csv(None,
sep="|",
header=True,
index=False,
encoding="utf-8",
quoting=csv.QUOTE_ALL))
def output(self):
return luigi.s3.S3Target('{}/{}/{}/YEAR={}/{}.psv'.format(self.root_path,
self.etl_path,
self.task_name,
self.year,
str(self.month).zfill(2)))
class RTask(luigi.Task):
root_path = luigi.Parameter()
def requires(self):
return RawData()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-r
'''
logger.debug(cmd)
out = subprocess.check_output(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_R.psv"))
class PythonTask(luigi.Task):
def requires(self):
return RTask()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-python --inputfile {} --outputfile {}
'''.format(os.path.join("/rita/data", os.path.basename(self.input().path)),
os.path.join("/rita/data", os.path.basename(self.output().path)))
logger.debug(cmd)
out = subprocess.call(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_python.json"))
<|fim▁end|> | equires( |
<|file_name|>rita.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
rita Pipeline
.. module:: rita
:synopsis: rita pipeline
.. moduleauthor:: Adolfo De Unánue <[email protected]>
"""
import os
import subprocess
from pathlib import Path
import boto3
import zipfile
import io
import csv
import datetime
import luigi
import luigi.s3
import pandas as pd
import sqlalchemy
from contextlib import closing
import requests
import re
from bs4 import BeautifulSoup
## Variables de ambiente
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
## Obtenemos las llaves de AWS
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
## Logging
import rita.config_ini
import logging
logger = logging.getLogger("rita.pipeline")
import rita.pipelines.utils
import rita.pipelines.common
from rita.pipelines.common.tasks import DockerTask
class ritaPipeline(luigi.WrapperTask):
"""
Task principal para el pipeline
"""
def requires(self):
yield DownloadRITACatalogs()
yield DownloadRITAData()
class DownloadRITACatalogs(luigi.WrapperTask):
"""
"""
def requires(self):
baseurl = "https://www.transtats.bts.gov"
url = "https://www.transtats.bts.gov/DL_SelectFields.asp?Table_ID=236"
page = requests.get(url)
soup = BeautifulSoup(page.content, "lxml")
for link in soup.find_all('a', href=re.compile('Download_Lookup')):
catalog_name = link.get('href').split('=L_')[-1]
catalog_url = '{}/{}'.format(baseurl, link.get('href'))
yield DownloadCatalog(catalog_name=catalog_name, catalog_url=catalog_url)
class DownloadCatalog(luigi.Task):
"""
"""
catalog_url = luigi.Parameter()
catalog_name = luigi.Parameter()
root_path = luigi.Parameter()
def r<|fim_middle|>self):
logger.debug("Guardando en {} el catálogo {}".format(self.output().path, self.catalog_name))
with closing(requests.get(self.catalog_url, stream= True)) as response, \
self.output().open('w') as output_file:
for chunk in response.iter_lines(chunk_size=1024*8):
if chunk:
output_file.write(chunk.decode('utf-8') + '\n')
def output(self):
output_path = '{}/catalogs/{}.csv'.format(self.root_path,
self.catalog_name)
return luigi.s3.S3Target(path=output_path)
class DownloadRITAData(luigi.WrapperTask):
"""
"""
start_year=luigi.IntParameter()
def requires(self):
today = datetime.date.today() + datetime.timedelta(days=-90)
max_year = today.year
max_month = today.month
years = range(self.start_year, max_year)
logger.info("Descargando datos de los años {}".format(years))
for año in years:
if año != max_year:
months = range(1,13)
else:
month = range(1, max_month+1)
for mes in months:
yield DownloadRITAMonthlyData(year=año, month=mes)
class DownloadRITAMonthlyData(DockerTask):
"""
"""
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
raw_path = luigi.Parameter()
@property
def cmd(self):
return '''
docker run --rm --env AWS_ACCESS_KEY_ID={} --env AWS_SECRET_ACCESS_KEY={} rita/download-rita --year {} --month {} --data_path {}/{}
'''.format(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, self.year, self.month, self.root_path, self.raw_path)
def output(self):
return luigi.s3.S3Target(path='{}/{}/{}-{}.zip'.format(self.root_path,
self.raw_path,
str(self.month).zfill(2),
self.year))
class ExtractColumns(luigi.Task):
"""
"""
task_name = "extract-columns"
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
bucket = luigi.Parameter()
etl_path = luigi.Parameter()
def requires(self):
return DownloadRITA(year=self.year, month=self.month)
def run(self):
s3 = boto3.resource('s3')
bucket = s3.Bucket(self.bucket)
input_path = Path(self.input().path)
obj = bucket.Object(str(input_path.relative_to('s3://{}'.format(self.bucket))))
df = None
with io.BytesIO(obj.get()["Body"].read()) as input_file:
input_file.seek(0)
with zipfile.ZipFile(input_file, mode='r') as zip_file:
for subfile in zip_file.namelist():
with zip_file.open(subfile) as file:
df = pd.read_csv(file)
with self.output().open('w') as output_file:
output_file.write(df.loc[:, 'YEAR':'DIV_AIRPORT_LANDINGS'].to_csv(None,
sep="|",
header=True,
index=False,
encoding="utf-8",
quoting=csv.QUOTE_ALL))
def output(self):
return luigi.s3.S3Target('{}/{}/{}/YEAR={}/{}.psv'.format(self.root_path,
self.etl_path,
self.task_name,
self.year,
str(self.month).zfill(2)))
class RTask(luigi.Task):
root_path = luigi.Parameter()
def requires(self):
return RawData()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-r
'''
logger.debug(cmd)
out = subprocess.check_output(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_R.psv"))
class PythonTask(luigi.Task):
def requires(self):
return RTask()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-python --inputfile {} --outputfile {}
'''.format(os.path.join("/rita/data", os.path.basename(self.input().path)),
os.path.join("/rita/data", os.path.basename(self.output().path)))
logger.debug(cmd)
out = subprocess.call(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_python.json"))
<|fim▁end|> | un( |
<|file_name|>rita.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
rita Pipeline
.. module:: rita
:synopsis: rita pipeline
.. moduleauthor:: Adolfo De Unánue <[email protected]>
"""
import os
import subprocess
from pathlib import Path
import boto3
import zipfile
import io
import csv
import datetime
import luigi
import luigi.s3
import pandas as pd
import sqlalchemy
from contextlib import closing
import requests
import re
from bs4 import BeautifulSoup
## Variables de ambiente
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
## Obtenemos las llaves de AWS
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
## Logging
import rita.config_ini
import logging
logger = logging.getLogger("rita.pipeline")
import rita.pipelines.utils
import rita.pipelines.common
from rita.pipelines.common.tasks import DockerTask
class ritaPipeline(luigi.WrapperTask):
"""
Task principal para el pipeline
"""
def requires(self):
yield DownloadRITACatalogs()
yield DownloadRITAData()
class DownloadRITACatalogs(luigi.WrapperTask):
"""
"""
def requires(self):
baseurl = "https://www.transtats.bts.gov"
url = "https://www.transtats.bts.gov/DL_SelectFields.asp?Table_ID=236"
page = requests.get(url)
soup = BeautifulSoup(page.content, "lxml")
for link in soup.find_all('a', href=re.compile('Download_Lookup')):
catalog_name = link.get('href').split('=L_')[-1]
catalog_url = '{}/{}'.format(baseurl, link.get('href'))
yield DownloadCatalog(catalog_name=catalog_name, catalog_url=catalog_url)
class DownloadCatalog(luigi.Task):
"""
"""
catalog_url = luigi.Parameter()
catalog_name = luigi.Parameter()
root_path = luigi.Parameter()
def run(self):
logger.debug("Guardando en {} el catálogo {}".format(self.output().path, self.catalog_name))
with closing(requests.get(self.catalog_url, stream= True)) as response, \
self.output().open('w') as output_file:
for chunk in response.iter_lines(chunk_size=1024*8):
if chunk:
output_file.write(chunk.decode('utf-8') + '\n')
def ou<|fim_middle|>elf):
output_path = '{}/catalogs/{}.csv'.format(self.root_path,
self.catalog_name)
return luigi.s3.S3Target(path=output_path)
class DownloadRITAData(luigi.WrapperTask):
"""
"""
start_year=luigi.IntParameter()
def requires(self):
today = datetime.date.today() + datetime.timedelta(days=-90)
max_year = today.year
max_month = today.month
years = range(self.start_year, max_year)
logger.info("Descargando datos de los años {}".format(years))
for año in years:
if año != max_year:
months = range(1,13)
else:
month = range(1, max_month+1)
for mes in months:
yield DownloadRITAMonthlyData(year=año, month=mes)
class DownloadRITAMonthlyData(DockerTask):
"""
"""
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
raw_path = luigi.Parameter()
@property
def cmd(self):
return '''
docker run --rm --env AWS_ACCESS_KEY_ID={} --env AWS_SECRET_ACCESS_KEY={} rita/download-rita --year {} --month {} --data_path {}/{}
'''.format(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, self.year, self.month, self.root_path, self.raw_path)
def output(self):
return luigi.s3.S3Target(path='{}/{}/{}-{}.zip'.format(self.root_path,
self.raw_path,
str(self.month).zfill(2),
self.year))
class ExtractColumns(luigi.Task):
"""
"""
task_name = "extract-columns"
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
bucket = luigi.Parameter()
etl_path = luigi.Parameter()
def requires(self):
return DownloadRITA(year=self.year, month=self.month)
def run(self):
s3 = boto3.resource('s3')
bucket = s3.Bucket(self.bucket)
input_path = Path(self.input().path)
obj = bucket.Object(str(input_path.relative_to('s3://{}'.format(self.bucket))))
df = None
with io.BytesIO(obj.get()["Body"].read()) as input_file:
input_file.seek(0)
with zipfile.ZipFile(input_file, mode='r') as zip_file:
for subfile in zip_file.namelist():
with zip_file.open(subfile) as file:
df = pd.read_csv(file)
with self.output().open('w') as output_file:
output_file.write(df.loc[:, 'YEAR':'DIV_AIRPORT_LANDINGS'].to_csv(None,
sep="|",
header=True,
index=False,
encoding="utf-8",
quoting=csv.QUOTE_ALL))
def output(self):
return luigi.s3.S3Target('{}/{}/{}/YEAR={}/{}.psv'.format(self.root_path,
self.etl_path,
self.task_name,
self.year,
str(self.month).zfill(2)))
class RTask(luigi.Task):
root_path = luigi.Parameter()
def requires(self):
return RawData()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-r
'''
logger.debug(cmd)
out = subprocess.check_output(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_R.psv"))
class PythonTask(luigi.Task):
def requires(self):
return RTask()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-python --inputfile {} --outputfile {}
'''.format(os.path.join("/rita/data", os.path.basename(self.input().path)),
os.path.join("/rita/data", os.path.basename(self.output().path)))
logger.debug(cmd)
out = subprocess.call(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_python.json"))
<|fim▁end|> | tput(s |
<|file_name|>rita.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
rita Pipeline
.. module:: rita
:synopsis: rita pipeline
.. moduleauthor:: Adolfo De Unánue <[email protected]>
"""
import os
import subprocess
from pathlib import Path
import boto3
import zipfile
import io
import csv
import datetime
import luigi
import luigi.s3
import pandas as pd
import sqlalchemy
from contextlib import closing
import requests
import re
from bs4 import BeautifulSoup
## Variables de ambiente
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
## Obtenemos las llaves de AWS
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
## Logging
import rita.config_ini
import logging
logger = logging.getLogger("rita.pipeline")
import rita.pipelines.utils
import rita.pipelines.common
from rita.pipelines.common.tasks import DockerTask
class ritaPipeline(luigi.WrapperTask):
"""
Task principal para el pipeline
"""
def requires(self):
yield DownloadRITACatalogs()
yield DownloadRITAData()
class DownloadRITACatalogs(luigi.WrapperTask):
"""
"""
def requires(self):
baseurl = "https://www.transtats.bts.gov"
url = "https://www.transtats.bts.gov/DL_SelectFields.asp?Table_ID=236"
page = requests.get(url)
soup = BeautifulSoup(page.content, "lxml")
for link in soup.find_all('a', href=re.compile('Download_Lookup')):
catalog_name = link.get('href').split('=L_')[-1]
catalog_url = '{}/{}'.format(baseurl, link.get('href'))
yield DownloadCatalog(catalog_name=catalog_name, catalog_url=catalog_url)
class DownloadCatalog(luigi.Task):
"""
"""
catalog_url = luigi.Parameter()
catalog_name = luigi.Parameter()
root_path = luigi.Parameter()
def run(self):
logger.debug("Guardando en {} el catálogo {}".format(self.output().path, self.catalog_name))
with closing(requests.get(self.catalog_url, stream= True)) as response, \
self.output().open('w') as output_file:
for chunk in response.iter_lines(chunk_size=1024*8):
if chunk:
output_file.write(chunk.decode('utf-8') + '\n')
def output(self):
output_path = '{}/catalogs/{}.csv'.format(self.root_path,
self.catalog_name)
return luigi.s3.S3Target(path=output_path)
class DownloadRITAData(luigi.WrapperTask):
"""
"""
start_year=luigi.IntParameter()
def re<|fim_middle|>elf):
today = datetime.date.today() + datetime.timedelta(days=-90)
max_year = today.year
max_month = today.month
years = range(self.start_year, max_year)
logger.info("Descargando datos de los años {}".format(years))
for año in years:
if año != max_year:
months = range(1,13)
else:
month = range(1, max_month+1)
for mes in months:
yield DownloadRITAMonthlyData(year=año, month=mes)
class DownloadRITAMonthlyData(DockerTask):
"""
"""
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
raw_path = luigi.Parameter()
@property
def cmd(self):
return '''
docker run --rm --env AWS_ACCESS_KEY_ID={} --env AWS_SECRET_ACCESS_KEY={} rita/download-rita --year {} --month {} --data_path {}/{}
'''.format(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, self.year, self.month, self.root_path, self.raw_path)
def output(self):
return luigi.s3.S3Target(path='{}/{}/{}-{}.zip'.format(self.root_path,
self.raw_path,
str(self.month).zfill(2),
self.year))
class ExtractColumns(luigi.Task):
"""
"""
task_name = "extract-columns"
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
bucket = luigi.Parameter()
etl_path = luigi.Parameter()
def requires(self):
return DownloadRITA(year=self.year, month=self.month)
def run(self):
s3 = boto3.resource('s3')
bucket = s3.Bucket(self.bucket)
input_path = Path(self.input().path)
obj = bucket.Object(str(input_path.relative_to('s3://{}'.format(self.bucket))))
df = None
with io.BytesIO(obj.get()["Body"].read()) as input_file:
input_file.seek(0)
with zipfile.ZipFile(input_file, mode='r') as zip_file:
for subfile in zip_file.namelist():
with zip_file.open(subfile) as file:
df = pd.read_csv(file)
with self.output().open('w') as output_file:
output_file.write(df.loc[:, 'YEAR':'DIV_AIRPORT_LANDINGS'].to_csv(None,
sep="|",
header=True,
index=False,
encoding="utf-8",
quoting=csv.QUOTE_ALL))
def output(self):
return luigi.s3.S3Target('{}/{}/{}/YEAR={}/{}.psv'.format(self.root_path,
self.etl_path,
self.task_name,
self.year,
str(self.month).zfill(2)))
class RTask(luigi.Task):
root_path = luigi.Parameter()
def requires(self):
return RawData()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-r
'''
logger.debug(cmd)
out = subprocess.check_output(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_R.psv"))
class PythonTask(luigi.Task):
def requires(self):
return RTask()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-python --inputfile {} --outputfile {}
'''.format(os.path.join("/rita/data", os.path.basename(self.input().path)),
os.path.join("/rita/data", os.path.basename(self.output().path)))
logger.debug(cmd)
out = subprocess.call(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_python.json"))
<|fim▁end|> | quires(s |
<|file_name|>rita.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
rita Pipeline
.. module:: rita
:synopsis: rita pipeline
.. moduleauthor:: Adolfo De Unánue <[email protected]>
"""
import os
import subprocess
from pathlib import Path
import boto3
import zipfile
import io
import csv
import datetime
import luigi
import luigi.s3
import pandas as pd
import sqlalchemy
from contextlib import closing
import requests
import re
from bs4 import BeautifulSoup
## Variables de ambiente
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
## Obtenemos las llaves de AWS
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
## Logging
import rita.config_ini
import logging
logger = logging.getLogger("rita.pipeline")
import rita.pipelines.utils
import rita.pipelines.common
from rita.pipelines.common.tasks import DockerTask
class ritaPipeline(luigi.WrapperTask):
"""
Task principal para el pipeline
"""
def requires(self):
yield DownloadRITACatalogs()
yield DownloadRITAData()
class DownloadRITACatalogs(luigi.WrapperTask):
"""
"""
def requires(self):
baseurl = "https://www.transtats.bts.gov"
url = "https://www.transtats.bts.gov/DL_SelectFields.asp?Table_ID=236"
page = requests.get(url)
soup = BeautifulSoup(page.content, "lxml")
for link in soup.find_all('a', href=re.compile('Download_Lookup')):
catalog_name = link.get('href').split('=L_')[-1]
catalog_url = '{}/{}'.format(baseurl, link.get('href'))
yield DownloadCatalog(catalog_name=catalog_name, catalog_url=catalog_url)
class DownloadCatalog(luigi.Task):
"""
"""
catalog_url = luigi.Parameter()
catalog_name = luigi.Parameter()
root_path = luigi.Parameter()
def run(self):
logger.debug("Guardando en {} el catálogo {}".format(self.output().path, self.catalog_name))
with closing(requests.get(self.catalog_url, stream= True)) as response, \
self.output().open('w') as output_file:
for chunk in response.iter_lines(chunk_size=1024*8):
if chunk:
output_file.write(chunk.decode('utf-8') + '\n')
def output(self):
output_path = '{}/catalogs/{}.csv'.format(self.root_path,
self.catalog_name)
return luigi.s3.S3Target(path=output_path)
class DownloadRITAData(luigi.WrapperTask):
"""
"""
start_year=luigi.IntParameter()
def requires(self):
today = datetime.date.today() + datetime.timedelta(days=-90)
max_year = today.year
max_month = today.month
years = range(self.start_year, max_year)
logger.info("Descargando datos de los años {}".format(years))
for año in years:
if año != max_year:
months = range(1,13)
else:
month = range(1, max_month+1)
for mes in months:
yield DownloadRITAMonthlyData(year=año, month=mes)
class DownloadRITAMonthlyData(DockerTask):
"""
"""
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
raw_path = luigi.Parameter()
@property
def cmd(se<|fim_middle|>:
return '''
docker run --rm --env AWS_ACCESS_KEY_ID={} --env AWS_SECRET_ACCESS_KEY={} rita/download-rita --year {} --month {} --data_path {}/{}
'''.format(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, self.year, self.month, self.root_path, self.raw_path)
def output(self):
return luigi.s3.S3Target(path='{}/{}/{}-{}.zip'.format(self.root_path,
self.raw_path,
str(self.month).zfill(2),
self.year))
class ExtractColumns(luigi.Task):
"""
"""
task_name = "extract-columns"
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
bucket = luigi.Parameter()
etl_path = luigi.Parameter()
def requires(self):
return DownloadRITA(year=self.year, month=self.month)
def run(self):
s3 = boto3.resource('s3')
bucket = s3.Bucket(self.bucket)
input_path = Path(self.input().path)
obj = bucket.Object(str(input_path.relative_to('s3://{}'.format(self.bucket))))
df = None
with io.BytesIO(obj.get()["Body"].read()) as input_file:
input_file.seek(0)
with zipfile.ZipFile(input_file, mode='r') as zip_file:
for subfile in zip_file.namelist():
with zip_file.open(subfile) as file:
df = pd.read_csv(file)
with self.output().open('w') as output_file:
output_file.write(df.loc[:, 'YEAR':'DIV_AIRPORT_LANDINGS'].to_csv(None,
sep="|",
header=True,
index=False,
encoding="utf-8",
quoting=csv.QUOTE_ALL))
def output(self):
return luigi.s3.S3Target('{}/{}/{}/YEAR={}/{}.psv'.format(self.root_path,
self.etl_path,
self.task_name,
self.year,
str(self.month).zfill(2)))
class RTask(luigi.Task):
root_path = luigi.Parameter()
def requires(self):
return RawData()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-r
'''
logger.debug(cmd)
out = subprocess.check_output(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_R.psv"))
class PythonTask(luigi.Task):
def requires(self):
return RTask()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-python --inputfile {} --outputfile {}
'''.format(os.path.join("/rita/data", os.path.basename(self.input().path)),
os.path.join("/rita/data", os.path.basename(self.output().path)))
logger.debug(cmd)
out = subprocess.call(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_python.json"))
<|fim▁end|> | lf) |
<|file_name|>rita.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
rita Pipeline
.. module:: rita
:synopsis: rita pipeline
.. moduleauthor:: Adolfo De Unánue <[email protected]>
"""
import os
import subprocess
from pathlib import Path
import boto3
import zipfile
import io
import csv
import datetime
import luigi
import luigi.s3
import pandas as pd
import sqlalchemy
from contextlib import closing
import requests
import re
from bs4 import BeautifulSoup
## Variables de ambiente
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
## Obtenemos las llaves de AWS
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
## Logging
import rita.config_ini
import logging
logger = logging.getLogger("rita.pipeline")
import rita.pipelines.utils
import rita.pipelines.common
from rita.pipelines.common.tasks import DockerTask
class ritaPipeline(luigi.WrapperTask):
"""
Task principal para el pipeline
"""
def requires(self):
yield DownloadRITACatalogs()
yield DownloadRITAData()
class DownloadRITACatalogs(luigi.WrapperTask):
"""
"""
def requires(self):
baseurl = "https://www.transtats.bts.gov"
url = "https://www.transtats.bts.gov/DL_SelectFields.asp?Table_ID=236"
page = requests.get(url)
soup = BeautifulSoup(page.content, "lxml")
for link in soup.find_all('a', href=re.compile('Download_Lookup')):
catalog_name = link.get('href').split('=L_')[-1]
catalog_url = '{}/{}'.format(baseurl, link.get('href'))
yield DownloadCatalog(catalog_name=catalog_name, catalog_url=catalog_url)
class DownloadCatalog(luigi.Task):
"""
"""
catalog_url = luigi.Parameter()
catalog_name = luigi.Parameter()
root_path = luigi.Parameter()
def run(self):
logger.debug("Guardando en {} el catálogo {}".format(self.output().path, self.catalog_name))
with closing(requests.get(self.catalog_url, stream= True)) as response, \
self.output().open('w') as output_file:
for chunk in response.iter_lines(chunk_size=1024*8):
if chunk:
output_file.write(chunk.decode('utf-8') + '\n')
def output(self):
output_path = '{}/catalogs/{}.csv'.format(self.root_path,
self.catalog_name)
return luigi.s3.S3Target(path=output_path)
class DownloadRITAData(luigi.WrapperTask):
"""
"""
start_year=luigi.IntParameter()
def requires(self):
today = datetime.date.today() + datetime.timedelta(days=-90)
max_year = today.year
max_month = today.month
years = range(self.start_year, max_year)
logger.info("Descargando datos de los años {}".format(years))
for año in years:
if año != max_year:
months = range(1,13)
else:
month = range(1, max_month+1)
for mes in months:
yield DownloadRITAMonthlyData(year=año, month=mes)
class DownloadRITAMonthlyData(DockerTask):
"""
"""
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
raw_path = luigi.Parameter()
@property
def cmd(self):
return '''
docker run --rm --env AWS_ACCESS_KEY_ID={} --env AWS_SECRET_ACCESS_KEY={} rita/download-rita --year {} --month {} --data_path {}/{}
'''.format(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, self.year, self.month, self.root_path, self.raw_path)
def output<|fim_middle|>:
return luigi.s3.S3Target(path='{}/{}/{}-{}.zip'.format(self.root_path,
self.raw_path,
str(self.month).zfill(2),
self.year))
class ExtractColumns(luigi.Task):
"""
"""
task_name = "extract-columns"
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
bucket = luigi.Parameter()
etl_path = luigi.Parameter()
def requires(self):
return DownloadRITA(year=self.year, month=self.month)
def run(self):
s3 = boto3.resource('s3')
bucket = s3.Bucket(self.bucket)
input_path = Path(self.input().path)
obj = bucket.Object(str(input_path.relative_to('s3://{}'.format(self.bucket))))
df = None
with io.BytesIO(obj.get()["Body"].read()) as input_file:
input_file.seek(0)
with zipfile.ZipFile(input_file, mode='r') as zip_file:
for subfile in zip_file.namelist():
with zip_file.open(subfile) as file:
df = pd.read_csv(file)
with self.output().open('w') as output_file:
output_file.write(df.loc[:, 'YEAR':'DIV_AIRPORT_LANDINGS'].to_csv(None,
sep="|",
header=True,
index=False,
encoding="utf-8",
quoting=csv.QUOTE_ALL))
def output(self):
return luigi.s3.S3Target('{}/{}/{}/YEAR={}/{}.psv'.format(self.root_path,
self.etl_path,
self.task_name,
self.year,
str(self.month).zfill(2)))
class RTask(luigi.Task):
root_path = luigi.Parameter()
def requires(self):
return RawData()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-r
'''
logger.debug(cmd)
out = subprocess.check_output(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_R.psv"))
class PythonTask(luigi.Task):
def requires(self):
return RTask()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-python --inputfile {} --outputfile {}
'''.format(os.path.join("/rita/data", os.path.basename(self.input().path)),
os.path.join("/rita/data", os.path.basename(self.output().path)))
logger.debug(cmd)
out = subprocess.call(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_python.json"))
<|fim▁end|> | (self) |
<|file_name|>rita.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
rita Pipeline
.. module:: rita
:synopsis: rita pipeline
.. moduleauthor:: Adolfo De Unánue <[email protected]>
"""
import os
import subprocess
from pathlib import Path
import boto3
import zipfile
import io
import csv
import datetime
import luigi
import luigi.s3
import pandas as pd
import sqlalchemy
from contextlib import closing
import requests
import re
from bs4 import BeautifulSoup
## Variables de ambiente
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
## Obtenemos las llaves de AWS
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
## Logging
import rita.config_ini
import logging
logger = logging.getLogger("rita.pipeline")
import rita.pipelines.utils
import rita.pipelines.common
from rita.pipelines.common.tasks import DockerTask
class ritaPipeline(luigi.WrapperTask):
"""
Task principal para el pipeline
"""
def requires(self):
yield DownloadRITACatalogs()
yield DownloadRITAData()
class DownloadRITACatalogs(luigi.WrapperTask):
"""
"""
def requires(self):
baseurl = "https://www.transtats.bts.gov"
url = "https://www.transtats.bts.gov/DL_SelectFields.asp?Table_ID=236"
page = requests.get(url)
soup = BeautifulSoup(page.content, "lxml")
for link in soup.find_all('a', href=re.compile('Download_Lookup')):
catalog_name = link.get('href').split('=L_')[-1]
catalog_url = '{}/{}'.format(baseurl, link.get('href'))
yield DownloadCatalog(catalog_name=catalog_name, catalog_url=catalog_url)
class DownloadCatalog(luigi.Task):
"""
"""
catalog_url = luigi.Parameter()
catalog_name = luigi.Parameter()
root_path = luigi.Parameter()
def run(self):
logger.debug("Guardando en {} el catálogo {}".format(self.output().path, self.catalog_name))
with closing(requests.get(self.catalog_url, stream= True)) as response, \
self.output().open('w') as output_file:
for chunk in response.iter_lines(chunk_size=1024*8):
if chunk:
output_file.write(chunk.decode('utf-8') + '\n')
def output(self):
output_path = '{}/catalogs/{}.csv'.format(self.root_path,
self.catalog_name)
return luigi.s3.S3Target(path=output_path)
class DownloadRITAData(luigi.WrapperTask):
"""
"""
start_year=luigi.IntParameter()
def requires(self):
today = datetime.date.today() + datetime.timedelta(days=-90)
max_year = today.year
max_month = today.month
years = range(self.start_year, max_year)
logger.info("Descargando datos de los años {}".format(years))
for año in years:
if año != max_year:
months = range(1,13)
else:
month = range(1, max_month+1)
for mes in months:
yield DownloadRITAMonthlyData(year=año, month=mes)
class DownloadRITAMonthlyData(DockerTask):
"""
"""
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
raw_path = luigi.Parameter()
@property
def cmd(self):
return '''
docker run --rm --env AWS_ACCESS_KEY_ID={} --env AWS_SECRET_ACCESS_KEY={} rita/download-rita --year {} --month {} --data_path {}/{}
'''.format(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, self.year, self.month, self.root_path, self.raw_path)
def output(self):
return luigi.s3.S3Target(path='{}/{}/{}-{}.zip'.format(self.root_path,
self.raw_path,
str(self.month).zfill(2),
self.year))
class ExtractColumns(luigi.Task):
"""
"""
task_name = "extract-columns"
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
bucket = luigi.Parameter()
etl_path = luigi.Parameter()
def requir<|fim_middle|>:
return DownloadRITA(year=self.year, month=self.month)
def run(self):
s3 = boto3.resource('s3')
bucket = s3.Bucket(self.bucket)
input_path = Path(self.input().path)
obj = bucket.Object(str(input_path.relative_to('s3://{}'.format(self.bucket))))
df = None
with io.BytesIO(obj.get()["Body"].read()) as input_file:
input_file.seek(0)
with zipfile.ZipFile(input_file, mode='r') as zip_file:
for subfile in zip_file.namelist():
with zip_file.open(subfile) as file:
df = pd.read_csv(file)
with self.output().open('w') as output_file:
output_file.write(df.loc[:, 'YEAR':'DIV_AIRPORT_LANDINGS'].to_csv(None,
sep="|",
header=True,
index=False,
encoding="utf-8",
quoting=csv.QUOTE_ALL))
def output(self):
return luigi.s3.S3Target('{}/{}/{}/YEAR={}/{}.psv'.format(self.root_path,
self.etl_path,
self.task_name,
self.year,
str(self.month).zfill(2)))
class RTask(luigi.Task):
root_path = luigi.Parameter()
def requires(self):
return RawData()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-r
'''
logger.debug(cmd)
out = subprocess.check_output(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_R.psv"))
class PythonTask(luigi.Task):
def requires(self):
return RTask()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-python --inputfile {} --outputfile {}
'''.format(os.path.join("/rita/data", os.path.basename(self.input().path)),
os.path.join("/rita/data", os.path.basename(self.output().path)))
logger.debug(cmd)
out = subprocess.call(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_python.json"))
<|fim▁end|> | es(self) |
<|file_name|>rita.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
rita Pipeline
.. module:: rita
:synopsis: rita pipeline
.. moduleauthor:: Adolfo De Unánue <[email protected]>
"""
import os
import subprocess
from pathlib import Path
import boto3
import zipfile
import io
import csv
import datetime
import luigi
import luigi.s3
import pandas as pd
import sqlalchemy
from contextlib import closing
import requests
import re
from bs4 import BeautifulSoup
## Variables de ambiente
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
## Obtenemos las llaves de AWS
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
## Logging
import rita.config_ini
import logging
logger = logging.getLogger("rita.pipeline")
import rita.pipelines.utils
import rita.pipelines.common
from rita.pipelines.common.tasks import DockerTask
class ritaPipeline(luigi.WrapperTask):
"""
Task principal para el pipeline
"""
def requires(self):
yield DownloadRITACatalogs()
yield DownloadRITAData()
class DownloadRITACatalogs(luigi.WrapperTask):
"""
"""
def requires(self):
baseurl = "https://www.transtats.bts.gov"
url = "https://www.transtats.bts.gov/DL_SelectFields.asp?Table_ID=236"
page = requests.get(url)
soup = BeautifulSoup(page.content, "lxml")
for link in soup.find_all('a', href=re.compile('Download_Lookup')):
catalog_name = link.get('href').split('=L_')[-1]
catalog_url = '{}/{}'.format(baseurl, link.get('href'))
yield DownloadCatalog(catalog_name=catalog_name, catalog_url=catalog_url)
class DownloadCatalog(luigi.Task):
"""
"""
catalog_url = luigi.Parameter()
catalog_name = luigi.Parameter()
root_path = luigi.Parameter()
def run(self):
logger.debug("Guardando en {} el catálogo {}".format(self.output().path, self.catalog_name))
with closing(requests.get(self.catalog_url, stream= True)) as response, \
self.output().open('w') as output_file:
for chunk in response.iter_lines(chunk_size=1024*8):
if chunk:
output_file.write(chunk.decode('utf-8') + '\n')
def output(self):
output_path = '{}/catalogs/{}.csv'.format(self.root_path,
self.catalog_name)
return luigi.s3.S3Target(path=output_path)
class DownloadRITAData(luigi.WrapperTask):
"""
"""
start_year=luigi.IntParameter()
def requires(self):
today = datetime.date.today() + datetime.timedelta(days=-90)
max_year = today.year
max_month = today.month
years = range(self.start_year, max_year)
logger.info("Descargando datos de los años {}".format(years))
for año in years:
if año != max_year:
months = range(1,13)
else:
month = range(1, max_month+1)
for mes in months:
yield DownloadRITAMonthlyData(year=año, month=mes)
class DownloadRITAMonthlyData(DockerTask):
"""
"""
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
raw_path = luigi.Parameter()
@property
def cmd(self):
return '''
docker run --rm --env AWS_ACCESS_KEY_ID={} --env AWS_SECRET_ACCESS_KEY={} rita/download-rita --year {} --month {} --data_path {}/{}
'''.format(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, self.year, self.month, self.root_path, self.raw_path)
def output(self):
return luigi.s3.S3Target(path='{}/{}/{}-{}.zip'.format(self.root_path,
self.raw_path,
str(self.month).zfill(2),
self.year))
class ExtractColumns(luigi.Task):
"""
"""
task_name = "extract-columns"
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
bucket = luigi.Parameter()
etl_path = luigi.Parameter()
def requires(self):
return DownloadRITA(year=self.year, month=self.month)
def run(se<|fim_middle|>:
s3 = boto3.resource('s3')
bucket = s3.Bucket(self.bucket)
input_path = Path(self.input().path)
obj = bucket.Object(str(input_path.relative_to('s3://{}'.format(self.bucket))))
df = None
with io.BytesIO(obj.get()["Body"].read()) as input_file:
input_file.seek(0)
with zipfile.ZipFile(input_file, mode='r') as zip_file:
for subfile in zip_file.namelist():
with zip_file.open(subfile) as file:
df = pd.read_csv(file)
with self.output().open('w') as output_file:
output_file.write(df.loc[:, 'YEAR':'DIV_AIRPORT_LANDINGS'].to_csv(None,
sep="|",
header=True,
index=False,
encoding="utf-8",
quoting=csv.QUOTE_ALL))
def output(self):
return luigi.s3.S3Target('{}/{}/{}/YEAR={}/{}.psv'.format(self.root_path,
self.etl_path,
self.task_name,
self.year,
str(self.month).zfill(2)))
class RTask(luigi.Task):
root_path = luigi.Parameter()
def requires(self):
return RawData()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-r
'''
logger.debug(cmd)
out = subprocess.check_output(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_R.psv"))
class PythonTask(luigi.Task):
def requires(self):
return RTask()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-python --inputfile {} --outputfile {}
'''.format(os.path.join("/rita/data", os.path.basename(self.input().path)),
os.path.join("/rita/data", os.path.basename(self.output().path)))
logger.debug(cmd)
out = subprocess.call(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_python.json"))
<|fim▁end|> | lf) |
<|file_name|>rita.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
rita Pipeline
.. module:: rita
:synopsis: rita pipeline
.. moduleauthor:: Adolfo De Unánue <[email protected]>
"""
import os
import subprocess
from pathlib import Path
import boto3
import zipfile
import io
import csv
import datetime
import luigi
import luigi.s3
import pandas as pd
import sqlalchemy
from contextlib import closing
import requests
import re
from bs4 import BeautifulSoup
## Variables de ambiente
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
## Obtenemos las llaves de AWS
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
## Logging
import rita.config_ini
import logging
logger = logging.getLogger("rita.pipeline")
import rita.pipelines.utils
import rita.pipelines.common
from rita.pipelines.common.tasks import DockerTask
class ritaPipeline(luigi.WrapperTask):
"""
Task principal para el pipeline
"""
def requires(self):
yield DownloadRITACatalogs()
yield DownloadRITAData()
class DownloadRITACatalogs(luigi.WrapperTask):
"""
"""
def requires(self):
baseurl = "https://www.transtats.bts.gov"
url = "https://www.transtats.bts.gov/DL_SelectFields.asp?Table_ID=236"
page = requests.get(url)
soup = BeautifulSoup(page.content, "lxml")
for link in soup.find_all('a', href=re.compile('Download_Lookup')):
catalog_name = link.get('href').split('=L_')[-1]
catalog_url = '{}/{}'.format(baseurl, link.get('href'))
yield DownloadCatalog(catalog_name=catalog_name, catalog_url=catalog_url)
class DownloadCatalog(luigi.Task):
"""
"""
catalog_url = luigi.Parameter()
catalog_name = luigi.Parameter()
root_path = luigi.Parameter()
def run(self):
logger.debug("Guardando en {} el catálogo {}".format(self.output().path, self.catalog_name))
with closing(requests.get(self.catalog_url, stream= True)) as response, \
self.output().open('w') as output_file:
for chunk in response.iter_lines(chunk_size=1024*8):
if chunk:
output_file.write(chunk.decode('utf-8') + '\n')
def output(self):
output_path = '{}/catalogs/{}.csv'.format(self.root_path,
self.catalog_name)
return luigi.s3.S3Target(path=output_path)
class DownloadRITAData(luigi.WrapperTask):
"""
"""
start_year=luigi.IntParameter()
def requires(self):
today = datetime.date.today() + datetime.timedelta(days=-90)
max_year = today.year
max_month = today.month
years = range(self.start_year, max_year)
logger.info("Descargando datos de los años {}".format(years))
for año in years:
if año != max_year:
months = range(1,13)
else:
month = range(1, max_month+1)
for mes in months:
yield DownloadRITAMonthlyData(year=año, month=mes)
class DownloadRITAMonthlyData(DockerTask):
"""
"""
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
raw_path = luigi.Parameter()
@property
def cmd(self):
return '''
docker run --rm --env AWS_ACCESS_KEY_ID={} --env AWS_SECRET_ACCESS_KEY={} rita/download-rita --year {} --month {} --data_path {}/{}
'''.format(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, self.year, self.month, self.root_path, self.raw_path)
def output(self):
return luigi.s3.S3Target(path='{}/{}/{}-{}.zip'.format(self.root_path,
self.raw_path,
str(self.month).zfill(2),
self.year))
class ExtractColumns(luigi.Task):
"""
"""
task_name = "extract-columns"
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
bucket = luigi.Parameter()
etl_path = luigi.Parameter()
def requires(self):
return DownloadRITA(year=self.year, month=self.month)
def run(self):
s3 = boto3.resource('s3')
bucket = s3.Bucket(self.bucket)
input_path = Path(self.input().path)
obj = bucket.Object(str(input_path.relative_to('s3://{}'.format(self.bucket))))
df = None
with io.BytesIO(obj.get()["Body"].read()) as input_file:
input_file.seek(0)
with zipfile.ZipFile(input_file, mode='r') as zip_file:
for subfile in zip_file.namelist():
with zip_file.open(subfile) as file:
df = pd.read_csv(file)
with self.output().open('w') as output_file:
output_file.write(df.loc[:, 'YEAR':'DIV_AIRPORT_LANDINGS'].to_csv(None,
sep="|",
header=True,
index=False,
encoding="utf-8",
quoting=csv.QUOTE_ALL))
def output<|fim_middle|>:
return luigi.s3.S3Target('{}/{}/{}/YEAR={}/{}.psv'.format(self.root_path,
self.etl_path,
self.task_name,
self.year,
str(self.month).zfill(2)))
class RTask(luigi.Task):
root_path = luigi.Parameter()
def requires(self):
return RawData()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-r
'''
logger.debug(cmd)
out = subprocess.check_output(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_R.psv"))
class PythonTask(luigi.Task):
def requires(self):
return RTask()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-python --inputfile {} --outputfile {}
'''.format(os.path.join("/rita/data", os.path.basename(self.input().path)),
os.path.join("/rita/data", os.path.basename(self.output().path)))
logger.debug(cmd)
out = subprocess.call(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_python.json"))
<|fim▁end|> | (self) |
<|file_name|>rita.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
rita Pipeline
.. module:: rita
:synopsis: rita pipeline
.. moduleauthor:: Adolfo De Unánue <[email protected]>
"""
import os
import subprocess
from pathlib import Path
import boto3
import zipfile
import io
import csv
import datetime
import luigi
import luigi.s3
import pandas as pd
import sqlalchemy
from contextlib import closing
import requests
import re
from bs4 import BeautifulSoup
## Variables de ambiente
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
## Obtenemos las llaves de AWS
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
## Logging
import rita.config_ini
import logging
logger = logging.getLogger("rita.pipeline")
import rita.pipelines.utils
import rita.pipelines.common
from rita.pipelines.common.tasks import DockerTask
class ritaPipeline(luigi.WrapperTask):
"""
Task principal para el pipeline
"""
def requires(self):
yield DownloadRITACatalogs()
yield DownloadRITAData()
class DownloadRITACatalogs(luigi.WrapperTask):
"""
"""
def requires(self):
baseurl = "https://www.transtats.bts.gov"
url = "https://www.transtats.bts.gov/DL_SelectFields.asp?Table_ID=236"
page = requests.get(url)
soup = BeautifulSoup(page.content, "lxml")
for link in soup.find_all('a', href=re.compile('Download_Lookup')):
catalog_name = link.get('href').split('=L_')[-1]
catalog_url = '{}/{}'.format(baseurl, link.get('href'))
yield DownloadCatalog(catalog_name=catalog_name, catalog_url=catalog_url)
class DownloadCatalog(luigi.Task):
"""
"""
catalog_url = luigi.Parameter()
catalog_name = luigi.Parameter()
root_path = luigi.Parameter()
def run(self):
logger.debug("Guardando en {} el catálogo {}".format(self.output().path, self.catalog_name))
with closing(requests.get(self.catalog_url, stream= True)) as response, \
self.output().open('w') as output_file:
for chunk in response.iter_lines(chunk_size=1024*8):
if chunk:
output_file.write(chunk.decode('utf-8') + '\n')
def output(self):
output_path = '{}/catalogs/{}.csv'.format(self.root_path,
self.catalog_name)
return luigi.s3.S3Target(path=output_path)
class DownloadRITAData(luigi.WrapperTask):
"""
"""
start_year=luigi.IntParameter()
def requires(self):
today = datetime.date.today() + datetime.timedelta(days=-90)
max_year = today.year
max_month = today.month
years = range(self.start_year, max_year)
logger.info("Descargando datos de los años {}".format(years))
for año in years:
if año != max_year:
months = range(1,13)
else:
month = range(1, max_month+1)
for mes in months:
yield DownloadRITAMonthlyData(year=año, month=mes)
class DownloadRITAMonthlyData(DockerTask):
"""
"""
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
raw_path = luigi.Parameter()
@property
def cmd(self):
return '''
docker run --rm --env AWS_ACCESS_KEY_ID={} --env AWS_SECRET_ACCESS_KEY={} rita/download-rita --year {} --month {} --data_path {}/{}
'''.format(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, self.year, self.month, self.root_path, self.raw_path)
def output(self):
return luigi.s3.S3Target(path='{}/{}/{}-{}.zip'.format(self.root_path,
self.raw_path,
str(self.month).zfill(2),
self.year))
class ExtractColumns(luigi.Task):
"""
"""
task_name = "extract-columns"
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
bucket = luigi.Parameter()
etl_path = luigi.Parameter()
def requires(self):
return DownloadRITA(year=self.year, month=self.month)
def run(self):
s3 = boto3.resource('s3')
bucket = s3.Bucket(self.bucket)
input_path = Path(self.input().path)
obj = bucket.Object(str(input_path.relative_to('s3://{}'.format(self.bucket))))
df = None
with io.BytesIO(obj.get()["Body"].read()) as input_file:
input_file.seek(0)
with zipfile.ZipFile(input_file, mode='r') as zip_file:
for subfile in zip_file.namelist():
with zip_file.open(subfile) as file:
df = pd.read_csv(file)
with self.output().open('w') as output_file:
output_file.write(df.loc[:, 'YEAR':'DIV_AIRPORT_LANDINGS'].to_csv(None,
sep="|",
header=True,
index=False,
encoding="utf-8",
quoting=csv.QUOTE_ALL))
def output(self):
return luigi.s3.S3Target('{}/{}/{}/YEAR={}/{}.psv'.format(self.root_path,
self.etl_path,
self.task_name,
self.year,
str(self.month).zfill(2)))
class RTask(luigi.Task):
root_path = luigi.Parameter()
def requir<|fim_middle|>:
return RawData()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-r
'''
logger.debug(cmd)
out = subprocess.check_output(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_R.psv"))
class PythonTask(luigi.Task):
def requires(self):
return RTask()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-python --inputfile {} --outputfile {}
'''.format(os.path.join("/rita/data", os.path.basename(self.input().path)),
os.path.join("/rita/data", os.path.basename(self.output().path)))
logger.debug(cmd)
out = subprocess.call(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_python.json"))
<|fim▁end|> | es(self) |
<|file_name|>rita.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
rita Pipeline
.. module:: rita
:synopsis: rita pipeline
.. moduleauthor:: Adolfo De Unánue <[email protected]>
"""
import os
import subprocess
from pathlib import Path
import boto3
import zipfile
import io
import csv
import datetime
import luigi
import luigi.s3
import pandas as pd
import sqlalchemy
from contextlib import closing
import requests
import re
from bs4 import BeautifulSoup
## Variables de ambiente
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
## Obtenemos las llaves de AWS
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
## Logging
import rita.config_ini
import logging
logger = logging.getLogger("rita.pipeline")
import rita.pipelines.utils
import rita.pipelines.common
from rita.pipelines.common.tasks import DockerTask
class ritaPipeline(luigi.WrapperTask):
"""
Task principal para el pipeline
"""
def requires(self):
yield DownloadRITACatalogs()
yield DownloadRITAData()
class DownloadRITACatalogs(luigi.WrapperTask):
"""
"""
def requires(self):
baseurl = "https://www.transtats.bts.gov"
url = "https://www.transtats.bts.gov/DL_SelectFields.asp?Table_ID=236"
page = requests.get(url)
soup = BeautifulSoup(page.content, "lxml")
for link in soup.find_all('a', href=re.compile('Download_Lookup')):
catalog_name = link.get('href').split('=L_')[-1]
catalog_url = '{}/{}'.format(baseurl, link.get('href'))
yield DownloadCatalog(catalog_name=catalog_name, catalog_url=catalog_url)
class DownloadCatalog(luigi.Task):
"""
"""
catalog_url = luigi.Parameter()
catalog_name = luigi.Parameter()
root_path = luigi.Parameter()
def run(self):
logger.debug("Guardando en {} el catálogo {}".format(self.output().path, self.catalog_name))
with closing(requests.get(self.catalog_url, stream= True)) as response, \
self.output().open('w') as output_file:
for chunk in response.iter_lines(chunk_size=1024*8):
if chunk:
output_file.write(chunk.decode('utf-8') + '\n')
def output(self):
output_path = '{}/catalogs/{}.csv'.format(self.root_path,
self.catalog_name)
return luigi.s3.S3Target(path=output_path)
class DownloadRITAData(luigi.WrapperTask):
"""
"""
start_year=luigi.IntParameter()
def requires(self):
today = datetime.date.today() + datetime.timedelta(days=-90)
max_year = today.year
max_month = today.month
years = range(self.start_year, max_year)
logger.info("Descargando datos de los años {}".format(years))
for año in years:
if año != max_year:
months = range(1,13)
else:
month = range(1, max_month+1)
for mes in months:
yield DownloadRITAMonthlyData(year=año, month=mes)
class DownloadRITAMonthlyData(DockerTask):
"""
"""
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
raw_path = luigi.Parameter()
@property
def cmd(self):
return '''
docker run --rm --env AWS_ACCESS_KEY_ID={} --env AWS_SECRET_ACCESS_KEY={} rita/download-rita --year {} --month {} --data_path {}/{}
'''.format(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, self.year, self.month, self.root_path, self.raw_path)
def output(self):
return luigi.s3.S3Target(path='{}/{}/{}-{}.zip'.format(self.root_path,
self.raw_path,
str(self.month).zfill(2),
self.year))
class ExtractColumns(luigi.Task):
"""
"""
task_name = "extract-columns"
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
bucket = luigi.Parameter()
etl_path = luigi.Parameter()
def requires(self):
return DownloadRITA(year=self.year, month=self.month)
def run(self):
s3 = boto3.resource('s3')
bucket = s3.Bucket(self.bucket)
input_path = Path(self.input().path)
obj = bucket.Object(str(input_path.relative_to('s3://{}'.format(self.bucket))))
df = None
with io.BytesIO(obj.get()["Body"].read()) as input_file:
input_file.seek(0)
with zipfile.ZipFile(input_file, mode='r') as zip_file:
for subfile in zip_file.namelist():
with zip_file.open(subfile) as file:
df = pd.read_csv(file)
with self.output().open('w') as output_file:
output_file.write(df.loc[:, 'YEAR':'DIV_AIRPORT_LANDINGS'].to_csv(None,
sep="|",
header=True,
index=False,
encoding="utf-8",
quoting=csv.QUOTE_ALL))
def output(self):
return luigi.s3.S3Target('{}/{}/{}/YEAR={}/{}.psv'.format(self.root_path,
self.etl_path,
self.task_name,
self.year,
str(self.month).zfill(2)))
class RTask(luigi.Task):
root_path = luigi.Parameter()
def requires(self):
return RawData()
def run(se<|fim_middle|>:
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-r
'''
logger.debug(cmd)
out = subprocess.check_output(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_R.psv"))
class PythonTask(luigi.Task):
def requires(self):
return RTask()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-python --inputfile {} --outputfile {}
'''.format(os.path.join("/rita/data", os.path.basename(self.input().path)),
os.path.join("/rita/data", os.path.basename(self.output().path)))
logger.debug(cmd)
out = subprocess.call(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_python.json"))
<|fim▁end|> | lf) |
<|file_name|>rita.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
rita Pipeline
.. module:: rita
:synopsis: rita pipeline
.. moduleauthor:: Adolfo De Unánue <[email protected]>
"""
import os
import subprocess
from pathlib import Path
import boto3
import zipfile
import io
import csv
import datetime
import luigi
import luigi.s3
import pandas as pd
import sqlalchemy
from contextlib import closing
import requests
import re
from bs4 import BeautifulSoup
## Variables de ambiente
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
## Obtenemos las llaves de AWS
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
## Logging
import rita.config_ini
import logging
logger = logging.getLogger("rita.pipeline")
import rita.pipelines.utils
import rita.pipelines.common
from rita.pipelines.common.tasks import DockerTask
class ritaPipeline(luigi.WrapperTask):
"""
Task principal para el pipeline
"""
def requires(self):
yield DownloadRITACatalogs()
yield DownloadRITAData()
class DownloadRITACatalogs(luigi.WrapperTask):
"""
"""
def requires(self):
baseurl = "https://www.transtats.bts.gov"
url = "https://www.transtats.bts.gov/DL_SelectFields.asp?Table_ID=236"
page = requests.get(url)
soup = BeautifulSoup(page.content, "lxml")
for link in soup.find_all('a', href=re.compile('Download_Lookup')):
catalog_name = link.get('href').split('=L_')[-1]
catalog_url = '{}/{}'.format(baseurl, link.get('href'))
yield DownloadCatalog(catalog_name=catalog_name, catalog_url=catalog_url)
class DownloadCatalog(luigi.Task):
"""
"""
catalog_url = luigi.Parameter()
catalog_name = luigi.Parameter()
root_path = luigi.Parameter()
def run(self):
logger.debug("Guardando en {} el catálogo {}".format(self.output().path, self.catalog_name))
with closing(requests.get(self.catalog_url, stream= True)) as response, \
self.output().open('w') as output_file:
for chunk in response.iter_lines(chunk_size=1024*8):
if chunk:
output_file.write(chunk.decode('utf-8') + '\n')
def output(self):
output_path = '{}/catalogs/{}.csv'.format(self.root_path,
self.catalog_name)
return luigi.s3.S3Target(path=output_path)
class DownloadRITAData(luigi.WrapperTask):
"""
"""
start_year=luigi.IntParameter()
def requires(self):
today = datetime.date.today() + datetime.timedelta(days=-90)
max_year = today.year
max_month = today.month
years = range(self.start_year, max_year)
logger.info("Descargando datos de los años {}".format(years))
for año in years:
if año != max_year:
months = range(1,13)
else:
month = range(1, max_month+1)
for mes in months:
yield DownloadRITAMonthlyData(year=año, month=mes)
class DownloadRITAMonthlyData(DockerTask):
"""
"""
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
raw_path = luigi.Parameter()
@property
def cmd(self):
return '''
docker run --rm --env AWS_ACCESS_KEY_ID={} --env AWS_SECRET_ACCESS_KEY={} rita/download-rita --year {} --month {} --data_path {}/{}
'''.format(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, self.year, self.month, self.root_path, self.raw_path)
def output(self):
return luigi.s3.S3Target(path='{}/{}/{}-{}.zip'.format(self.root_path,
self.raw_path,
str(self.month).zfill(2),
self.year))
class ExtractColumns(luigi.Task):
"""
"""
task_name = "extract-columns"
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
bucket = luigi.Parameter()
etl_path = luigi.Parameter()
def requires(self):
return DownloadRITA(year=self.year, month=self.month)
def run(self):
s3 = boto3.resource('s3')
bucket = s3.Bucket(self.bucket)
input_path = Path(self.input().path)
obj = bucket.Object(str(input_path.relative_to('s3://{}'.format(self.bucket))))
df = None
with io.BytesIO(obj.get()["Body"].read()) as input_file:
input_file.seek(0)
with zipfile.ZipFile(input_file, mode='r') as zip_file:
for subfile in zip_file.namelist():
with zip_file.open(subfile) as file:
df = pd.read_csv(file)
with self.output().open('w') as output_file:
output_file.write(df.loc[:, 'YEAR':'DIV_AIRPORT_LANDINGS'].to_csv(None,
sep="|",
header=True,
index=False,
encoding="utf-8",
quoting=csv.QUOTE_ALL))
def output(self):
return luigi.s3.S3Target('{}/{}/{}/YEAR={}/{}.psv'.format(self.root_path,
self.etl_path,
self.task_name,
self.year,
str(self.month).zfill(2)))
class RTask(luigi.Task):
root_path = luigi.Parameter()
def requires(self):
return RawData()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-r
'''
logger.debug(cmd)
out = subprocess.check_output(cmd, shell=True)
logger.debug(out)
def output<|fim_middle|>:
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_R.psv"))
class PythonTask(luigi.Task):
def requires(self):
return RTask()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-python --inputfile {} --outputfile {}
'''.format(os.path.join("/rita/data", os.path.basename(self.input().path)),
os.path.join("/rita/data", os.path.basename(self.output().path)))
logger.debug(cmd)
out = subprocess.call(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_python.json"))
<|fim▁end|> | (self) |
<|file_name|>rita.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
rita Pipeline
.. module:: rita
:synopsis: rita pipeline
.. moduleauthor:: Adolfo De Unánue <[email protected]>
"""
import os
import subprocess
from pathlib import Path
import boto3
import zipfile
import io
import csv
import datetime
import luigi
import luigi.s3
import pandas as pd
import sqlalchemy
from contextlib import closing
import requests
import re
from bs4 import BeautifulSoup
## Variables de ambiente
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
## Obtenemos las llaves de AWS
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
## Logging
import rita.config_ini
import logging
logger = logging.getLogger("rita.pipeline")
import rita.pipelines.utils
import rita.pipelines.common
from rita.pipelines.common.tasks import DockerTask
class ritaPipeline(luigi.WrapperTask):
"""
Task principal para el pipeline
"""
def requires(self):
yield DownloadRITACatalogs()
yield DownloadRITAData()
class DownloadRITACatalogs(luigi.WrapperTask):
"""
"""
def requires(self):
baseurl = "https://www.transtats.bts.gov"
url = "https://www.transtats.bts.gov/DL_SelectFields.asp?Table_ID=236"
page = requests.get(url)
soup = BeautifulSoup(page.content, "lxml")
for link in soup.find_all('a', href=re.compile('Download_Lookup')):
catalog_name = link.get('href').split('=L_')[-1]
catalog_url = '{}/{}'.format(baseurl, link.get('href'))
yield DownloadCatalog(catalog_name=catalog_name, catalog_url=catalog_url)
class DownloadCatalog(luigi.Task):
"""
"""
catalog_url = luigi.Parameter()
catalog_name = luigi.Parameter()
root_path = luigi.Parameter()
def run(self):
logger.debug("Guardando en {} el catálogo {}".format(self.output().path, self.catalog_name))
with closing(requests.get(self.catalog_url, stream= True)) as response, \
self.output().open('w') as output_file:
for chunk in response.iter_lines(chunk_size=1024*8):
if chunk:
output_file.write(chunk.decode('utf-8') + '\n')
def output(self):
output_path = '{}/catalogs/{}.csv'.format(self.root_path,
self.catalog_name)
return luigi.s3.S3Target(path=output_path)
class DownloadRITAData(luigi.WrapperTask):
"""
"""
start_year=luigi.IntParameter()
def requires(self):
today = datetime.date.today() + datetime.timedelta(days=-90)
max_year = today.year
max_month = today.month
years = range(self.start_year, max_year)
logger.info("Descargando datos de los años {}".format(years))
for año in years:
if año != max_year:
months = range(1,13)
else:
month = range(1, max_month+1)
for mes in months:
yield DownloadRITAMonthlyData(year=año, month=mes)
class DownloadRITAMonthlyData(DockerTask):
"""
"""
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
raw_path = luigi.Parameter()
@property
def cmd(self):
return '''
docker run --rm --env AWS_ACCESS_KEY_ID={} --env AWS_SECRET_ACCESS_KEY={} rita/download-rita --year {} --month {} --data_path {}/{}
'''.format(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, self.year, self.month, self.root_path, self.raw_path)
def output(self):
return luigi.s3.S3Target(path='{}/{}/{}-{}.zip'.format(self.root_path,
self.raw_path,
str(self.month).zfill(2),
self.year))
class ExtractColumns(luigi.Task):
"""
"""
task_name = "extract-columns"
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
bucket = luigi.Parameter()
etl_path = luigi.Parameter()
def requires(self):
return DownloadRITA(year=self.year, month=self.month)
def run(self):
s3 = boto3.resource('s3')
bucket = s3.Bucket(self.bucket)
input_path = Path(self.input().path)
obj = bucket.Object(str(input_path.relative_to('s3://{}'.format(self.bucket))))
df = None
with io.BytesIO(obj.get()["Body"].read()) as input_file:
input_file.seek(0)
with zipfile.ZipFile(input_file, mode='r') as zip_file:
for subfile in zip_file.namelist():
with zip_file.open(subfile) as file:
df = pd.read_csv(file)
with self.output().open('w') as output_file:
output_file.write(df.loc[:, 'YEAR':'DIV_AIRPORT_LANDINGS'].to_csv(None,
sep="|",
header=True,
index=False,
encoding="utf-8",
quoting=csv.QUOTE_ALL))
def output(self):
return luigi.s3.S3Target('{}/{}/{}/YEAR={}/{}.psv'.format(self.root_path,
self.etl_path,
self.task_name,
self.year,
str(self.month).zfill(2)))
class RTask(luigi.Task):
root_path = luigi.Parameter()
def requires(self):
return RawData()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-r
'''
logger.debug(cmd)
out = subprocess.check_output(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_R.psv"))
class PythonTask(luigi.Task):
def requir<|fim_middle|>:
return RTask()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-python --inputfile {} --outputfile {}
'''.format(os.path.join("/rita/data", os.path.basename(self.input().path)),
os.path.join("/rita/data", os.path.basename(self.output().path)))
logger.debug(cmd)
out = subprocess.call(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_python.json"))
<|fim▁end|> | es(self) |
<|file_name|>rita.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
rita Pipeline
.. module:: rita
:synopsis: rita pipeline
.. moduleauthor:: Adolfo De Unánue <[email protected]>
"""
import os
import subprocess
from pathlib import Path
import boto3
import zipfile
import io
import csv
import datetime
import luigi
import luigi.s3
import pandas as pd
import sqlalchemy
from contextlib import closing
import requests
import re
from bs4 import BeautifulSoup
## Variables de ambiente
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
## Obtenemos las llaves de AWS
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
## Logging
import rita.config_ini
import logging
logger = logging.getLogger("rita.pipeline")
import rita.pipelines.utils
import rita.pipelines.common
from rita.pipelines.common.tasks import DockerTask
class ritaPipeline(luigi.WrapperTask):
"""
Task principal para el pipeline
"""
def requires(self):
yield DownloadRITACatalogs()
yield DownloadRITAData()
class DownloadRITACatalogs(luigi.WrapperTask):
"""
"""
def requires(self):
baseurl = "https://www.transtats.bts.gov"
url = "https://www.transtats.bts.gov/DL_SelectFields.asp?Table_ID=236"
page = requests.get(url)
soup = BeautifulSoup(page.content, "lxml")
for link in soup.find_all('a', href=re.compile('Download_Lookup')):
catalog_name = link.get('href').split('=L_')[-1]
catalog_url = '{}/{}'.format(baseurl, link.get('href'))
yield DownloadCatalog(catalog_name=catalog_name, catalog_url=catalog_url)
class DownloadCatalog(luigi.Task):
"""
"""
catalog_url = luigi.Parameter()
catalog_name = luigi.Parameter()
root_path = luigi.Parameter()
def run(self):
logger.debug("Guardando en {} el catálogo {}".format(self.output().path, self.catalog_name))
with closing(requests.get(self.catalog_url, stream= True)) as response, \
self.output().open('w') as output_file:
for chunk in response.iter_lines(chunk_size=1024*8):
if chunk:
output_file.write(chunk.decode('utf-8') + '\n')
def output(self):
output_path = '{}/catalogs/{}.csv'.format(self.root_path,
self.catalog_name)
return luigi.s3.S3Target(path=output_path)
class DownloadRITAData(luigi.WrapperTask):
"""
"""
start_year=luigi.IntParameter()
def requires(self):
today = datetime.date.today() + datetime.timedelta(days=-90)
max_year = today.year
max_month = today.month
years = range(self.start_year, max_year)
logger.info("Descargando datos de los años {}".format(years))
for año in years:
if año != max_year:
months = range(1,13)
else:
month = range(1, max_month+1)
for mes in months:
yield DownloadRITAMonthlyData(year=año, month=mes)
class DownloadRITAMonthlyData(DockerTask):
"""
"""
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
raw_path = luigi.Parameter()
@property
def cmd(self):
return '''
docker run --rm --env AWS_ACCESS_KEY_ID={} --env AWS_SECRET_ACCESS_KEY={} rita/download-rita --year {} --month {} --data_path {}/{}
'''.format(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, self.year, self.month, self.root_path, self.raw_path)
def output(self):
return luigi.s3.S3Target(path='{}/{}/{}-{}.zip'.format(self.root_path,
self.raw_path,
str(self.month).zfill(2),
self.year))
class ExtractColumns(luigi.Task):
"""
"""
task_name = "extract-columns"
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
bucket = luigi.Parameter()
etl_path = luigi.Parameter()
def requires(self):
return DownloadRITA(year=self.year, month=self.month)
def run(self):
s3 = boto3.resource('s3')
bucket = s3.Bucket(self.bucket)
input_path = Path(self.input().path)
obj = bucket.Object(str(input_path.relative_to('s3://{}'.format(self.bucket))))
df = None
with io.BytesIO(obj.get()["Body"].read()) as input_file:
input_file.seek(0)
with zipfile.ZipFile(input_file, mode='r') as zip_file:
for subfile in zip_file.namelist():
with zip_file.open(subfile) as file:
df = pd.read_csv(file)
with self.output().open('w') as output_file:
output_file.write(df.loc[:, 'YEAR':'DIV_AIRPORT_LANDINGS'].to_csv(None,
sep="|",
header=True,
index=False,
encoding="utf-8",
quoting=csv.QUOTE_ALL))
def output(self):
return luigi.s3.S3Target('{}/{}/{}/YEAR={}/{}.psv'.format(self.root_path,
self.etl_path,
self.task_name,
self.year,
str(self.month).zfill(2)))
class RTask(luigi.Task):
root_path = luigi.Parameter()
def requires(self):
return RawData()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-r
'''
logger.debug(cmd)
out = subprocess.check_output(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_R.psv"))
class PythonTask(luigi.Task):
def requires(self):
return RTask()
def run(se<|fim_middle|>:
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-python --inputfile {} --outputfile {}
'''.format(os.path.join("/rita/data", os.path.basename(self.input().path)),
os.path.join("/rita/data", os.path.basename(self.output().path)))
logger.debug(cmd)
out = subprocess.call(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_python.json"))
<|fim▁end|> | lf) |
<|file_name|>rita.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
rita Pipeline
.. module:: rita
:synopsis: rita pipeline
.. moduleauthor:: Adolfo De Unánue <[email protected]>
"""
import os
import subprocess
from pathlib import Path
import boto3
import zipfile
import io
import csv
import datetime
import luigi
import luigi.s3
import pandas as pd
import sqlalchemy
from contextlib import closing
import requests
import re
from bs4 import BeautifulSoup
## Variables de ambiente
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
## Obtenemos las llaves de AWS
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
## Logging
import rita.config_ini
import logging
logger = logging.getLogger("rita.pipeline")
import rita.pipelines.utils
import rita.pipelines.common
from rita.pipelines.common.tasks import DockerTask
class ritaPipeline(luigi.WrapperTask):
"""
Task principal para el pipeline
"""
def requires(self):
yield DownloadRITACatalogs()
yield DownloadRITAData()
class DownloadRITACatalogs(luigi.WrapperTask):
"""
"""
def requires(self):
baseurl = "https://www.transtats.bts.gov"
url = "https://www.transtats.bts.gov/DL_SelectFields.asp?Table_ID=236"
page = requests.get(url)
soup = BeautifulSoup(page.content, "lxml")
for link in soup.find_all('a', href=re.compile('Download_Lookup')):
catalog_name = link.get('href').split('=L_')[-1]
catalog_url = '{}/{}'.format(baseurl, link.get('href'))
yield DownloadCatalog(catalog_name=catalog_name, catalog_url=catalog_url)
class DownloadCatalog(luigi.Task):
"""
"""
catalog_url = luigi.Parameter()
catalog_name = luigi.Parameter()
root_path = luigi.Parameter()
def run(self):
logger.debug("Guardando en {} el catálogo {}".format(self.output().path, self.catalog_name))
with closing(requests.get(self.catalog_url, stream= True)) as response, \
self.output().open('w') as output_file:
for chunk in response.iter_lines(chunk_size=1024*8):
if chunk:
output_file.write(chunk.decode('utf-8') + '\n')
def output(self):
output_path = '{}/catalogs/{}.csv'.format(self.root_path,
self.catalog_name)
return luigi.s3.S3Target(path=output_path)
class DownloadRITAData(luigi.WrapperTask):
"""
"""
start_year=luigi.IntParameter()
def requires(self):
today = datetime.date.today() + datetime.timedelta(days=-90)
max_year = today.year
max_month = today.month
years = range(self.start_year, max_year)
logger.info("Descargando datos de los años {}".format(years))
for año in years:
if año != max_year:
months = range(1,13)
else:
month = range(1, max_month+1)
for mes in months:
yield DownloadRITAMonthlyData(year=año, month=mes)
class DownloadRITAMonthlyData(DockerTask):
"""
"""
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
raw_path = luigi.Parameter()
@property
def cmd(self):
return '''
docker run --rm --env AWS_ACCESS_KEY_ID={} --env AWS_SECRET_ACCESS_KEY={} rita/download-rita --year {} --month {} --data_path {}/{}
'''.format(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, self.year, self.month, self.root_path, self.raw_path)
def output(self):
return luigi.s3.S3Target(path='{}/{}/{}-{}.zip'.format(self.root_path,
self.raw_path,
str(self.month).zfill(2),
self.year))
class ExtractColumns(luigi.Task):
"""
"""
task_name = "extract-columns"
year = luigi.IntParameter()
month = luigi.IntParameter()
root_path = luigi.Parameter()
bucket = luigi.Parameter()
etl_path = luigi.Parameter()
def requires(self):
return DownloadRITA(year=self.year, month=self.month)
def run(self):
s3 = boto3.resource('s3')
bucket = s3.Bucket(self.bucket)
input_path = Path(self.input().path)
obj = bucket.Object(str(input_path.relative_to('s3://{}'.format(self.bucket))))
df = None
with io.BytesIO(obj.get()["Body"].read()) as input_file:
input_file.seek(0)
with zipfile.ZipFile(input_file, mode='r') as zip_file:
for subfile in zip_file.namelist():
with zip_file.open(subfile) as file:
df = pd.read_csv(file)
with self.output().open('w') as output_file:
output_file.write(df.loc[:, 'YEAR':'DIV_AIRPORT_LANDINGS'].to_csv(None,
sep="|",
header=True,
index=False,
encoding="utf-8",
quoting=csv.QUOTE_ALL))
def output(self):
return luigi.s3.S3Target('{}/{}/{}/YEAR={}/{}.psv'.format(self.root_path,
self.etl_path,
self.task_name,
self.year,
str(self.month).zfill(2)))
class RTask(luigi.Task):
root_path = luigi.Parameter()
def requires(self):
return RawData()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-r
'''
logger.debug(cmd)
out = subprocess.check_output(cmd, shell=True)
logger.debug(out)
def output(self):
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_R.psv"))
class PythonTask(luigi.Task):
def requires(self):
return RTask()
def run(self):
cmd = '''
docker run --rm -v rita_store:/rita/data rita/test-python --inputfile {} --outputfile {}
'''.format(os.path.join("/rita/data", os.path.basename(self.input().path)),
os.path.join("/rita/data", os.path.basename(self.output().path)))
logger.debug(cmd)
out = subprocess.call(cmd, shell=True)
logger.debug(out)
def output<|fim_middle|>:
return luigi.LocalTarget(os.path.join(os.getcwd(), "data", "hola_mundo_desde_python.json"))
<|fim▁end|> | (self) |
<|file_name|>phxsql_utils.py<|end_file_name|><|fim▁begin|><|fim▁hole|> return str<|fim▁end|> | def format_path( str ):
while( str.find( '//' ) != -1 ):
str = str.replace( '//', '/' ) |
<|file_name|>phxsql_utils.py<|end_file_name|><|fim▁begin|>def format_path( str ):
<|fim_middle|>
<|fim▁end|> | while( str.find( '//' ) != -1 ):
str = str.replace( '//', '/' )
return str |
<|file_name|>phxsql_utils.py<|end_file_name|><|fim▁begin|>def <|fim_middle|>( str ):
while( str.find( '//' ) != -1 ):
str = str.replace( '//', '/' )
return str
<|fim▁end|> | format_path |
<|file_name|>archive_org_plugin.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import (unicode_literals, division, absolute_import, print_function)
store_version = 1 # Needed for dynamic plugin loading
__license__ = 'GPL 3'
__copyright__ = '2011, John Schember <[email protected]>'
__docformat__ = 'restructuredtext en'
<|fim▁hole|>from calibre.gui2.store.search_result import SearchResult
class ArchiveOrgStore(BasicStoreConfig, OpenSearchOPDSStore):
open_search_url = 'http://bookserver.archive.org/catalog/opensearch.xml'
web_url = 'http://www.archive.org/details/texts'
# http://bookserver.archive.org/catalog/
def search(self, query, max_results=10, timeout=60):
for s in OpenSearchOPDSStore.search(self, query, max_results, timeout):
s.detail_item = 'http://www.archive.org/details/' + s.detail_item.split(':')[-1]
s.price = '$0.00'
s.drm = SearchResult.DRM_UNLOCKED
yield s
def get_details(self, search_result, timeout):
'''
The opensearch feed only returns a subset of formats that are available.
We want to get a list of all formats that the user can get.
'''
from calibre import browser
from contextlib import closing
from lxml import html
br = browser()
with closing(br.open(search_result.detail_item, timeout=timeout)) as nf:
idata = html.fromstring(nf.read())
formats = ', '.join(idata.xpath('//p[@id="dl" and @class="content"]//a/text()'))
search_result.formats = formats.upper()
return True<|fim▁end|> | from calibre.gui2.store.basic_config import BasicStoreConfig
from calibre.gui2.store.opensearch_store import OpenSearchOPDSStore |
<|file_name|>archive_org_plugin.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import (unicode_literals, division, absolute_import, print_function)
store_version = 1 # Needed for dynamic plugin loading
__license__ = 'GPL 3'
__copyright__ = '2011, John Schember <[email protected]>'
__docformat__ = 'restructuredtext en'
from calibre.gui2.store.basic_config import BasicStoreConfig
from calibre.gui2.store.opensearch_store import OpenSearchOPDSStore
from calibre.gui2.store.search_result import SearchResult
class ArchiveOrgStore(BasicStoreConfig, OpenSearchOPDSStore):
<|fim_middle|>
<|fim▁end|> | open_search_url = 'http://bookserver.archive.org/catalog/opensearch.xml'
web_url = 'http://www.archive.org/details/texts'
# http://bookserver.archive.org/catalog/
def search(self, query, max_results=10, timeout=60):
for s in OpenSearchOPDSStore.search(self, query, max_results, timeout):
s.detail_item = 'http://www.archive.org/details/' + s.detail_item.split(':')[-1]
s.price = '$0.00'
s.drm = SearchResult.DRM_UNLOCKED
yield s
def get_details(self, search_result, timeout):
'''
The opensearch feed only returns a subset of formats that are available.
We want to get a list of all formats that the user can get.
'''
from calibre import browser
from contextlib import closing
from lxml import html
br = browser()
with closing(br.open(search_result.detail_item, timeout=timeout)) as nf:
idata = html.fromstring(nf.read())
formats = ', '.join(idata.xpath('//p[@id="dl" and @class="content"]//a/text()'))
search_result.formats = formats.upper()
return True |
<|file_name|>archive_org_plugin.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import (unicode_literals, division, absolute_import, print_function)
store_version = 1 # Needed for dynamic plugin loading
__license__ = 'GPL 3'
__copyright__ = '2011, John Schember <[email protected]>'
__docformat__ = 'restructuredtext en'
from calibre.gui2.store.basic_config import BasicStoreConfig
from calibre.gui2.store.opensearch_store import OpenSearchOPDSStore
from calibre.gui2.store.search_result import SearchResult
class ArchiveOrgStore(BasicStoreConfig, OpenSearchOPDSStore):
open_search_url = 'http://bookserver.archive.org/catalog/opensearch.xml'
web_url = 'http://www.archive.org/details/texts'
# http://bookserver.archive.org/catalog/
def search(self, query, max_results=10, timeout=60):
<|fim_middle|>
def get_details(self, search_result, timeout):
'''
The opensearch feed only returns a subset of formats that are available.
We want to get a list of all formats that the user can get.
'''
from calibre import browser
from contextlib import closing
from lxml import html
br = browser()
with closing(br.open(search_result.detail_item, timeout=timeout)) as nf:
idata = html.fromstring(nf.read())
formats = ', '.join(idata.xpath('//p[@id="dl" and @class="content"]//a/text()'))
search_result.formats = formats.upper()
return True
<|fim▁end|> | for s in OpenSearchOPDSStore.search(self, query, max_results, timeout):
s.detail_item = 'http://www.archive.org/details/' + s.detail_item.split(':')[-1]
s.price = '$0.00'
s.drm = SearchResult.DRM_UNLOCKED
yield s |
<|file_name|>archive_org_plugin.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import (unicode_literals, division, absolute_import, print_function)
store_version = 1 # Needed for dynamic plugin loading
__license__ = 'GPL 3'
__copyright__ = '2011, John Schember <[email protected]>'
__docformat__ = 'restructuredtext en'
from calibre.gui2.store.basic_config import BasicStoreConfig
from calibre.gui2.store.opensearch_store import OpenSearchOPDSStore
from calibre.gui2.store.search_result import SearchResult
class ArchiveOrgStore(BasicStoreConfig, OpenSearchOPDSStore):
open_search_url = 'http://bookserver.archive.org/catalog/opensearch.xml'
web_url = 'http://www.archive.org/details/texts'
# http://bookserver.archive.org/catalog/
def search(self, query, max_results=10, timeout=60):
for s in OpenSearchOPDSStore.search(self, query, max_results, timeout):
s.detail_item = 'http://www.archive.org/details/' + s.detail_item.split(':')[-1]
s.price = '$0.00'
s.drm = SearchResult.DRM_UNLOCKED
yield s
def get_details(self, search_result, timeout):
<|fim_middle|>
<|fim▁end|> | '''
The opensearch feed only returns a subset of formats that are available.
We want to get a list of all formats that the user can get.
'''
from calibre import browser
from contextlib import closing
from lxml import html
br = browser()
with closing(br.open(search_result.detail_item, timeout=timeout)) as nf:
idata = html.fromstring(nf.read())
formats = ', '.join(idata.xpath('//p[@id="dl" and @class="content"]//a/text()'))
search_result.formats = formats.upper()
return True |
<|file_name|>archive_org_plugin.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import (unicode_literals, division, absolute_import, print_function)
store_version = 1 # Needed for dynamic plugin loading
__license__ = 'GPL 3'
__copyright__ = '2011, John Schember <[email protected]>'
__docformat__ = 'restructuredtext en'
from calibre.gui2.store.basic_config import BasicStoreConfig
from calibre.gui2.store.opensearch_store import OpenSearchOPDSStore
from calibre.gui2.store.search_result import SearchResult
class ArchiveOrgStore(BasicStoreConfig, OpenSearchOPDSStore):
open_search_url = 'http://bookserver.archive.org/catalog/opensearch.xml'
web_url = 'http://www.archive.org/details/texts'
# http://bookserver.archive.org/catalog/
def <|fim_middle|>(self, query, max_results=10, timeout=60):
for s in OpenSearchOPDSStore.search(self, query, max_results, timeout):
s.detail_item = 'http://www.archive.org/details/' + s.detail_item.split(':')[-1]
s.price = '$0.00'
s.drm = SearchResult.DRM_UNLOCKED
yield s
def get_details(self, search_result, timeout):
'''
The opensearch feed only returns a subset of formats that are available.
We want to get a list of all formats that the user can get.
'''
from calibre import browser
from contextlib import closing
from lxml import html
br = browser()
with closing(br.open(search_result.detail_item, timeout=timeout)) as nf:
idata = html.fromstring(nf.read())
formats = ', '.join(idata.xpath('//p[@id="dl" and @class="content"]//a/text()'))
search_result.formats = formats.upper()
return True
<|fim▁end|> | search |
<|file_name|>archive_org_plugin.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import (unicode_literals, division, absolute_import, print_function)
store_version = 1 # Needed for dynamic plugin loading
__license__ = 'GPL 3'
__copyright__ = '2011, John Schember <[email protected]>'
__docformat__ = 'restructuredtext en'
from calibre.gui2.store.basic_config import BasicStoreConfig
from calibre.gui2.store.opensearch_store import OpenSearchOPDSStore
from calibre.gui2.store.search_result import SearchResult
class ArchiveOrgStore(BasicStoreConfig, OpenSearchOPDSStore):
open_search_url = 'http://bookserver.archive.org/catalog/opensearch.xml'
web_url = 'http://www.archive.org/details/texts'
# http://bookserver.archive.org/catalog/
def search(self, query, max_results=10, timeout=60):
for s in OpenSearchOPDSStore.search(self, query, max_results, timeout):
s.detail_item = 'http://www.archive.org/details/' + s.detail_item.split(':')[-1]
s.price = '$0.00'
s.drm = SearchResult.DRM_UNLOCKED
yield s
def <|fim_middle|>(self, search_result, timeout):
'''
The opensearch feed only returns a subset of formats that are available.
We want to get a list of all formats that the user can get.
'''
from calibre import browser
from contextlib import closing
from lxml import html
br = browser()
with closing(br.open(search_result.detail_item, timeout=timeout)) as nf:
idata = html.fromstring(nf.read())
formats = ', '.join(idata.xpath('//p[@id="dl" and @class="content"]//a/text()'))
search_result.formats = formats.upper()
return True
<|fim▁end|> | get_details |
<|file_name|>sleep_schedule.py<|end_file_name|><|fim▁begin|>from datetime import datetime, timedelta
from time import sleep
from random import uniform
class SleepSchedule(object):
"""Pauses the execution of the bot every day for some time
Simulates the user going to sleep every day for some time, the sleep time
and the duration is changed every day by a random offset defined in the
config file
Example Config:
"sleep_schedule": [
{
"time": "12:00",
"duration": "5:30",
"time_random_offset": "00:30",
"duration_random_offset": "00:30",
"wake_up_at_location": ""
},
{
"time": "17:45",
"duration": "3:00",
"time_random_offset": "01:00",
"duration_random_offset": "00:30",
"wake_up_at_location": ""
}
]
time: (HH:MM) local time that the bot should sleep
duration: (HH:MM) the duration of sleep
time_random_offset: (HH:MM) random offset of time that the sleep will start
for this example the possible start time is 11:30-12:30
duration_random_offset: (HH:MM) random offset of duration of sleep
for this example the possible duration is 5:00-6:00
wake_up_at_location: (lat, long | lat, long, alt | "") the location at which the bot wake up
*Note that an empty string ("") will not change the location*. """
LOG_INTERVAL_SECONDS = 600
SCHEDULING_MARGIN = timedelta(minutes=10) # Skip if next sleep is RESCHEDULING_MARGIN from now
def __init__(self, bot, config):
self.bot = bot
self._process_config(config)
self._schedule_next_sleep()
self._calculate_current_sleep()
def work(self):
if self._should_sleep_now():
self._sleep()
wake_up_at_location = self._wake_up_at_location
self._schedule_next_sleep()
if wake_up_at_location:
if hasattr(self.bot, 'api'): # Check if api is already initialized
self.bot.api.set_position(wake_up_at_location[0],wake_up_at_location[1],wake_up_at_location[2])
else:
self.bot.wake_location = wake_up_at_location
if hasattr(self.bot, 'api'): self.bot.login() # Same here
def _process_config(self, config):
self.entries = []
for entry in config:
prepared = {}
prepared['time'] = datetime.strptime(entry['time'] if 'time' in entry else '01:00', '%H:%M')
# Using datetime for easier stripping of timedeltas
raw_duration = datetime.strptime(entry['duration'] if 'duration' in entry else '07:00', '%H:%M')
duration = int(timedelta(hours=raw_duration.hour, minutes=raw_duration.minute).total_seconds())
raw_time_random_offset = datetime.strptime(entry['time_random_offset'] if 'time_random_offset' in entry else '01:00', '%H:%M')
time_random_offset = int(
timedelta(
hours=raw_time_random_offset.hour, minutes=raw_time_random_offset.minute).total_seconds())
raw_duration_random_offset = datetime.strptime(entry['duration_random_offset'] if 'duration_random_offset' in entry else '00:30', '%H:%M')
duration_random_offset = int(
timedelta(
hours=raw_duration_random_offset.hour, minutes=raw_duration_random_offset.minute).total_seconds())
raw_wake_up_at_location = entry['wake_up_at_location'] if 'wake_up_at_location' in entry else ''
if raw_wake_up_at_location:
try:
wake_up_at_location = raw_wake_up_at_location.split(',',2)
lat=float(wake_up_at_location[0])
lng=float(wake_up_at_location[1])
if len(wake_up_at_location) == 3:<|fim▁hole|> alt=float(wake_up_at_location[2])
else:
alt = uniform(self.bot.config.alt_min, self.bot.config.alt_max)
except ValueError:
raise ValueError('SleepSchedule wake_up_at_location, parsing error in location') #TODO there must be a more elegant way to do it...
prepared['wake_up_at_location'] = [lat, lng, alt]
prepared['duration'] = duration
prepared['time_random_offset'] = time_random_offset
prepared['duration_random_offset'] = duration_random_offset
self.entries.append(prepared)
def _schedule_next_sleep(self):
self._next_sleep, self._next_duration, self._wake_up_at_location = self._get_next_sleep_schedule()
self.bot.event_manager.emit(
'next_sleep',
sender=self,
formatted="Next sleep at {time}",
data={
'time': str(self._next_sleep)
}
)
def _calculate_current_sleep(self):
self._current_sleep = self._next_sleep - timedelta(days=1)
current_duration = self._next_duration
self._current_end = self._current_sleep + timedelta(seconds = current_duration)
def _should_sleep_now(self):
if datetime.now() >= self._next_sleep:
return True
if datetime.now() >= self._current_sleep and datetime.now() < self._current_end:
self._next_duration = (self._current_end - datetime.now()).total_seconds()
return True
return False
def _get_next_sleep_schedule(self):
now = datetime.now() + self.SCHEDULING_MARGIN
times = []
for index in range(len(self.entries)):
next_time = now.replace(hour=self.entries[index]['time'].hour, minute=self.entries[index]['time'].minute)
next_time += timedelta(seconds=self._get_random_offset(self.entries[index]['time_random_offset']))
# If sleep time is passed add one day
if next_time <= now:
next_time += timedelta(days=1)
times.append(next_time)
diffs = {}
for index in range(len(self.entries)):
diff = (times[index]-now).total_seconds()
if diff >= 0: diffs[index] = diff
closest = min(diffs.iterkeys(), key=lambda x: diffs[x])
next_time = times[closest]
next_duration = self._get_next_duration(self.entries[closest])
location = self.entries[closest]['wake_up_at_location'] if 'wake_up_at_location' in self.entries[closest] else ''
return next_time, next_duration, location
def _get_next_duration(self, entry):
duration = entry['duration'] + self._get_random_offset(entry['duration_random_offset'])
return duration
def _get_random_offset(self, max_offset):
offset = uniform(-max_offset, max_offset)
return int(offset)
def _sleep(self):
sleep_to_go = self._next_duration
sleep_m, sleep_s = divmod(sleep_to_go, 60)
sleep_h, sleep_m = divmod(sleep_m, 60)
sleep_hms = '%02d:%02d:%02d' % (sleep_h, sleep_m, sleep_s)
now = datetime.now()
wake = str(now + timedelta(seconds=sleep_to_go))
self.bot.event_manager.emit(
'bot_sleep',
sender=self,
formatted="Sleeping for {time_hms}, wake at {wake}",
data={
'time_hms': sleep_hms,
'wake': wake
}
)
while sleep_to_go > 0:
if sleep_to_go < self.LOG_INTERVAL_SECONDS:
sleep(sleep_to_go)
sleep_to_go = 0
else:
sleep(self.LOG_INTERVAL_SECONDS)
sleep_to_go -= self.LOG_INTERVAL_SECONDS<|fim▁end|> | |
<|file_name|>sleep_schedule.py<|end_file_name|><|fim▁begin|>from datetime import datetime, timedelta
from time import sleep
from random import uniform
class SleepSchedule(object):
<|fim_middle|>
<|fim▁end|> | """Pauses the execution of the bot every day for some time
Simulates the user going to sleep every day for some time, the sleep time
and the duration is changed every day by a random offset defined in the
config file
Example Config:
"sleep_schedule": [
{
"time": "12:00",
"duration": "5:30",
"time_random_offset": "00:30",
"duration_random_offset": "00:30",
"wake_up_at_location": ""
},
{
"time": "17:45",
"duration": "3:00",
"time_random_offset": "01:00",
"duration_random_offset": "00:30",
"wake_up_at_location": ""
}
]
time: (HH:MM) local time that the bot should sleep
duration: (HH:MM) the duration of sleep
time_random_offset: (HH:MM) random offset of time that the sleep will start
for this example the possible start time is 11:30-12:30
duration_random_offset: (HH:MM) random offset of duration of sleep
for this example the possible duration is 5:00-6:00
wake_up_at_location: (lat, long | lat, long, alt | "") the location at which the bot wake up
*Note that an empty string ("") will not change the location*. """
LOG_INTERVAL_SECONDS = 600
SCHEDULING_MARGIN = timedelta(minutes=10) # Skip if next sleep is RESCHEDULING_MARGIN from now
def __init__(self, bot, config):
self.bot = bot
self._process_config(config)
self._schedule_next_sleep()
self._calculate_current_sleep()
def work(self):
if self._should_sleep_now():
self._sleep()
wake_up_at_location = self._wake_up_at_location
self._schedule_next_sleep()
if wake_up_at_location:
if hasattr(self.bot, 'api'): # Check if api is already initialized
self.bot.api.set_position(wake_up_at_location[0],wake_up_at_location[1],wake_up_at_location[2])
else:
self.bot.wake_location = wake_up_at_location
if hasattr(self.bot, 'api'): self.bot.login() # Same here
def _process_config(self, config):
self.entries = []
for entry in config:
prepared = {}
prepared['time'] = datetime.strptime(entry['time'] if 'time' in entry else '01:00', '%H:%M')
# Using datetime for easier stripping of timedeltas
raw_duration = datetime.strptime(entry['duration'] if 'duration' in entry else '07:00', '%H:%M')
duration = int(timedelta(hours=raw_duration.hour, minutes=raw_duration.minute).total_seconds())
raw_time_random_offset = datetime.strptime(entry['time_random_offset'] if 'time_random_offset' in entry else '01:00', '%H:%M')
time_random_offset = int(
timedelta(
hours=raw_time_random_offset.hour, minutes=raw_time_random_offset.minute).total_seconds())
raw_duration_random_offset = datetime.strptime(entry['duration_random_offset'] if 'duration_random_offset' in entry else '00:30', '%H:%M')
duration_random_offset = int(
timedelta(
hours=raw_duration_random_offset.hour, minutes=raw_duration_random_offset.minute).total_seconds())
raw_wake_up_at_location = entry['wake_up_at_location'] if 'wake_up_at_location' in entry else ''
if raw_wake_up_at_location:
try:
wake_up_at_location = raw_wake_up_at_location.split(',',2)
lat=float(wake_up_at_location[0])
lng=float(wake_up_at_location[1])
if len(wake_up_at_location) == 3:
alt=float(wake_up_at_location[2])
else:
alt = uniform(self.bot.config.alt_min, self.bot.config.alt_max)
except ValueError:
raise ValueError('SleepSchedule wake_up_at_location, parsing error in location') #TODO there must be a more elegant way to do it...
prepared['wake_up_at_location'] = [lat, lng, alt]
prepared['duration'] = duration
prepared['time_random_offset'] = time_random_offset
prepared['duration_random_offset'] = duration_random_offset
self.entries.append(prepared)
def _schedule_next_sleep(self):
self._next_sleep, self._next_duration, self._wake_up_at_location = self._get_next_sleep_schedule()
self.bot.event_manager.emit(
'next_sleep',
sender=self,
formatted="Next sleep at {time}",
data={
'time': str(self._next_sleep)
}
)
def _calculate_current_sleep(self):
self._current_sleep = self._next_sleep - timedelta(days=1)
current_duration = self._next_duration
self._current_end = self._current_sleep + timedelta(seconds = current_duration)
def _should_sleep_now(self):
if datetime.now() >= self._next_sleep:
return True
if datetime.now() >= self._current_sleep and datetime.now() < self._current_end:
self._next_duration = (self._current_end - datetime.now()).total_seconds()
return True
return False
def _get_next_sleep_schedule(self):
now = datetime.now() + self.SCHEDULING_MARGIN
times = []
for index in range(len(self.entries)):
next_time = now.replace(hour=self.entries[index]['time'].hour, minute=self.entries[index]['time'].minute)
next_time += timedelta(seconds=self._get_random_offset(self.entries[index]['time_random_offset']))
# If sleep time is passed add one day
if next_time <= now:
next_time += timedelta(days=1)
times.append(next_time)
diffs = {}
for index in range(len(self.entries)):
diff = (times[index]-now).total_seconds()
if diff >= 0: diffs[index] = diff
closest = min(diffs.iterkeys(), key=lambda x: diffs[x])
next_time = times[closest]
next_duration = self._get_next_duration(self.entries[closest])
location = self.entries[closest]['wake_up_at_location'] if 'wake_up_at_location' in self.entries[closest] else ''
return next_time, next_duration, location
def _get_next_duration(self, entry):
duration = entry['duration'] + self._get_random_offset(entry['duration_random_offset'])
return duration
def _get_random_offset(self, max_offset):
offset = uniform(-max_offset, max_offset)
return int(offset)
def _sleep(self):
sleep_to_go = self._next_duration
sleep_m, sleep_s = divmod(sleep_to_go, 60)
sleep_h, sleep_m = divmod(sleep_m, 60)
sleep_hms = '%02d:%02d:%02d' % (sleep_h, sleep_m, sleep_s)
now = datetime.now()
wake = str(now + timedelta(seconds=sleep_to_go))
self.bot.event_manager.emit(
'bot_sleep',
sender=self,
formatted="Sleeping for {time_hms}, wake at {wake}",
data={
'time_hms': sleep_hms,
'wake': wake
}
)
while sleep_to_go > 0:
if sleep_to_go < self.LOG_INTERVAL_SECONDS:
sleep(sleep_to_go)
sleep_to_go = 0
else:
sleep(self.LOG_INTERVAL_SECONDS)
sleep_to_go -= self.LOG_INTERVAL_SECONDS |
<|file_name|>sleep_schedule.py<|end_file_name|><|fim▁begin|>from datetime import datetime, timedelta
from time import sleep
from random import uniform
class SleepSchedule(object):
"""Pauses the execution of the bot every day for some time
Simulates the user going to sleep every day for some time, the sleep time
and the duration is changed every day by a random offset defined in the
config file
Example Config:
"sleep_schedule": [
{
"time": "12:00",
"duration": "5:30",
"time_random_offset": "00:30",
"duration_random_offset": "00:30",
"wake_up_at_location": ""
},
{
"time": "17:45",
"duration": "3:00",
"time_random_offset": "01:00",
"duration_random_offset": "00:30",
"wake_up_at_location": ""
}
]
time: (HH:MM) local time that the bot should sleep
duration: (HH:MM) the duration of sleep
time_random_offset: (HH:MM) random offset of time that the sleep will start
for this example the possible start time is 11:30-12:30
duration_random_offset: (HH:MM) random offset of duration of sleep
for this example the possible duration is 5:00-6:00
wake_up_at_location: (lat, long | lat, long, alt | "") the location at which the bot wake up
*Note that an empty string ("") will not change the location*. """
LOG_INTERVAL_SECONDS = 600
SCHEDULING_MARGIN = timedelta(minutes=10) # Skip if next sleep is RESCHEDULING_MARGIN from now
def __init__(self, bot, config):
<|fim_middle|>
def work(self):
if self._should_sleep_now():
self._sleep()
wake_up_at_location = self._wake_up_at_location
self._schedule_next_sleep()
if wake_up_at_location:
if hasattr(self.bot, 'api'): # Check if api is already initialized
self.bot.api.set_position(wake_up_at_location[0],wake_up_at_location[1],wake_up_at_location[2])
else:
self.bot.wake_location = wake_up_at_location
if hasattr(self.bot, 'api'): self.bot.login() # Same here
def _process_config(self, config):
self.entries = []
for entry in config:
prepared = {}
prepared['time'] = datetime.strptime(entry['time'] if 'time' in entry else '01:00', '%H:%M')
# Using datetime for easier stripping of timedeltas
raw_duration = datetime.strptime(entry['duration'] if 'duration' in entry else '07:00', '%H:%M')
duration = int(timedelta(hours=raw_duration.hour, minutes=raw_duration.minute).total_seconds())
raw_time_random_offset = datetime.strptime(entry['time_random_offset'] if 'time_random_offset' in entry else '01:00', '%H:%M')
time_random_offset = int(
timedelta(
hours=raw_time_random_offset.hour, minutes=raw_time_random_offset.minute).total_seconds())
raw_duration_random_offset = datetime.strptime(entry['duration_random_offset'] if 'duration_random_offset' in entry else '00:30', '%H:%M')
duration_random_offset = int(
timedelta(
hours=raw_duration_random_offset.hour, minutes=raw_duration_random_offset.minute).total_seconds())
raw_wake_up_at_location = entry['wake_up_at_location'] if 'wake_up_at_location' in entry else ''
if raw_wake_up_at_location:
try:
wake_up_at_location = raw_wake_up_at_location.split(',',2)
lat=float(wake_up_at_location[0])
lng=float(wake_up_at_location[1])
if len(wake_up_at_location) == 3:
alt=float(wake_up_at_location[2])
else:
alt = uniform(self.bot.config.alt_min, self.bot.config.alt_max)
except ValueError:
raise ValueError('SleepSchedule wake_up_at_location, parsing error in location') #TODO there must be a more elegant way to do it...
prepared['wake_up_at_location'] = [lat, lng, alt]
prepared['duration'] = duration
prepared['time_random_offset'] = time_random_offset
prepared['duration_random_offset'] = duration_random_offset
self.entries.append(prepared)
def _schedule_next_sleep(self):
self._next_sleep, self._next_duration, self._wake_up_at_location = self._get_next_sleep_schedule()
self.bot.event_manager.emit(
'next_sleep',
sender=self,
formatted="Next sleep at {time}",
data={
'time': str(self._next_sleep)
}
)
def _calculate_current_sleep(self):
self._current_sleep = self._next_sleep - timedelta(days=1)
current_duration = self._next_duration
self._current_end = self._current_sleep + timedelta(seconds = current_duration)
def _should_sleep_now(self):
if datetime.now() >= self._next_sleep:
return True
if datetime.now() >= self._current_sleep and datetime.now() < self._current_end:
self._next_duration = (self._current_end - datetime.now()).total_seconds()
return True
return False
def _get_next_sleep_schedule(self):
now = datetime.now() + self.SCHEDULING_MARGIN
times = []
for index in range(len(self.entries)):
next_time = now.replace(hour=self.entries[index]['time'].hour, minute=self.entries[index]['time'].minute)
next_time += timedelta(seconds=self._get_random_offset(self.entries[index]['time_random_offset']))
# If sleep time is passed add one day
if next_time <= now:
next_time += timedelta(days=1)
times.append(next_time)
diffs = {}
for index in range(len(self.entries)):
diff = (times[index]-now).total_seconds()
if diff >= 0: diffs[index] = diff
closest = min(diffs.iterkeys(), key=lambda x: diffs[x])
next_time = times[closest]
next_duration = self._get_next_duration(self.entries[closest])
location = self.entries[closest]['wake_up_at_location'] if 'wake_up_at_location' in self.entries[closest] else ''
return next_time, next_duration, location
def _get_next_duration(self, entry):
duration = entry['duration'] + self._get_random_offset(entry['duration_random_offset'])
return duration
def _get_random_offset(self, max_offset):
offset = uniform(-max_offset, max_offset)
return int(offset)
def _sleep(self):
sleep_to_go = self._next_duration
sleep_m, sleep_s = divmod(sleep_to_go, 60)
sleep_h, sleep_m = divmod(sleep_m, 60)
sleep_hms = '%02d:%02d:%02d' % (sleep_h, sleep_m, sleep_s)
now = datetime.now()
wake = str(now + timedelta(seconds=sleep_to_go))
self.bot.event_manager.emit(
'bot_sleep',
sender=self,
formatted="Sleeping for {time_hms}, wake at {wake}",
data={
'time_hms': sleep_hms,
'wake': wake
}
)
while sleep_to_go > 0:
if sleep_to_go < self.LOG_INTERVAL_SECONDS:
sleep(sleep_to_go)
sleep_to_go = 0
else:
sleep(self.LOG_INTERVAL_SECONDS)
sleep_to_go -= self.LOG_INTERVAL_SECONDS
<|fim▁end|> | self.bot = bot
self._process_config(config)
self._schedule_next_sleep()
self._calculate_current_sleep() |
<|file_name|>sleep_schedule.py<|end_file_name|><|fim▁begin|>from datetime import datetime, timedelta
from time import sleep
from random import uniform
class SleepSchedule(object):
"""Pauses the execution of the bot every day for some time
Simulates the user going to sleep every day for some time, the sleep time
and the duration is changed every day by a random offset defined in the
config file
Example Config:
"sleep_schedule": [
{
"time": "12:00",
"duration": "5:30",
"time_random_offset": "00:30",
"duration_random_offset": "00:30",
"wake_up_at_location": ""
},
{
"time": "17:45",
"duration": "3:00",
"time_random_offset": "01:00",
"duration_random_offset": "00:30",
"wake_up_at_location": ""
}
]
time: (HH:MM) local time that the bot should sleep
duration: (HH:MM) the duration of sleep
time_random_offset: (HH:MM) random offset of time that the sleep will start
for this example the possible start time is 11:30-12:30
duration_random_offset: (HH:MM) random offset of duration of sleep
for this example the possible duration is 5:00-6:00
wake_up_at_location: (lat, long | lat, long, alt | "") the location at which the bot wake up
*Note that an empty string ("") will not change the location*. """
LOG_INTERVAL_SECONDS = 600
SCHEDULING_MARGIN = timedelta(minutes=10) # Skip if next sleep is RESCHEDULING_MARGIN from now
def __init__(self, bot, config):
self.bot = bot
self._process_config(config)
self._schedule_next_sleep()
self._calculate_current_sleep()
def work(self):
<|fim_middle|>
def _process_config(self, config):
self.entries = []
for entry in config:
prepared = {}
prepared['time'] = datetime.strptime(entry['time'] if 'time' in entry else '01:00', '%H:%M')
# Using datetime for easier stripping of timedeltas
raw_duration = datetime.strptime(entry['duration'] if 'duration' in entry else '07:00', '%H:%M')
duration = int(timedelta(hours=raw_duration.hour, minutes=raw_duration.minute).total_seconds())
raw_time_random_offset = datetime.strptime(entry['time_random_offset'] if 'time_random_offset' in entry else '01:00', '%H:%M')
time_random_offset = int(
timedelta(
hours=raw_time_random_offset.hour, minutes=raw_time_random_offset.minute).total_seconds())
raw_duration_random_offset = datetime.strptime(entry['duration_random_offset'] if 'duration_random_offset' in entry else '00:30', '%H:%M')
duration_random_offset = int(
timedelta(
hours=raw_duration_random_offset.hour, minutes=raw_duration_random_offset.minute).total_seconds())
raw_wake_up_at_location = entry['wake_up_at_location'] if 'wake_up_at_location' in entry else ''
if raw_wake_up_at_location:
try:
wake_up_at_location = raw_wake_up_at_location.split(',',2)
lat=float(wake_up_at_location[0])
lng=float(wake_up_at_location[1])
if len(wake_up_at_location) == 3:
alt=float(wake_up_at_location[2])
else:
alt = uniform(self.bot.config.alt_min, self.bot.config.alt_max)
except ValueError:
raise ValueError('SleepSchedule wake_up_at_location, parsing error in location') #TODO there must be a more elegant way to do it...
prepared['wake_up_at_location'] = [lat, lng, alt]
prepared['duration'] = duration
prepared['time_random_offset'] = time_random_offset
prepared['duration_random_offset'] = duration_random_offset
self.entries.append(prepared)
def _schedule_next_sleep(self):
self._next_sleep, self._next_duration, self._wake_up_at_location = self._get_next_sleep_schedule()
self.bot.event_manager.emit(
'next_sleep',
sender=self,
formatted="Next sleep at {time}",
data={
'time': str(self._next_sleep)
}
)
def _calculate_current_sleep(self):
self._current_sleep = self._next_sleep - timedelta(days=1)
current_duration = self._next_duration
self._current_end = self._current_sleep + timedelta(seconds = current_duration)
def _should_sleep_now(self):
if datetime.now() >= self._next_sleep:
return True
if datetime.now() >= self._current_sleep and datetime.now() < self._current_end:
self._next_duration = (self._current_end - datetime.now()).total_seconds()
return True
return False
def _get_next_sleep_schedule(self):
now = datetime.now() + self.SCHEDULING_MARGIN
times = []
for index in range(len(self.entries)):
next_time = now.replace(hour=self.entries[index]['time'].hour, minute=self.entries[index]['time'].minute)
next_time += timedelta(seconds=self._get_random_offset(self.entries[index]['time_random_offset']))
# If sleep time is passed add one day
if next_time <= now:
next_time += timedelta(days=1)
times.append(next_time)
diffs = {}
for index in range(len(self.entries)):
diff = (times[index]-now).total_seconds()
if diff >= 0: diffs[index] = diff
closest = min(diffs.iterkeys(), key=lambda x: diffs[x])
next_time = times[closest]
next_duration = self._get_next_duration(self.entries[closest])
location = self.entries[closest]['wake_up_at_location'] if 'wake_up_at_location' in self.entries[closest] else ''
return next_time, next_duration, location
def _get_next_duration(self, entry):
duration = entry['duration'] + self._get_random_offset(entry['duration_random_offset'])
return duration
def _get_random_offset(self, max_offset):
offset = uniform(-max_offset, max_offset)
return int(offset)
def _sleep(self):
sleep_to_go = self._next_duration
sleep_m, sleep_s = divmod(sleep_to_go, 60)
sleep_h, sleep_m = divmod(sleep_m, 60)
sleep_hms = '%02d:%02d:%02d' % (sleep_h, sleep_m, sleep_s)
now = datetime.now()
wake = str(now + timedelta(seconds=sleep_to_go))
self.bot.event_manager.emit(
'bot_sleep',
sender=self,
formatted="Sleeping for {time_hms}, wake at {wake}",
data={
'time_hms': sleep_hms,
'wake': wake
}
)
while sleep_to_go > 0:
if sleep_to_go < self.LOG_INTERVAL_SECONDS:
sleep(sleep_to_go)
sleep_to_go = 0
else:
sleep(self.LOG_INTERVAL_SECONDS)
sleep_to_go -= self.LOG_INTERVAL_SECONDS
<|fim▁end|> | if self._should_sleep_now():
self._sleep()
wake_up_at_location = self._wake_up_at_location
self._schedule_next_sleep()
if wake_up_at_location:
if hasattr(self.bot, 'api'): # Check if api is already initialized
self.bot.api.set_position(wake_up_at_location[0],wake_up_at_location[1],wake_up_at_location[2])
else:
self.bot.wake_location = wake_up_at_location
if hasattr(self.bot, 'api'): self.bot.login() # Same here |
<|file_name|>sleep_schedule.py<|end_file_name|><|fim▁begin|>from datetime import datetime, timedelta
from time import sleep
from random import uniform
class SleepSchedule(object):
"""Pauses the execution of the bot every day for some time
Simulates the user going to sleep every day for some time, the sleep time
and the duration is changed every day by a random offset defined in the
config file
Example Config:
"sleep_schedule": [
{
"time": "12:00",
"duration": "5:30",
"time_random_offset": "00:30",
"duration_random_offset": "00:30",
"wake_up_at_location": ""
},
{
"time": "17:45",
"duration": "3:00",
"time_random_offset": "01:00",
"duration_random_offset": "00:30",
"wake_up_at_location": ""
}
]
time: (HH:MM) local time that the bot should sleep
duration: (HH:MM) the duration of sleep
time_random_offset: (HH:MM) random offset of time that the sleep will start
for this example the possible start time is 11:30-12:30
duration_random_offset: (HH:MM) random offset of duration of sleep
for this example the possible duration is 5:00-6:00
wake_up_at_location: (lat, long | lat, long, alt | "") the location at which the bot wake up
*Note that an empty string ("") will not change the location*. """
LOG_INTERVAL_SECONDS = 600
SCHEDULING_MARGIN = timedelta(minutes=10) # Skip if next sleep is RESCHEDULING_MARGIN from now
def __init__(self, bot, config):
self.bot = bot
self._process_config(config)
self._schedule_next_sleep()
self._calculate_current_sleep()
def work(self):
if self._should_sleep_now():
self._sleep()
wake_up_at_location = self._wake_up_at_location
self._schedule_next_sleep()
if wake_up_at_location:
if hasattr(self.bot, 'api'): # Check if api is already initialized
self.bot.api.set_position(wake_up_at_location[0],wake_up_at_location[1],wake_up_at_location[2])
else:
self.bot.wake_location = wake_up_at_location
if hasattr(self.bot, 'api'): self.bot.login() # Same here
def _process_config(self, config):
<|fim_middle|>
def _schedule_next_sleep(self):
self._next_sleep, self._next_duration, self._wake_up_at_location = self._get_next_sleep_schedule()
self.bot.event_manager.emit(
'next_sleep',
sender=self,
formatted="Next sleep at {time}",
data={
'time': str(self._next_sleep)
}
)
def _calculate_current_sleep(self):
self._current_sleep = self._next_sleep - timedelta(days=1)
current_duration = self._next_duration
self._current_end = self._current_sleep + timedelta(seconds = current_duration)
def _should_sleep_now(self):
if datetime.now() >= self._next_sleep:
return True
if datetime.now() >= self._current_sleep and datetime.now() < self._current_end:
self._next_duration = (self._current_end - datetime.now()).total_seconds()
return True
return False
def _get_next_sleep_schedule(self):
now = datetime.now() + self.SCHEDULING_MARGIN
times = []
for index in range(len(self.entries)):
next_time = now.replace(hour=self.entries[index]['time'].hour, minute=self.entries[index]['time'].minute)
next_time += timedelta(seconds=self._get_random_offset(self.entries[index]['time_random_offset']))
# If sleep time is passed add one day
if next_time <= now:
next_time += timedelta(days=1)
times.append(next_time)
diffs = {}
for index in range(len(self.entries)):
diff = (times[index]-now).total_seconds()
if diff >= 0: diffs[index] = diff
closest = min(diffs.iterkeys(), key=lambda x: diffs[x])
next_time = times[closest]
next_duration = self._get_next_duration(self.entries[closest])
location = self.entries[closest]['wake_up_at_location'] if 'wake_up_at_location' in self.entries[closest] else ''
return next_time, next_duration, location
def _get_next_duration(self, entry):
duration = entry['duration'] + self._get_random_offset(entry['duration_random_offset'])
return duration
def _get_random_offset(self, max_offset):
offset = uniform(-max_offset, max_offset)
return int(offset)
def _sleep(self):
sleep_to_go = self._next_duration
sleep_m, sleep_s = divmod(sleep_to_go, 60)
sleep_h, sleep_m = divmod(sleep_m, 60)
sleep_hms = '%02d:%02d:%02d' % (sleep_h, sleep_m, sleep_s)
now = datetime.now()
wake = str(now + timedelta(seconds=sleep_to_go))
self.bot.event_manager.emit(
'bot_sleep',
sender=self,
formatted="Sleeping for {time_hms}, wake at {wake}",
data={
'time_hms': sleep_hms,
'wake': wake
}
)
while sleep_to_go > 0:
if sleep_to_go < self.LOG_INTERVAL_SECONDS:
sleep(sleep_to_go)
sleep_to_go = 0
else:
sleep(self.LOG_INTERVAL_SECONDS)
sleep_to_go -= self.LOG_INTERVAL_SECONDS
<|fim▁end|> | self.entries = []
for entry in config:
prepared = {}
prepared['time'] = datetime.strptime(entry['time'] if 'time' in entry else '01:00', '%H:%M')
# Using datetime for easier stripping of timedeltas
raw_duration = datetime.strptime(entry['duration'] if 'duration' in entry else '07:00', '%H:%M')
duration = int(timedelta(hours=raw_duration.hour, minutes=raw_duration.minute).total_seconds())
raw_time_random_offset = datetime.strptime(entry['time_random_offset'] if 'time_random_offset' in entry else '01:00', '%H:%M')
time_random_offset = int(
timedelta(
hours=raw_time_random_offset.hour, minutes=raw_time_random_offset.minute).total_seconds())
raw_duration_random_offset = datetime.strptime(entry['duration_random_offset'] if 'duration_random_offset' in entry else '00:30', '%H:%M')
duration_random_offset = int(
timedelta(
hours=raw_duration_random_offset.hour, minutes=raw_duration_random_offset.minute).total_seconds())
raw_wake_up_at_location = entry['wake_up_at_location'] if 'wake_up_at_location' in entry else ''
if raw_wake_up_at_location:
try:
wake_up_at_location = raw_wake_up_at_location.split(',',2)
lat=float(wake_up_at_location[0])
lng=float(wake_up_at_location[1])
if len(wake_up_at_location) == 3:
alt=float(wake_up_at_location[2])
else:
alt = uniform(self.bot.config.alt_min, self.bot.config.alt_max)
except ValueError:
raise ValueError('SleepSchedule wake_up_at_location, parsing error in location') #TODO there must be a more elegant way to do it...
prepared['wake_up_at_location'] = [lat, lng, alt]
prepared['duration'] = duration
prepared['time_random_offset'] = time_random_offset
prepared['duration_random_offset'] = duration_random_offset
self.entries.append(prepared) |
<|file_name|>sleep_schedule.py<|end_file_name|><|fim▁begin|>from datetime import datetime, timedelta
from time import sleep
from random import uniform
class SleepSchedule(object):
"""Pauses the execution of the bot every day for some time
Simulates the user going to sleep every day for some time, the sleep time
and the duration is changed every day by a random offset defined in the
config file
Example Config:
"sleep_schedule": [
{
"time": "12:00",
"duration": "5:30",
"time_random_offset": "00:30",
"duration_random_offset": "00:30",
"wake_up_at_location": ""
},
{
"time": "17:45",
"duration": "3:00",
"time_random_offset": "01:00",
"duration_random_offset": "00:30",
"wake_up_at_location": ""
}
]
time: (HH:MM) local time that the bot should sleep
duration: (HH:MM) the duration of sleep
time_random_offset: (HH:MM) random offset of time that the sleep will start
for this example the possible start time is 11:30-12:30
duration_random_offset: (HH:MM) random offset of duration of sleep
for this example the possible duration is 5:00-6:00
wake_up_at_location: (lat, long | lat, long, alt | "") the location at which the bot wake up
*Note that an empty string ("") will not change the location*. """
LOG_INTERVAL_SECONDS = 600
SCHEDULING_MARGIN = timedelta(minutes=10) # Skip if next sleep is RESCHEDULING_MARGIN from now
def __init__(self, bot, config):
self.bot = bot
self._process_config(config)
self._schedule_next_sleep()
self._calculate_current_sleep()
def work(self):
if self._should_sleep_now():
self._sleep()
wake_up_at_location = self._wake_up_at_location
self._schedule_next_sleep()
if wake_up_at_location:
if hasattr(self.bot, 'api'): # Check if api is already initialized
self.bot.api.set_position(wake_up_at_location[0],wake_up_at_location[1],wake_up_at_location[2])
else:
self.bot.wake_location = wake_up_at_location
if hasattr(self.bot, 'api'): self.bot.login() # Same here
def _process_config(self, config):
self.entries = []
for entry in config:
prepared = {}
prepared['time'] = datetime.strptime(entry['time'] if 'time' in entry else '01:00', '%H:%M')
# Using datetime for easier stripping of timedeltas
raw_duration = datetime.strptime(entry['duration'] if 'duration' in entry else '07:00', '%H:%M')
duration = int(timedelta(hours=raw_duration.hour, minutes=raw_duration.minute).total_seconds())
raw_time_random_offset = datetime.strptime(entry['time_random_offset'] if 'time_random_offset' in entry else '01:00', '%H:%M')
time_random_offset = int(
timedelta(
hours=raw_time_random_offset.hour, minutes=raw_time_random_offset.minute).total_seconds())
raw_duration_random_offset = datetime.strptime(entry['duration_random_offset'] if 'duration_random_offset' in entry else '00:30', '%H:%M')
duration_random_offset = int(
timedelta(
hours=raw_duration_random_offset.hour, minutes=raw_duration_random_offset.minute).total_seconds())
raw_wake_up_at_location = entry['wake_up_at_location'] if 'wake_up_at_location' in entry else ''
if raw_wake_up_at_location:
try:
wake_up_at_location = raw_wake_up_at_location.split(',',2)
lat=float(wake_up_at_location[0])
lng=float(wake_up_at_location[1])
if len(wake_up_at_location) == 3:
alt=float(wake_up_at_location[2])
else:
alt = uniform(self.bot.config.alt_min, self.bot.config.alt_max)
except ValueError:
raise ValueError('SleepSchedule wake_up_at_location, parsing error in location') #TODO there must be a more elegant way to do it...
prepared['wake_up_at_location'] = [lat, lng, alt]
prepared['duration'] = duration
prepared['time_random_offset'] = time_random_offset
prepared['duration_random_offset'] = duration_random_offset
self.entries.append(prepared)
def _schedule_next_sleep(self):
<|fim_middle|>
def _calculate_current_sleep(self):
self._current_sleep = self._next_sleep - timedelta(days=1)
current_duration = self._next_duration
self._current_end = self._current_sleep + timedelta(seconds = current_duration)
def _should_sleep_now(self):
if datetime.now() >= self._next_sleep:
return True
if datetime.now() >= self._current_sleep and datetime.now() < self._current_end:
self._next_duration = (self._current_end - datetime.now()).total_seconds()
return True
return False
def _get_next_sleep_schedule(self):
now = datetime.now() + self.SCHEDULING_MARGIN
times = []
for index in range(len(self.entries)):
next_time = now.replace(hour=self.entries[index]['time'].hour, minute=self.entries[index]['time'].minute)
next_time += timedelta(seconds=self._get_random_offset(self.entries[index]['time_random_offset']))
# If sleep time is passed add one day
if next_time <= now:
next_time += timedelta(days=1)
times.append(next_time)
diffs = {}
for index in range(len(self.entries)):
diff = (times[index]-now).total_seconds()
if diff >= 0: diffs[index] = diff
closest = min(diffs.iterkeys(), key=lambda x: diffs[x])
next_time = times[closest]
next_duration = self._get_next_duration(self.entries[closest])
location = self.entries[closest]['wake_up_at_location'] if 'wake_up_at_location' in self.entries[closest] else ''
return next_time, next_duration, location
def _get_next_duration(self, entry):
duration = entry['duration'] + self._get_random_offset(entry['duration_random_offset'])
return duration
def _get_random_offset(self, max_offset):
offset = uniform(-max_offset, max_offset)
return int(offset)
def _sleep(self):
sleep_to_go = self._next_duration
sleep_m, sleep_s = divmod(sleep_to_go, 60)
sleep_h, sleep_m = divmod(sleep_m, 60)
sleep_hms = '%02d:%02d:%02d' % (sleep_h, sleep_m, sleep_s)
now = datetime.now()
wake = str(now + timedelta(seconds=sleep_to_go))
self.bot.event_manager.emit(
'bot_sleep',
sender=self,
formatted="Sleeping for {time_hms}, wake at {wake}",
data={
'time_hms': sleep_hms,
'wake': wake
}
)
while sleep_to_go > 0:
if sleep_to_go < self.LOG_INTERVAL_SECONDS:
sleep(sleep_to_go)
sleep_to_go = 0
else:
sleep(self.LOG_INTERVAL_SECONDS)
sleep_to_go -= self.LOG_INTERVAL_SECONDS
<|fim▁end|> | self._next_sleep, self._next_duration, self._wake_up_at_location = self._get_next_sleep_schedule()
self.bot.event_manager.emit(
'next_sleep',
sender=self,
formatted="Next sleep at {time}",
data={
'time': str(self._next_sleep)
}
) |
<|file_name|>sleep_schedule.py<|end_file_name|><|fim▁begin|>from datetime import datetime, timedelta
from time import sleep
from random import uniform
class SleepSchedule(object):
"""Pauses the execution of the bot every day for some time
Simulates the user going to sleep every day for some time, the sleep time
and the duration is changed every day by a random offset defined in the
config file
Example Config:
"sleep_schedule": [
{
"time": "12:00",
"duration": "5:30",
"time_random_offset": "00:30",
"duration_random_offset": "00:30",
"wake_up_at_location": ""
},
{
"time": "17:45",
"duration": "3:00",
"time_random_offset": "01:00",
"duration_random_offset": "00:30",
"wake_up_at_location": ""
}
]
time: (HH:MM) local time that the bot should sleep
duration: (HH:MM) the duration of sleep
time_random_offset: (HH:MM) random offset of time that the sleep will start
for this example the possible start time is 11:30-12:30
duration_random_offset: (HH:MM) random offset of duration of sleep
for this example the possible duration is 5:00-6:00
wake_up_at_location: (lat, long | lat, long, alt | "") the location at which the bot wake up
*Note that an empty string ("") will not change the location*. """
LOG_INTERVAL_SECONDS = 600
SCHEDULING_MARGIN = timedelta(minutes=10) # Skip if next sleep is RESCHEDULING_MARGIN from now
def __init__(self, bot, config):
self.bot = bot
self._process_config(config)
self._schedule_next_sleep()
self._calculate_current_sleep()
def work(self):
if self._should_sleep_now():
self._sleep()
wake_up_at_location = self._wake_up_at_location
self._schedule_next_sleep()
if wake_up_at_location:
if hasattr(self.bot, 'api'): # Check if api is already initialized
self.bot.api.set_position(wake_up_at_location[0],wake_up_at_location[1],wake_up_at_location[2])
else:
self.bot.wake_location = wake_up_at_location
if hasattr(self.bot, 'api'): self.bot.login() # Same here
def _process_config(self, config):
self.entries = []
for entry in config:
prepared = {}
prepared['time'] = datetime.strptime(entry['time'] if 'time' in entry else '01:00', '%H:%M')
# Using datetime for easier stripping of timedeltas
raw_duration = datetime.strptime(entry['duration'] if 'duration' in entry else '07:00', '%H:%M')
duration = int(timedelta(hours=raw_duration.hour, minutes=raw_duration.minute).total_seconds())
raw_time_random_offset = datetime.strptime(entry['time_random_offset'] if 'time_random_offset' in entry else '01:00', '%H:%M')
time_random_offset = int(
timedelta(
hours=raw_time_random_offset.hour, minutes=raw_time_random_offset.minute).total_seconds())
raw_duration_random_offset = datetime.strptime(entry['duration_random_offset'] if 'duration_random_offset' in entry else '00:30', '%H:%M')
duration_random_offset = int(
timedelta(
hours=raw_duration_random_offset.hour, minutes=raw_duration_random_offset.minute).total_seconds())
raw_wake_up_at_location = entry['wake_up_at_location'] if 'wake_up_at_location' in entry else ''
if raw_wake_up_at_location:
try:
wake_up_at_location = raw_wake_up_at_location.split(',',2)
lat=float(wake_up_at_location[0])
lng=float(wake_up_at_location[1])
if len(wake_up_at_location) == 3:
alt=float(wake_up_at_location[2])
else:
alt = uniform(self.bot.config.alt_min, self.bot.config.alt_max)
except ValueError:
raise ValueError('SleepSchedule wake_up_at_location, parsing error in location') #TODO there must be a more elegant way to do it...
prepared['wake_up_at_location'] = [lat, lng, alt]
prepared['duration'] = duration
prepared['time_random_offset'] = time_random_offset
prepared['duration_random_offset'] = duration_random_offset
self.entries.append(prepared)
def _schedule_next_sleep(self):
self._next_sleep, self._next_duration, self._wake_up_at_location = self._get_next_sleep_schedule()
self.bot.event_manager.emit(
'next_sleep',
sender=self,
formatted="Next sleep at {time}",
data={
'time': str(self._next_sleep)
}
)
def _calculate_current_sleep(self):
<|fim_middle|>
def _should_sleep_now(self):
if datetime.now() >= self._next_sleep:
return True
if datetime.now() >= self._current_sleep and datetime.now() < self._current_end:
self._next_duration = (self._current_end - datetime.now()).total_seconds()
return True
return False
def _get_next_sleep_schedule(self):
now = datetime.now() + self.SCHEDULING_MARGIN
times = []
for index in range(len(self.entries)):
next_time = now.replace(hour=self.entries[index]['time'].hour, minute=self.entries[index]['time'].minute)
next_time += timedelta(seconds=self._get_random_offset(self.entries[index]['time_random_offset']))
# If sleep time is passed add one day
if next_time <= now:
next_time += timedelta(days=1)
times.append(next_time)
diffs = {}
for index in range(len(self.entries)):
diff = (times[index]-now).total_seconds()
if diff >= 0: diffs[index] = diff
closest = min(diffs.iterkeys(), key=lambda x: diffs[x])
next_time = times[closest]
next_duration = self._get_next_duration(self.entries[closest])
location = self.entries[closest]['wake_up_at_location'] if 'wake_up_at_location' in self.entries[closest] else ''
return next_time, next_duration, location
def _get_next_duration(self, entry):
duration = entry['duration'] + self._get_random_offset(entry['duration_random_offset'])
return duration
def _get_random_offset(self, max_offset):
offset = uniform(-max_offset, max_offset)
return int(offset)
def _sleep(self):
sleep_to_go = self._next_duration
sleep_m, sleep_s = divmod(sleep_to_go, 60)
sleep_h, sleep_m = divmod(sleep_m, 60)
sleep_hms = '%02d:%02d:%02d' % (sleep_h, sleep_m, sleep_s)
now = datetime.now()
wake = str(now + timedelta(seconds=sleep_to_go))
self.bot.event_manager.emit(
'bot_sleep',
sender=self,
formatted="Sleeping for {time_hms}, wake at {wake}",
data={
'time_hms': sleep_hms,
'wake': wake
}
)
while sleep_to_go > 0:
if sleep_to_go < self.LOG_INTERVAL_SECONDS:
sleep(sleep_to_go)
sleep_to_go = 0
else:
sleep(self.LOG_INTERVAL_SECONDS)
sleep_to_go -= self.LOG_INTERVAL_SECONDS
<|fim▁end|> | self._current_sleep = self._next_sleep - timedelta(days=1)
current_duration = self._next_duration
self._current_end = self._current_sleep + timedelta(seconds = current_duration) |
<|file_name|>sleep_schedule.py<|end_file_name|><|fim▁begin|>from datetime import datetime, timedelta
from time import sleep
from random import uniform
class SleepSchedule(object):
"""Pauses the execution of the bot every day for some time
Simulates the user going to sleep every day for some time, the sleep time
and the duration is changed every day by a random offset defined in the
config file
Example Config:
"sleep_schedule": [
{
"time": "12:00",
"duration": "5:30",
"time_random_offset": "00:30",
"duration_random_offset": "00:30",
"wake_up_at_location": ""
},
{
"time": "17:45",
"duration": "3:00",
"time_random_offset": "01:00",
"duration_random_offset": "00:30",
"wake_up_at_location": ""
}
]
time: (HH:MM) local time that the bot should sleep
duration: (HH:MM) the duration of sleep
time_random_offset: (HH:MM) random offset of time that the sleep will start
for this example the possible start time is 11:30-12:30
duration_random_offset: (HH:MM) random offset of duration of sleep
for this example the possible duration is 5:00-6:00
wake_up_at_location: (lat, long | lat, long, alt | "") the location at which the bot wake up
*Note that an empty string ("") will not change the location*. """
LOG_INTERVAL_SECONDS = 600
SCHEDULING_MARGIN = timedelta(minutes=10) # Skip if next sleep is RESCHEDULING_MARGIN from now
def __init__(self, bot, config):
self.bot = bot
self._process_config(config)
self._schedule_next_sleep()
self._calculate_current_sleep()
def work(self):
if self._should_sleep_now():
self._sleep()
wake_up_at_location = self._wake_up_at_location
self._schedule_next_sleep()
if wake_up_at_location:
if hasattr(self.bot, 'api'): # Check if api is already initialized
self.bot.api.set_position(wake_up_at_location[0],wake_up_at_location[1],wake_up_at_location[2])
else:
self.bot.wake_location = wake_up_at_location
if hasattr(self.bot, 'api'): self.bot.login() # Same here
def _process_config(self, config):
self.entries = []
for entry in config:
prepared = {}
prepared['time'] = datetime.strptime(entry['time'] if 'time' in entry else '01:00', '%H:%M')
# Using datetime for easier stripping of timedeltas
raw_duration = datetime.strptime(entry['duration'] if 'duration' in entry else '07:00', '%H:%M')
duration = int(timedelta(hours=raw_duration.hour, minutes=raw_duration.minute).total_seconds())
raw_time_random_offset = datetime.strptime(entry['time_random_offset'] if 'time_random_offset' in entry else '01:00', '%H:%M')
time_random_offset = int(
timedelta(
hours=raw_time_random_offset.hour, minutes=raw_time_random_offset.minute).total_seconds())
raw_duration_random_offset = datetime.strptime(entry['duration_random_offset'] if 'duration_random_offset' in entry else '00:30', '%H:%M')
duration_random_offset = int(
timedelta(
hours=raw_duration_random_offset.hour, minutes=raw_duration_random_offset.minute).total_seconds())
raw_wake_up_at_location = entry['wake_up_at_location'] if 'wake_up_at_location' in entry else ''
if raw_wake_up_at_location:
try:
wake_up_at_location = raw_wake_up_at_location.split(',',2)
lat=float(wake_up_at_location[0])
lng=float(wake_up_at_location[1])
if len(wake_up_at_location) == 3:
alt=float(wake_up_at_location[2])
else:
alt = uniform(self.bot.config.alt_min, self.bot.config.alt_max)
except ValueError:
raise ValueError('SleepSchedule wake_up_at_location, parsing error in location') #TODO there must be a more elegant way to do it...
prepared['wake_up_at_location'] = [lat, lng, alt]
prepared['duration'] = duration
prepared['time_random_offset'] = time_random_offset
prepared['duration_random_offset'] = duration_random_offset
self.entries.append(prepared)
def _schedule_next_sleep(self):
self._next_sleep, self._next_duration, self._wake_up_at_location = self._get_next_sleep_schedule()
self.bot.event_manager.emit(
'next_sleep',
sender=self,
formatted="Next sleep at {time}",
data={
'time': str(self._next_sleep)
}
)
def _calculate_current_sleep(self):
self._current_sleep = self._next_sleep - timedelta(days=1)
current_duration = self._next_duration
self._current_end = self._current_sleep + timedelta(seconds = current_duration)
def _should_sleep_now(self):
<|fim_middle|>
def _get_next_sleep_schedule(self):
now = datetime.now() + self.SCHEDULING_MARGIN
times = []
for index in range(len(self.entries)):
next_time = now.replace(hour=self.entries[index]['time'].hour, minute=self.entries[index]['time'].minute)
next_time += timedelta(seconds=self._get_random_offset(self.entries[index]['time_random_offset']))
# If sleep time is passed add one day
if next_time <= now:
next_time += timedelta(days=1)
times.append(next_time)
diffs = {}
for index in range(len(self.entries)):
diff = (times[index]-now).total_seconds()
if diff >= 0: diffs[index] = diff
closest = min(diffs.iterkeys(), key=lambda x: diffs[x])
next_time = times[closest]
next_duration = self._get_next_duration(self.entries[closest])
location = self.entries[closest]['wake_up_at_location'] if 'wake_up_at_location' in self.entries[closest] else ''
return next_time, next_duration, location
def _get_next_duration(self, entry):
duration = entry['duration'] + self._get_random_offset(entry['duration_random_offset'])
return duration
def _get_random_offset(self, max_offset):
offset = uniform(-max_offset, max_offset)
return int(offset)
def _sleep(self):
sleep_to_go = self._next_duration
sleep_m, sleep_s = divmod(sleep_to_go, 60)
sleep_h, sleep_m = divmod(sleep_m, 60)
sleep_hms = '%02d:%02d:%02d' % (sleep_h, sleep_m, sleep_s)
now = datetime.now()
wake = str(now + timedelta(seconds=sleep_to_go))
self.bot.event_manager.emit(
'bot_sleep',
sender=self,
formatted="Sleeping for {time_hms}, wake at {wake}",
data={
'time_hms': sleep_hms,
'wake': wake
}
)
while sleep_to_go > 0:
if sleep_to_go < self.LOG_INTERVAL_SECONDS:
sleep(sleep_to_go)
sleep_to_go = 0
else:
sleep(self.LOG_INTERVAL_SECONDS)
sleep_to_go -= self.LOG_INTERVAL_SECONDS
<|fim▁end|> | if datetime.now() >= self._next_sleep:
return True
if datetime.now() >= self._current_sleep and datetime.now() < self._current_end:
self._next_duration = (self._current_end - datetime.now()).total_seconds()
return True
return False |
<|file_name|>sleep_schedule.py<|end_file_name|><|fim▁begin|>from datetime import datetime, timedelta
from time import sleep
from random import uniform
class SleepSchedule(object):
"""Pauses the execution of the bot every day for some time
Simulates the user going to sleep every day for some time, the sleep time
and the duration is changed every day by a random offset defined in the
config file
Example Config:
"sleep_schedule": [
{
"time": "12:00",
"duration": "5:30",
"time_random_offset": "00:30",
"duration_random_offset": "00:30",
"wake_up_at_location": ""
},
{
"time": "17:45",
"duration": "3:00",
"time_random_offset": "01:00",
"duration_random_offset": "00:30",
"wake_up_at_location": ""
}
]
time: (HH:MM) local time that the bot should sleep
duration: (HH:MM) the duration of sleep
time_random_offset: (HH:MM) random offset of time that the sleep will start
for this example the possible start time is 11:30-12:30
duration_random_offset: (HH:MM) random offset of duration of sleep
for this example the possible duration is 5:00-6:00
wake_up_at_location: (lat, long | lat, long, alt | "") the location at which the bot wake up
*Note that an empty string ("") will not change the location*. """
LOG_INTERVAL_SECONDS = 600
SCHEDULING_MARGIN = timedelta(minutes=10) # Skip if next sleep is RESCHEDULING_MARGIN from now
def __init__(self, bot, config):
self.bot = bot
self._process_config(config)
self._schedule_next_sleep()
self._calculate_current_sleep()
def work(self):
if self._should_sleep_now():
self._sleep()
wake_up_at_location = self._wake_up_at_location
self._schedule_next_sleep()
if wake_up_at_location:
if hasattr(self.bot, 'api'): # Check if api is already initialized
self.bot.api.set_position(wake_up_at_location[0],wake_up_at_location[1],wake_up_at_location[2])
else:
self.bot.wake_location = wake_up_at_location
if hasattr(self.bot, 'api'): self.bot.login() # Same here
def _process_config(self, config):
self.entries = []
for entry in config:
prepared = {}
prepared['time'] = datetime.strptime(entry['time'] if 'time' in entry else '01:00', '%H:%M')
# Using datetime for easier stripping of timedeltas
raw_duration = datetime.strptime(entry['duration'] if 'duration' in entry else '07:00', '%H:%M')
duration = int(timedelta(hours=raw_duration.hour, minutes=raw_duration.minute).total_seconds())
raw_time_random_offset = datetime.strptime(entry['time_random_offset'] if 'time_random_offset' in entry else '01:00', '%H:%M')
time_random_offset = int(
timedelta(
hours=raw_time_random_offset.hour, minutes=raw_time_random_offset.minute).total_seconds())
raw_duration_random_offset = datetime.strptime(entry['duration_random_offset'] if 'duration_random_offset' in entry else '00:30', '%H:%M')
duration_random_offset = int(
timedelta(
hours=raw_duration_random_offset.hour, minutes=raw_duration_random_offset.minute).total_seconds())
raw_wake_up_at_location = entry['wake_up_at_location'] if 'wake_up_at_location' in entry else ''
if raw_wake_up_at_location:
try:
wake_up_at_location = raw_wake_up_at_location.split(',',2)
lat=float(wake_up_at_location[0])
lng=float(wake_up_at_location[1])
if len(wake_up_at_location) == 3:
alt=float(wake_up_at_location[2])
else:
alt = uniform(self.bot.config.alt_min, self.bot.config.alt_max)
except ValueError:
raise ValueError('SleepSchedule wake_up_at_location, parsing error in location') #TODO there must be a more elegant way to do it...
prepared['wake_up_at_location'] = [lat, lng, alt]
prepared['duration'] = duration
prepared['time_random_offset'] = time_random_offset
prepared['duration_random_offset'] = duration_random_offset
self.entries.append(prepared)
def _schedule_next_sleep(self):
self._next_sleep, self._next_duration, self._wake_up_at_location = self._get_next_sleep_schedule()
self.bot.event_manager.emit(
'next_sleep',
sender=self,
formatted="Next sleep at {time}",
data={
'time': str(self._next_sleep)
}
)
def _calculate_current_sleep(self):
self._current_sleep = self._next_sleep - timedelta(days=1)
current_duration = self._next_duration
self._current_end = self._current_sleep + timedelta(seconds = current_duration)
def _should_sleep_now(self):
if datetime.now() >= self._next_sleep:
return True
if datetime.now() >= self._current_sleep and datetime.now() < self._current_end:
self._next_duration = (self._current_end - datetime.now()).total_seconds()
return True
return False
def _get_next_sleep_schedule(self):
<|fim_middle|>
def _get_next_duration(self, entry):
duration = entry['duration'] + self._get_random_offset(entry['duration_random_offset'])
return duration
def _get_random_offset(self, max_offset):
offset = uniform(-max_offset, max_offset)
return int(offset)
def _sleep(self):
sleep_to_go = self._next_duration
sleep_m, sleep_s = divmod(sleep_to_go, 60)
sleep_h, sleep_m = divmod(sleep_m, 60)
sleep_hms = '%02d:%02d:%02d' % (sleep_h, sleep_m, sleep_s)
now = datetime.now()
wake = str(now + timedelta(seconds=sleep_to_go))
self.bot.event_manager.emit(
'bot_sleep',
sender=self,
formatted="Sleeping for {time_hms}, wake at {wake}",
data={
'time_hms': sleep_hms,
'wake': wake
}
)
while sleep_to_go > 0:
if sleep_to_go < self.LOG_INTERVAL_SECONDS:
sleep(sleep_to_go)
sleep_to_go = 0
else:
sleep(self.LOG_INTERVAL_SECONDS)
sleep_to_go -= self.LOG_INTERVAL_SECONDS
<|fim▁end|> | now = datetime.now() + self.SCHEDULING_MARGIN
times = []
for index in range(len(self.entries)):
next_time = now.replace(hour=self.entries[index]['time'].hour, minute=self.entries[index]['time'].minute)
next_time += timedelta(seconds=self._get_random_offset(self.entries[index]['time_random_offset']))
# If sleep time is passed add one day
if next_time <= now:
next_time += timedelta(days=1)
times.append(next_time)
diffs = {}
for index in range(len(self.entries)):
diff = (times[index]-now).total_seconds()
if diff >= 0: diffs[index] = diff
closest = min(diffs.iterkeys(), key=lambda x: diffs[x])
next_time = times[closest]
next_duration = self._get_next_duration(self.entries[closest])
location = self.entries[closest]['wake_up_at_location'] if 'wake_up_at_location' in self.entries[closest] else ''
return next_time, next_duration, location |
<|file_name|>sleep_schedule.py<|end_file_name|><|fim▁begin|>from datetime import datetime, timedelta
from time import sleep
from random import uniform
class SleepSchedule(object):
"""Pauses the execution of the bot every day for some time
Simulates the user going to sleep every day for some time, the sleep time
and the duration is changed every day by a random offset defined in the
config file
Example Config:
"sleep_schedule": [
{
"time": "12:00",
"duration": "5:30",
"time_random_offset": "00:30",
"duration_random_offset": "00:30",
"wake_up_at_location": ""
},
{
"time": "17:45",
"duration": "3:00",
"time_random_offset": "01:00",
"duration_random_offset": "00:30",
"wake_up_at_location": ""
}
]
time: (HH:MM) local time that the bot should sleep
duration: (HH:MM) the duration of sleep
time_random_offset: (HH:MM) random offset of time that the sleep will start
for this example the possible start time is 11:30-12:30
duration_random_offset: (HH:MM) random offset of duration of sleep
for this example the possible duration is 5:00-6:00
wake_up_at_location: (lat, long | lat, long, alt | "") the location at which the bot wake up
*Note that an empty string ("") will not change the location*. """
LOG_INTERVAL_SECONDS = 600
SCHEDULING_MARGIN = timedelta(minutes=10) # Skip if next sleep is RESCHEDULING_MARGIN from now
def __init__(self, bot, config):
self.bot = bot
self._process_config(config)
self._schedule_next_sleep()
self._calculate_current_sleep()
def work(self):
if self._should_sleep_now():
self._sleep()
wake_up_at_location = self._wake_up_at_location
self._schedule_next_sleep()
if wake_up_at_location:
if hasattr(self.bot, 'api'): # Check if api is already initialized
self.bot.api.set_position(wake_up_at_location[0],wake_up_at_location[1],wake_up_at_location[2])
else:
self.bot.wake_location = wake_up_at_location
if hasattr(self.bot, 'api'): self.bot.login() # Same here
def _process_config(self, config):
self.entries = []
for entry in config:
prepared = {}
prepared['time'] = datetime.strptime(entry['time'] if 'time' in entry else '01:00', '%H:%M')
# Using datetime for easier stripping of timedeltas
raw_duration = datetime.strptime(entry['duration'] if 'duration' in entry else '07:00', '%H:%M')
duration = int(timedelta(hours=raw_duration.hour, minutes=raw_duration.minute).total_seconds())
raw_time_random_offset = datetime.strptime(entry['time_random_offset'] if 'time_random_offset' in entry else '01:00', '%H:%M')
time_random_offset = int(
timedelta(
hours=raw_time_random_offset.hour, minutes=raw_time_random_offset.minute).total_seconds())
raw_duration_random_offset = datetime.strptime(entry['duration_random_offset'] if 'duration_random_offset' in entry else '00:30', '%H:%M')
duration_random_offset = int(
timedelta(
hours=raw_duration_random_offset.hour, minutes=raw_duration_random_offset.minute).total_seconds())
raw_wake_up_at_location = entry['wake_up_at_location'] if 'wake_up_at_location' in entry else ''
if raw_wake_up_at_location:
try:
wake_up_at_location = raw_wake_up_at_location.split(',',2)
lat=float(wake_up_at_location[0])
lng=float(wake_up_at_location[1])
if len(wake_up_at_location) == 3:
alt=float(wake_up_at_location[2])
else:
alt = uniform(self.bot.config.alt_min, self.bot.config.alt_max)
except ValueError:
raise ValueError('SleepSchedule wake_up_at_location, parsing error in location') #TODO there must be a more elegant way to do it...
prepared['wake_up_at_location'] = [lat, lng, alt]
prepared['duration'] = duration
prepared['time_random_offset'] = time_random_offset
prepared['duration_random_offset'] = duration_random_offset
self.entries.append(prepared)
def _schedule_next_sleep(self):
self._next_sleep, self._next_duration, self._wake_up_at_location = self._get_next_sleep_schedule()
self.bot.event_manager.emit(
'next_sleep',
sender=self,
formatted="Next sleep at {time}",
data={
'time': str(self._next_sleep)
}
)
def _calculate_current_sleep(self):
self._current_sleep = self._next_sleep - timedelta(days=1)
current_duration = self._next_duration
self._current_end = self._current_sleep + timedelta(seconds = current_duration)
def _should_sleep_now(self):
if datetime.now() >= self._next_sleep:
return True
if datetime.now() >= self._current_sleep and datetime.now() < self._current_end:
self._next_duration = (self._current_end - datetime.now()).total_seconds()
return True
return False
def _get_next_sleep_schedule(self):
now = datetime.now() + self.SCHEDULING_MARGIN
times = []
for index in range(len(self.entries)):
next_time = now.replace(hour=self.entries[index]['time'].hour, minute=self.entries[index]['time'].minute)
next_time += timedelta(seconds=self._get_random_offset(self.entries[index]['time_random_offset']))
# If sleep time is passed add one day
if next_time <= now:
next_time += timedelta(days=1)
times.append(next_time)
diffs = {}
for index in range(len(self.entries)):
diff = (times[index]-now).total_seconds()
if diff >= 0: diffs[index] = diff
closest = min(diffs.iterkeys(), key=lambda x: diffs[x])
next_time = times[closest]
next_duration = self._get_next_duration(self.entries[closest])
location = self.entries[closest]['wake_up_at_location'] if 'wake_up_at_location' in self.entries[closest] else ''
return next_time, next_duration, location
def _get_next_duration(self, entry):
<|fim_middle|>
def _get_random_offset(self, max_offset):
offset = uniform(-max_offset, max_offset)
return int(offset)
def _sleep(self):
sleep_to_go = self._next_duration
sleep_m, sleep_s = divmod(sleep_to_go, 60)
sleep_h, sleep_m = divmod(sleep_m, 60)
sleep_hms = '%02d:%02d:%02d' % (sleep_h, sleep_m, sleep_s)
now = datetime.now()
wake = str(now + timedelta(seconds=sleep_to_go))
self.bot.event_manager.emit(
'bot_sleep',
sender=self,
formatted="Sleeping for {time_hms}, wake at {wake}",
data={
'time_hms': sleep_hms,
'wake': wake
}
)
while sleep_to_go > 0:
if sleep_to_go < self.LOG_INTERVAL_SECONDS:
sleep(sleep_to_go)
sleep_to_go = 0
else:
sleep(self.LOG_INTERVAL_SECONDS)
sleep_to_go -= self.LOG_INTERVAL_SECONDS
<|fim▁end|> | duration = entry['duration'] + self._get_random_offset(entry['duration_random_offset'])
return duration |
<|file_name|>sleep_schedule.py<|end_file_name|><|fim▁begin|>from datetime import datetime, timedelta
from time import sleep
from random import uniform
class SleepSchedule(object):
"""Pauses the execution of the bot every day for some time
Simulates the user going to sleep every day for some time, the sleep time
and the duration is changed every day by a random offset defined in the
config file
Example Config:
"sleep_schedule": [
{
"time": "12:00",
"duration": "5:30",
"time_random_offset": "00:30",
"duration_random_offset": "00:30",
"wake_up_at_location": ""
},
{
"time": "17:45",
"duration": "3:00",
"time_random_offset": "01:00",
"duration_random_offset": "00:30",
"wake_up_at_location": ""
}
]
time: (HH:MM) local time that the bot should sleep
duration: (HH:MM) the duration of sleep
time_random_offset: (HH:MM) random offset of time that the sleep will start
for this example the possible start time is 11:30-12:30
duration_random_offset: (HH:MM) random offset of duration of sleep
for this example the possible duration is 5:00-6:00
wake_up_at_location: (lat, long | lat, long, alt | "") the location at which the bot wake up
*Note that an empty string ("") will not change the location*. """
LOG_INTERVAL_SECONDS = 600
SCHEDULING_MARGIN = timedelta(minutes=10) # Skip if next sleep is RESCHEDULING_MARGIN from now
def __init__(self, bot, config):
self.bot = bot
self._process_config(config)
self._schedule_next_sleep()
self._calculate_current_sleep()
def work(self):
if self._should_sleep_now():
self._sleep()
wake_up_at_location = self._wake_up_at_location
self._schedule_next_sleep()
if wake_up_at_location:
if hasattr(self.bot, 'api'): # Check if api is already initialized
self.bot.api.set_position(wake_up_at_location[0],wake_up_at_location[1],wake_up_at_location[2])
else:
self.bot.wake_location = wake_up_at_location
if hasattr(self.bot, 'api'): self.bot.login() # Same here
def _process_config(self, config):
self.entries = []
for entry in config:
prepared = {}
prepared['time'] = datetime.strptime(entry['time'] if 'time' in entry else '01:00', '%H:%M')
# Using datetime for easier stripping of timedeltas
raw_duration = datetime.strptime(entry['duration'] if 'duration' in entry else '07:00', '%H:%M')
duration = int(timedelta(hours=raw_duration.hour, minutes=raw_duration.minute).total_seconds())
raw_time_random_offset = datetime.strptime(entry['time_random_offset'] if 'time_random_offset' in entry else '01:00', '%H:%M')
time_random_offset = int(
timedelta(
hours=raw_time_random_offset.hour, minutes=raw_time_random_offset.minute).total_seconds())
raw_duration_random_offset = datetime.strptime(entry['duration_random_offset'] if 'duration_random_offset' in entry else '00:30', '%H:%M')
duration_random_offset = int(
timedelta(
hours=raw_duration_random_offset.hour, minutes=raw_duration_random_offset.minute).total_seconds())
raw_wake_up_at_location = entry['wake_up_at_location'] if 'wake_up_at_location' in entry else ''
if raw_wake_up_at_location:
try:
wake_up_at_location = raw_wake_up_at_location.split(',',2)
lat=float(wake_up_at_location[0])
lng=float(wake_up_at_location[1])
if len(wake_up_at_location) == 3:
alt=float(wake_up_at_location[2])
else:
alt = uniform(self.bot.config.alt_min, self.bot.config.alt_max)
except ValueError:
raise ValueError('SleepSchedule wake_up_at_location, parsing error in location') #TODO there must be a more elegant way to do it...
prepared['wake_up_at_location'] = [lat, lng, alt]
prepared['duration'] = duration
prepared['time_random_offset'] = time_random_offset
prepared['duration_random_offset'] = duration_random_offset
self.entries.append(prepared)
def _schedule_next_sleep(self):
self._next_sleep, self._next_duration, self._wake_up_at_location = self._get_next_sleep_schedule()
self.bot.event_manager.emit(
'next_sleep',
sender=self,
formatted="Next sleep at {time}",
data={
'time': str(self._next_sleep)
}
)
def _calculate_current_sleep(self):
self._current_sleep = self._next_sleep - timedelta(days=1)
current_duration = self._next_duration
self._current_end = self._current_sleep + timedelta(seconds = current_duration)
def _should_sleep_now(self):
if datetime.now() >= self._next_sleep:
return True
if datetime.now() >= self._current_sleep and datetime.now() < self._current_end:
self._next_duration = (self._current_end - datetime.now()).total_seconds()
return True
return False
def _get_next_sleep_schedule(self):
now = datetime.now() + self.SCHEDULING_MARGIN
times = []
for index in range(len(self.entries)):
next_time = now.replace(hour=self.entries[index]['time'].hour, minute=self.entries[index]['time'].minute)
next_time += timedelta(seconds=self._get_random_offset(self.entries[index]['time_random_offset']))
# If sleep time is passed add one day
if next_time <= now:
next_time += timedelta(days=1)
times.append(next_time)
diffs = {}
for index in range(len(self.entries)):
diff = (times[index]-now).total_seconds()
if diff >= 0: diffs[index] = diff
closest = min(diffs.iterkeys(), key=lambda x: diffs[x])
next_time = times[closest]
next_duration = self._get_next_duration(self.entries[closest])
location = self.entries[closest]['wake_up_at_location'] if 'wake_up_at_location' in self.entries[closest] else ''
return next_time, next_duration, location
def _get_next_duration(self, entry):
duration = entry['duration'] + self._get_random_offset(entry['duration_random_offset'])
return duration
def _get_random_offset(self, max_offset):
<|fim_middle|>
def _sleep(self):
sleep_to_go = self._next_duration
sleep_m, sleep_s = divmod(sleep_to_go, 60)
sleep_h, sleep_m = divmod(sleep_m, 60)
sleep_hms = '%02d:%02d:%02d' % (sleep_h, sleep_m, sleep_s)
now = datetime.now()
wake = str(now + timedelta(seconds=sleep_to_go))
self.bot.event_manager.emit(
'bot_sleep',
sender=self,
formatted="Sleeping for {time_hms}, wake at {wake}",
data={
'time_hms': sleep_hms,
'wake': wake
}
)
while sleep_to_go > 0:
if sleep_to_go < self.LOG_INTERVAL_SECONDS:
sleep(sleep_to_go)
sleep_to_go = 0
else:
sleep(self.LOG_INTERVAL_SECONDS)
sleep_to_go -= self.LOG_INTERVAL_SECONDS
<|fim▁end|> | offset = uniform(-max_offset, max_offset)
return int(offset) |
<|file_name|>sleep_schedule.py<|end_file_name|><|fim▁begin|>from datetime import datetime, timedelta
from time import sleep
from random import uniform
class SleepSchedule(object):
"""Pauses the execution of the bot every day for some time
Simulates the user going to sleep every day for some time, the sleep time
and the duration is changed every day by a random offset defined in the
config file
Example Config:
"sleep_schedule": [
{
"time": "12:00",
"duration": "5:30",
"time_random_offset": "00:30",
"duration_random_offset": "00:30",
"wake_up_at_location": ""
},
{
"time": "17:45",
"duration": "3:00",
"time_random_offset": "01:00",
"duration_random_offset": "00:30",
"wake_up_at_location": ""
}
]
time: (HH:MM) local time that the bot should sleep
duration: (HH:MM) the duration of sleep
time_random_offset: (HH:MM) random offset of time that the sleep will start
for this example the possible start time is 11:30-12:30
duration_random_offset: (HH:MM) random offset of duration of sleep
for this example the possible duration is 5:00-6:00
wake_up_at_location: (lat, long | lat, long, alt | "") the location at which the bot wake up
*Note that an empty string ("") will not change the location*. """
LOG_INTERVAL_SECONDS = 600
SCHEDULING_MARGIN = timedelta(minutes=10) # Skip if next sleep is RESCHEDULING_MARGIN from now
def __init__(self, bot, config):
self.bot = bot
self._process_config(config)
self._schedule_next_sleep()
self._calculate_current_sleep()
def work(self):
if self._should_sleep_now():
self._sleep()
wake_up_at_location = self._wake_up_at_location
self._schedule_next_sleep()
if wake_up_at_location:
if hasattr(self.bot, 'api'): # Check if api is already initialized
self.bot.api.set_position(wake_up_at_location[0],wake_up_at_location[1],wake_up_at_location[2])
else:
self.bot.wake_location = wake_up_at_location
if hasattr(self.bot, 'api'): self.bot.login() # Same here
def _process_config(self, config):
self.entries = []
for entry in config:
prepared = {}
prepared['time'] = datetime.strptime(entry['time'] if 'time' in entry else '01:00', '%H:%M')
# Using datetime for easier stripping of timedeltas
raw_duration = datetime.strptime(entry['duration'] if 'duration' in entry else '07:00', '%H:%M')
duration = int(timedelta(hours=raw_duration.hour, minutes=raw_duration.minute).total_seconds())
raw_time_random_offset = datetime.strptime(entry['time_random_offset'] if 'time_random_offset' in entry else '01:00', '%H:%M')
time_random_offset = int(
timedelta(
hours=raw_time_random_offset.hour, minutes=raw_time_random_offset.minute).total_seconds())
raw_duration_random_offset = datetime.strptime(entry['duration_random_offset'] if 'duration_random_offset' in entry else '00:30', '%H:%M')
duration_random_offset = int(
timedelta(
hours=raw_duration_random_offset.hour, minutes=raw_duration_random_offset.minute).total_seconds())
raw_wake_up_at_location = entry['wake_up_at_location'] if 'wake_up_at_location' in entry else ''
if raw_wake_up_at_location:
try:
wake_up_at_location = raw_wake_up_at_location.split(',',2)
lat=float(wake_up_at_location[0])
lng=float(wake_up_at_location[1])
if len(wake_up_at_location) == 3:
alt=float(wake_up_at_location[2])
else:
alt = uniform(self.bot.config.alt_min, self.bot.config.alt_max)
except ValueError:
raise ValueError('SleepSchedule wake_up_at_location, parsing error in location') #TODO there must be a more elegant way to do it...
prepared['wake_up_at_location'] = [lat, lng, alt]
prepared['duration'] = duration
prepared['time_random_offset'] = time_random_offset
prepared['duration_random_offset'] = duration_random_offset
self.entries.append(prepared)
def _schedule_next_sleep(self):
self._next_sleep, self._next_duration, self._wake_up_at_location = self._get_next_sleep_schedule()
self.bot.event_manager.emit(
'next_sleep',
sender=self,
formatted="Next sleep at {time}",
data={
'time': str(self._next_sleep)
}
)
def _calculate_current_sleep(self):
self._current_sleep = self._next_sleep - timedelta(days=1)
current_duration = self._next_duration
self._current_end = self._current_sleep + timedelta(seconds = current_duration)
def _should_sleep_now(self):
if datetime.now() >= self._next_sleep:
return True
if datetime.now() >= self._current_sleep and datetime.now() < self._current_end:
self._next_duration = (self._current_end - datetime.now()).total_seconds()
return True
return False
def _get_next_sleep_schedule(self):
now = datetime.now() + self.SCHEDULING_MARGIN
times = []
for index in range(len(self.entries)):
next_time = now.replace(hour=self.entries[index]['time'].hour, minute=self.entries[index]['time'].minute)
next_time += timedelta(seconds=self._get_random_offset(self.entries[index]['time_random_offset']))
# If sleep time is passed add one day
if next_time <= now:
next_time += timedelta(days=1)
times.append(next_time)
diffs = {}
for index in range(len(self.entries)):
diff = (times[index]-now).total_seconds()
if diff >= 0: diffs[index] = diff
closest = min(diffs.iterkeys(), key=lambda x: diffs[x])
next_time = times[closest]
next_duration = self._get_next_duration(self.entries[closest])
location = self.entries[closest]['wake_up_at_location'] if 'wake_up_at_location' in self.entries[closest] else ''
return next_time, next_duration, location
def _get_next_duration(self, entry):
duration = entry['duration'] + self._get_random_offset(entry['duration_random_offset'])
return duration
def _get_random_offset(self, max_offset):
offset = uniform(-max_offset, max_offset)
return int(offset)
def _sleep(self):
<|fim_middle|>
<|fim▁end|> | sleep_to_go = self._next_duration
sleep_m, sleep_s = divmod(sleep_to_go, 60)
sleep_h, sleep_m = divmod(sleep_m, 60)
sleep_hms = '%02d:%02d:%02d' % (sleep_h, sleep_m, sleep_s)
now = datetime.now()
wake = str(now + timedelta(seconds=sleep_to_go))
self.bot.event_manager.emit(
'bot_sleep',
sender=self,
formatted="Sleeping for {time_hms}, wake at {wake}",
data={
'time_hms': sleep_hms,
'wake': wake
}
)
while sleep_to_go > 0:
if sleep_to_go < self.LOG_INTERVAL_SECONDS:
sleep(sleep_to_go)
sleep_to_go = 0
else:
sleep(self.LOG_INTERVAL_SECONDS)
sleep_to_go -= self.LOG_INTERVAL_SECONDS |
<|file_name|>sleep_schedule.py<|end_file_name|><|fim▁begin|>from datetime import datetime, timedelta
from time import sleep
from random import uniform
class SleepSchedule(object):
"""Pauses the execution of the bot every day for some time
Simulates the user going to sleep every day for some time, the sleep time
and the duration is changed every day by a random offset defined in the
config file
Example Config:
"sleep_schedule": [
{
"time": "12:00",
"duration": "5:30",
"time_random_offset": "00:30",
"duration_random_offset": "00:30",
"wake_up_at_location": ""
},
{
"time": "17:45",
"duration": "3:00",
"time_random_offset": "01:00",
"duration_random_offset": "00:30",
"wake_up_at_location": ""
}
]
time: (HH:MM) local time that the bot should sleep
duration: (HH:MM) the duration of sleep
time_random_offset: (HH:MM) random offset of time that the sleep will start
for this example the possible start time is 11:30-12:30
duration_random_offset: (HH:MM) random offset of duration of sleep
for this example the possible duration is 5:00-6:00
wake_up_at_location: (lat, long | lat, long, alt | "") the location at which the bot wake up
*Note that an empty string ("") will not change the location*. """
LOG_INTERVAL_SECONDS = 600
SCHEDULING_MARGIN = timedelta(minutes=10) # Skip if next sleep is RESCHEDULING_MARGIN from now
def __init__(self, bot, config):
self.bot = bot
self._process_config(config)
self._schedule_next_sleep()
self._calculate_current_sleep()
def work(self):
if self._should_sleep_now():
<|fim_middle|>
def _process_config(self, config):
self.entries = []
for entry in config:
prepared = {}
prepared['time'] = datetime.strptime(entry['time'] if 'time' in entry else '01:00', '%H:%M')
# Using datetime for easier stripping of timedeltas
raw_duration = datetime.strptime(entry['duration'] if 'duration' in entry else '07:00', '%H:%M')
duration = int(timedelta(hours=raw_duration.hour, minutes=raw_duration.minute).total_seconds())
raw_time_random_offset = datetime.strptime(entry['time_random_offset'] if 'time_random_offset' in entry else '01:00', '%H:%M')
time_random_offset = int(
timedelta(
hours=raw_time_random_offset.hour, minutes=raw_time_random_offset.minute).total_seconds())
raw_duration_random_offset = datetime.strptime(entry['duration_random_offset'] if 'duration_random_offset' in entry else '00:30', '%H:%M')
duration_random_offset = int(
timedelta(
hours=raw_duration_random_offset.hour, minutes=raw_duration_random_offset.minute).total_seconds())
raw_wake_up_at_location = entry['wake_up_at_location'] if 'wake_up_at_location' in entry else ''
if raw_wake_up_at_location:
try:
wake_up_at_location = raw_wake_up_at_location.split(',',2)
lat=float(wake_up_at_location[0])
lng=float(wake_up_at_location[1])
if len(wake_up_at_location) == 3:
alt=float(wake_up_at_location[2])
else:
alt = uniform(self.bot.config.alt_min, self.bot.config.alt_max)
except ValueError:
raise ValueError('SleepSchedule wake_up_at_location, parsing error in location') #TODO there must be a more elegant way to do it...
prepared['wake_up_at_location'] = [lat, lng, alt]
prepared['duration'] = duration
prepared['time_random_offset'] = time_random_offset
prepared['duration_random_offset'] = duration_random_offset
self.entries.append(prepared)
def _schedule_next_sleep(self):
self._next_sleep, self._next_duration, self._wake_up_at_location = self._get_next_sleep_schedule()
self.bot.event_manager.emit(
'next_sleep',
sender=self,
formatted="Next sleep at {time}",
data={
'time': str(self._next_sleep)
}
)
def _calculate_current_sleep(self):
self._current_sleep = self._next_sleep - timedelta(days=1)
current_duration = self._next_duration
self._current_end = self._current_sleep + timedelta(seconds = current_duration)
def _should_sleep_now(self):
if datetime.now() >= self._next_sleep:
return True
if datetime.now() >= self._current_sleep and datetime.now() < self._current_end:
self._next_duration = (self._current_end - datetime.now()).total_seconds()
return True
return False
def _get_next_sleep_schedule(self):
now = datetime.now() + self.SCHEDULING_MARGIN
times = []
for index in range(len(self.entries)):
next_time = now.replace(hour=self.entries[index]['time'].hour, minute=self.entries[index]['time'].minute)
next_time += timedelta(seconds=self._get_random_offset(self.entries[index]['time_random_offset']))
# If sleep time is passed add one day
if next_time <= now:
next_time += timedelta(days=1)
times.append(next_time)
diffs = {}
for index in range(len(self.entries)):
diff = (times[index]-now).total_seconds()
if diff >= 0: diffs[index] = diff
closest = min(diffs.iterkeys(), key=lambda x: diffs[x])
next_time = times[closest]
next_duration = self._get_next_duration(self.entries[closest])
location = self.entries[closest]['wake_up_at_location'] if 'wake_up_at_location' in self.entries[closest] else ''
return next_time, next_duration, location
def _get_next_duration(self, entry):
duration = entry['duration'] + self._get_random_offset(entry['duration_random_offset'])
return duration
def _get_random_offset(self, max_offset):
offset = uniform(-max_offset, max_offset)
return int(offset)
def _sleep(self):
sleep_to_go = self._next_duration
sleep_m, sleep_s = divmod(sleep_to_go, 60)
sleep_h, sleep_m = divmod(sleep_m, 60)
sleep_hms = '%02d:%02d:%02d' % (sleep_h, sleep_m, sleep_s)
now = datetime.now()
wake = str(now + timedelta(seconds=sleep_to_go))
self.bot.event_manager.emit(
'bot_sleep',
sender=self,
formatted="Sleeping for {time_hms}, wake at {wake}",
data={
'time_hms': sleep_hms,
'wake': wake
}
)
while sleep_to_go > 0:
if sleep_to_go < self.LOG_INTERVAL_SECONDS:
sleep(sleep_to_go)
sleep_to_go = 0
else:
sleep(self.LOG_INTERVAL_SECONDS)
sleep_to_go -= self.LOG_INTERVAL_SECONDS
<|fim▁end|> | self._sleep()
wake_up_at_location = self._wake_up_at_location
self._schedule_next_sleep()
if wake_up_at_location:
if hasattr(self.bot, 'api'): # Check if api is already initialized
self.bot.api.set_position(wake_up_at_location[0],wake_up_at_location[1],wake_up_at_location[2])
else:
self.bot.wake_location = wake_up_at_location
if hasattr(self.bot, 'api'): self.bot.login() # Same here |
<|file_name|>sleep_schedule.py<|end_file_name|><|fim▁begin|>from datetime import datetime, timedelta
from time import sleep
from random import uniform
class SleepSchedule(object):
"""Pauses the execution of the bot every day for some time
Simulates the user going to sleep every day for some time, the sleep time
and the duration is changed every day by a random offset defined in the
config file
Example Config:
"sleep_schedule": [
{
"time": "12:00",
"duration": "5:30",
"time_random_offset": "00:30",
"duration_random_offset": "00:30",
"wake_up_at_location": ""
},
{
"time": "17:45",
"duration": "3:00",
"time_random_offset": "01:00",
"duration_random_offset": "00:30",
"wake_up_at_location": ""
}
]
time: (HH:MM) local time that the bot should sleep
duration: (HH:MM) the duration of sleep
time_random_offset: (HH:MM) random offset of time that the sleep will start
for this example the possible start time is 11:30-12:30
duration_random_offset: (HH:MM) random offset of duration of sleep
for this example the possible duration is 5:00-6:00
wake_up_at_location: (lat, long | lat, long, alt | "") the location at which the bot wake up
*Note that an empty string ("") will not change the location*. """
LOG_INTERVAL_SECONDS = 600
SCHEDULING_MARGIN = timedelta(minutes=10) # Skip if next sleep is RESCHEDULING_MARGIN from now
def __init__(self, bot, config):
self.bot = bot
self._process_config(config)
self._schedule_next_sleep()
self._calculate_current_sleep()
def work(self):
if self._should_sleep_now():
self._sleep()
wake_up_at_location = self._wake_up_at_location
self._schedule_next_sleep()
if wake_up_at_location:
<|fim_middle|>
if hasattr(self.bot, 'api'): self.bot.login() # Same here
def _process_config(self, config):
self.entries = []
for entry in config:
prepared = {}
prepared['time'] = datetime.strptime(entry['time'] if 'time' in entry else '01:00', '%H:%M')
# Using datetime for easier stripping of timedeltas
raw_duration = datetime.strptime(entry['duration'] if 'duration' in entry else '07:00', '%H:%M')
duration = int(timedelta(hours=raw_duration.hour, minutes=raw_duration.minute).total_seconds())
raw_time_random_offset = datetime.strptime(entry['time_random_offset'] if 'time_random_offset' in entry else '01:00', '%H:%M')
time_random_offset = int(
timedelta(
hours=raw_time_random_offset.hour, minutes=raw_time_random_offset.minute).total_seconds())
raw_duration_random_offset = datetime.strptime(entry['duration_random_offset'] if 'duration_random_offset' in entry else '00:30', '%H:%M')
duration_random_offset = int(
timedelta(
hours=raw_duration_random_offset.hour, minutes=raw_duration_random_offset.minute).total_seconds())
raw_wake_up_at_location = entry['wake_up_at_location'] if 'wake_up_at_location' in entry else ''
if raw_wake_up_at_location:
try:
wake_up_at_location = raw_wake_up_at_location.split(',',2)
lat=float(wake_up_at_location[0])
lng=float(wake_up_at_location[1])
if len(wake_up_at_location) == 3:
alt=float(wake_up_at_location[2])
else:
alt = uniform(self.bot.config.alt_min, self.bot.config.alt_max)
except ValueError:
raise ValueError('SleepSchedule wake_up_at_location, parsing error in location') #TODO there must be a more elegant way to do it...
prepared['wake_up_at_location'] = [lat, lng, alt]
prepared['duration'] = duration
prepared['time_random_offset'] = time_random_offset
prepared['duration_random_offset'] = duration_random_offset
self.entries.append(prepared)
def _schedule_next_sleep(self):
self._next_sleep, self._next_duration, self._wake_up_at_location = self._get_next_sleep_schedule()
self.bot.event_manager.emit(
'next_sleep',
sender=self,
formatted="Next sleep at {time}",
data={
'time': str(self._next_sleep)
}
)
def _calculate_current_sleep(self):
self._current_sleep = self._next_sleep - timedelta(days=1)
current_duration = self._next_duration
self._current_end = self._current_sleep + timedelta(seconds = current_duration)
def _should_sleep_now(self):
if datetime.now() >= self._next_sleep:
return True
if datetime.now() >= self._current_sleep and datetime.now() < self._current_end:
self._next_duration = (self._current_end - datetime.now()).total_seconds()
return True
return False
def _get_next_sleep_schedule(self):
now = datetime.now() + self.SCHEDULING_MARGIN
times = []
for index in range(len(self.entries)):
next_time = now.replace(hour=self.entries[index]['time'].hour, minute=self.entries[index]['time'].minute)
next_time += timedelta(seconds=self._get_random_offset(self.entries[index]['time_random_offset']))
# If sleep time is passed add one day
if next_time <= now:
next_time += timedelta(days=1)
times.append(next_time)
diffs = {}
for index in range(len(self.entries)):
diff = (times[index]-now).total_seconds()
if diff >= 0: diffs[index] = diff
closest = min(diffs.iterkeys(), key=lambda x: diffs[x])
next_time = times[closest]
next_duration = self._get_next_duration(self.entries[closest])
location = self.entries[closest]['wake_up_at_location'] if 'wake_up_at_location' in self.entries[closest] else ''
return next_time, next_duration, location
def _get_next_duration(self, entry):
duration = entry['duration'] + self._get_random_offset(entry['duration_random_offset'])
return duration
def _get_random_offset(self, max_offset):
offset = uniform(-max_offset, max_offset)
return int(offset)
def _sleep(self):
sleep_to_go = self._next_duration
sleep_m, sleep_s = divmod(sleep_to_go, 60)
sleep_h, sleep_m = divmod(sleep_m, 60)
sleep_hms = '%02d:%02d:%02d' % (sleep_h, sleep_m, sleep_s)
now = datetime.now()
wake = str(now + timedelta(seconds=sleep_to_go))
self.bot.event_manager.emit(
'bot_sleep',
sender=self,
formatted="Sleeping for {time_hms}, wake at {wake}",
data={
'time_hms': sleep_hms,
'wake': wake
}
)
while sleep_to_go > 0:
if sleep_to_go < self.LOG_INTERVAL_SECONDS:
sleep(sleep_to_go)
sleep_to_go = 0
else:
sleep(self.LOG_INTERVAL_SECONDS)
sleep_to_go -= self.LOG_INTERVAL_SECONDS
<|fim▁end|> | if hasattr(self.bot, 'api'): # Check if api is already initialized
self.bot.api.set_position(wake_up_at_location[0],wake_up_at_location[1],wake_up_at_location[2])
else:
self.bot.wake_location = wake_up_at_location |
<|file_name|>sleep_schedule.py<|end_file_name|><|fim▁begin|>from datetime import datetime, timedelta
from time import sleep
from random import uniform
class SleepSchedule(object):
"""Pauses the execution of the bot every day for some time
Simulates the user going to sleep every day for some time, the sleep time
and the duration is changed every day by a random offset defined in the
config file
Example Config:
"sleep_schedule": [
{
"time": "12:00",
"duration": "5:30",
"time_random_offset": "00:30",
"duration_random_offset": "00:30",
"wake_up_at_location": ""
},
{
"time": "17:45",
"duration": "3:00",
"time_random_offset": "01:00",
"duration_random_offset": "00:30",
"wake_up_at_location": ""
}
]
time: (HH:MM) local time that the bot should sleep
duration: (HH:MM) the duration of sleep
time_random_offset: (HH:MM) random offset of time that the sleep will start
for this example the possible start time is 11:30-12:30
duration_random_offset: (HH:MM) random offset of duration of sleep
for this example the possible duration is 5:00-6:00
wake_up_at_location: (lat, long | lat, long, alt | "") the location at which the bot wake up
*Note that an empty string ("") will not change the location*. """
LOG_INTERVAL_SECONDS = 600
SCHEDULING_MARGIN = timedelta(minutes=10) # Skip if next sleep is RESCHEDULING_MARGIN from now
def __init__(self, bot, config):
self.bot = bot
self._process_config(config)
self._schedule_next_sleep()
self._calculate_current_sleep()
def work(self):
if self._should_sleep_now():
self._sleep()
wake_up_at_location = self._wake_up_at_location
self._schedule_next_sleep()
if wake_up_at_location:
if hasattr(self.bot, 'api'): # Check if api is already initialized
<|fim_middle|>
else:
self.bot.wake_location = wake_up_at_location
if hasattr(self.bot, 'api'): self.bot.login() # Same here
def _process_config(self, config):
self.entries = []
for entry in config:
prepared = {}
prepared['time'] = datetime.strptime(entry['time'] if 'time' in entry else '01:00', '%H:%M')
# Using datetime for easier stripping of timedeltas
raw_duration = datetime.strptime(entry['duration'] if 'duration' in entry else '07:00', '%H:%M')
duration = int(timedelta(hours=raw_duration.hour, minutes=raw_duration.minute).total_seconds())
raw_time_random_offset = datetime.strptime(entry['time_random_offset'] if 'time_random_offset' in entry else '01:00', '%H:%M')
time_random_offset = int(
timedelta(
hours=raw_time_random_offset.hour, minutes=raw_time_random_offset.minute).total_seconds())
raw_duration_random_offset = datetime.strptime(entry['duration_random_offset'] if 'duration_random_offset' in entry else '00:30', '%H:%M')
duration_random_offset = int(
timedelta(
hours=raw_duration_random_offset.hour, minutes=raw_duration_random_offset.minute).total_seconds())
raw_wake_up_at_location = entry['wake_up_at_location'] if 'wake_up_at_location' in entry else ''
if raw_wake_up_at_location:
try:
wake_up_at_location = raw_wake_up_at_location.split(',',2)
lat=float(wake_up_at_location[0])
lng=float(wake_up_at_location[1])
if len(wake_up_at_location) == 3:
alt=float(wake_up_at_location[2])
else:
alt = uniform(self.bot.config.alt_min, self.bot.config.alt_max)
except ValueError:
raise ValueError('SleepSchedule wake_up_at_location, parsing error in location') #TODO there must be a more elegant way to do it...
prepared['wake_up_at_location'] = [lat, lng, alt]
prepared['duration'] = duration
prepared['time_random_offset'] = time_random_offset
prepared['duration_random_offset'] = duration_random_offset
self.entries.append(prepared)
def _schedule_next_sleep(self):
self._next_sleep, self._next_duration, self._wake_up_at_location = self._get_next_sleep_schedule()
self.bot.event_manager.emit(
'next_sleep',
sender=self,
formatted="Next sleep at {time}",
data={
'time': str(self._next_sleep)
}
)
def _calculate_current_sleep(self):
self._current_sleep = self._next_sleep - timedelta(days=1)
current_duration = self._next_duration
self._current_end = self._current_sleep + timedelta(seconds = current_duration)
def _should_sleep_now(self):
if datetime.now() >= self._next_sleep:
return True
if datetime.now() >= self._current_sleep and datetime.now() < self._current_end:
self._next_duration = (self._current_end - datetime.now()).total_seconds()
return True
return False
def _get_next_sleep_schedule(self):
now = datetime.now() + self.SCHEDULING_MARGIN
times = []
for index in range(len(self.entries)):
next_time = now.replace(hour=self.entries[index]['time'].hour, minute=self.entries[index]['time'].minute)
next_time += timedelta(seconds=self._get_random_offset(self.entries[index]['time_random_offset']))
# If sleep time is passed add one day
if next_time <= now:
next_time += timedelta(days=1)
times.append(next_time)
diffs = {}
for index in range(len(self.entries)):
diff = (times[index]-now).total_seconds()
if diff >= 0: diffs[index] = diff
closest = min(diffs.iterkeys(), key=lambda x: diffs[x])
next_time = times[closest]
next_duration = self._get_next_duration(self.entries[closest])
location = self.entries[closest]['wake_up_at_location'] if 'wake_up_at_location' in self.entries[closest] else ''
return next_time, next_duration, location
def _get_next_duration(self, entry):
duration = entry['duration'] + self._get_random_offset(entry['duration_random_offset'])
return duration
def _get_random_offset(self, max_offset):
offset = uniform(-max_offset, max_offset)
return int(offset)
def _sleep(self):
sleep_to_go = self._next_duration
sleep_m, sleep_s = divmod(sleep_to_go, 60)
sleep_h, sleep_m = divmod(sleep_m, 60)
sleep_hms = '%02d:%02d:%02d' % (sleep_h, sleep_m, sleep_s)
now = datetime.now()
wake = str(now + timedelta(seconds=sleep_to_go))
self.bot.event_manager.emit(
'bot_sleep',
sender=self,
formatted="Sleeping for {time_hms}, wake at {wake}",
data={
'time_hms': sleep_hms,
'wake': wake
}
)
while sleep_to_go > 0:
if sleep_to_go < self.LOG_INTERVAL_SECONDS:
sleep(sleep_to_go)
sleep_to_go = 0
else:
sleep(self.LOG_INTERVAL_SECONDS)
sleep_to_go -= self.LOG_INTERVAL_SECONDS
<|fim▁end|> | self.bot.api.set_position(wake_up_at_location[0],wake_up_at_location[1],wake_up_at_location[2]) |
<|file_name|>sleep_schedule.py<|end_file_name|><|fim▁begin|>from datetime import datetime, timedelta
from time import sleep
from random import uniform
class SleepSchedule(object):
"""Pauses the execution of the bot every day for some time
Simulates the user going to sleep every day for some time, the sleep time
and the duration is changed every day by a random offset defined in the
config file
Example Config:
"sleep_schedule": [
{
"time": "12:00",
"duration": "5:30",
"time_random_offset": "00:30",
"duration_random_offset": "00:30",
"wake_up_at_location": ""
},
{
"time": "17:45",
"duration": "3:00",
"time_random_offset": "01:00",
"duration_random_offset": "00:30",
"wake_up_at_location": ""
}
]
time: (HH:MM) local time that the bot should sleep
duration: (HH:MM) the duration of sleep
time_random_offset: (HH:MM) random offset of time that the sleep will start
for this example the possible start time is 11:30-12:30
duration_random_offset: (HH:MM) random offset of duration of sleep
for this example the possible duration is 5:00-6:00
wake_up_at_location: (lat, long | lat, long, alt | "") the location at which the bot wake up
*Note that an empty string ("") will not change the location*. """
LOG_INTERVAL_SECONDS = 600
SCHEDULING_MARGIN = timedelta(minutes=10) # Skip if next sleep is RESCHEDULING_MARGIN from now
def __init__(self, bot, config):
self.bot = bot
self._process_config(config)
self._schedule_next_sleep()
self._calculate_current_sleep()
def work(self):
if self._should_sleep_now():
self._sleep()
wake_up_at_location = self._wake_up_at_location
self._schedule_next_sleep()
if wake_up_at_location:
if hasattr(self.bot, 'api'): # Check if api is already initialized
self.bot.api.set_position(wake_up_at_location[0],wake_up_at_location[1],wake_up_at_location[2])
else:
<|fim_middle|>
if hasattr(self.bot, 'api'): self.bot.login() # Same here
def _process_config(self, config):
self.entries = []
for entry in config:
prepared = {}
prepared['time'] = datetime.strptime(entry['time'] if 'time' in entry else '01:00', '%H:%M')
# Using datetime for easier stripping of timedeltas
raw_duration = datetime.strptime(entry['duration'] if 'duration' in entry else '07:00', '%H:%M')
duration = int(timedelta(hours=raw_duration.hour, minutes=raw_duration.minute).total_seconds())
raw_time_random_offset = datetime.strptime(entry['time_random_offset'] if 'time_random_offset' in entry else '01:00', '%H:%M')
time_random_offset = int(
timedelta(
hours=raw_time_random_offset.hour, minutes=raw_time_random_offset.minute).total_seconds())
raw_duration_random_offset = datetime.strptime(entry['duration_random_offset'] if 'duration_random_offset' in entry else '00:30', '%H:%M')
duration_random_offset = int(
timedelta(
hours=raw_duration_random_offset.hour, minutes=raw_duration_random_offset.minute).total_seconds())
raw_wake_up_at_location = entry['wake_up_at_location'] if 'wake_up_at_location' in entry else ''
if raw_wake_up_at_location:
try:
wake_up_at_location = raw_wake_up_at_location.split(',',2)
lat=float(wake_up_at_location[0])
lng=float(wake_up_at_location[1])
if len(wake_up_at_location) == 3:
alt=float(wake_up_at_location[2])
else:
alt = uniform(self.bot.config.alt_min, self.bot.config.alt_max)
except ValueError:
raise ValueError('SleepSchedule wake_up_at_location, parsing error in location') #TODO there must be a more elegant way to do it...
prepared['wake_up_at_location'] = [lat, lng, alt]
prepared['duration'] = duration
prepared['time_random_offset'] = time_random_offset
prepared['duration_random_offset'] = duration_random_offset
self.entries.append(prepared)
def _schedule_next_sleep(self):
self._next_sleep, self._next_duration, self._wake_up_at_location = self._get_next_sleep_schedule()
self.bot.event_manager.emit(
'next_sleep',
sender=self,
formatted="Next sleep at {time}",
data={
'time': str(self._next_sleep)
}
)
def _calculate_current_sleep(self):
self._current_sleep = self._next_sleep - timedelta(days=1)
current_duration = self._next_duration
self._current_end = self._current_sleep + timedelta(seconds = current_duration)
def _should_sleep_now(self):
if datetime.now() >= self._next_sleep:
return True
if datetime.now() >= self._current_sleep and datetime.now() < self._current_end:
self._next_duration = (self._current_end - datetime.now()).total_seconds()
return True
return False
def _get_next_sleep_schedule(self):
now = datetime.now() + self.SCHEDULING_MARGIN
times = []
for index in range(len(self.entries)):
next_time = now.replace(hour=self.entries[index]['time'].hour, minute=self.entries[index]['time'].minute)
next_time += timedelta(seconds=self._get_random_offset(self.entries[index]['time_random_offset']))
# If sleep time is passed add one day
if next_time <= now:
next_time += timedelta(days=1)
times.append(next_time)
diffs = {}
for index in range(len(self.entries)):
diff = (times[index]-now).total_seconds()
if diff >= 0: diffs[index] = diff
closest = min(diffs.iterkeys(), key=lambda x: diffs[x])
next_time = times[closest]
next_duration = self._get_next_duration(self.entries[closest])
location = self.entries[closest]['wake_up_at_location'] if 'wake_up_at_location' in self.entries[closest] else ''
return next_time, next_duration, location
def _get_next_duration(self, entry):
duration = entry['duration'] + self._get_random_offset(entry['duration_random_offset'])
return duration
def _get_random_offset(self, max_offset):
offset = uniform(-max_offset, max_offset)
return int(offset)
def _sleep(self):
sleep_to_go = self._next_duration
sleep_m, sleep_s = divmod(sleep_to_go, 60)
sleep_h, sleep_m = divmod(sleep_m, 60)
sleep_hms = '%02d:%02d:%02d' % (sleep_h, sleep_m, sleep_s)
now = datetime.now()
wake = str(now + timedelta(seconds=sleep_to_go))
self.bot.event_manager.emit(
'bot_sleep',
sender=self,
formatted="Sleeping for {time_hms}, wake at {wake}",
data={
'time_hms': sleep_hms,
'wake': wake
}
)
while sleep_to_go > 0:
if sleep_to_go < self.LOG_INTERVAL_SECONDS:
sleep(sleep_to_go)
sleep_to_go = 0
else:
sleep(self.LOG_INTERVAL_SECONDS)
sleep_to_go -= self.LOG_INTERVAL_SECONDS
<|fim▁end|> | self.bot.wake_location = wake_up_at_location |
<|file_name|>sleep_schedule.py<|end_file_name|><|fim▁begin|>from datetime import datetime, timedelta
from time import sleep
from random import uniform
class SleepSchedule(object):
"""Pauses the execution of the bot every day for some time
Simulates the user going to sleep every day for some time, the sleep time
and the duration is changed every day by a random offset defined in the
config file
Example Config:
"sleep_schedule": [
{
"time": "12:00",
"duration": "5:30",
"time_random_offset": "00:30",
"duration_random_offset": "00:30",
"wake_up_at_location": ""
},
{
"time": "17:45",
"duration": "3:00",
"time_random_offset": "01:00",
"duration_random_offset": "00:30",
"wake_up_at_location": ""
}
]
time: (HH:MM) local time that the bot should sleep
duration: (HH:MM) the duration of sleep
time_random_offset: (HH:MM) random offset of time that the sleep will start
for this example the possible start time is 11:30-12:30
duration_random_offset: (HH:MM) random offset of duration of sleep
for this example the possible duration is 5:00-6:00
wake_up_at_location: (lat, long | lat, long, alt | "") the location at which the bot wake up
*Note that an empty string ("") will not change the location*. """
LOG_INTERVAL_SECONDS = 600
SCHEDULING_MARGIN = timedelta(minutes=10) # Skip if next sleep is RESCHEDULING_MARGIN from now
def __init__(self, bot, config):
self.bot = bot
self._process_config(config)
self._schedule_next_sleep()
self._calculate_current_sleep()
def work(self):
if self._should_sleep_now():
self._sleep()
wake_up_at_location = self._wake_up_at_location
self._schedule_next_sleep()
if wake_up_at_location:
if hasattr(self.bot, 'api'): # Check if api is already initialized
self.bot.api.set_position(wake_up_at_location[0],wake_up_at_location[1],wake_up_at_location[2])
else:
self.bot.wake_location = wake_up_at_location
if hasattr(self.bot, 'api'): <|fim_middle|>
def _process_config(self, config):
self.entries = []
for entry in config:
prepared = {}
prepared['time'] = datetime.strptime(entry['time'] if 'time' in entry else '01:00', '%H:%M')
# Using datetime for easier stripping of timedeltas
raw_duration = datetime.strptime(entry['duration'] if 'duration' in entry else '07:00', '%H:%M')
duration = int(timedelta(hours=raw_duration.hour, minutes=raw_duration.minute).total_seconds())
raw_time_random_offset = datetime.strptime(entry['time_random_offset'] if 'time_random_offset' in entry else '01:00', '%H:%M')
time_random_offset = int(
timedelta(
hours=raw_time_random_offset.hour, minutes=raw_time_random_offset.minute).total_seconds())
raw_duration_random_offset = datetime.strptime(entry['duration_random_offset'] if 'duration_random_offset' in entry else '00:30', '%H:%M')
duration_random_offset = int(
timedelta(
hours=raw_duration_random_offset.hour, minutes=raw_duration_random_offset.minute).total_seconds())
raw_wake_up_at_location = entry['wake_up_at_location'] if 'wake_up_at_location' in entry else ''
if raw_wake_up_at_location:
try:
wake_up_at_location = raw_wake_up_at_location.split(',',2)
lat=float(wake_up_at_location[0])
lng=float(wake_up_at_location[1])
if len(wake_up_at_location) == 3:
alt=float(wake_up_at_location[2])
else:
alt = uniform(self.bot.config.alt_min, self.bot.config.alt_max)
except ValueError:
raise ValueError('SleepSchedule wake_up_at_location, parsing error in location') #TODO there must be a more elegant way to do it...
prepared['wake_up_at_location'] = [lat, lng, alt]
prepared['duration'] = duration
prepared['time_random_offset'] = time_random_offset
prepared['duration_random_offset'] = duration_random_offset
self.entries.append(prepared)
def _schedule_next_sleep(self):
self._next_sleep, self._next_duration, self._wake_up_at_location = self._get_next_sleep_schedule()
self.bot.event_manager.emit(
'next_sleep',
sender=self,
formatted="Next sleep at {time}",
data={
'time': str(self._next_sleep)
}
)
def _calculate_current_sleep(self):
self._current_sleep = self._next_sleep - timedelta(days=1)
current_duration = self._next_duration
self._current_end = self._current_sleep + timedelta(seconds = current_duration)
def _should_sleep_now(self):
if datetime.now() >= self._next_sleep:
return True
if datetime.now() >= self._current_sleep and datetime.now() < self._current_end:
self._next_duration = (self._current_end - datetime.now()).total_seconds()
return True
return False
def _get_next_sleep_schedule(self):
now = datetime.now() + self.SCHEDULING_MARGIN
times = []
for index in range(len(self.entries)):
next_time = now.replace(hour=self.entries[index]['time'].hour, minute=self.entries[index]['time'].minute)
next_time += timedelta(seconds=self._get_random_offset(self.entries[index]['time_random_offset']))
# If sleep time is passed add one day
if next_time <= now:
next_time += timedelta(days=1)
times.append(next_time)
diffs = {}
for index in range(len(self.entries)):
diff = (times[index]-now).total_seconds()
if diff >= 0: diffs[index] = diff
closest = min(diffs.iterkeys(), key=lambda x: diffs[x])
next_time = times[closest]
next_duration = self._get_next_duration(self.entries[closest])
location = self.entries[closest]['wake_up_at_location'] if 'wake_up_at_location' in self.entries[closest] else ''
return next_time, next_duration, location
def _get_next_duration(self, entry):
duration = entry['duration'] + self._get_random_offset(entry['duration_random_offset'])
return duration
def _get_random_offset(self, max_offset):
offset = uniform(-max_offset, max_offset)
return int(offset)
def _sleep(self):
sleep_to_go = self._next_duration
sleep_m, sleep_s = divmod(sleep_to_go, 60)
sleep_h, sleep_m = divmod(sleep_m, 60)
sleep_hms = '%02d:%02d:%02d' % (sleep_h, sleep_m, sleep_s)
now = datetime.now()
wake = str(now + timedelta(seconds=sleep_to_go))
self.bot.event_manager.emit(
'bot_sleep',
sender=self,
formatted="Sleeping for {time_hms}, wake at {wake}",
data={
'time_hms': sleep_hms,
'wake': wake
}
)
while sleep_to_go > 0:
if sleep_to_go < self.LOG_INTERVAL_SECONDS:
sleep(sleep_to_go)
sleep_to_go = 0
else:
sleep(self.LOG_INTERVAL_SECONDS)
sleep_to_go -= self.LOG_INTERVAL_SECONDS
<|fim▁end|> | self.bot.login() # Same here |
<|file_name|>sleep_schedule.py<|end_file_name|><|fim▁begin|>from datetime import datetime, timedelta
from time import sleep
from random import uniform
class SleepSchedule(object):
"""Pauses the execution of the bot every day for some time
Simulates the user going to sleep every day for some time, the sleep time
and the duration is changed every day by a random offset defined in the
config file
Example Config:
"sleep_schedule": [
{
"time": "12:00",
"duration": "5:30",
"time_random_offset": "00:30",
"duration_random_offset": "00:30",
"wake_up_at_location": ""
},
{
"time": "17:45",
"duration": "3:00",
"time_random_offset": "01:00",
"duration_random_offset": "00:30",
"wake_up_at_location": ""
}
]
time: (HH:MM) local time that the bot should sleep
duration: (HH:MM) the duration of sleep
time_random_offset: (HH:MM) random offset of time that the sleep will start
for this example the possible start time is 11:30-12:30
duration_random_offset: (HH:MM) random offset of duration of sleep
for this example the possible duration is 5:00-6:00
wake_up_at_location: (lat, long | lat, long, alt | "") the location at which the bot wake up
*Note that an empty string ("") will not change the location*. """
LOG_INTERVAL_SECONDS = 600
SCHEDULING_MARGIN = timedelta(minutes=10) # Skip if next sleep is RESCHEDULING_MARGIN from now
def __init__(self, bot, config):
self.bot = bot
self._process_config(config)
self._schedule_next_sleep()
self._calculate_current_sleep()
def work(self):
if self._should_sleep_now():
self._sleep()
wake_up_at_location = self._wake_up_at_location
self._schedule_next_sleep()
if wake_up_at_location:
if hasattr(self.bot, 'api'): # Check if api is already initialized
self.bot.api.set_position(wake_up_at_location[0],wake_up_at_location[1],wake_up_at_location[2])
else:
self.bot.wake_location = wake_up_at_location
if hasattr(self.bot, 'api'): self.bot.login() # Same here
def _process_config(self, config):
self.entries = []
for entry in config:
prepared = {}
prepared['time'] = datetime.strptime(entry['time'] if 'time' in entry else '01:00', '%H:%M')
# Using datetime for easier stripping of timedeltas
raw_duration = datetime.strptime(entry['duration'] if 'duration' in entry else '07:00', '%H:%M')
duration = int(timedelta(hours=raw_duration.hour, minutes=raw_duration.minute).total_seconds())
raw_time_random_offset = datetime.strptime(entry['time_random_offset'] if 'time_random_offset' in entry else '01:00', '%H:%M')
time_random_offset = int(
timedelta(
hours=raw_time_random_offset.hour, minutes=raw_time_random_offset.minute).total_seconds())
raw_duration_random_offset = datetime.strptime(entry['duration_random_offset'] if 'duration_random_offset' in entry else '00:30', '%H:%M')
duration_random_offset = int(
timedelta(
hours=raw_duration_random_offset.hour, minutes=raw_duration_random_offset.minute).total_seconds())
raw_wake_up_at_location = entry['wake_up_at_location'] if 'wake_up_at_location' in entry else ''
if raw_wake_up_at_location:
<|fim_middle|>
prepared['duration'] = duration
prepared['time_random_offset'] = time_random_offset
prepared['duration_random_offset'] = duration_random_offset
self.entries.append(prepared)
def _schedule_next_sleep(self):
self._next_sleep, self._next_duration, self._wake_up_at_location = self._get_next_sleep_schedule()
self.bot.event_manager.emit(
'next_sleep',
sender=self,
formatted="Next sleep at {time}",
data={
'time': str(self._next_sleep)
}
)
def _calculate_current_sleep(self):
self._current_sleep = self._next_sleep - timedelta(days=1)
current_duration = self._next_duration
self._current_end = self._current_sleep + timedelta(seconds = current_duration)
def _should_sleep_now(self):
if datetime.now() >= self._next_sleep:
return True
if datetime.now() >= self._current_sleep and datetime.now() < self._current_end:
self._next_duration = (self._current_end - datetime.now()).total_seconds()
return True
return False
def _get_next_sleep_schedule(self):
now = datetime.now() + self.SCHEDULING_MARGIN
times = []
for index in range(len(self.entries)):
next_time = now.replace(hour=self.entries[index]['time'].hour, minute=self.entries[index]['time'].minute)
next_time += timedelta(seconds=self._get_random_offset(self.entries[index]['time_random_offset']))
# If sleep time is passed add one day
if next_time <= now:
next_time += timedelta(days=1)
times.append(next_time)
diffs = {}
for index in range(len(self.entries)):
diff = (times[index]-now).total_seconds()
if diff >= 0: diffs[index] = diff
closest = min(diffs.iterkeys(), key=lambda x: diffs[x])
next_time = times[closest]
next_duration = self._get_next_duration(self.entries[closest])
location = self.entries[closest]['wake_up_at_location'] if 'wake_up_at_location' in self.entries[closest] else ''
return next_time, next_duration, location
def _get_next_duration(self, entry):
duration = entry['duration'] + self._get_random_offset(entry['duration_random_offset'])
return duration
def _get_random_offset(self, max_offset):
offset = uniform(-max_offset, max_offset)
return int(offset)
def _sleep(self):
sleep_to_go = self._next_duration
sleep_m, sleep_s = divmod(sleep_to_go, 60)
sleep_h, sleep_m = divmod(sleep_m, 60)
sleep_hms = '%02d:%02d:%02d' % (sleep_h, sleep_m, sleep_s)
now = datetime.now()
wake = str(now + timedelta(seconds=sleep_to_go))
self.bot.event_manager.emit(
'bot_sleep',
sender=self,
formatted="Sleeping for {time_hms}, wake at {wake}",
data={
'time_hms': sleep_hms,
'wake': wake
}
)
while sleep_to_go > 0:
if sleep_to_go < self.LOG_INTERVAL_SECONDS:
sleep(sleep_to_go)
sleep_to_go = 0
else:
sleep(self.LOG_INTERVAL_SECONDS)
sleep_to_go -= self.LOG_INTERVAL_SECONDS
<|fim▁end|> | try:
wake_up_at_location = raw_wake_up_at_location.split(',',2)
lat=float(wake_up_at_location[0])
lng=float(wake_up_at_location[1])
if len(wake_up_at_location) == 3:
alt=float(wake_up_at_location[2])
else:
alt = uniform(self.bot.config.alt_min, self.bot.config.alt_max)
except ValueError:
raise ValueError('SleepSchedule wake_up_at_location, parsing error in location') #TODO there must be a more elegant way to do it...
prepared['wake_up_at_location'] = [lat, lng, alt] |
<|file_name|>sleep_schedule.py<|end_file_name|><|fim▁begin|>from datetime import datetime, timedelta
from time import sleep
from random import uniform
class SleepSchedule(object):
"""Pauses the execution of the bot every day for some time
Simulates the user going to sleep every day for some time, the sleep time
and the duration is changed every day by a random offset defined in the
config file
Example Config:
"sleep_schedule": [
{
"time": "12:00",
"duration": "5:30",
"time_random_offset": "00:30",
"duration_random_offset": "00:30",
"wake_up_at_location": ""
},
{
"time": "17:45",
"duration": "3:00",
"time_random_offset": "01:00",
"duration_random_offset": "00:30",
"wake_up_at_location": ""
}
]
time: (HH:MM) local time that the bot should sleep
duration: (HH:MM) the duration of sleep
time_random_offset: (HH:MM) random offset of time that the sleep will start
for this example the possible start time is 11:30-12:30
duration_random_offset: (HH:MM) random offset of duration of sleep
for this example the possible duration is 5:00-6:00
wake_up_at_location: (lat, long | lat, long, alt | "") the location at which the bot wake up
*Note that an empty string ("") will not change the location*. """
LOG_INTERVAL_SECONDS = 600
SCHEDULING_MARGIN = timedelta(minutes=10) # Skip if next sleep is RESCHEDULING_MARGIN from now
def __init__(self, bot, config):
self.bot = bot
self._process_config(config)
self._schedule_next_sleep()
self._calculate_current_sleep()
def work(self):
if self._should_sleep_now():
self._sleep()
wake_up_at_location = self._wake_up_at_location
self._schedule_next_sleep()
if wake_up_at_location:
if hasattr(self.bot, 'api'): # Check if api is already initialized
self.bot.api.set_position(wake_up_at_location[0],wake_up_at_location[1],wake_up_at_location[2])
else:
self.bot.wake_location = wake_up_at_location
if hasattr(self.bot, 'api'): self.bot.login() # Same here
def _process_config(self, config):
self.entries = []
for entry in config:
prepared = {}
prepared['time'] = datetime.strptime(entry['time'] if 'time' in entry else '01:00', '%H:%M')
# Using datetime for easier stripping of timedeltas
raw_duration = datetime.strptime(entry['duration'] if 'duration' in entry else '07:00', '%H:%M')
duration = int(timedelta(hours=raw_duration.hour, minutes=raw_duration.minute).total_seconds())
raw_time_random_offset = datetime.strptime(entry['time_random_offset'] if 'time_random_offset' in entry else '01:00', '%H:%M')
time_random_offset = int(
timedelta(
hours=raw_time_random_offset.hour, minutes=raw_time_random_offset.minute).total_seconds())
raw_duration_random_offset = datetime.strptime(entry['duration_random_offset'] if 'duration_random_offset' in entry else '00:30', '%H:%M')
duration_random_offset = int(
timedelta(
hours=raw_duration_random_offset.hour, minutes=raw_duration_random_offset.minute).total_seconds())
raw_wake_up_at_location = entry['wake_up_at_location'] if 'wake_up_at_location' in entry else ''
if raw_wake_up_at_location:
try:
wake_up_at_location = raw_wake_up_at_location.split(',',2)
lat=float(wake_up_at_location[0])
lng=float(wake_up_at_location[1])
if len(wake_up_at_location) == 3:
<|fim_middle|>
else:
alt = uniform(self.bot.config.alt_min, self.bot.config.alt_max)
except ValueError:
raise ValueError('SleepSchedule wake_up_at_location, parsing error in location') #TODO there must be a more elegant way to do it...
prepared['wake_up_at_location'] = [lat, lng, alt]
prepared['duration'] = duration
prepared['time_random_offset'] = time_random_offset
prepared['duration_random_offset'] = duration_random_offset
self.entries.append(prepared)
def _schedule_next_sleep(self):
self._next_sleep, self._next_duration, self._wake_up_at_location = self._get_next_sleep_schedule()
self.bot.event_manager.emit(
'next_sleep',
sender=self,
formatted="Next sleep at {time}",
data={
'time': str(self._next_sleep)
}
)
def _calculate_current_sleep(self):
self._current_sleep = self._next_sleep - timedelta(days=1)
current_duration = self._next_duration
self._current_end = self._current_sleep + timedelta(seconds = current_duration)
def _should_sleep_now(self):
if datetime.now() >= self._next_sleep:
return True
if datetime.now() >= self._current_sleep and datetime.now() < self._current_end:
self._next_duration = (self._current_end - datetime.now()).total_seconds()
return True
return False
def _get_next_sleep_schedule(self):
now = datetime.now() + self.SCHEDULING_MARGIN
times = []
for index in range(len(self.entries)):
next_time = now.replace(hour=self.entries[index]['time'].hour, minute=self.entries[index]['time'].minute)
next_time += timedelta(seconds=self._get_random_offset(self.entries[index]['time_random_offset']))
# If sleep time is passed add one day
if next_time <= now:
next_time += timedelta(days=1)
times.append(next_time)
diffs = {}
for index in range(len(self.entries)):
diff = (times[index]-now).total_seconds()
if diff >= 0: diffs[index] = diff
closest = min(diffs.iterkeys(), key=lambda x: diffs[x])
next_time = times[closest]
next_duration = self._get_next_duration(self.entries[closest])
location = self.entries[closest]['wake_up_at_location'] if 'wake_up_at_location' in self.entries[closest] else ''
return next_time, next_duration, location
def _get_next_duration(self, entry):
duration = entry['duration'] + self._get_random_offset(entry['duration_random_offset'])
return duration
def _get_random_offset(self, max_offset):
offset = uniform(-max_offset, max_offset)
return int(offset)
def _sleep(self):
sleep_to_go = self._next_duration
sleep_m, sleep_s = divmod(sleep_to_go, 60)
sleep_h, sleep_m = divmod(sleep_m, 60)
sleep_hms = '%02d:%02d:%02d' % (sleep_h, sleep_m, sleep_s)
now = datetime.now()
wake = str(now + timedelta(seconds=sleep_to_go))
self.bot.event_manager.emit(
'bot_sleep',
sender=self,
formatted="Sleeping for {time_hms}, wake at {wake}",
data={
'time_hms': sleep_hms,
'wake': wake
}
)
while sleep_to_go > 0:
if sleep_to_go < self.LOG_INTERVAL_SECONDS:
sleep(sleep_to_go)
sleep_to_go = 0
else:
sleep(self.LOG_INTERVAL_SECONDS)
sleep_to_go -= self.LOG_INTERVAL_SECONDS
<|fim▁end|> | alt=float(wake_up_at_location[2]) |
<|file_name|>sleep_schedule.py<|end_file_name|><|fim▁begin|>from datetime import datetime, timedelta
from time import sleep
from random import uniform
class SleepSchedule(object):
"""Pauses the execution of the bot every day for some time
Simulates the user going to sleep every day for some time, the sleep time
and the duration is changed every day by a random offset defined in the
config file
Example Config:
"sleep_schedule": [
{
"time": "12:00",
"duration": "5:30",
"time_random_offset": "00:30",
"duration_random_offset": "00:30",
"wake_up_at_location": ""
},
{
"time": "17:45",
"duration": "3:00",
"time_random_offset": "01:00",
"duration_random_offset": "00:30",
"wake_up_at_location": ""
}
]
time: (HH:MM) local time that the bot should sleep
duration: (HH:MM) the duration of sleep
time_random_offset: (HH:MM) random offset of time that the sleep will start
for this example the possible start time is 11:30-12:30
duration_random_offset: (HH:MM) random offset of duration of sleep
for this example the possible duration is 5:00-6:00
wake_up_at_location: (lat, long | lat, long, alt | "") the location at which the bot wake up
*Note that an empty string ("") will not change the location*. """
LOG_INTERVAL_SECONDS = 600
SCHEDULING_MARGIN = timedelta(minutes=10) # Skip if next sleep is RESCHEDULING_MARGIN from now
def __init__(self, bot, config):
self.bot = bot
self._process_config(config)
self._schedule_next_sleep()
self._calculate_current_sleep()
def work(self):
if self._should_sleep_now():
self._sleep()
wake_up_at_location = self._wake_up_at_location
self._schedule_next_sleep()
if wake_up_at_location:
if hasattr(self.bot, 'api'): # Check if api is already initialized
self.bot.api.set_position(wake_up_at_location[0],wake_up_at_location[1],wake_up_at_location[2])
else:
self.bot.wake_location = wake_up_at_location
if hasattr(self.bot, 'api'): self.bot.login() # Same here
def _process_config(self, config):
self.entries = []
for entry in config:
prepared = {}
prepared['time'] = datetime.strptime(entry['time'] if 'time' in entry else '01:00', '%H:%M')
# Using datetime for easier stripping of timedeltas
raw_duration = datetime.strptime(entry['duration'] if 'duration' in entry else '07:00', '%H:%M')
duration = int(timedelta(hours=raw_duration.hour, minutes=raw_duration.minute).total_seconds())
raw_time_random_offset = datetime.strptime(entry['time_random_offset'] if 'time_random_offset' in entry else '01:00', '%H:%M')
time_random_offset = int(
timedelta(
hours=raw_time_random_offset.hour, minutes=raw_time_random_offset.minute).total_seconds())
raw_duration_random_offset = datetime.strptime(entry['duration_random_offset'] if 'duration_random_offset' in entry else '00:30', '%H:%M')
duration_random_offset = int(
timedelta(
hours=raw_duration_random_offset.hour, minutes=raw_duration_random_offset.minute).total_seconds())
raw_wake_up_at_location = entry['wake_up_at_location'] if 'wake_up_at_location' in entry else ''
if raw_wake_up_at_location:
try:
wake_up_at_location = raw_wake_up_at_location.split(',',2)
lat=float(wake_up_at_location[0])
lng=float(wake_up_at_location[1])
if len(wake_up_at_location) == 3:
alt=float(wake_up_at_location[2])
else:
<|fim_middle|>
except ValueError:
raise ValueError('SleepSchedule wake_up_at_location, parsing error in location') #TODO there must be a more elegant way to do it...
prepared['wake_up_at_location'] = [lat, lng, alt]
prepared['duration'] = duration
prepared['time_random_offset'] = time_random_offset
prepared['duration_random_offset'] = duration_random_offset
self.entries.append(prepared)
def _schedule_next_sleep(self):
self._next_sleep, self._next_duration, self._wake_up_at_location = self._get_next_sleep_schedule()
self.bot.event_manager.emit(
'next_sleep',
sender=self,
formatted="Next sleep at {time}",
data={
'time': str(self._next_sleep)
}
)
def _calculate_current_sleep(self):
self._current_sleep = self._next_sleep - timedelta(days=1)
current_duration = self._next_duration
self._current_end = self._current_sleep + timedelta(seconds = current_duration)
def _should_sleep_now(self):
if datetime.now() >= self._next_sleep:
return True
if datetime.now() >= self._current_sleep and datetime.now() < self._current_end:
self._next_duration = (self._current_end - datetime.now()).total_seconds()
return True
return False
def _get_next_sleep_schedule(self):
now = datetime.now() + self.SCHEDULING_MARGIN
times = []
for index in range(len(self.entries)):
next_time = now.replace(hour=self.entries[index]['time'].hour, minute=self.entries[index]['time'].minute)
next_time += timedelta(seconds=self._get_random_offset(self.entries[index]['time_random_offset']))
# If sleep time is passed add one day
if next_time <= now:
next_time += timedelta(days=1)
times.append(next_time)
diffs = {}
for index in range(len(self.entries)):
diff = (times[index]-now).total_seconds()
if diff >= 0: diffs[index] = diff
closest = min(diffs.iterkeys(), key=lambda x: diffs[x])
next_time = times[closest]
next_duration = self._get_next_duration(self.entries[closest])
location = self.entries[closest]['wake_up_at_location'] if 'wake_up_at_location' in self.entries[closest] else ''
return next_time, next_duration, location
def _get_next_duration(self, entry):
duration = entry['duration'] + self._get_random_offset(entry['duration_random_offset'])
return duration
def _get_random_offset(self, max_offset):
offset = uniform(-max_offset, max_offset)
return int(offset)
def _sleep(self):
sleep_to_go = self._next_duration
sleep_m, sleep_s = divmod(sleep_to_go, 60)
sleep_h, sleep_m = divmod(sleep_m, 60)
sleep_hms = '%02d:%02d:%02d' % (sleep_h, sleep_m, sleep_s)
now = datetime.now()
wake = str(now + timedelta(seconds=sleep_to_go))
self.bot.event_manager.emit(
'bot_sleep',
sender=self,
formatted="Sleeping for {time_hms}, wake at {wake}",
data={
'time_hms': sleep_hms,
'wake': wake
}
)
while sleep_to_go > 0:
if sleep_to_go < self.LOG_INTERVAL_SECONDS:
sleep(sleep_to_go)
sleep_to_go = 0
else:
sleep(self.LOG_INTERVAL_SECONDS)
sleep_to_go -= self.LOG_INTERVAL_SECONDS
<|fim▁end|> | alt = uniform(self.bot.config.alt_min, self.bot.config.alt_max) |
<|file_name|>sleep_schedule.py<|end_file_name|><|fim▁begin|>from datetime import datetime, timedelta
from time import sleep
from random import uniform
class SleepSchedule(object):
"""Pauses the execution of the bot every day for some time
Simulates the user going to sleep every day for some time, the sleep time
and the duration is changed every day by a random offset defined in the
config file
Example Config:
"sleep_schedule": [
{
"time": "12:00",
"duration": "5:30",
"time_random_offset": "00:30",
"duration_random_offset": "00:30",
"wake_up_at_location": ""
},
{
"time": "17:45",
"duration": "3:00",
"time_random_offset": "01:00",
"duration_random_offset": "00:30",
"wake_up_at_location": ""
}
]
time: (HH:MM) local time that the bot should sleep
duration: (HH:MM) the duration of sleep
time_random_offset: (HH:MM) random offset of time that the sleep will start
for this example the possible start time is 11:30-12:30
duration_random_offset: (HH:MM) random offset of duration of sleep
for this example the possible duration is 5:00-6:00
wake_up_at_location: (lat, long | lat, long, alt | "") the location at which the bot wake up
*Note that an empty string ("") will not change the location*. """
LOG_INTERVAL_SECONDS = 600
SCHEDULING_MARGIN = timedelta(minutes=10) # Skip if next sleep is RESCHEDULING_MARGIN from now
def __init__(self, bot, config):
self.bot = bot
self._process_config(config)
self._schedule_next_sleep()
self._calculate_current_sleep()
def work(self):
if self._should_sleep_now():
self._sleep()
wake_up_at_location = self._wake_up_at_location
self._schedule_next_sleep()
if wake_up_at_location:
if hasattr(self.bot, 'api'): # Check if api is already initialized
self.bot.api.set_position(wake_up_at_location[0],wake_up_at_location[1],wake_up_at_location[2])
else:
self.bot.wake_location = wake_up_at_location
if hasattr(self.bot, 'api'): self.bot.login() # Same here
def _process_config(self, config):
self.entries = []
for entry in config:
prepared = {}
prepared['time'] = datetime.strptime(entry['time'] if 'time' in entry else '01:00', '%H:%M')
# Using datetime for easier stripping of timedeltas
raw_duration = datetime.strptime(entry['duration'] if 'duration' in entry else '07:00', '%H:%M')
duration = int(timedelta(hours=raw_duration.hour, minutes=raw_duration.minute).total_seconds())
raw_time_random_offset = datetime.strptime(entry['time_random_offset'] if 'time_random_offset' in entry else '01:00', '%H:%M')
time_random_offset = int(
timedelta(
hours=raw_time_random_offset.hour, minutes=raw_time_random_offset.minute).total_seconds())
raw_duration_random_offset = datetime.strptime(entry['duration_random_offset'] if 'duration_random_offset' in entry else '00:30', '%H:%M')
duration_random_offset = int(
timedelta(
hours=raw_duration_random_offset.hour, minutes=raw_duration_random_offset.minute).total_seconds())
raw_wake_up_at_location = entry['wake_up_at_location'] if 'wake_up_at_location' in entry else ''
if raw_wake_up_at_location:
try:
wake_up_at_location = raw_wake_up_at_location.split(',',2)
lat=float(wake_up_at_location[0])
lng=float(wake_up_at_location[1])
if len(wake_up_at_location) == 3:
alt=float(wake_up_at_location[2])
else:
alt = uniform(self.bot.config.alt_min, self.bot.config.alt_max)
except ValueError:
raise ValueError('SleepSchedule wake_up_at_location, parsing error in location') #TODO there must be a more elegant way to do it...
prepared['wake_up_at_location'] = [lat, lng, alt]
prepared['duration'] = duration
prepared['time_random_offset'] = time_random_offset
prepared['duration_random_offset'] = duration_random_offset
self.entries.append(prepared)
def _schedule_next_sleep(self):
self._next_sleep, self._next_duration, self._wake_up_at_location = self._get_next_sleep_schedule()
self.bot.event_manager.emit(
'next_sleep',
sender=self,
formatted="Next sleep at {time}",
data={
'time': str(self._next_sleep)
}
)
def _calculate_current_sleep(self):
self._current_sleep = self._next_sleep - timedelta(days=1)
current_duration = self._next_duration
self._current_end = self._current_sleep + timedelta(seconds = current_duration)
def _should_sleep_now(self):
if datetime.now() >= self._next_sleep:
<|fim_middle|>
if datetime.now() >= self._current_sleep and datetime.now() < self._current_end:
self._next_duration = (self._current_end - datetime.now()).total_seconds()
return True
return False
def _get_next_sleep_schedule(self):
now = datetime.now() + self.SCHEDULING_MARGIN
times = []
for index in range(len(self.entries)):
next_time = now.replace(hour=self.entries[index]['time'].hour, minute=self.entries[index]['time'].minute)
next_time += timedelta(seconds=self._get_random_offset(self.entries[index]['time_random_offset']))
# If sleep time is passed add one day
if next_time <= now:
next_time += timedelta(days=1)
times.append(next_time)
diffs = {}
for index in range(len(self.entries)):
diff = (times[index]-now).total_seconds()
if diff >= 0: diffs[index] = diff
closest = min(diffs.iterkeys(), key=lambda x: diffs[x])
next_time = times[closest]
next_duration = self._get_next_duration(self.entries[closest])
location = self.entries[closest]['wake_up_at_location'] if 'wake_up_at_location' in self.entries[closest] else ''
return next_time, next_duration, location
def _get_next_duration(self, entry):
duration = entry['duration'] + self._get_random_offset(entry['duration_random_offset'])
return duration
def _get_random_offset(self, max_offset):
offset = uniform(-max_offset, max_offset)
return int(offset)
def _sleep(self):
sleep_to_go = self._next_duration
sleep_m, sleep_s = divmod(sleep_to_go, 60)
sleep_h, sleep_m = divmod(sleep_m, 60)
sleep_hms = '%02d:%02d:%02d' % (sleep_h, sleep_m, sleep_s)
now = datetime.now()
wake = str(now + timedelta(seconds=sleep_to_go))
self.bot.event_manager.emit(
'bot_sleep',
sender=self,
formatted="Sleeping for {time_hms}, wake at {wake}",
data={
'time_hms': sleep_hms,
'wake': wake
}
)
while sleep_to_go > 0:
if sleep_to_go < self.LOG_INTERVAL_SECONDS:
sleep(sleep_to_go)
sleep_to_go = 0
else:
sleep(self.LOG_INTERVAL_SECONDS)
sleep_to_go -= self.LOG_INTERVAL_SECONDS
<|fim▁end|> | return True |
<|file_name|>sleep_schedule.py<|end_file_name|><|fim▁begin|>from datetime import datetime, timedelta
from time import sleep
from random import uniform
class SleepSchedule(object):
"""Pauses the execution of the bot every day for some time
Simulates the user going to sleep every day for some time, the sleep time
and the duration is changed every day by a random offset defined in the
config file
Example Config:
"sleep_schedule": [
{
"time": "12:00",
"duration": "5:30",
"time_random_offset": "00:30",
"duration_random_offset": "00:30",
"wake_up_at_location": ""
},
{
"time": "17:45",
"duration": "3:00",
"time_random_offset": "01:00",
"duration_random_offset": "00:30",
"wake_up_at_location": ""
}
]
time: (HH:MM) local time that the bot should sleep
duration: (HH:MM) the duration of sleep
time_random_offset: (HH:MM) random offset of time that the sleep will start
for this example the possible start time is 11:30-12:30
duration_random_offset: (HH:MM) random offset of duration of sleep
for this example the possible duration is 5:00-6:00
wake_up_at_location: (lat, long | lat, long, alt | "") the location at which the bot wake up
*Note that an empty string ("") will not change the location*. """
LOG_INTERVAL_SECONDS = 600
SCHEDULING_MARGIN = timedelta(minutes=10) # Skip if next sleep is RESCHEDULING_MARGIN from now
def __init__(self, bot, config):
self.bot = bot
self._process_config(config)
self._schedule_next_sleep()
self._calculate_current_sleep()
def work(self):
if self._should_sleep_now():
self._sleep()
wake_up_at_location = self._wake_up_at_location
self._schedule_next_sleep()
if wake_up_at_location:
if hasattr(self.bot, 'api'): # Check if api is already initialized
self.bot.api.set_position(wake_up_at_location[0],wake_up_at_location[1],wake_up_at_location[2])
else:
self.bot.wake_location = wake_up_at_location
if hasattr(self.bot, 'api'): self.bot.login() # Same here
def _process_config(self, config):
self.entries = []
for entry in config:
prepared = {}
prepared['time'] = datetime.strptime(entry['time'] if 'time' in entry else '01:00', '%H:%M')
# Using datetime for easier stripping of timedeltas
raw_duration = datetime.strptime(entry['duration'] if 'duration' in entry else '07:00', '%H:%M')
duration = int(timedelta(hours=raw_duration.hour, minutes=raw_duration.minute).total_seconds())
raw_time_random_offset = datetime.strptime(entry['time_random_offset'] if 'time_random_offset' in entry else '01:00', '%H:%M')
time_random_offset = int(
timedelta(
hours=raw_time_random_offset.hour, minutes=raw_time_random_offset.minute).total_seconds())
raw_duration_random_offset = datetime.strptime(entry['duration_random_offset'] if 'duration_random_offset' in entry else '00:30', '%H:%M')
duration_random_offset = int(
timedelta(
hours=raw_duration_random_offset.hour, minutes=raw_duration_random_offset.minute).total_seconds())
raw_wake_up_at_location = entry['wake_up_at_location'] if 'wake_up_at_location' in entry else ''
if raw_wake_up_at_location:
try:
wake_up_at_location = raw_wake_up_at_location.split(',',2)
lat=float(wake_up_at_location[0])
lng=float(wake_up_at_location[1])
if len(wake_up_at_location) == 3:
alt=float(wake_up_at_location[2])
else:
alt = uniform(self.bot.config.alt_min, self.bot.config.alt_max)
except ValueError:
raise ValueError('SleepSchedule wake_up_at_location, parsing error in location') #TODO there must be a more elegant way to do it...
prepared['wake_up_at_location'] = [lat, lng, alt]
prepared['duration'] = duration
prepared['time_random_offset'] = time_random_offset
prepared['duration_random_offset'] = duration_random_offset
self.entries.append(prepared)
def _schedule_next_sleep(self):
self._next_sleep, self._next_duration, self._wake_up_at_location = self._get_next_sleep_schedule()
self.bot.event_manager.emit(
'next_sleep',
sender=self,
formatted="Next sleep at {time}",
data={
'time': str(self._next_sleep)
}
)
def _calculate_current_sleep(self):
self._current_sleep = self._next_sleep - timedelta(days=1)
current_duration = self._next_duration
self._current_end = self._current_sleep + timedelta(seconds = current_duration)
def _should_sleep_now(self):
if datetime.now() >= self._next_sleep:
return True
if datetime.now() >= self._current_sleep and datetime.now() < self._current_end:
<|fim_middle|>
return False
def _get_next_sleep_schedule(self):
now = datetime.now() + self.SCHEDULING_MARGIN
times = []
for index in range(len(self.entries)):
next_time = now.replace(hour=self.entries[index]['time'].hour, minute=self.entries[index]['time'].minute)
next_time += timedelta(seconds=self._get_random_offset(self.entries[index]['time_random_offset']))
# If sleep time is passed add one day
if next_time <= now:
next_time += timedelta(days=1)
times.append(next_time)
diffs = {}
for index in range(len(self.entries)):
diff = (times[index]-now).total_seconds()
if diff >= 0: diffs[index] = diff
closest = min(diffs.iterkeys(), key=lambda x: diffs[x])
next_time = times[closest]
next_duration = self._get_next_duration(self.entries[closest])
location = self.entries[closest]['wake_up_at_location'] if 'wake_up_at_location' in self.entries[closest] else ''
return next_time, next_duration, location
def _get_next_duration(self, entry):
duration = entry['duration'] + self._get_random_offset(entry['duration_random_offset'])
return duration
def _get_random_offset(self, max_offset):
offset = uniform(-max_offset, max_offset)
return int(offset)
def _sleep(self):
sleep_to_go = self._next_duration
sleep_m, sleep_s = divmod(sleep_to_go, 60)
sleep_h, sleep_m = divmod(sleep_m, 60)
sleep_hms = '%02d:%02d:%02d' % (sleep_h, sleep_m, sleep_s)
now = datetime.now()
wake = str(now + timedelta(seconds=sleep_to_go))
self.bot.event_manager.emit(
'bot_sleep',
sender=self,
formatted="Sleeping for {time_hms}, wake at {wake}",
data={
'time_hms': sleep_hms,
'wake': wake
}
)
while sleep_to_go > 0:
if sleep_to_go < self.LOG_INTERVAL_SECONDS:
sleep(sleep_to_go)
sleep_to_go = 0
else:
sleep(self.LOG_INTERVAL_SECONDS)
sleep_to_go -= self.LOG_INTERVAL_SECONDS
<|fim▁end|> | self._next_duration = (self._current_end - datetime.now()).total_seconds()
return True |
<|file_name|>sleep_schedule.py<|end_file_name|><|fim▁begin|>from datetime import datetime, timedelta
from time import sleep
from random import uniform
class SleepSchedule(object):
"""Pauses the execution of the bot every day for some time
Simulates the user going to sleep every day for some time, the sleep time
and the duration is changed every day by a random offset defined in the
config file
Example Config:
"sleep_schedule": [
{
"time": "12:00",
"duration": "5:30",
"time_random_offset": "00:30",
"duration_random_offset": "00:30",
"wake_up_at_location": ""
},
{
"time": "17:45",
"duration": "3:00",
"time_random_offset": "01:00",
"duration_random_offset": "00:30",
"wake_up_at_location": ""
}
]
time: (HH:MM) local time that the bot should sleep
duration: (HH:MM) the duration of sleep
time_random_offset: (HH:MM) random offset of time that the sleep will start
for this example the possible start time is 11:30-12:30
duration_random_offset: (HH:MM) random offset of duration of sleep
for this example the possible duration is 5:00-6:00
wake_up_at_location: (lat, long | lat, long, alt | "") the location at which the bot wake up
*Note that an empty string ("") will not change the location*. """
LOG_INTERVAL_SECONDS = 600
SCHEDULING_MARGIN = timedelta(minutes=10) # Skip if next sleep is RESCHEDULING_MARGIN from now
def __init__(self, bot, config):
self.bot = bot
self._process_config(config)
self._schedule_next_sleep()
self._calculate_current_sleep()
def work(self):
if self._should_sleep_now():
self._sleep()
wake_up_at_location = self._wake_up_at_location
self._schedule_next_sleep()
if wake_up_at_location:
if hasattr(self.bot, 'api'): # Check if api is already initialized
self.bot.api.set_position(wake_up_at_location[0],wake_up_at_location[1],wake_up_at_location[2])
else:
self.bot.wake_location = wake_up_at_location
if hasattr(self.bot, 'api'): self.bot.login() # Same here
def _process_config(self, config):
self.entries = []
for entry in config:
prepared = {}
prepared['time'] = datetime.strptime(entry['time'] if 'time' in entry else '01:00', '%H:%M')
# Using datetime for easier stripping of timedeltas
raw_duration = datetime.strptime(entry['duration'] if 'duration' in entry else '07:00', '%H:%M')
duration = int(timedelta(hours=raw_duration.hour, minutes=raw_duration.minute).total_seconds())
raw_time_random_offset = datetime.strptime(entry['time_random_offset'] if 'time_random_offset' in entry else '01:00', '%H:%M')
time_random_offset = int(
timedelta(
hours=raw_time_random_offset.hour, minutes=raw_time_random_offset.minute).total_seconds())
raw_duration_random_offset = datetime.strptime(entry['duration_random_offset'] if 'duration_random_offset' in entry else '00:30', '%H:%M')
duration_random_offset = int(
timedelta(
hours=raw_duration_random_offset.hour, minutes=raw_duration_random_offset.minute).total_seconds())
raw_wake_up_at_location = entry['wake_up_at_location'] if 'wake_up_at_location' in entry else ''
if raw_wake_up_at_location:
try:
wake_up_at_location = raw_wake_up_at_location.split(',',2)
lat=float(wake_up_at_location[0])
lng=float(wake_up_at_location[1])
if len(wake_up_at_location) == 3:
alt=float(wake_up_at_location[2])
else:
alt = uniform(self.bot.config.alt_min, self.bot.config.alt_max)
except ValueError:
raise ValueError('SleepSchedule wake_up_at_location, parsing error in location') #TODO there must be a more elegant way to do it...
prepared['wake_up_at_location'] = [lat, lng, alt]
prepared['duration'] = duration
prepared['time_random_offset'] = time_random_offset
prepared['duration_random_offset'] = duration_random_offset
self.entries.append(prepared)
def _schedule_next_sleep(self):
self._next_sleep, self._next_duration, self._wake_up_at_location = self._get_next_sleep_schedule()
self.bot.event_manager.emit(
'next_sleep',
sender=self,
formatted="Next sleep at {time}",
data={
'time': str(self._next_sleep)
}
)
def _calculate_current_sleep(self):
self._current_sleep = self._next_sleep - timedelta(days=1)
current_duration = self._next_duration
self._current_end = self._current_sleep + timedelta(seconds = current_duration)
def _should_sleep_now(self):
if datetime.now() >= self._next_sleep:
return True
if datetime.now() >= self._current_sleep and datetime.now() < self._current_end:
self._next_duration = (self._current_end - datetime.now()).total_seconds()
return True
return False
def _get_next_sleep_schedule(self):
now = datetime.now() + self.SCHEDULING_MARGIN
times = []
for index in range(len(self.entries)):
next_time = now.replace(hour=self.entries[index]['time'].hour, minute=self.entries[index]['time'].minute)
next_time += timedelta(seconds=self._get_random_offset(self.entries[index]['time_random_offset']))
# If sleep time is passed add one day
if next_time <= now:
<|fim_middle|>
times.append(next_time)
diffs = {}
for index in range(len(self.entries)):
diff = (times[index]-now).total_seconds()
if diff >= 0: diffs[index] = diff
closest = min(diffs.iterkeys(), key=lambda x: diffs[x])
next_time = times[closest]
next_duration = self._get_next_duration(self.entries[closest])
location = self.entries[closest]['wake_up_at_location'] if 'wake_up_at_location' in self.entries[closest] else ''
return next_time, next_duration, location
def _get_next_duration(self, entry):
duration = entry['duration'] + self._get_random_offset(entry['duration_random_offset'])
return duration
def _get_random_offset(self, max_offset):
offset = uniform(-max_offset, max_offset)
return int(offset)
def _sleep(self):
sleep_to_go = self._next_duration
sleep_m, sleep_s = divmod(sleep_to_go, 60)
sleep_h, sleep_m = divmod(sleep_m, 60)
sleep_hms = '%02d:%02d:%02d' % (sleep_h, sleep_m, sleep_s)
now = datetime.now()
wake = str(now + timedelta(seconds=sleep_to_go))
self.bot.event_manager.emit(
'bot_sleep',
sender=self,
formatted="Sleeping for {time_hms}, wake at {wake}",
data={
'time_hms': sleep_hms,
'wake': wake
}
)
while sleep_to_go > 0:
if sleep_to_go < self.LOG_INTERVAL_SECONDS:
sleep(sleep_to_go)
sleep_to_go = 0
else:
sleep(self.LOG_INTERVAL_SECONDS)
sleep_to_go -= self.LOG_INTERVAL_SECONDS
<|fim▁end|> | next_time += timedelta(days=1) |
<|file_name|>sleep_schedule.py<|end_file_name|><|fim▁begin|>from datetime import datetime, timedelta
from time import sleep
from random import uniform
class SleepSchedule(object):
"""Pauses the execution of the bot every day for some time
Simulates the user going to sleep every day for some time, the sleep time
and the duration is changed every day by a random offset defined in the
config file
Example Config:
"sleep_schedule": [
{
"time": "12:00",
"duration": "5:30",
"time_random_offset": "00:30",
"duration_random_offset": "00:30",
"wake_up_at_location": ""
},
{
"time": "17:45",
"duration": "3:00",
"time_random_offset": "01:00",
"duration_random_offset": "00:30",
"wake_up_at_location": ""
}
]
time: (HH:MM) local time that the bot should sleep
duration: (HH:MM) the duration of sleep
time_random_offset: (HH:MM) random offset of time that the sleep will start
for this example the possible start time is 11:30-12:30
duration_random_offset: (HH:MM) random offset of duration of sleep
for this example the possible duration is 5:00-6:00
wake_up_at_location: (lat, long | lat, long, alt | "") the location at which the bot wake up
*Note that an empty string ("") will not change the location*. """
LOG_INTERVAL_SECONDS = 600
SCHEDULING_MARGIN = timedelta(minutes=10) # Skip if next sleep is RESCHEDULING_MARGIN from now
def __init__(self, bot, config):
self.bot = bot
self._process_config(config)
self._schedule_next_sleep()
self._calculate_current_sleep()
def work(self):
if self._should_sleep_now():
self._sleep()
wake_up_at_location = self._wake_up_at_location
self._schedule_next_sleep()
if wake_up_at_location:
if hasattr(self.bot, 'api'): # Check if api is already initialized
self.bot.api.set_position(wake_up_at_location[0],wake_up_at_location[1],wake_up_at_location[2])
else:
self.bot.wake_location = wake_up_at_location
if hasattr(self.bot, 'api'): self.bot.login() # Same here
def _process_config(self, config):
self.entries = []
for entry in config:
prepared = {}
prepared['time'] = datetime.strptime(entry['time'] if 'time' in entry else '01:00', '%H:%M')
# Using datetime for easier stripping of timedeltas
raw_duration = datetime.strptime(entry['duration'] if 'duration' in entry else '07:00', '%H:%M')
duration = int(timedelta(hours=raw_duration.hour, minutes=raw_duration.minute).total_seconds())
raw_time_random_offset = datetime.strptime(entry['time_random_offset'] if 'time_random_offset' in entry else '01:00', '%H:%M')
time_random_offset = int(
timedelta(
hours=raw_time_random_offset.hour, minutes=raw_time_random_offset.minute).total_seconds())
raw_duration_random_offset = datetime.strptime(entry['duration_random_offset'] if 'duration_random_offset' in entry else '00:30', '%H:%M')
duration_random_offset = int(
timedelta(
hours=raw_duration_random_offset.hour, minutes=raw_duration_random_offset.minute).total_seconds())
raw_wake_up_at_location = entry['wake_up_at_location'] if 'wake_up_at_location' in entry else ''
if raw_wake_up_at_location:
try:
wake_up_at_location = raw_wake_up_at_location.split(',',2)
lat=float(wake_up_at_location[0])
lng=float(wake_up_at_location[1])
if len(wake_up_at_location) == 3:
alt=float(wake_up_at_location[2])
else:
alt = uniform(self.bot.config.alt_min, self.bot.config.alt_max)
except ValueError:
raise ValueError('SleepSchedule wake_up_at_location, parsing error in location') #TODO there must be a more elegant way to do it...
prepared['wake_up_at_location'] = [lat, lng, alt]
prepared['duration'] = duration
prepared['time_random_offset'] = time_random_offset
prepared['duration_random_offset'] = duration_random_offset
self.entries.append(prepared)
def _schedule_next_sleep(self):
self._next_sleep, self._next_duration, self._wake_up_at_location = self._get_next_sleep_schedule()
self.bot.event_manager.emit(
'next_sleep',
sender=self,
formatted="Next sleep at {time}",
data={
'time': str(self._next_sleep)
}
)
def _calculate_current_sleep(self):
self._current_sleep = self._next_sleep - timedelta(days=1)
current_duration = self._next_duration
self._current_end = self._current_sleep + timedelta(seconds = current_duration)
def _should_sleep_now(self):
if datetime.now() >= self._next_sleep:
return True
if datetime.now() >= self._current_sleep and datetime.now() < self._current_end:
self._next_duration = (self._current_end - datetime.now()).total_seconds()
return True
return False
def _get_next_sleep_schedule(self):
now = datetime.now() + self.SCHEDULING_MARGIN
times = []
for index in range(len(self.entries)):
next_time = now.replace(hour=self.entries[index]['time'].hour, minute=self.entries[index]['time'].minute)
next_time += timedelta(seconds=self._get_random_offset(self.entries[index]['time_random_offset']))
# If sleep time is passed add one day
if next_time <= now:
next_time += timedelta(days=1)
times.append(next_time)
diffs = {}
for index in range(len(self.entries)):
diff = (times[index]-now).total_seconds()
if diff >= 0: <|fim_middle|>
closest = min(diffs.iterkeys(), key=lambda x: diffs[x])
next_time = times[closest]
next_duration = self._get_next_duration(self.entries[closest])
location = self.entries[closest]['wake_up_at_location'] if 'wake_up_at_location' in self.entries[closest] else ''
return next_time, next_duration, location
def _get_next_duration(self, entry):
duration = entry['duration'] + self._get_random_offset(entry['duration_random_offset'])
return duration
def _get_random_offset(self, max_offset):
offset = uniform(-max_offset, max_offset)
return int(offset)
def _sleep(self):
sleep_to_go = self._next_duration
sleep_m, sleep_s = divmod(sleep_to_go, 60)
sleep_h, sleep_m = divmod(sleep_m, 60)
sleep_hms = '%02d:%02d:%02d' % (sleep_h, sleep_m, sleep_s)
now = datetime.now()
wake = str(now + timedelta(seconds=sleep_to_go))
self.bot.event_manager.emit(
'bot_sleep',
sender=self,
formatted="Sleeping for {time_hms}, wake at {wake}",
data={
'time_hms': sleep_hms,
'wake': wake
}
)
while sleep_to_go > 0:
if sleep_to_go < self.LOG_INTERVAL_SECONDS:
sleep(sleep_to_go)
sleep_to_go = 0
else:
sleep(self.LOG_INTERVAL_SECONDS)
sleep_to_go -= self.LOG_INTERVAL_SECONDS
<|fim▁end|> | diffs[index] = diff |
<|file_name|>sleep_schedule.py<|end_file_name|><|fim▁begin|>from datetime import datetime, timedelta
from time import sleep
from random import uniform
class SleepSchedule(object):
"""Pauses the execution of the bot every day for some time
Simulates the user going to sleep every day for some time, the sleep time
and the duration is changed every day by a random offset defined in the
config file
Example Config:
"sleep_schedule": [
{
"time": "12:00",
"duration": "5:30",
"time_random_offset": "00:30",
"duration_random_offset": "00:30",
"wake_up_at_location": ""
},
{
"time": "17:45",
"duration": "3:00",
"time_random_offset": "01:00",
"duration_random_offset": "00:30",
"wake_up_at_location": ""
}
]
time: (HH:MM) local time that the bot should sleep
duration: (HH:MM) the duration of sleep
time_random_offset: (HH:MM) random offset of time that the sleep will start
for this example the possible start time is 11:30-12:30
duration_random_offset: (HH:MM) random offset of duration of sleep
for this example the possible duration is 5:00-6:00
wake_up_at_location: (lat, long | lat, long, alt | "") the location at which the bot wake up
*Note that an empty string ("") will not change the location*. """
LOG_INTERVAL_SECONDS = 600
SCHEDULING_MARGIN = timedelta(minutes=10) # Skip if next sleep is RESCHEDULING_MARGIN from now
def __init__(self, bot, config):
self.bot = bot
self._process_config(config)
self._schedule_next_sleep()
self._calculate_current_sleep()
def work(self):
if self._should_sleep_now():
self._sleep()
wake_up_at_location = self._wake_up_at_location
self._schedule_next_sleep()
if wake_up_at_location:
if hasattr(self.bot, 'api'): # Check if api is already initialized
self.bot.api.set_position(wake_up_at_location[0],wake_up_at_location[1],wake_up_at_location[2])
else:
self.bot.wake_location = wake_up_at_location
if hasattr(self.bot, 'api'): self.bot.login() # Same here
def _process_config(self, config):
self.entries = []
for entry in config:
prepared = {}
prepared['time'] = datetime.strptime(entry['time'] if 'time' in entry else '01:00', '%H:%M')
# Using datetime for easier stripping of timedeltas
raw_duration = datetime.strptime(entry['duration'] if 'duration' in entry else '07:00', '%H:%M')
duration = int(timedelta(hours=raw_duration.hour, minutes=raw_duration.minute).total_seconds())
raw_time_random_offset = datetime.strptime(entry['time_random_offset'] if 'time_random_offset' in entry else '01:00', '%H:%M')
time_random_offset = int(
timedelta(
hours=raw_time_random_offset.hour, minutes=raw_time_random_offset.minute).total_seconds())
raw_duration_random_offset = datetime.strptime(entry['duration_random_offset'] if 'duration_random_offset' in entry else '00:30', '%H:%M')
duration_random_offset = int(
timedelta(
hours=raw_duration_random_offset.hour, minutes=raw_duration_random_offset.minute).total_seconds())
raw_wake_up_at_location = entry['wake_up_at_location'] if 'wake_up_at_location' in entry else ''
if raw_wake_up_at_location:
try:
wake_up_at_location = raw_wake_up_at_location.split(',',2)
lat=float(wake_up_at_location[0])
lng=float(wake_up_at_location[1])
if len(wake_up_at_location) == 3:
alt=float(wake_up_at_location[2])
else:
alt = uniform(self.bot.config.alt_min, self.bot.config.alt_max)
except ValueError:
raise ValueError('SleepSchedule wake_up_at_location, parsing error in location') #TODO there must be a more elegant way to do it...
prepared['wake_up_at_location'] = [lat, lng, alt]
prepared['duration'] = duration
prepared['time_random_offset'] = time_random_offset
prepared['duration_random_offset'] = duration_random_offset
self.entries.append(prepared)
def _schedule_next_sleep(self):
self._next_sleep, self._next_duration, self._wake_up_at_location = self._get_next_sleep_schedule()
self.bot.event_manager.emit(
'next_sleep',
sender=self,
formatted="Next sleep at {time}",
data={
'time': str(self._next_sleep)
}
)
def _calculate_current_sleep(self):
self._current_sleep = self._next_sleep - timedelta(days=1)
current_duration = self._next_duration
self._current_end = self._current_sleep + timedelta(seconds = current_duration)
def _should_sleep_now(self):
if datetime.now() >= self._next_sleep:
return True
if datetime.now() >= self._current_sleep and datetime.now() < self._current_end:
self._next_duration = (self._current_end - datetime.now()).total_seconds()
return True
return False
def _get_next_sleep_schedule(self):
now = datetime.now() + self.SCHEDULING_MARGIN
times = []
for index in range(len(self.entries)):
next_time = now.replace(hour=self.entries[index]['time'].hour, minute=self.entries[index]['time'].minute)
next_time += timedelta(seconds=self._get_random_offset(self.entries[index]['time_random_offset']))
# If sleep time is passed add one day
if next_time <= now:
next_time += timedelta(days=1)
times.append(next_time)
diffs = {}
for index in range(len(self.entries)):
diff = (times[index]-now).total_seconds()
if diff >= 0: diffs[index] = diff
closest = min(diffs.iterkeys(), key=lambda x: diffs[x])
next_time = times[closest]
next_duration = self._get_next_duration(self.entries[closest])
location = self.entries[closest]['wake_up_at_location'] if 'wake_up_at_location' in self.entries[closest] else ''
return next_time, next_duration, location
def _get_next_duration(self, entry):
duration = entry['duration'] + self._get_random_offset(entry['duration_random_offset'])
return duration
def _get_random_offset(self, max_offset):
offset = uniform(-max_offset, max_offset)
return int(offset)
def _sleep(self):
sleep_to_go = self._next_duration
sleep_m, sleep_s = divmod(sleep_to_go, 60)
sleep_h, sleep_m = divmod(sleep_m, 60)
sleep_hms = '%02d:%02d:%02d' % (sleep_h, sleep_m, sleep_s)
now = datetime.now()
wake = str(now + timedelta(seconds=sleep_to_go))
self.bot.event_manager.emit(
'bot_sleep',
sender=self,
formatted="Sleeping for {time_hms}, wake at {wake}",
data={
'time_hms': sleep_hms,
'wake': wake
}
)
while sleep_to_go > 0:
if sleep_to_go < self.LOG_INTERVAL_SECONDS:
<|fim_middle|>
else:
sleep(self.LOG_INTERVAL_SECONDS)
sleep_to_go -= self.LOG_INTERVAL_SECONDS
<|fim▁end|> | sleep(sleep_to_go)
sleep_to_go = 0 |
<|file_name|>sleep_schedule.py<|end_file_name|><|fim▁begin|>from datetime import datetime, timedelta
from time import sleep
from random import uniform
class SleepSchedule(object):
"""Pauses the execution of the bot every day for some time
Simulates the user going to sleep every day for some time, the sleep time
and the duration is changed every day by a random offset defined in the
config file
Example Config:
"sleep_schedule": [
{
"time": "12:00",
"duration": "5:30",
"time_random_offset": "00:30",
"duration_random_offset": "00:30",
"wake_up_at_location": ""
},
{
"time": "17:45",
"duration": "3:00",
"time_random_offset": "01:00",
"duration_random_offset": "00:30",
"wake_up_at_location": ""
}
]
time: (HH:MM) local time that the bot should sleep
duration: (HH:MM) the duration of sleep
time_random_offset: (HH:MM) random offset of time that the sleep will start
for this example the possible start time is 11:30-12:30
duration_random_offset: (HH:MM) random offset of duration of sleep
for this example the possible duration is 5:00-6:00
wake_up_at_location: (lat, long | lat, long, alt | "") the location at which the bot wake up
*Note that an empty string ("") will not change the location*. """
LOG_INTERVAL_SECONDS = 600
SCHEDULING_MARGIN = timedelta(minutes=10) # Skip if next sleep is RESCHEDULING_MARGIN from now
def __init__(self, bot, config):
self.bot = bot
self._process_config(config)
self._schedule_next_sleep()
self._calculate_current_sleep()
def work(self):
if self._should_sleep_now():
self._sleep()
wake_up_at_location = self._wake_up_at_location
self._schedule_next_sleep()
if wake_up_at_location:
if hasattr(self.bot, 'api'): # Check if api is already initialized
self.bot.api.set_position(wake_up_at_location[0],wake_up_at_location[1],wake_up_at_location[2])
else:
self.bot.wake_location = wake_up_at_location
if hasattr(self.bot, 'api'): self.bot.login() # Same here
def _process_config(self, config):
self.entries = []
for entry in config:
prepared = {}
prepared['time'] = datetime.strptime(entry['time'] if 'time' in entry else '01:00', '%H:%M')
# Using datetime for easier stripping of timedeltas
raw_duration = datetime.strptime(entry['duration'] if 'duration' in entry else '07:00', '%H:%M')
duration = int(timedelta(hours=raw_duration.hour, minutes=raw_duration.minute).total_seconds())
raw_time_random_offset = datetime.strptime(entry['time_random_offset'] if 'time_random_offset' in entry else '01:00', '%H:%M')
time_random_offset = int(
timedelta(
hours=raw_time_random_offset.hour, minutes=raw_time_random_offset.minute).total_seconds())
raw_duration_random_offset = datetime.strptime(entry['duration_random_offset'] if 'duration_random_offset' in entry else '00:30', '%H:%M')
duration_random_offset = int(
timedelta(
hours=raw_duration_random_offset.hour, minutes=raw_duration_random_offset.minute).total_seconds())
raw_wake_up_at_location = entry['wake_up_at_location'] if 'wake_up_at_location' in entry else ''
if raw_wake_up_at_location:
try:
wake_up_at_location = raw_wake_up_at_location.split(',',2)
lat=float(wake_up_at_location[0])
lng=float(wake_up_at_location[1])
if len(wake_up_at_location) == 3:
alt=float(wake_up_at_location[2])
else:
alt = uniform(self.bot.config.alt_min, self.bot.config.alt_max)
except ValueError:
raise ValueError('SleepSchedule wake_up_at_location, parsing error in location') #TODO there must be a more elegant way to do it...
prepared['wake_up_at_location'] = [lat, lng, alt]
prepared['duration'] = duration
prepared['time_random_offset'] = time_random_offset
prepared['duration_random_offset'] = duration_random_offset
self.entries.append(prepared)
def _schedule_next_sleep(self):
self._next_sleep, self._next_duration, self._wake_up_at_location = self._get_next_sleep_schedule()
self.bot.event_manager.emit(
'next_sleep',
sender=self,
formatted="Next sleep at {time}",
data={
'time': str(self._next_sleep)
}
)
def _calculate_current_sleep(self):
self._current_sleep = self._next_sleep - timedelta(days=1)
current_duration = self._next_duration
self._current_end = self._current_sleep + timedelta(seconds = current_duration)
def _should_sleep_now(self):
if datetime.now() >= self._next_sleep:
return True
if datetime.now() >= self._current_sleep and datetime.now() < self._current_end:
self._next_duration = (self._current_end - datetime.now()).total_seconds()
return True
return False
def _get_next_sleep_schedule(self):
now = datetime.now() + self.SCHEDULING_MARGIN
times = []
for index in range(len(self.entries)):
next_time = now.replace(hour=self.entries[index]['time'].hour, minute=self.entries[index]['time'].minute)
next_time += timedelta(seconds=self._get_random_offset(self.entries[index]['time_random_offset']))
# If sleep time is passed add one day
if next_time <= now:
next_time += timedelta(days=1)
times.append(next_time)
diffs = {}
for index in range(len(self.entries)):
diff = (times[index]-now).total_seconds()
if diff >= 0: diffs[index] = diff
closest = min(diffs.iterkeys(), key=lambda x: diffs[x])
next_time = times[closest]
next_duration = self._get_next_duration(self.entries[closest])
location = self.entries[closest]['wake_up_at_location'] if 'wake_up_at_location' in self.entries[closest] else ''
return next_time, next_duration, location
def _get_next_duration(self, entry):
duration = entry['duration'] + self._get_random_offset(entry['duration_random_offset'])
return duration
def _get_random_offset(self, max_offset):
offset = uniform(-max_offset, max_offset)
return int(offset)
def _sleep(self):
sleep_to_go = self._next_duration
sleep_m, sleep_s = divmod(sleep_to_go, 60)
sleep_h, sleep_m = divmod(sleep_m, 60)
sleep_hms = '%02d:%02d:%02d' % (sleep_h, sleep_m, sleep_s)
now = datetime.now()
wake = str(now + timedelta(seconds=sleep_to_go))
self.bot.event_manager.emit(
'bot_sleep',
sender=self,
formatted="Sleeping for {time_hms}, wake at {wake}",
data={
'time_hms': sleep_hms,
'wake': wake
}
)
while sleep_to_go > 0:
if sleep_to_go < self.LOG_INTERVAL_SECONDS:
sleep(sleep_to_go)
sleep_to_go = 0
else:
<|fim_middle|>
<|fim▁end|> | sleep(self.LOG_INTERVAL_SECONDS)
sleep_to_go -= self.LOG_INTERVAL_SECONDS |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.