content
stringlengths 0
894k
| type
stringclasses 2
values |
---|---|
from .regressor import CrossLgbRegression
|
python
|
from django.urls import path, include
from django.contrib import admin
from django.contrib.auth import views as auth_views
admin.autodiscover()
import autobot.views
# To add a new path, first import the app:
# import blog
#
# Then add the new path:
# path('blog/', blog.urls, name="blog")
#
# Learn more here: https://docs.djangoproject.com/en/2.1/topics/http/urls/
urlpatterns = [
path("", autobot.views.index, name="index"),
path("login/", autobot.views.login, name="login"),
path("logout/", auth_views.LogoutView.as_view(), name="logout"),
path("social-auth/", include('social_django.urls', namespace="social")),
path("db/", autobot.views.db, name="db"),
path("admin/", admin.site.urls),
]
|
python
|
# This imports all that is listed in __init__.py of current directory:
from __init__ import *
#-------------------------------------------------------------------------------------- MAIN WINDOW CLASS ----------------------------------------------------------------------------------#
class Window(QtGui.QMainWindow): # defines a subclass of QMainWindow named Window
# This method defines everything that is to be executed automatically when the class is initialized:
def __init__(self):
super(Window, self).__init__() # This makes the class inherit functions from its upper class (Here, QMainWindow Class).
#---------------------------------- RESTORING THE LAST SETTINGS --------------------------------#
# Note: settings is an imported variable (see Variables_Module.py)
if settings.value("Runs").isNull(): # if a value named Runs does not exist in settings (this is the first time the code is run)
# then, set these custom values as the x, y positions and height, width of the Window class:
# Note: QRect converts integer values into a rectangle form which can be used to visualize a window:
self.setGeometry(QtCore.QRect(50, 50, 800, 450))
else:
self.restore_last_settings_MWindow() # restore the last settings...
self.setWindowTitle("EditOS")
self.setWindowIcon(QtGui.QIcon("Icons/Icon.ico")) # sets the icon to the icon present in icons folder of current directory...
self.initialize_editor() # (for initialize_editor, see __init__ RELATED FUNCTIONS in Objects_Module.py)
#--------------------------------- MENU BAR CONFIGURATION ---------------------------------------#
# Here, we add a menubar using the built-in .menuBar() function of QMainWindow class in PyQt and store it in self.main_menu variable for future use:
self.main_menu = self.menuBar()
self.file_menu = self.main_menu.addMenu("&File")
self.edit_menu = self.main_menu.addMenu("&Edit")
self.view_menu = self.main_menu.addMenu("&View")
self.help_menu = self.main_menu.addMenu("&Help")
# Here, we add a statusbar using the built-in .statusBar() function of QMainWindow class in PyQt and store it in self.statusbar variable for future use:
self.statusbar = self.statusBar()
self.statusbar.hide() # we hide it currently so that it can be invoked by our statusbar option in view menu (see Functions_Modules.py for details)
#------------------------------- FINAL TOUCHES TO THE WINDOW --------------------------------------#
# Here, we add a style named cleanlooks using QStyleFactory and set that style to our QApplication:
# Note: Qt has a number of built-in styles with names such as plastique, cleanlooks, motif, windows vista, cde etc.
QtGui.QApplication.setStyle(QtGui.QStyleFactory.create("Cleanlooks"))
self.show()
#-------------------------------------------------------------- __init__ RELATED FUNCTIONS ------------------------------------------------------------------------#
def restore_last_settings_MWindow(self):
if (settings.value("Runs").toInt()) >= 1: # check if the number of runs is equal to or greater than 1:
if (settings.value("State").toString()) == "Maximized": # if yes, check the settings if the last time window was maximized
# then, set these custom values as the x, y positions and height, width of the Window class and maximize the window:
# Note: QRect converts integer values into a rectangle form which can be used to visualize a window:
self.setGeometry(QtCore.QRect(50, 50, 800, 450))
self.showMaximized()
else:
# if no, last time the window was not maximized,
# then set the position and size of the window according to the last values present in the settings named Geometry:
# Note: .toSize and .toPoint converts the values in settings to QSize and QPoint
# Which are compatible values to be used for moving and resizing the main window.
self.resize(settings.value("Main_Size").toSize())
self.move(settings.value("Main_Position").toPoint())
else: # the else command is just a safety measure for any unexpected exceptions or falts...
self.setGeometry(QtCore.QRect(50, 50, 800, 450))
def initialize_editor(self):
#-------------------------------- SETTING THE DEFAULT SETTINGS (EDITOR WINDOW) ---------------------------------------#
self.text_editor = QtGui.QPlainTextEdit() # QPlainTextEdit gives us that big bald white space we call editor.
tmr = QtGui.QFont("times new roman")
tmr.setPointSize(16) # set font size to 16pt
self.text_editor.setFont(tmr)
# self.back_color and self.font_color hold the background and font colors of our editor
# They are defaulted to none but we will allow the user to change them later:
self.back_color = "none"
self.font_color = "none"
# sets the default stylesheet of our editor using css. it has no border, no background and no font color currently.
self.text_editor.setStyleSheet("border: none;background: %s;color: %s;" %(self.back_color, self.font_color))
#-------------------------------- RESTORING THE LAST SETTINGS (EDITOR WINDOW) -------------------------------------------#
# Note: settings is an imported variable (see Variables_Module.py)
if settings.value("Runs").isNull(): # if a value named Runs does not exist in settings (this is the first time the code is run)
pass # do nothing.
else: # else, if a value named Runs does exist (this is not the first time the code is run)
# (for restore_last_settings_editor, see SUPPORT FUNCTIONS in Objects_Module.py)
self.restore_last_settings_editor() # restore the last settings of the text editor's window...
#-------------------------------- FINAL TOUCHES TO THE WINDOW (EDITOR WINDOW) -------------------------------------------#
self.text_editor.cursorPositionChanged.connect(self.position_print) # (for position_print, see SUPPORT FUNCTIONS in Objects_Module.py)
self.setCentralWidget(self.text_editor) # makes our window a text editor.
#-------------------------------------------------------------- FUNCTIONS FOR MENUBAR OPTIONS-------------------------------------------------------------------------------#
def new_file(self):
warning = QtGui.QMessageBox.question(self, "Warning!!!", "Are you sure?\nplease save all work first....", QtGui.QMessageBox.Yes | QtGui.QMessageBox.No)
if warning == QtGui.QMessageBox.Yes:
self.text_editor.clear()
else:
pass
def open_file(self):
# note: File_Dialog_Window is a team created class (see OTHER WINDOW CLASSES in Objects_Module.py)
# get the name and location of the file to be opened:
self.open_file_name = QtGui.QFileDialog.getOpenFileName(File_Dialog_Window(), "Open File", "", ("Text Files (*.txt);;Any File (*.*)"))
with open(self.open_file_name, "r") as self.current_file: # open the file in read only mode and as variable self.current_file
text = self.current_file.read()
self.text_editor.setPlainText(text) # set the text in the file as the text in the editor window
self.current_file = open(self.open_file_name, "r+") # open file for future purposes
def save_file(self):
try: # try checking if there is a variable named self.current_file
self.current_file
except AttributeError: # except, if there is an attribute error (no file was opened).
self.save_as_file() # (for save_as_file function, see below)
else: # else if everything goes ok (there is a self.current_file variable), then:
if self.current_file.closed == True:
self.save_as_file() # (for save_as_file function, see below)
elif self.current_file.closed == False:
text = self.text_editor.toPlainText() # get the text currently in the editor...
try: # try saving the file:
self.current_file.write(text)
except IOError: # except, if there is an input output error
self.save_as_file() # (for save_as_file function, see below)
def save_as_file(self):
# note: File_Dialog_Window is a team created class (see OTHER WINDOW CLASSES in Objects_Module.py)
# get the name and location of the file to be saved:
self.save_file_name = QtGui.QFileDialog.getSaveFileName(File_Dialog_Window(), "Save As File", "Document", ("Text Files (*.txt);;Any File (*.*)"))
with open(self.save_file_name, "w") as self.current_file:
text = self.text_editor.toPlainText() # get the text currently in the editor...
self.current_file.write(text) # write the text in the editor to the file
self.current_file = open(self.save_file_name, "r+") # open file for future purposes
def print_preview(self):
# note: paint_page_view is a team created function, see SUPPORT FUNCTIONS in Objects_Module.py
print_preview_dialog = QtGui.QPrintPreviewDialog()
print_preview_dialog.paintRequested.connect(self.paint_page_view) # whenever print_preview_dialog is created supply current page_view via paint_page_view method...
print_preview_dialog.exec_()
def print_doc(self):
print_dialog = QtGui.QPrintDialog()
print_dialog.exec_()
if print_dialog.Accepted: # if a printer is selected successfully,
self.text_editor.print_(print_dialog.printer()) # print everything in the text_editor by the printer selected by user in print_dialog
def exit_app(self):
# note: save_current_settings() is a team created function, see SUPPORT FUNCTIONS in Objects_Module.py
self.save_current_settings()
warning = QtGui.QMessageBox.question(self, "Warning!!!", "Are you sure you want to quit?\nplease save all work before closing....", QtGui.QMessageBox.Yes | QtGui.QMessageBox.No)
if warning == QtGui.QMessageBox.Yes:
try: # try closing the current file:
self.current_file.close()
except AttributeError: # except, if there is an attribute error (no file was opened).
QtCore.QCoreApplication.instance().quit()
else: # else, if self.current_file is sucessfully closed:
QtCore.QCoreApplication.instance().quit()
else: # if, No button is clicked:
pass # do, nothing.
def find(self):
editor = self.text_editor # allows us to access editor window through a variable named editor
# note: File_Dialog is a team created class (see OTHER WINDOW CLASSES in Objects_Module.py)
find_dialog = Find_Dialog(self)
self.find_dialog = find_dialog # allows us to access Find_Dialog class through a variable named self.find_dialog
text = self.text_editor.toPlainText() # gets the text currently in the editor and stores it in text variable
def find_text(self):
# gets the word to be found from the find dialog and stores it in find_word:
find_word = find_dialog.find_input.text()
# Note: case_sensitive, whole_words_only and direction are all imported variables (see Variables_Module.py)
# These variables are manipulated from the check boxes in the find dialog window
# flag contains the settings which makes the find function work.
if case_sensitive == False and whole_words_only == False and direction == "Backward":
flag = QtGui.QTextDocument.FindBackward
elif case_sensitive == True and whole_words_only == False and direction == "Backward":
flag = QtGui.QTextDocument.FindBackward and QtGui.QTextDocument.FindCaseSensitively
elif case_sensitive == True and whole_words_only == True and direction == "Backward":
flag = QtGui.QTextDocument.FindBackward and QtGui.QTextDocument.FindCaseSensitively and QtGui.QTextDocument.FindWholeWords
elif case_sensitive == False and whole_words_only == True and direction == "Backward":
flag = QtGui.QTextDocument.FindBackward and QtGui.QTextDocument.FindWholeWords
elif case_sensitive == False and whole_words_only == False and direction == "Forward":
flag = QtGui.QTextDocument.FindCaseSensitively
elif case_sensitive == True and whole_words_only == False and direction == "Forward":
flag = QtGui.QTextDocument.FindCaseSensitively
elif case_sensitive == True and whole_words_only == True and direction == "Forward":
flag = QtGui.QTextDocument.FindCaseSensitively and QtGui.QTextDocument.FindWholeWords
elif case_sensitive == False and whole_words_only == True and direction == "Forward":
flag = QtGui.QTextDocument.FindWholeWords
else:
flag = QtGui.QTextDocument.FindBackward
editor.find(find_word, flag)
def replace_text(self):
# gets the word to be found from the find dialog and stores it in replace_word:
replace_word = find_dialog.replace_input.text()
#-------------------------------------------------------------------------#
#-- when the find button is clicked in the find dialog, if a word is --#
#-- found. Then, it is selected automatically and thus, the cursor --#
#-- has a selection. However, if no matching word is found the cursor --#
#-- will have no selection --#
#-------------------------------------------------------------------------#
if editor.textCursor().hasSelection():
editor.insertPlainText(replace_word) # replace the selection with the word to be replaced with
else:
# show a message:
message = QtGui.QMessageBox(QtGui.QMessageBox.Information, "Error!!!", "No text was found to be replaced, \nTry finding the word again then replace it!", QtGui.QMessageBox.Ok)
message.setWindowIcon(QtGui.QIcon("Icons/Icon.ico"))
message.exec_()
def replace_all(self):
# get the words to be found and replaced from the find dialog:
find_word = find_dialog.find_input.text()
replace_word = find_dialog.replace_input.text()
new_text = text.replace(find_word, replace_word)
editor.clear()
editor.insertPlainText(new_text) # add the new text to the editor window
self.find_dialog.find_btn.clicked.connect(find_text)
self.find_dialog.find_next_btn.clicked.connect(find_text)
self.find_dialog.replace_btn.clicked.connect(replace_text)
self.find_dialog.replace_all_btn.clicked.connect(replace_all)
return self.find_dialog
def get_font_choice(self):
font, valid = QtGui.QFontDialog.getFont()
if valid:
self.text_editor.setFont(font)
# add a value named "Editor_Font" to settings and set that value to the font chosen by user in QFontDialog:
settings.setValue("Editor_Font", font)
def get_font_color(self):
color_dialog = QtGui.QColorDialog.getColor()
# change the value of self.font_color to the name of the color chosen by user:
self.font_color = color_dialog.name()
# set the stylesheet of the text editor with the same background color but new font color:
self.text_editor.setStyleSheet("border: none;background: %s;color: %s;" %(self.back_color, self.font_color))
def get_back_color(self):
bgcolor_dialog = QtGui.QColorDialog.getColor()
# change the value of self.back_color to the name of the color chosen by user:
self.back_color = bgcolor_dialog.name()
# set the stylesheet of the text editor with the same font color but new background color:
self.text_editor.setStyleSheet("border: none;background: %s;color: %s;" %(self.back_color, self.font_color))
def set_night_theme(self, isChecked):
#----------------------------------------------------------#
#-- This function is called by the checkbox of view menu --#
#-- named "Night Theme" and this function acts according --#
#-- to the current state of that checkbox. For more info --#
#-- see add_night_theme_option_view_menu() function in --#
#-- Functions_Module.py --#
#----------------------------------------------------------#
# Note: isChecked is a property of checkboxes that returns true if it is checked or false otherwise:
if isChecked:
# change the values of self.back_color and self.font_color variables to black and white respectively:
self.back_color = "black"
self.font_color = "white"
# set the stylesheet of the text editor according to the changed values of self.back_color and self.font_color:
self.text_editor.setStyleSheet("border: none;background: %s;color: %s;" %(self.back_color, self.font_color))
# add a value named "Night_Btn" to settings and set that value to the current status of the checkbox that is "checked":
settings.setValue("Night_Btn", "checked")
else:
# set the stylesheet of the text editor back to default :
self.text_editor.setStyleSheet("border: none;background: none;color: none;")
# add a value named "Night_Btn" to settings and set that value to the current status of the checkbox that is "unchecked":
settings.setValue("Night_Btn", "unchecked")
def add_statusbar(self, isChecked):
#----------------------------------------------------------#
#-- This function is called by the checkbox of view menu --#
#-- named "Statusbar" and this function acts according --#
#-- to the current state of that checkbox. For more info --#
#-- see add_statusbar_checkbox_view_menu() function in --#
#-- Functions_Module.py --#
#----------------------------------------------------------#
# Note: isChecked is a property of checkboxes that returns true if it is checked or false otherwise:
if isChecked:
self.statusbar.show()
# add a value named "Status_Btn" to settings and set that value to the current status of the checkbox that is "checked":
settings.setValue("Status_Btn", "checked")
else:
self.statusbar.hide()
# add a value named "Status_Btn" to settings and set that value to the current status of the checkbox that is "unchecked":
settings.setValue("Status_Btn", "unchecked")
def about(self):
# note: About_Window is a team created class (see OTHER WINDOW CLASSES in Objects_Module.py)
self.about = About_Window()
return self.about
#------------------------------------------------------------- SUPPORT FUNCTIONS ---------------------------------------------------------------------#
def save_current_settings(self):
# Note: settings is an imported variable (see Variables_Module.py)
settings.setValue("Main_Size", self.size()) # add a value named "Main_Size" to settings and set that value to the current size of the main window.
settings.setValue("Main_Position", self.pos()) # add a value named "Main_Position" to settings and set that value to the current position of the main window.
settings.setValue("StyleSheet", self.text_editor.styleSheet()) # add a value named "StyleSheet" to settings and set that value to the current stylesheet of the editor
if settings.value("Runs").isNull(): # if a value named Runs does not exist in settings (this is the first time the code is run)
settings.setValue("Runs", int(1)) # create a value named Runs in settings and set its value to integer 1 (the current no. of runs)
elif (settings.value("Runs").toInt()) >= 1: # else if a value named "Runs" does exist in settings, check if its value is greater than 1.
runs, can_convert = (settings.value("Runs").toInt()) # can_convert is a property of integer setting values that returns true if it can be converted
if can_convert == True:
settings.setValue("Runs", int(runs + 1)) # add 1 to the number of runs before closing the app.
else:
settings.setValue("Runs", int(1)) # the else command is just a safety measure for any unexpected exceptions or falts...
if self.isMaximized(): # if self (here, our Window Class) is maximized:
settings.setValue("State", "Maximized") # create a value named State in settings and set its value to "Maximized"
else:
settings.setValue("State", "False")
def paint_page_view(self, printer):
self.text_editor.print_(printer) # print current page view using the given printer
def restore_last_settings_editor(self):
if (settings.value("Runs").toInt()) >= 1: # check if the number of runs is equal to or greater than 1,
self.text_editor.setStyleSheet(settings.value("StyleSheet").toString())
if settings.value("Editor_Font").isValid():
self.text_editor.setFont(QtGui.QFont(settings.value("Editor_Font")))
else:
pass # do nothing...
def position_print(self):
line = self.text_editor.textCursor().blockNumber()
col = self.text_editor.textCursor().columnNumber()
cursor_position = ("Line: %s | Column: %s" %(str(line), str(col)))
self.statusbar.showMessage(cursor_position) # shows the cursor position on statusbar
def closeEvent(self, event): # if user tries to close self (here, our Window class) this function is executed.
event.ignore()
self.exit_app() # note: exit_app() is a team created function, see FUNCTIONS FOR MENUBAR OPTIONS in Objects_Module.py
#------------------------------------------------------------------------------------- OTHER WINDOW CLASSES --------------------------------------------------------------------------#
class File_Dialog_Window(QtGui.QWidget): # defines a subclass of QWidget named File_Dialog_Window
# This method defines everything that is to be executed automatically when the class is initialized:
def __init__(self):
super(File_Dialog_Window, self).__init__() # This makes the class inherit functions from its upper class (Here, QWidget Class):
self.move(50, 50)
self.setWindowIcon(QtGui.QIcon("Icons/Icon.ico"))
self.show()
class About_Window(QtGui.QWidget): # defines a subclass of QWidget named About_Window
# This method defines everything that is to be executed automatically when the class is initialized:
def __init__(self):
super(About_Window, self).__init__() # This makes the class inherit functions from its upper class (Here, QWidget Class):
if settings.value("Runs").isNull(): # if a value named Runs does not exist in settings (this is the first time the code is run)
# then, set these custom values as the x, y positions and height, width of the Window class:
# Note: QRect converts integer values into a rectangle form which can be used to visualize a window:
self.setGeometry(QtCore.QRect(50, 50, 350, 110))
else:
self.restore_last_settings_AbWindow() # restore the last settings of About_Window...
pass
self.setWindowTitle("About")
self.setWindowIcon(QtGui.QIcon("Icons/Icon.ico"))
self.interface()
self.show()
def restore_last_settings_AbWindow(self):
if (settings.value("Runs").toInt()) >= 1: # check if the number of runs is equal to or greater than 1:
# If yes, then set the position of the window according to the last values present in the settings named About_Position:
# Note: .toPoint converts the values in settings to a QPoint which can be used to move the window.
self.move(settings.value("About_Position").toPoint())
self.resize(350, 110)
else: # the else command is just a safety measure for any unexpected exceptions or falts...
self.setGeometry(QtCore.QRect(50, 50, 350, 110))
def interface(self):
para = "An open source project by A.E.R.T team. \nIts a fully functional text editor coded in python \nand licensed under unlicense."
name = QtGui.QLabel(self)
about_text = QtGui.QLabel(self)
name.setStyleSheet("font-family: georgia;color: blue;font: 18pt")
about_text.setStyleSheet("font-family: georgia;font: 12pt")
name.setText("EditOS")
about_text.setText(para)
name.resize(name.sizeHint())
about_text.resize(about_text.sizeHint())
name.move(40, 7)
about_text.move(5, 45)
logo = QtGui.QLabel(self)
logo.setPixmap(QtGui.QPixmap("Icons/Large_Icon.ico"))
logo.move(5, 5)
logo.resize(logo.sizeHint())
logo.show()
# this saves the current settings in registry and then, closes the window:
def close_window(self):
# Note: settings is an imported variable (see Variables_Module.py)
settings.setValue("About_Position", self.pos()) # add a value named "About_Position" to settings and set that value to the current position of the window
self.hide()
def closeEvent(self, event): # if user tries to close self (here, our About_Window class) this function is executed.
event.ignore()
self.close_window()
class Find_Dialog(QtGui.QDialog): # defines a subclass of QDailog named Find_Dialog
def __init__(self, parent = None):
QtGui.QDialog.__init__(self, parent) # This makes the class inherit functions from its upper class (Here, QDialog Class):
if settings.value("Runs").isNull(): # if a value named Runs does not exist in settings (this is the first time the code is run)
# then, set these custom values as the x, y positions and height, width of the Window class:
# Note: QRect converts integer values into a rectangle form which can be used to visualize a window:
self.setGeometry(QtCore.QRect(50, 50, 400, 220))
else:
self.restore_last_settings_FdWindow() # restore the last settings of Find_Dialog...
pass
self.setWindowTitle("Find")
self.setWindowIcon(QtGui.QIcon("Icons/Icon.ico"))
self.add_find_interface()
self.show()
def restore_last_settings_FdWindow(self):
if (settings.value("Runs").toInt()) >= 1: # check if the number of runs is equal to or greater than 1:
# If yes, then set the position of the window according to the last values present in the settings named Find_Position:
# Note: .toPoint converts the values in settings to a QPoint which can be used to move the window.
self.move(settings.value("Find_Position").toPoint())
self.resize(400, 220)
else: # the else command is just a safety measure for any unexpected exceptions or falts...
self.setGeometry(QtCore.QRect(50, 50, 400, 220))
def width(self):
return float(self.frameGeometry().width())
def height(self):
return float(self.frameGeometry().height())
def paintEvent(self, event): # creates the line for direction option
painter = QtGui.QPainter()
painter.begin(self)
pen = QtGui.QPen(QtGui.QColor(211, 211, 211))
painter.setPen(pen)
painter.drawLine(QtCore.QPoint(int(self.width() / 40.0), int(self.height() /3.5)), QtCore.QPoint(int(self.width() / 20.0), int(self.height() /3.5)))
painter.drawLine(QtCore.QPoint(int(self.width() / 5.7), int(self.height() /3.5)), QtCore.QPoint(int(self.width() / 1.16), int(self.height() /3.5)))
painter.drawLine(QtCore.QPoint(int(self.width() / 40.0), int(self.height() /3.5)), QtCore.QPoint(int(self.width() / 40.0), int(self.height() / 2.15)))
painter.drawLine(QtCore.QPoint(int(self.width() / 40.0), int(self.height() / 2.15)), QtCore.QPoint(int(self.width() / 1.16), int(self.height() / 2.15)))
painter.drawLine(QtCore.QPoint(int(self.width() / 1.16), int(self.height() /3.5)), QtCore.QPoint(int(self.width() / 1.16), int(self.height() / 2.15)))
painter.end()
def add_find_interface(self):
find_label = QtGui.QLabel("Search For: ", self)
self.find_label = find_label
self.find_label.move(int(self.width() / 20.0), int(self.height() / 15.5))
self.find_label.resize(self.find_label.sizeHint())
find_input = QtGui.QLineEdit(self)
self.find_input = find_input
self.find_input.setGeometry(int(self.width() / 4.0), int(self.height() / 20.0), int(self.width() / 1.6), int(self.height() / 8.8))
find_btn = QtGui.QPushButton("Find", self)
self.find_btn = find_btn
self.find_btn.move(int(self.width() / 2.9), int(self.height() / 5.0))
self.find_btn.resize(self.find_btn.sizeHint())
find_next_btn = QtGui.QPushButton("Find Next", self)
self.find_next_btn = find_next_btn
self.find_next_btn.move(int(self.width() / 1.7), int(self.height() / 5.0))
self.find_next_btn.resize(self.find_next_btn.sizeHint())
direction_label = QtGui.QLabel("Direction: ", self)
self.direction_label = direction_label
self.direction_label.move(int(self.width() / 17.0), int(self.height() / 3.2))
self.direction_label.resize(self.direction_label.sizeHint())
backwards_radio_btn = QtGui.QRadioButton("Backward", self)
self.backwards_radio_btn = backwards_radio_btn
self.backwards_radio_btn.move(int(self.width() / 4.5), int(self.width() / 4.4))
self.backwards_radio_btn.resize(self.backwards_radio_btn.sizeHint())
self.backwards_radio_btn.toggle()
self.backwards_radio_btn.toggled.connect(self.set_direction)
forwards_radio_btn = QtGui.QRadioButton("Forward", self)
self.forwards_radio_btn = forwards_radio_btn
self.forwards_radio_btn.move(int(self.width() / 2.0), int(self.width() / 4.4))
self.forwards_radio_btn.resize(self.forwards_radio_btn.sizeHint())
self.forwards_radio_btn.toggled.connect(self.set_direction)
replace_label = QtGui.QLabel("Replace By: ", self)
self.replace_label = replace_label
self.replace_label.move(int(self.width() / 20.0), int(self.height() / 1.65))
self.replace_label.resize(self.replace_label.sizeHint())
replace_input = QtGui.QLineEdit(self)
self.replace_input = replace_input
self.replace_input.setGeometry(int(self.width() / 4.0), int(self.height() / 1.7), int(self.width() / 1.6), int(self.height() / 8.8))
replace_btn = QtGui.QPushButton("Replace", self)
self.replace_btn = replace_btn
self.replace_btn.move(int(self.width() / 2.9), int(self.height() / 1.34))
self.replace_btn.resize(self.replace_btn.sizeHint())
replace_all_btn = QtGui.QPushButton("Replace All", self)
self.replace_all_btn = replace_all_btn
self.replace_all_btn.move(int(self.width() / 1.7), int(self.height() / 1.34))
self.replace_all_btn.resize(self.replace_all_btn.sizeHint())
case_check = QtGui.QCheckBox("Case sensitive", self)
self.case_check = case_check
self.case_check.move(int(self.width() / 40.0), int(self.height() / 1.1))
self.case_check.stateChanged.connect(self.case_sense)
whole_word_opt = QtGui.QCheckBox("Whole words only",self)
self.whole_word_opt = whole_word_opt
self.whole_word_opt.move(int(self.width() / 3.7), int(self.height() / 1.1))
self.whole_word_opt.stateChanged.connect(self.whole_word_sense)
def set_direction(self, isChecked):
#--------------------------------------------------#
#-- This function is called by either --#
#-- backwards_radio_btn or forwards_radio_btn --#
#-- and this function acts according --#
#-- to the current state of those radio buttons. --#
#-- For the coding of these buttons, see up --#
#-- In add_find_interface of Find_Dialog --#
#--------------------------------------------------#
# Note: isChecked is a property of radio buttons that returns true if it is checked or false otherwise:
global direction # Note: direction is an imported variable (see Variables_Module.py)
if isChecked:
if direction == "Forward":
direction = "Backward"
elif direction == "Backward":
direction = "Forward"
else:
direction = "Backward"
else:
pass
def case_sense(self, state):
#--------------------------------------------------------#
#-- This function is called by whole_word_opt checkbox --#
#-- and this function acts according to --#
#-- the current state of that checkbox. --#
#-- For the coding of this checkbox, --#
#-- See up in add_find_interface of Find_Dialog --#
#--------------------------------------------------------#
# Note: state is a property of checkboxes that equals QtCore.Qt.Checked if checkbox is checked or false otherwise:
global case_sensitive # Note: case_sensitive is an imported variable (see Variables_Module.py)
if state == QtCore.Qt.Checked:
case_sensitive = True
else:
case_sensitive = False
def whole_word_sense(self, state):
#----------------------------------------------------#
#-- This function is called by case_check checkbox --#
#-- and this function acts according to --#
#-- the current state of that checkbox. --#
#-- For the coding of this checkbox, --#
#-- See up in add_find_interface of Find_Dialog --#
#----------------------------------------------------#
# Note: state is a property of checkboxes that equals QtCore.Qt.Checked if checkbox is checked or false otherwise:
global whole_words_only # Note: whole_words_only is an imported variable (see Variables_Module.py)
if state == QtCore.Qt.Checked:
whole_words_only = True
else:
whole_words_only = False
# this saves the current settings in registry and then, closes the window:
def close_window(self):
# Note: settings is an imported variable (see Variables_Module.py)
settings.setValue("Find_Position", self.pos()) # add a value named "Find_Position" to settings and set that value to the current position of the window
self.hide()
def closeEvent(self, event): # if user tries to close self (here, our Find_Dialog class) this function is executed.
event.ignore()
self.close_window()
#-------x-----------------------x-------------------------x--------------------------THE END--------------------x---------------------x-----------------------------------x---------------------x------------------x------------#
|
python
|
import os
# Database connection setup
class Config(object):
SERVER = ''
DATABASE = ''
DRIVER = ''
USERNAME = ''
PASSWORD = ''
SQLALCHEMY_DATABASE_URI = f'mssql+pyodbc://{USERNAME}:{PASSWORD}@{SERVER}/{DATABASE}?driver={DRIVER}'
SQLALCHEMY_TRACK_MODIFICATIONS = False
DEBUG = True
SECRET_KEY = os.environ.get('SECRET_KEY') or '' # Set the Secret_key Config
|
python
|
import os
import sys
filename = __file__[:-5] + '-input'
with open(filename) as f:
lines = f.read().splitlines()
lines = list(map(lambda s: s.split('-'), lines))
connections = {}
for line in lines:
if line[0] not in connections and line[1] != 'start' and line[0] != 'end':
connections[line[0]] = [line[1]]
elif line[1] != 'start' and line[0] != 'end':
connections[line[0]].append(line[1])
if line[1] not in connections and line[0] != 'start' and line[1] != 'end':
connections[line[1]] = [line[0]]
elif line[0] != 'start' and line[1] != 'end':
connections[line[1]].append(line[0])
routes = []
def find_all_paths(start, end, path, twice):
path.append(start)
if start == end:
routes.append(path)
else:
for neighbor in connections[start]:
if (neighbor in path and neighbor.isupper()) or neighbor not in path:
find_all_paths(neighbor, end, path[:], twice)
elif (neighbor in path and not twice):
find_all_paths(neighbor, end, path[:], not twice)
find_all_paths('start', 'end', [], False)
print(len(routes))
|
python
|
import numpy as np
from . InverterException import InverterException
from . InputData import InputData
class Image(InputData):
"""
This class represents a camera image and can be used as input to the
various inversion algorithms. Images can be created directly, or by
importing and filtering a video.
"""
def __init__(self, data):
"""
Constructor.
Args:
data (numpy.ndarray): Raw image data, or Image object to copy.
"""
if data.ndim != 2:
raise InverterException("Invalid dimensions of image: {}. Image must have exactly two dimensions.".format(data.ndim))
self.data = data
self.pixels = data.shape
self.subset = (slice(None), slice(None))
def get(self):
"""
Returns:
numpy.ndarray: the image data, or the previously specified subset of the image data.
"""
return self.data[self.subset]
def setSubset(self, x, y=None, w=None, h=None):
"""
Specifies which subset of the image to return when
'get()' is called. Calling this method as 'setSubset(None)'
resets any previously set subset.
Args:
x (int): X axis offset.
y (int): Y axis offset.
w (int): Number of pixels to pick along X axis.
h (int): Number of pixels to pick along Y axis.
"""
if (x is None) and (y is None) and (w is None) and (h is None):
self.subset = (slice(None), slice(None))
else:
self.subset = (slice(x, x+w), slice(y, y+h))
|
python
|
"""Support for Avanaza Stock sensor."""
|
python
|
import os
import re
import codecs
def isValidLine(line):
if re.search('include \"', line) == None or line.find('.PSVita') != -1 or line.find('.PS4') != -1 or line.find('.Switch') != -1 or line.find('.XBoxOne') != -1:
return True
return False
class CreateHeader:
def __init__(self):
self.lines = []
def addLine(self,line):
self.lines.append(line)
def readLines(self,path):
f = codecs.open(path, 'r','utf-8_sig')
line = f.readline()
while line:
if isValidLine(line):
self.lines.append(line.strip(os.linesep))
line = f.readline()
f.close()
def output(self,path):
f = codecs.open(path, 'w','utf-8_sig')
for line in self.lines:
f.write(line + os.linesep)
f.close()
effekseerHeader = CreateHeader()
effekseerHeader.readLines('Effekseer/Effekseer/Effekseer.Base.Pre.h')
effekseerHeader.readLines('Effekseer/Effekseer/Utils/Effekseer.CustomAllocator.h')
effekseerHeader.readLines('Effekseer/Effekseer/Effekseer.Vector2D.h')
effekseerHeader.readLines('Effekseer/Effekseer/Effekseer.Vector3D.h')
effekseerHeader.readLines('Effekseer/Effekseer/Effekseer.Color.h')
effekseerHeader.readLines('Effekseer/Effekseer/Effekseer.RectF.h')
effekseerHeader.readLines('Effekseer/Effekseer/Effekseer.Matrix43.h')
effekseerHeader.readLines('Effekseer/Effekseer/Effekseer.Matrix44.h')
effekseerHeader.readLines('Effekseer/Effekseer/Effekseer.File.h')
effekseerHeader.readLines('Effekseer/Effekseer/Effekseer.DefaultFile.h')
effekseerHeader.readLines('Effekseer/Effekseer/Backend/GraphicsDevice.h')
effekseerHeader.readLines('Effekseer/Effekseer/Effekseer.Resource.h')
effekseerHeader.readLines('Effekseer/Effekseer/Effekseer.Effect.h')
effekseerHeader.readLines('Effekseer/Effekseer/Effekseer.Manager.h')
effekseerHeader.readLines('Effekseer/Effekseer/Effekseer.Setting.h')
effekseerHeader.readLines('Effekseer/Effekseer/Effekseer.Server.h')
effekseerHeader.readLines('Effekseer/Effekseer/Effekseer.Client.h')
effekseerHeader.addLine('')
effekseerHeader.addLine('#include "Effekseer.Modules.h"')
effekseerHeader.addLine('')
effekseerHeader.output('Effekseer/Effekseer.h')
effekseerSimdHeader = CreateHeader()
effekseerSimdHeader.addLine('#pragma once')
effekseerSimdHeader.readLines('Effekseer/Effekseer/SIMD/Base.h')
effekseerSimdHeader.readLines('Effekseer/Effekseer/SIMD/Float4_Gen.h')
effekseerSimdHeader.readLines('Effekseer/Effekseer/SIMD/Float4_NEON.h')
effekseerSimdHeader.readLines('Effekseer/Effekseer/SIMD/Float4_SSE.h')
effekseerSimdHeader.readLines('Effekseer/Effekseer/SIMD/Int4_Gen.h')
effekseerSimdHeader.readLines('Effekseer/Effekseer/SIMD/Int4_NEON.h')
effekseerSimdHeader.readLines('Effekseer/Effekseer/SIMD/Int4_SSE.h')
effekseerSimdHeader.readLines('Effekseer/Effekseer/SIMD/Bridge_Gen.h')
effekseerSimdHeader.readLines('Effekseer/Effekseer/SIMD/Bridge_NEON.h')
effekseerSimdHeader.readLines('Effekseer/Effekseer/SIMD/Bridge_SSE.h')
effekseerSimdHeader.readLines('Effekseer/Effekseer/SIMD/Vec2f.h')
effekseerSimdHeader.readLines('Effekseer/Effekseer/SIMD/Vec3f.h')
effekseerSimdHeader.readLines('Effekseer/Effekseer/SIMD/Vec4f.h')
effekseerSimdHeader.readLines('Effekseer/Effekseer/SIMD/Mat43f.h')
effekseerSimdHeader.readLines('Effekseer/Effekseer/SIMD/Mat44f.h')
effekseerSimdHeader.readLines('Effekseer/Effekseer/SIMD/Quaternionf.h')
effekseerSimdHeader.readLines('Effekseer/Effekseer/SIMD/Utils.h')
effekseerSimdHeader.output('Effekseer/Effekseer.SIMD.h')
effekseerModulesHeader = CreateHeader()
effekseerModulesHeader.addLine('#pragma once')
effekseerModulesHeader.addLine('')
effekseerModulesHeader.addLine('#include "Effekseer.h"')
effekseerModulesHeader.addLine('#include "Effekseer.SIMD.h"')
effekseerModulesHeader.addLine('')
effekseerModulesHeader.addLine('// A header to access internal data of effekseer')
effekseerModulesHeader.readLines('Effekseer/Effekseer/Parameter/Effekseer.Parameters.h')
effekseerModulesHeader.readLines('Effekseer/Effekseer/Renderer/Effekseer.SpriteRenderer.h')
effekseerModulesHeader.readLines('Effekseer/Effekseer/Renderer/Effekseer.RibbonRenderer.h')
effekseerModulesHeader.readLines('Effekseer/Effekseer/Renderer/Effekseer.RingRenderer.h')
effekseerModulesHeader.readLines('Effekseer/Effekseer/Renderer/Effekseer.ModelRenderer.h')
effekseerModulesHeader.readLines('Effekseer/Effekseer/Renderer/Effekseer.TrackRenderer.h')
effekseerModulesHeader.readLines('Effekseer/Effekseer/Effekseer.EffectLoader.h')
effekseerModulesHeader.readLines('Effekseer/Effekseer/Effekseer.TextureLoader.h')
effekseerModulesHeader.readLines('Effekseer/Effekseer/Model/Model.h')
effekseerModulesHeader.readLines('Effekseer/Effekseer/Model/ModelLoader.h')
effekseerModulesHeader.readLines('Effekseer/Effekseer/Effekseer.MaterialLoader.h')
effekseerModulesHeader.readLines('Effekseer/Effekseer/Model/Model.h')
effekseerModulesHeader.readLines('Effekseer/Effekseer/Effekseer.Curve.h')
effekseerModulesHeader.readLines('Effekseer/Effekseer/Effekseer.CurveLoader.h')
effekseerModulesHeader.readLines('Effekseer/Effekseer/Sound/Effekseer.SoundPlayer.h')
effekseerModulesHeader.readLines('Effekseer/Effekseer/Effekseer.SoundLoader.h')
effekseerModulesHeader.output('Effekseer/Effekseer.Modules.h')
effekseerRendererDX9Header = CreateHeader()
effekseerRendererDX9Header.readLines('EffekseerRendererDX9/EffekseerRenderer/EffekseerRendererDX9.Base.Pre.h')
effekseerRendererDX9Header.readLines('EffekseerRendererCommon/EffekseerRenderer.Renderer.h')
effekseerRendererDX9Header.readLines('EffekseerRendererDX9/EffekseerRenderer/EffekseerRendererDX9.Renderer.h')
effekseerRendererDX9Header.output('EffekseerRendererDX9/EffekseerRendererDX9.h')
effekseerRendererDX11Header = CreateHeader()
effekseerRendererDX11Header.readLines('EffekseerRendererDX11/EffekseerRenderer/EffekseerRendererDX11.Base.Pre.h')
effekseerRendererDX11Header.readLines('EffekseerRendererCommon/EffekseerRenderer.Renderer.h')
effekseerRendererDX11Header.readLines('EffekseerRendererDX11/EffekseerRenderer/EffekseerRendererDX11.Renderer.h')
effekseerRendererDX11Header.output('EffekseerRendererDX11/EffekseerRendererDX11.h')
effekseerRendererDX12Header = CreateHeader()
effekseerRendererDX12Header.readLines('EffekseerRendererDX12/EffekseerRenderer/EffekseerRendererDX12.Base.Pre.h')
effekseerRendererDX12Header.readLines('EffekseerRendererCommon/EffekseerRenderer.Renderer.h')
effekseerRendererDX12Header.readLines('EffekseerRendererDX12/EffekseerRenderer/EffekseerRendererDX12.Renderer.h')
effekseerRendererDX12Header.readLines('EffekseerRendererLLGI/Common.h')
effekseerRendererDX12Header.output('EffekseerRendererDX12/EffekseerRendererDX12.h')
effekseerRendererVulkanHeader = CreateHeader()
effekseerRendererVulkanHeader.readLines('EffekseerRendererVulkan/EffekseerRenderer/EffekseerRendererVulkan.Base.Pre.h')
effekseerRendererVulkanHeader.readLines('EffekseerRendererCommon/EffekseerRenderer.Renderer.h')
effekseerRendererVulkanHeader.readLines('EffekseerRendererVulkan/EffekseerRenderer/EffekseerRendererVulkan.Renderer.h')
effekseerRendererVulkanHeader.readLines('EffekseerRendererLLGI/Common.h')
effekseerRendererVulkanHeader.output('EffekseerRendererVulkan/EffekseerRendererVulkan.h')
effekseerRendererGLHeader = CreateHeader()
effekseerRendererGLHeader.readLines('EffekseerRendererGL/EffekseerRenderer/EffekseerRendererGL.Base.Pre.h')
effekseerRendererGLHeader.readLines('EffekseerRendererCommon/EffekseerRenderer.Renderer.h')
effekseerRendererGLHeader.readLines('EffekseerRendererGL/EffekseerRenderer/EffekseerRendererGL.Renderer.h')
effekseerRendererGLHeader.output('EffekseerRendererGL/EffekseerRendererGL.h')
effekseerRendererMetalHeader = CreateHeader()
effekseerRendererMetalHeader.readLines('EffekseerRendererMetal/EffekseerRenderer/EffekseerRendererMetal.Base.Pre.h')
effekseerRendererMetalHeader.readLines('EffekseerRendererCommon/EffekseerRenderer.Renderer.h')
effekseerRendererMetalHeader.readLines('EffekseerRendererMetal/EffekseerRenderer/EffekseerRendererMetal.Renderer.h')
effekseerRendererMetalHeader.readLines('EffekseerRendererLLGI/Common.h')
effekseerRendererMetalHeader.output('EffekseerRendererMetal/EffekseerRendererMetal.h')
|
python
|
from __future__ import annotations
from typing import List, Tuple
import ujson
import os.path as path
import stargazing.pomodoro.pomodoro_controller as pomo_pc
CONFIG_FILE_PATH = f"{path.dirname(path.abspath(__file__))}/../config/settings.json"
def get_saved_youtube_player_urls() -> List[str]:
with open(CONFIG_FILE_PATH) as file:
data = ujson.load(file)
return data["saved_youtube_player_urls"]
def get_interval_times() -> List[List[int]]:
with open(CONFIG_FILE_PATH) as file:
data = ujson.load(file)
return data["interval_times"]
def get_last_session_data() -> Tuple[str, pomo_pc.PomodoroIntervalSettings, bool, int]:
with open(CONFIG_FILE_PATH) as file:
data = ujson.load(file)
return (data["last_project_name"], pomo_pc.PomodoroIntervalSettings(*data["last_interval_time"]),
data["last_autostart"], data["last_volume"])
def update_last_session_data(project_name: str, interval_settings: pomo_pc.PomodoroIntervalSettings,
autostart: bool, volume: int) -> None:
with open(CONFIG_FILE_PATH, 'r+') as file:
data = ujson.load(file)
data["last_project_name"] = project_name
data["last_interval_time"] = [
interval_settings.work_secs, interval_settings.break_secs]
data["last_autostart"] = autostart
data["last_volume"] = volume
file.seek(0)
ujson.dump(data, file, indent=4)
file.truncate()
|
python
|
OUTPUT_ON = b'1'
OUTPUT_OFF = b'0'
OUTPUT_PULSE = b'P'
OUTPUT_CURRENT = b'O'
INPUT_DELTA = b'D'
INPUT_CURRENT = b'C'
TURNOUT_NORMAL = b'N'
TURNOUT_REVERSE = b'R'
IDENTIFY = b'Y'
SERVO_ANGLE = b'A'
SET_TURNOUT = b'T'
GET_TURNOUT = b'G'
CONFIG = b'F'
ACKNOWLEDGE = b'!'
STORE = b'W'
ERRORRESPONSE = b'E'
WARNINGRESPONSE = b'e'
def commandName(cmd):
if cmd == OUTPUT_ON:
return("OUTPUT_ON")
elif cmd == OUTPUT_OFF:
return("OUTPUT_OFF")
elif cmd == OUTPUT_PULSE:
return("OUTPUT_PULSE")
elif cmd == OUTPUT_CURRENT:
return("OUTPUT_CURRENT")
elif cmd == INPUT_DELTA:
return("INPUT_DELTA")
elif cmd == INPUT_CURRENT:
return("INPUT_CURRENT")
elif cmd == TURNOUT_NORMAL:
return("TURNOUT_NORMAL")
elif cmd == TURNOUT_REVERSE:
return("TURNOUT_REVERSE")
elif cmd == SERVO_ANGLE:
return("SERVO_ANGLE")
elif cmd == SET_TURNOUT:
return("SET_TURNOUT")
elif cmd == GET_TURNOUT:
return("GET_TURNOUT")
elif cmd == IDENTIFY:
return("IDENTIFY")
elif cmd == CONFIG:
return("CONFIG")
elif cmd == ACKNOWLEDGE:
return("ACKNOWLEDGE")
elif cmd == STORE:
return("STORE")
elif cmd == ERRORRESPONSE:
return("ERRORRESPONSE")
elif cmd == WARNINGRESPONSE:
return("WARNINGRESPONSE")
else:
return("UNKNOWN COMMAND: %s" % str(cmd))
|
python
|
import tensorflow as tf
# Stolen from magenta/models/shared/events_rnn_graph
def make_rnn_cell(rnn_layer_sizes,
dropout_keep_prob=1.0,
attn_length=0,
base_cell=tf.contrib.rnn.BasicLSTMCell,
state_is_tuple=False):
cells = []
for num_units in rnn_layer_sizes:
cell = base_cell(num_units, state_is_tuple=state_is_tuple)
cell = tf.contrib.rnn.DropoutWrapper(
cell, output_keep_prob=dropout_keep_prob)
cells.append(cell)
cell = tf.contrib.rnn.MultiRNNCell(cells, state_is_tuple=state_is_tuple)
if attn_length:
cell = tf.contrib.rnn.AttentionCellWrapper(
cell, attn_length, state_is_tuple=state_is_tuple)
return cell
|
python
|
from block import Block
from transaction import Transaction
from Crypto.PublicKey import RSA
from Crypto.Hash import SHA256
import bitcoin
class BlockChain:
def __init__(self):
self.chain = []
self.tx_pool = []
self.bits = 2
self.reward = 50
genesis_block = Block(None, self.bits, [])
genesis_block.bits = self.bits
genesis_block.gen_hash()
self.chain.append(genesis_block)
def make_transaction(self, value, receiver_address, sender_address, sender_private_key):
try:
transaction = Transaction(self, value, receiver_address, sender_address, sender_private_key)
except Exception as e:
return str(e)
self.tx_pool.append(transaction)
return transaction.hash
def mining(self, miner_address):
tx_list = self.tx_pool
tx_list.insert(0, Transaction(self, self.reward, miner_address))
self.tx_pool = []
new_block = Block(self.chain[-1].hash, self.bits, tx_list)
try:
new_block.gen_hash()
except:
return '블록 생성 실패'
self.chain.append(new_block)
return new_block.hash
def get_utxo_list(self, address):
utxo_list = []
for block in self.chain:
for tx in block.transactions:
for i in range(len(tx.outputs)):
if address == tx.outputs[i].to:
utxo_list.append((tx.hash, i, tx.outputs[i].to, tx.outputs[i].value))
for i in range(len(tx.inputs)):
for utxo in utxo_list:
if tx.inputs[i].hash == utxo[0] and tx.inputs[i].n == utxo[1] and tx.inputs[i].address == utxo[2] and tx.inputs[i].value == utxo[3]:
utxo_list.remove((tx.inputs[i].hash, tx.inputs[i].n, tx.inputs[i].address, tx.inputs[i].value))
return utxo_list
def get_balance(self, address):
utxo_list = self.get_utxo_list(address)
balance = 0
for utxo in utxo_list:
balance += utxo[3]
return balance
def increase_bits(self):
self.bits += 1
def decrease_bits(self):
self.bits -= 1
|
python
|
'''
Nombre de archivo:
+procesamientodatos.py
Descripción:
+Librería con funciones para el procesamiento de los datos
Métodos:
|--+cargar_datos
|--+generar_tablas
|--+almacenar_tablas
'''
#librerías necesarias
import sys, os, glob, datetime as dt
from pyspark.sql import SparkSession, functions as F, window as W, DataFrame as DF
from pyspark.sql.types import (DateType, IntegerType, FloatType, DoubleType, LongType, StringType, StructField, StructType, TimestampType)
from functools import reduce
#sesión de spark
spark = SparkSession.builder\
.master("local")\
.appName("App#1")\
.config('spark.ui.port', '4050')\
.getOrCreate()
spark.sparkContext.setLogLevel("ERROR")
#función para carga de datos (lista de archivos .json)
def cargar_datos(files=[]):
try:
#lectura de archivos .json
df1 = spark.read.json(files, multiLine=True)
#se realizan las transformaciones necesarias para obtener cada uno de los elementos del esquema
df1 = df1.withColumn('viajes', F.explode(F.col('viajes'))).select('identificador','viajes.*').orderBy('identificador')
df1.collect()
return [df1]
except Exception as e:
exc_type, exc_obj, exc_tb = sys.exc_info()
print(exc_type, os.path.split(exc_tb.tb_frame.f_code.co_filename)[1], exc_tb.tb_lineno, exc_obj)
#función para generar las tablas con los resultados de los datos procesados
def generar_tablas(df=[]):
try:
#se crean dataframes temporales que sirven como tablas intermedias para el filtrado y agregación de los datos
df1a = df[0].withColumnRenamed('codigo_postal_origen','codigo_postal').withColumn('tipo', F.lit('origen'))\
.groupBy('codigo_postal', 'tipo').agg(F.count('codigo_postal').alias('cantidad_viajes'), F.sum(F.col('kilometros')*F.col('precio_kilometro')).alias('ingresos'))
df1b = df[0].withColumnRenamed('codigo_postal_destino','codigo_postal').withColumn('tipo', F.lit('destino'))\
.groupBy('codigo_postal', 'tipo').agg(F.count('codigo_postal').alias('cantidad_viajes'), F.sum(F.col('kilometros')*F.col('precio_kilometro')).alias('ingresos'))
df1c = df[0].select('identificador', 'kilometros', 'precio_kilometro')\
.groupBy('identificador').agg(F.sum('kilometros').alias('cantidad_kms'), F.sum(F.col('kilometros')*F.col('precio_kilometro')).alias('ingresos'))
#tabla correspondiente a la cantidad de viajes por código postal
df2 = df1a.union(df1b).select('codigo_postal', 'tipo', 'cantidad_viajes').orderBy(F.col('codigo_postal'), F.col('tipo').desc())
#tabla correspondiente a los ingresos totales por código postal
df3 = df1a.union(df1b).select('codigo_postal', 'tipo', F.round('ingresos',2).alias('ingresos')).orderBy(F.col('codigo_postal'), F.col('tipo').desc())
#tabla correspondiente a la cantidad de kms e ingresos por identificador de conductor
df4 = df1c.select('identificador', F.round('cantidad_kms',2).alias('cantidad_kms'), F.round('ingresos',2).alias('ingresos')).orderBy(F.col('identificador'))
#tabla correspondiente a métricas particulares
data = [('persona_con_mas_kilometros', df4.groupBy('identificador').agg(F.max('cantidad_kms')).orderBy(F.col('max(cantidad_kms)').desc()).collect()[0][0]),\
('persona_con_mas_ingresos', df4.groupBy('identificador').agg(F.max('ingresos')).orderBy(F.col('max(ingresos)').desc()).collect()[0][0]),\
('percentil_25', df4.select(F.percentile_approx('ingresos', .25)).collect()[0][0]),\
('percentil_50', df4.select(F.percentile_approx('ingresos', .50)).collect()[0][0]),\
('percentil_75', df4.select(F.percentile_approx('ingresos', .75)).collect()[0][0]),\
('codigo_postal_origen_con_mas_ingresos', df1a.groupBy('codigo_postal').agg(F.max('ingresos')).orderBy(F.col('max(ingresos)').desc()).collect()[0][0]),\
('codigo_postal_destino_con_mas_ingresos', df1b.groupBy('codigo_postal').agg(F.max('ingresos')).orderBy(F.col('max(ingresos)').desc()).collect()[0][0])]
schema = StructType(\
[StructField('tipo_metrica',StringType()),
StructField('valor',StringType()),])
df5 = spark.createDataFrame(data, schema)
#se agregan los dataframes a una lista para la iteración
proceso = [df2, df3, df5]
#
if 'fecha' in df[0].columns: #código para tabla de métricas en Parte Extra (existe columna fecha)
window = W.Window.partitionBy('fecha')
dfe1a = df[0].withColumnRenamed('codigo_postal_origen','codigo_postal').withColumn('tipo', F.lit('origen'))\
.groupBy('codigo_postal', 'tipo', 'fecha').agg(F.count('codigo_postal').alias('cantidad_viajes'), F.sum(F.col('kilometros')*F.col('precio_kilometro')).alias('ingresos'))
dfe1b = df[0].withColumnRenamed('codigo_postal_destino','codigo_postal').withColumn('tipo', F.lit('destino'))\
.groupBy('codigo_postal', 'tipo', 'fecha').agg(F.count('codigo_postal').alias('cantidad_viajes'), F.sum(F.col('kilometros')*F.col('precio_kilometro')).alias('ingresos'))
dfe1c = df[0].select('identificador', 'kilometros', 'precio_kilometro', 'fecha')\
.groupBy('identificador', 'fecha').agg(F.sum('kilometros').alias('cantidad_kms'), F.sum(F.col('kilometros')*F.col('precio_kilometro')).alias('ingresos'))
#tabla correspondiente a la cantidad de viajes por código postal
dfe2 = dfe1a.union(dfe1b).select('codigo_postal', 'tipo', 'cantidad_viajes', 'fecha').orderBy(F.col('codigo_postal'), F.col('tipo').desc(), F.col('fecha'))
#tabla correspondiente a los ingresos totales por código postal
dfe3 = dfe1a.union(dfe1b).select('codigo_postal', 'tipo', F.round('ingresos',2).alias('ingresos'), 'fecha').orderBy(F.col('codigo_postal'), F.col('tipo').desc(), F.col('fecha'))
#tabla correspondiente a la cantidad de kms e ingresos por identificador de conductor
dfe4 = dfe1c.select('identificador', F.round('cantidad_kms',2).alias('cantidad_kms'), F.round('ingresos',2).alias('ingresos'), 'fecha').orderBy(F.col('identificador'), F.col('fecha'))
#tabla correspondiente a métricas particulares
met1 = dfe4.groupBy(F.lit('persona_con_mas_kilometros').alias('tipo_metrica'), 'fecha', F.col('identificador').alias('valor')).agg(F.max('cantidad_kms')).orderBy(F.col('max(cantidad_kms)').desc())\
.withColumn('row',F.row_number().over(W.Window.partitionBy('fecha').orderBy(F.col('fecha').desc()))).filter(F.col('row')<=1).drop('row').drop('max(cantidad_kms)').orderBy(F.col('fecha').desc())
met2 = dfe4.groupBy(F.lit('persona_con_mas_ingresos').alias('tipo_metrica'), 'fecha', F.col('identificador').alias('valor')).agg(F.max('ingresos')).orderBy(F.col('max(ingresos)').desc())\
.withColumn('row',F.row_number().over(W.Window.partitionBy('fecha').orderBy(F.col('fecha').desc()))).filter(F.col('row')<=1).drop('row').drop('max(ingresos)').orderBy(F.col('fecha').desc())
met3 = dfe4.groupBy(F.lit('percentil_25').alias('tipo_metrica'), 'fecha').agg(F.percentile_approx('ingresos', .25).alias('valor')).orderBy(F.col('fecha').desc())
met4 = dfe4.groupBy(F.lit('percentil_50').alias('tipo_metrica'), 'fecha').agg(F.percentile_approx('ingresos', .50).alias('valor')).orderBy(F.col('fecha').desc())
met5 = dfe4.groupBy(F.lit('percentil_75').alias('tipo_metrica'), 'fecha').agg(F.percentile_approx('ingresos', .75).alias('valor')).orderBy(F.col('fecha').desc())
met6 = dfe3.where('tipo like "origen"').groupBy(F.lit('codigo_postal_origen_con_mas_ingresos').alias('tipo_metrica'), 'fecha', F.col('codigo_postal').alias('valor')).agg(F.max('ingresos')).orderBy(F.col('max(ingresos)').desc())\
.withColumn('row',F.row_number().over(W.Window.partitionBy('fecha').orderBy(F.col('fecha').desc()))).filter(F.col('row')<=1).drop('row').drop('max(ingresos)').orderBy(F.col('fecha').desc())
met7 = dfe3.where('tipo like "destino"').groupBy(F.lit('codigo_postal_destino_con_mas_ingresos').alias('tipo_metrica'), 'fecha', F.col('codigo_postal').alias('valor')).agg(F.max('ingresos')).orderBy(F.col('max(ingresos)').desc())\
.withColumn('row',F.row_number().over(W.Window.partitionBy('fecha').orderBy(F.col('fecha').desc()))).filter(F.col('row')<=1).drop('row').drop('max(ingresos)').orderBy(F.col('fecha').desc())
dfe5 = reduce(DF.unionAll, [met1, met2, met3, met4, met5, met6, met7])
proceso.append(dfe5)
#
#por medio de las funciones list-map-lambda se ejecutan las operaciones iterando sobre los dataframes creados
list(map(lambda x: {x.printSchema(), x.show(50, truncate=False)}, proceso)) #se despliegan el esquema y los datos correspondientes a cada tabla
return proceso
except Exception as e:
exc_type, exc_obj, exc_tb = sys.exc_info()
print(exc_type, os.path.split(exc_tb.tb_frame.f_code.co_filename)[1], exc_tb.tb_lineno, exc_obj)
#función para almacenar los dataframes en formato .csv
def almacenar_tablas(df=[], files_name=[]):
try:
#escritura de los archivos
csv_files=[]
if (len(df)==len(files_name)):
#se ejecutan las operaciones de escritura iterando sobre cada objeto
list(map(lambda x, y: {x.write.csv(y, mode='overwrite')}, df, files_name))
#se ejecuta una función de comprobación, leyendo cada archivo creado
[csv_files.append(spark.read.csv(files_name[i])) for i in range(len(files_name))]
if csv_files: print('Tablas almacenadas: '+ str(files_name))
return csv_files
except Exception as e:
exc_type, exc_obj, exc_tb = sys.exc_info()
print(exc_type, os.path.split(exc_tb.tb_frame.f_code.co_filename)[1], exc_tb.tb_lineno, exc_obj)
#
|
python
|
"""VIC Emergency Incidents feed entry."""
from typing import Optional, Tuple
import logging
import re
from time import strptime
import calendar
from datetime import datetime
import pytz
from aio_geojson_client.feed_entry import FeedEntry
from geojson import Feature
from markdownify import markdownify
from .consts import ATTR_CATEGORY1, ATTR_CATEGORY2, ATTR_ID, \
ATTR_PUB_DATE, ATTR_SOURCE_TITLE, ATTR_SOURCE_ORG, ATTR_ESTA_ID, \
ATTR_RESOURCES, ATTRIBUTION, ATTR_SIZE, ATTR_SIZE_FMT, ATTR_LOCATION, \
ATTR_STATEWIDE, ATTR_TEXT, ATTR_STATUS, ATTR_TYPE, \
ATTR_WEBBODY, CUSTOM_ATTRIBUTE
_LOGGER = logging.getLogger(__name__)
class VICEmergencyIncidentsFeedEntry(FeedEntry):
"""VIC Emergency Incidents feed entry."""
def __init__(self,
home_coordinates: Tuple[float, float],
feature: Feature):
"""Initialise this service."""
super().__init__(home_coordinates, feature)
@property
def attribution(self) -> Optional[str]:
"""Return the attribution of this entry."""
return ATTRIBUTION
@property
def title(self) -> Optional[str]:
"""Return the attribution of this entry."""
return ATTR_SOURCE_TITLE
@property
def category1(self) -> str:
"""Return the category of this entry."""
return self._search_in_properties(ATTR_CATEGORY1)
@property
def category2(self) -> str:
"""Return the category of this entry."""
return self._search_in_properties(ATTR_CATEGORY2)
@property
def external_id(self) -> str:
"""Return the external id of this entry."""
return self._search_in_properties(ATTR_ID)
@property
def publication_date(self) -> datetime:
"""Return the publication date of this entry."""
publication_date = self._search_in_properties(ATTR_PUB_DATE)
if publication_date:
# Parse the date. Sometimes that have Z as the timezone, which isn't like by %z.
# This gets rids of any ms and the Z which then allows it to work.
if publication_date[-1] == 'Z':
date_struct = strptime(publication_date[:-5], "%Y-%m-%dT%H:%M:%S")
else:
date_struct = strptime(publication_date, "%Y-%m-%dT%H:%M:%S%z")
publication_date = datetime.fromtimestamp(calendar.timegm(date_struct), tz=pytz.utc)
return publication_date
@property
def description(self) -> str:
"""Return the description of this entry."""
return self._search_in_properties(ATTR_TEXT)
def _search_in_description(self, regexp):
"""Find a sub-string in the entry's description."""
if self.description:
match = re.search(regexp, self.description)
if match:
return match.group(CUSTOM_ATTRIBUTE)
return None
@property
def location(self) -> str:
"""Return the location of this entry."""
return self._search_in_properties(ATTR_LOCATION)
@property
def status(self) -> str:
"""Return the status of this entry."""
return self._search_in_properties(ATTR_STATUS)
@property
def type(self) -> str:
"""Return the type of this entry."""
return self._search_in_properties(ATTR_TYPE)
@property
def size(self) -> str:
"""Return the size of this entry."""
return self._search_in_properties(ATTR_SIZE)
@property
def size_fmt(self) -> str:
"""Return the size of this entry."""
return self._search_in_properties(ATTR_SIZE_FMT)
@property
def statewide(self) -> str:
"""Return the size of this entry."""
return self._search_in_properties(ATTR_STATEWIDE)
@property
def source_organisation(self) -> str:
"""Return the responsible agency of this entry."""
return self._search_in_properties(ATTR_SOURCE_ORG)
@property
def source_organisation_title(self) -> str:
"""Return the responsible agency of this entry."""
return self._search_in_properties(ATTR_SOURCE_TITLE)
@property
def resources(self) -> str:
"""Return the responsible agency of this entry."""
return self._search_in_properties(ATTR_RESOURCES)
@property
def etsa_id(self) -> str:
"""Return the responsible agency of this entry."""
return self._search_in_properties(ATTR_ESTA_ID)
@property
def advice_html(self) -> str:
"""Return the responsible agency of this entry."""
return self._search_in_properties(ATTR_WEBBODY)
@property
def advice_markdown(self) -> str:
"""Return the responsible agency of this entry."""
if self._search_in_properties(ATTR_WEBBODY) is None:
return None
return markdownify(self._search_in_properties(ATTR_WEBBODY))
|
python
|
import networkx as nx
from . import utils
# ===== asexual lineage metrics =====
def get_asexual_lineage_length(lineage):
"""Get asexual lineage length.
Will check that given lineage is an asexual lineage.
Args:
lineage (networkx.DiGraph): an asexual lineage
Returns:
length (int) of given lineage
"""
if not utils.is_asexual_lineage(lineage): raise Exception("the given lineage is not an asexual lineage")
return len(lineage.nodes)
def get_asexual_lineage_num_discrete_state_changes(lineage, attribute_list):
"""Get the number of discrete state changes from an asexual lineage.
State is described by the aggregation of all attributes give by attribute list.
Args:
lineage (networkx.DiGraph): an asexual lineage
attribute_list (list): list of attributes (strings) to use when defining
a state
Returns:
Returns the number of discrete states along the lineage.
"""
# Check that lineage is an asexual lineage.
if not utils.is_asexual_lineage(lineage): raise Exception("the given lineage is not an asexual lineage")
# Check that all nodes have all given attributes in the attribute list
if not utils.all_taxa_have_attributes(lineage, attribute_list): raise Exception("given attributes are not universal among all taxa along the lineage")
# get the first state (root node)
lineage_id = utils.get_root_ids(lineage)[0]
num_states = 1
cur_state = [lineage.nodes[lineage_id][attr] for attr in attribute_list]
# count the number of state changes moving down the lineage
while True:
successor_ids = list(lineage.successors(lineage_id))
if len(successor_ids) == 0: break # We've hit the last thing!
lineage_id = successor_ids[0]
state = [lineage.nodes[lineage_id][attr] for attr in attribute_list]
if cur_state != state:
cur_state = state
num_states += 1
return num_states
def get_asexual_lineage_num_discrete_unique_states(lineage, attribute_list):
"""Get the number of discrete unique states along a lineage where what it
means to be a state is defined by attribute_list.
Args:
lineage (networkx.DiGraph): an asexual lineage
attribute_list (list): list of attributes (strings) to use when defining
a state
Returns:
The number of discrete unique states found along the lineage.
"""
# Check that lineage is an asexual lineage.
if not utils.is_asexual_lineage(lineage): raise Exception("the given lineage is not an asexual lineage")
# Check that all nodes have all given attributes in the attribute list
if not utils.all_taxa_have_attributes(lineage, attribute_list): raise Exception("given attributes are not universal among all taxa along the lineage")
# get the first state (root node)
lineage_id = utils.get_root_ids(lineage)[0]
unique_states = set()
unique_states.add(tuple([lineage.nodes[lineage_id][attr] for attr in attribute_list]))
while True:
successor_ids = list(lineage.successors(lineage_id))
if len(successor_ids) == 0: break # We've hit the last thing!
lineage_id = successor_ids[0]
unique_states.add(tuple([lineage.nodes[lineage_id][attr] for attr in attribute_list]))
return len(unique_states)
def get_asexual_lineage_mutation_accumulation(lineage, mutation_attributes, skip_root=False):
"""Get the distribution of mutation type accumulations over an asexual lineage.
Args:
lineage (networkx.DiGraph): an asexual lineage
mutation_attributes (list of str): what are the mutation count attributes
that we should accumulate over the lineage?
skip_root (bool): Should we include root node mutation count values in
our accumlation? Defaults to false.
Returns:
A dictionary indexed by mutation types (mutation_attributes) where each
value in the dictionary is the sum of that type of mutation along the lineage.
"""
# Check that lineage is an asexual lineage.
if not utils.is_asexual_lineage(lineage): raise Exception("the given lineage is not an asexual lineage")
# Check that all nodes have all given attributes in the attribute list
if not utils.all_taxa_have_attributes(lineage, mutation_attributes): raise Exception("given mutation attributes are not universal among all taxa along the lineage")
# initialize
mut_accumulators = {mut_attr:0 for mut_attr in mutation_attributes}
# get the root node
lineage_id = utils.get_root_ids(lineage)[0]
if not skip_root:
for mut_attr in mutation_attributes:
mut_accumulators[mut_attr] += lineage.nodes[lineage_id][mut_attr]
while True:
successor_ids = list(lineage.successors(lineage_id))
if len(successor_ids) == 0: break # We've hit the last thing!
# Is this a new state or a member of the current state?
lineage_id = successor_ids[0]
for mut_attr in mutation_attributes:
mut_accumulators[mut_attr] += lineage.nodes[lineage_id][mut_attr]
return mut_accumulators
# ===== asexual phylogeny metrics =====
def get_mrca_tree_depth_asexual(phylogeny, ids=None):
"""Get the tree depth of the most recent common ancestor shared by the specified
taxa ids (ids) in an asexual phylogeny (phylogeny).
"""
# Get the id of the most recent common ancestor
mrca_id = utils.get_mrca_id_asexual(phylogeny, ids)
if mrca_id == -1: raise Exception("phylogeny has no common ancestor")
# Calculate distance from root to mrca
cur_id = mrca_id
depth = 0
while True:
ancestor_ids = list(phylogeny.predecessors(cur_id))
if len(ancestor_ids) == 0: break
depth+=1
cur_id = ancestor_ids[0]
return depth
# ===== phylogenetic richness =====
def calc_phylogenetic_diversity_asexual(phylogeny, ids=None):
"""Calculate phylogenetic diversity (i.e., the number of nodes in the minimum
spanning tree from the MRCA to all extant taxa). Currently only for asexual
phylogenies.
(Faith, 1992)
ids gives the set we want to calculate phylogenetic diversity on. i.e.,
we'll get the mrca for those ids and compute the minimum spanning tree
none defaults to including all leaf nodes
"""
# if given no ids, default to leaf taxa; otherwise, validate given ids
if ids == None:
# Find MRCA on leaf nodes
ids = utils.get_leaf_taxa_ids(phylogeny)
# (1) get the mrca
mrca_id = utils.get_mrca_id_asexual(phylogeny, ids)
if mrca_id == -1: raise Exception("given ids have no common ancestor")
# (2) collect paths from each id to mrca
canopy = set([i for i in ids] + [mrca_id])
for i in ids:
cur_id = i
while True:
ancestor_ids = list(phylogeny.predecessors(cur_id))
if len(ancestor_ids) == 0: break
cur_id = ancestor_ids[0]
# If we've encountered this path before, we can skip the rest because
# we're guaranteed an asexual phylogeny.
if cur_id in canopy: break
canopy.add(cur_id)
# Build a subgraph with only the canopy
canopy_phylo = nx.subgraph(phylogeny, list(canopy))
# Okay, now we can compute the minimum spanning tree.
return len(nx.minimum_spanning_tree(canopy_phylo.to_undirected()).nodes)
|
python
|
import tqdm
from multiprocessing import Pool
import logging
from dsrt.config.defaults import DataConfig
class Filter:
def __init__(self, properties, parallel=True, config=DataConfig()):
self.properties = properties
self.config = config
self.parallel = parallel
self.init_logger()
def init_logger(self):
self.logger = logging.getLogger()
self.logger.setLevel(self.config['logging-level'])
def transform(self, dialogues):
chunksize=self.config['chunksize']
p = Pool() if self.parallel else Pool(1)
if self.config['filter-long-dialogues']:
self.max_dl = self.config['max-dialogue-length']
self.log('info', 'Filtering long dialogues (> {} utterances) ...'.format(self.max_dl))
res = []
total = len(dialogues)
self.log('info', '[filter running on {} cores]'.format(p._processes))
for d in tqdm.tqdm(p.imap(self.filter_long_dialogues, dialogues, chunksize=chunksize), total=total):
res.append(d)
dialogues = list(filter(None, res))
if self.config['filter-dialogues-with-long-utterances']:
self.max_ul = self.config['max-utterance-length']
self.log('info', 'Filtering dialogues with long utterances (> {} tokens) ...'.format(self.max_ul))
res = []
total = len(dialogues)
self.log('info', '[filter running on {} cores]'.format(p._processes))
for d in tqdm.tqdm(p.imap(self.filter_dialogues_with_long_utterances, dialogues, chunksize=chunksize), total=total):
res.append(d)
dialogues = list(filter(None, res))
p.close()
p.join()
return dialogues
def filter_long_dialogues(self, dialogue):
if len(dialogue) > self.max_dl:
return None
def filter_dialogues_with_long_utterances(self, dialogue):
for utterance in dialogue:
if len(utterance) > self.max_ul:
return None
return dialogue
####################
# UTILITIES #
####################
def log(self, priority, msg):
"""
Just a wrapper, for convenience.
NB1: priority may be set to one of:
- CRITICAL [50]
- ERROR [40]
- WARNING [30]
- INFO [20]
- DEBUG [10]
- NOTSET [0]
Anything else defaults to [20]
NB2: the levelmap is a defaultdict stored in Config; it maps priority
strings onto integers
"""
self.logger.log(logging.CRITICAL, msg)
|
python
|
import os
import numba
import torch
import torch.nn as nn
from torch.optim.lr_scheduler import ReduceLROnPlateau
from optimizer import *
from trainer_callbacks import *
from utils import *
#%% #################################### Model Trainer Class ####################################
class ModelTrainer():
def __init__(self,
model=None,
Loaders=[None,[]],
metrics=None,
fold=None,
lr=None,
epochsTorun=None,
checkpoint_saving_path=None,
resume_train_from_checkpoint=False,
resume_checkpoint_path=None,
test_run_for_error=False,
batch_size=None,
do_grad_accum=False,
grad_accum_steps=4,
use_fp16=True,
problem_name=None
):
super(ModelTrainer, self).__init__()
self.problem_name = problem_name
self.model = model.cuda()
self.trainLoader = Loaders[0]
self.valLoader = Loaders[1]
self.info_bbx = store_info(metrics)
self.fold = fold
if self.fold != None:
self.checkpoint_saving_path = checkpoint_saving_path + '/fold' + str(self.fold) + '/'
else:
self.checkpoint_saving_path = checkpoint_saving_path + '/'
self.fold = 0
os.makedirs(self.checkpoint_saving_path,exist_ok=True)
self.lr = lr
self.epochsTorun = epochsTorun
self.init_epoch = -1
self.test_run_for_error = test_run_for_error
self.current_checkpoint_save_count = 1
self.resume_checkpoint_path = resume_checkpoint_path
self.best_loss = 9999
self.best_f1_score = -9999
self.best_rmse = 9999
self.batch_size = batch_size
self.optimizer = Over9000(params=self.model.parameters(),lr=self.lr)
self.scheduler = ReduceLROnPlateau(self.optimizer, factor=0.5, mode='min', patience=5, verbose=True)
self.do_grad_accum = do_grad_accum
self.grad_accum_steps = grad_accum_steps
self.trainer_settings_dict = {
'do_grad_accum': self.do_grad_accum,
'grad_accum_steps':self.grad_accum_steps,
'epochsTorun':self.epochsTorun,
'lr':self.lr,
'batch_size':batch_size,
}
self.use_fp16 = use_fp16
self.scheduler_flag = 9999
self.criterion = RMSELoss().cuda()
self.criterion_2 = nn.CrossEntropyLoss().cuda()
self.scaler = torch.cuda.amp.GradScaler()
if resume_train_from_checkpoint:
if os.path.isfile(resume_checkpoint_path):
print("=> Loading checkpoint from '{}'".format(resume_checkpoint_path))
checkpoint_dict = torch.load(resume_checkpoint_path)
self.model.load_state_dict(checkpoint_dict['Model_state_dict'])
self.scheduler.load_state_dict(checkpoint_dict['Scheduler_state_dict'])
self.optimizer.load_state_dict(checkpoint_dict['Optimizer_state_dict'])
self.best_loss = checkpoint_dict['Best_val_loss']
self.best_f1_score = checkpoint_dict['Best_val_f1_score']
self.info_bbx.all_info = checkpoint_dict['All_info']
self.init_epoch = checkpoint_dict['Epoch']
print('Best Val loss is {}'.format(self.best_loss))
print('Best Val f1_score is {}'.format(self.best_f1_score))
print('Current val loss is {}'.format(checkpoint_dict['Current_val_Loss']))
print('Current val f1 score is {}'.format(checkpoint_dict['Current_val_f1_score']))
self.scheduler_flag = checkpoint_dict['Scheduler_flag']
del checkpoint_dict
torch.cuda.empty_cache()
else:
print("=> No checkpoint found at '{}' !".format(resume_checkpoint_path))
#%% train part starts here
def fit(self):
with TQDM() as pbar:
pbar.on_train_begin({'num_batches':len(self.trainLoader),'num_epoch':self.epochsTorun})
pbar.on_val_begin({'num_batches':len(self.valLoader),'num_epoch':self.epochsTorun})
self.train_metric_meter = Metric_Meter()
self.val_metric_meter = Metric_Meter()
for epoch in range(self.epochsTorun):
current_epoch_no = epoch+1
if current_epoch_no <= self.init_epoch:
continue
pbar.on_epoch_train_begin(self.fold,current_epoch_no)
self.info_bbx._init_new_epoch(current_epoch_no)
self.model.train()
torch.set_grad_enabled(True)
#self.optimizer.zero_grad()
self.train_metric_meter.reset()
self.val_metric_meter.reset()
for itera_no, data in enumerate(self.trainLoader):
pbar.on_train_batch_begin()
self.optimizer.zero_grad()
images, targets = data
images = images.cuda()
targets = targets.cuda()
with torch.cuda.amp.autocast():
out = self.model(images)
batch_loss = self.criterion(out['LOGITS'], targets[:,None]) + self.criterion_2(out['LOGITS_2'], targets.long())
self.scaler.scale(batch_loss).backward()
self.scaler.step(self.optimizer)
self.scaler.update()
self.train_metric_meter.update(out['LOGITS'].clone(), targets, 'single')
self.info_bbx.update_train_info({'Loss':[(batch_loss.detach().item()),images.shape[0]]})
pbar.on_train_batch_end(logs=self.info_bbx.request_current_epoch_train_metric_info())
torch.cuda.empty_cache()
if self.test_run_for_error:
if itera_no==5:
break
#%% validation part starts here
f1_score, rmse = self.train_metric_meter.feedback()
self.info_bbx.update_train_info({'f1_score': f1_score, 'rmse': rmse})
pbar.on_epoch_train_end(self.info_bbx.request_current_epoch_train_metric_info())
pbar.on_epoch_val_begin(self.fold,current_epoch_no)
self.model.eval()
torch.set_grad_enabled(False)
with torch.no_grad():
for itera_no, data in enumerate(self.valLoader):
pbar.on_val_batch_begin()
images, targets = data
images = images.cuda()
targets = targets.cuda()
with torch.cuda.amp.autocast():
out = self.model(images)
batch_loss = self.criterion(out['LOGITS'], targets[:,None]) + self.criterion_2(out['LOGITS_2'], targets.long())
self.val_metric_meter.update(out['LOGITS'].clone(), targets, 'single')
self.info_bbx.update_val_info({'Loss':[(batch_loss.detach().item()),images.shape[0]]})
pbar.on_val_batch_end(logs=self.info_bbx.request_current_epoch_val_metric_info())
torch.cuda.empty_cache()
if self.test_run_for_error:
if itera_no==5:
break
f1_score, rmse = self.val_metric_meter.feedback()
self.info_bbx.update_val_info({'f1_score': f1_score, 'rmse': rmse})
pbar.on_epoch_val_end(self.info_bbx.request_current_epoch_val_metric_info())
#%% Update best parameters
if self.best_loss > self.info_bbx.get_info(current_epoch_no,'Loss','Val'):
print( ' Val Loss is improved from {:.4f} to {:.4f}! '.format(self.best_loss,self.info_bbx.get_info(current_epoch_no,'Loss','Val')) )
self.best_loss = self.info_bbx.get_info(current_epoch_no,'Loss','Val')
is_best_loss = True
else:
print( ' Val Loss is not improved from {:.4f}! '.format(self.best_loss))
is_best_loss = False
if self.best_f1_score < self.info_bbx.get_info(current_epoch_no,'f1_score','Val'):
print( ' Val f1 score is improved from {:.4f} to {:.4f}! '.format(self.best_f1_score,self.info_bbx.get_info(current_epoch_no,'f1_score','Val')) )
self.best_f1_score = self.info_bbx.get_info(current_epoch_no,'f1_score','Val')
is_best_f1_score = True
else:
print( ' Val f1 score is not improved from {:.4f}! '.format(self.best_f1_score))
is_best_f1_score = False
#%%Learning Rate Schedulers
if is_best_loss or is_best_f1_score:
self.scheduler_flag = self.scheduler_flag - 1
self.scheduler.step(self.scheduler_flag)
else:
self.scheduler.step(self.scheduler_flag+1)
#%%checkpoint dict creation
checkpoint_dict = {
'Epoch': current_epoch_no,
'Model_state_dict': self.model.state_dict(),
'Current_val_Loss': self.info_bbx.get_info(current_epoch_no,'Loss','Val'),
'Current_train_Loss': self.info_bbx.get_info(current_epoch_no,'Loss','Train'),
'Current_val_f1_score':self.info_bbx.get_info(current_epoch_no,'f1_score','Val'),
'Current_train_f1_score':self.info_bbx.get_info(current_epoch_no,'f1_score','Train'),
'Current_val_rmse':self.info_bbx.get_info(current_epoch_no,'rmse','Val'),
'Current_train_rmse':self.info_bbx.get_info(current_epoch_no,'rmse','Train'),
'Best_val_loss' : self.best_loss,
'Best_val_f1_score': self.best_f1_score,
'Best_val_rmse': self.best_rmse,
}
#%%checkpoint dict saving
if is_best_f1_score:
torch.save(checkpoint_dict, self.checkpoint_saving_path+'checkpoint_best_f1_score_fold{}.pth'.format(self.fold))
del checkpoint_dict
torch.cuda.empty_cache()
|
python
|
from backend.stage import ready_stage
from backend import message
from backend import helpers
class JobStage(ready_stage.ReadyStage):
stage_type = 'Job'
def __init__(self, game) -> None:
super().__init__(game)
self._job_selected = {} # facility selected indexed by player
@classmethod
def title(cls) -> str:
return "Select your Job"
### stage event handling ###
def begin(self):
# clear player jobs
for player in self.game.players:
if player.current_facility:
player.current_facility.leave(player)
super().begin()
def end(self):
super().end()
# update player jobs
for player in self.game.players:
# the client is responsible for checking that player has a job selected when clicking ready
assert player in self._job_selected
self._job_selected[player].join(player)
### action handling ###
def job_selected(self, sender, job: str) -> None:
if job is None:
# handle deselection
self._job_selected.pop(sender, None)
else:
# find facility using `job` string
try:
facility = self.game.facilities[job]
except KeyError:
raise message.InvalidArgumentError("job_selected called with invalid job %s" % job, method='job_selected', args=(job,))
else:
# change/add job selections
self._job_selected[sender] = facility
# notify all players of change
self._update_job_selections_to_all()
### convenience ###
@property
def _job_selections_id(self):
"""Return dictionary of player ids indexed by jobs"""
return dict((f.name, [p.id for p in players]) for f, players in helpers.invert(self._job_selected, codomain=self.game.facilities.values()).items())
def _update_job_selections_to_all(self) -> None:
for player in self.game.players:
player.update_job_selections(job_selections=self._job_selections_id)
### player handling ###
def handle_add_player(self, new_player) -> None:
super().handle_add_player(new_player)
# update new player's job selection
new_player.update_job_selections(job_selections=self._job_selections_id)
def handle_remove_player(self, player_removed) -> None:
super().handle_remove_player(player_removed)
# remove player from job selections
self._job_selected.pop(player_removed, None)
# update everyone else's job selection data
self._update_job_selections_to_all()
|
python
|
""" Robot http server and interface handler
Approach to operations:
This http server module is conceptualized as a gateway between a robot,
with private, internal operations, and the web. Incoming requests for
actions to be executed by the robot and requests for information such as
telemetry data arrive via http post and get.
Requests posted for execution are forwarded to the robot process using
the robot's internal communications framework. (In general, a move toward
all json messaging is being considered.)
When the robot makes available information for consumption by web users, it
sends the information to this server. User retrieve the information in replies
to their posts or in replies to their get requests.
"""
__author__ = "Tal G. Ball"
__copyright__ = "Copyright (C) 2009-2020 Tal G. Ball"
__license__ = "Apache License, Version 2.0"
__version__ = "1.0"
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from http.server import HTTPServer
from http.server import BaseHTTPRequestHandler
from socketserver import ThreadingMixIn
import os
import ssl
import time
from time import time as robtimer
import json
import logging
import multiprocessing
import threading
import socket
from lbrsys.settings import robhttpLogFile, robhttpAddress, USE_SSL
from lbrsys import feedback
from robcom import robauth
proc = multiprocessing.current_process()
if proc.name == "Robot Http Service" :
logging.basicConfig (level=logging.DEBUG,filename = robhttpLogFile,
format='[%(levelname)s] (%(processName)-10s) %(message)s',)
class RobHTTPService(ThreadingMixIn, HTTPServer):
allow_reuse_address = True
daemon_threads = True
def __init__(self, address, handler, receiveQ, sendQ):
HTTPServer.__init__(self, address, handler)
self.receiveQ = receiveQ
self.sendQ = sendQ
self.currentTelemetry = {'Ranges':{'Left':1,'Right':2,'Forward':3, 'Back':4, 'Bottom':5}}
self.newTelemetry = True
self.t0 = robtimer()
self.motors_powered = 0
self.telemetry_sent = 0
self.heartbeat_thread = None
self.heartbeat = False
self.dockSignal_state = {
'time_to_live': 3.0,
'left': 0.0, # timestamp of last left signal
'right': 0.0,
}
self.set_security_mode()
def check_dockSignal(self):
'''Monitor time to live for docksignals. Todo - generalize for any signals needing ttl'''
for signal in ['left', 'right']:
try:
state = self.currentTelemetry['dockSignal'][signal]
if state == 1:
if time.time() - self.dockSignal_state[signal] \
> self.dockSignal_state['time_to_live']:
# print("Clearing dockSignal: %s" % signal)
self.currentTelemetry['dockSignal'][signal] = 0
self.dockSignal_state[signal] = 0.0
except KeyError:
pass
return
def set_security_mode(self):
try:
if USE_SSL:
self.socket = ssl.wrap_socket(
self.socket,
server_side=True,
certfile=os.environ['ROBOT_CERT'],
keyfile=os.environ['ROBOT_KEY']
)
except Exception as e:
logging.error("Exception securing http server: {}".format(str(e)))
# todo simplify heartbeat management using threading.Timer
def set_heartbeat(self):
if self.motors_powered > 0 and not self.heartbeat:
self.heartbeat_thread = threading.Thread(target=self.check_heartbeat)
self.heartbeat_thread.start()
self.heartbeat = True
def check_heartbeat(self, pulse=2.0):
time.sleep(pulse)
self.heartbeat = False
if self.motors_powered > 0 and time.time() - self.telemetry_sent > pulse:
self.sendQ.put('/r/0/0')
self.motors_powered = 0
logging.debug("Heartbeat timeout - cutting motor power")
print("Hearbeat timeout - cutting motor power at %s" % time.asctime())
else:
# print('\ttelemetry age: %.3f' % (time.time() - self.telemetry_sent))
self.set_heartbeat()
def updateTelemetry(self):
"""Telemetry updater - run in a separate thread."""
while True:
self.check_dockSignal()
msg = self.receiveQ.get()
# print("Updating telemetry: {}".format(str(msg)))
self.receiveQ.task_done()
if msg == "Shutdown":
break
if type(msg) is feedback: # todo - reexamine and look at voltages
if type(msg.info) is dict:
for k, v in msg.info.items():
# for dockSignal updates, only replace the part of the telemetry
# provided by the current feedback message
# and note the time of the 1 signals to facilitate state /
# time to live management
if k == 'dockSignal':
if k not in self.currentTelemetry:
self.currentTelemetry[k] = {}
for signal in v.keys():
self.currentTelemetry[k][signal] = v[signal]
if signal == 'time':
continue
if v[signal] == 1:
self.dockSignal_state[signal] = v['time']
else:
# for all other updates, replace the entire telemetry entry
# with the current message
self.currentTelemetry[k] = v
else:
print("Please send telemetry feedback as dict: %s" % (msg.info))
self.newTelemetry = True
return
class RobHandler(BaseHTTPRequestHandler):
buffering = 1 # line buffering mode
http_log_file = open(robhttpLogFile, 'w', buffering)
def log_message(self, format, *args):
self.http_log_file.write("%s - - [%s] %s\n" %
(self.client_address[0],
self.log_date_time_string(),
format % args))
def handle_power(self, msgD):
command = None
# todo msgD type checking
if 'heading' in msgD and msgD['heading'] != '':
command = "/h/%.1f" % float(msgD['heading'])
elif 'turn' in msgD and msgD['turn'] != '':
command = "/t/%.1f" % float(msgD['turn'])
elif 'level' in msgD and msgD['level'] != '':
# print("POWER msgD: {}".format(str(msgD)))
level = float(msgD['level'])
angle = float(msgD['angle'])
range = 0
sensor = 'Forward'
duration = 0
if 'range' in msgD and msgD['range'] != '':
range = int(msgD['range'])
if 'sensor' in msgD and msgD['sensor'] != '':
sensor = msgD['sensor']
if 'duration' in msgD and msgD['duration'] != '':
duration = int(msgD['duration'])
command = "/r/%.2f/%d/%d/%s/%d" % (level, angle, range, sensor, duration)
if msgD['level'] > 0:
self.server.motors_powered = time.time()
elif msgD['level'] == 0:
self.server.motors_powered = 0
if command is not None:
# print("\tSENDING: {}".format(command))
self.server.sendQ.put(command)
self.send_response(200)
else:
if 'speech' not in msgD:
self.send_response(400)
if 'speech' in msgD:
self.handle_say_noreply(msgD)
if command is None:
self.send_response(200)
if self.server.newTelemetry:
self.server.newTelemetry = False
# for now, always send telemetry
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
buffer = json.dumps(self.server.currentTelemetry).encode()
self.wfile.write(buffer)
if self.server.motors_powered > 0:
# todo track heartbeats on a per client basis, otherwise client 2 could accidentally keep alive client 1
self.server.telemetry_sent = time.time()
self.server.set_heartbeat()
if self.server.currentTelemetry == "Shutdown":
logging.info("Shutting down robot http gateway service.")
shutdownThread = threading.Thread(target=self.server.shutdown,
name="Shutdown Thread")
shutdownThread.start()
shutdownThread.join()
return
def handle_telemetry(self):
self.send_response(200)
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
buffer = json.dumps(self.server.currentTelemetry).encode()
# json.dump(buffer, self.wfile)
self.wfile.write(buffer)
self.server.telemetry_sent = time.time()
# print("GET path: %s" % self.path)
def handle_docksignal(self, msgD):
self.server.receiveQ.put(feedback(msgD))
self.send_response(204)
self.send_header("Access-Control-Allow-Origin", "*")
self.end_headers()
return
def handle_say_noreply(self, msgD):
try:
if 'text' in msgD['speech']:
speech_command = f"/s/{msgD['speech']['text']}"
else:
speech_command = f"/s/{msgD['speech']}"
except KeyError:
speech_command = f"/s/Bad speech post: {str(msgD)}"
except Exception as e:
speech_command = f"/s/Unexpected error in speech command: {str(msgD)}\n{str(e)}"
self.server.sendQ.put(speech_command)
return
def handle_say(self, msgD):
self.handle_say_noreply(msgD)
self.send_response(200)
self.send_header("Access-Control-Allow-Origin", "*")
self.end_headers()
return
def is_user_authorized(self):
try:
# print(str(self.headers))
user = self.headers['User']
token_type, token = self.headers['Authorization'].split(':')
if token_type == 'TOK' and robauth.is_authorized(user, token):
return True
else:
raise Exception
except Exception as e:
logging.info("Failed authorization. Headers:\n%s\n%s" %
(str(self.headers), str(e)))
return False
def do_OPTIONS(self):
"""" Setup to support ajax queries from client"""
# print("Headers: %s" % str(self.headers))
self.send_response(200, 'ok')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.send_header('Access-Control-Allow-Methods', 'GET, POST, OPTIONS')
self.send_header('Access-Control-Allow-Headers', 'X-Requested-With, Content-type, User, Authorization')
self.end_headers()
return
def do_GET(self):
if self.path.startswith('/validate'):
logging.debug("/validate with headers %s" % str(self.headers))
if not self.is_user_authorized():
self.send_response(401)
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(b'Validation failed.\r\n')
print("Validation for %s failed" % self.headers['User'])
self.log_message("Validation for %s failed", self.headers['User'])
else:
print("Validation for %s succeeded" % self.headers['User'])
self.log_message("Validation for %s succeeded", self.headers['User'])
self.send_response(200, 'ok')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(b'Validation succeeded.\r\n')
return
if self.path.startswith('/telemetry'):
self.handle_telemetry()
return
self.send_response(404)
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(b'Service not available.\r\n')
return
def do_POST(self):
"""
post power, turn or heading json for operating the motors
post to /docksignal path to communicate receipt of docking signals
post replies:
200 - post reply contains telemetry data
204 - post reply is status only, i.e. no new data.
400 - bad post request, i.e. no power level provided (for now)
401 - authentication failure
"""
tpstart = time.time()
if not self.is_user_authorized():
self.send_response(401)
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
return
#assume json for now, one obj per line.
msgS = self.rfile.readline()
if type(msgS) is bytes:
msgS = msgS.decode()
msgD = json.loads(msgS)
if self.path == '/':
self.handle_power(msgD)
elif self.path == '/docksignal':
self.handle_docksignal(msgD)
elif self.path == '/say':
self.handle_say(msgD)
return
def startService(receiveQ, sendQ, addr=robhttpAddress):
server = RobHTTPService(addr, RobHandler, receiveQ, sendQ)
# server = RobHTTPService(('', 9145), RobHandler, receiveQ, sendQ)
telUpdateThread = threading.Thread(target=server.updateTelemetry,
name = "TelemetryUpdateThread")
logging.debug("Starting Telemetry Updater.")
telUpdateThread.start()
logging.debug("Starting robot http gateway service.")
server.serve_forever()
telUpdateThread.join()
# todo refactor this close
RobHandler.http_log_file.close()
if __name__ == '__main__':
sendQ = multiprocessing.JoinableQueue()
receiveQ = multiprocessing.JoinableQueue()
#address = robhttpAddress
p = multiprocessing.Process(target=startService,
args=(receiveQ, sendQ,
# ('',9145)),
('lbr2a.ballfamily.org',9145)),
#('127.0.0.1',9145)),
name='Robot Http Service')
p.start()
print("Service started..")
cn = 0
while True:
comm = sendQ.get()
sendQ.task_done()
print("%d - %s: %s" % (cn,time.asctime(), comm))
cn += 1
if cn >= 20:
receiveQ.put("Shutdown")
break
else:
receiveQ.put("[{'Return':(%d,%d,%d)}]" % (cn,cn,cn))
print("Joining Queues..")
sendQ.join()
receiveQ.join()
print("Done.")
print("Stopping service process..")
#p.join()
p.terminate()
print("Done.")
|
python
|
"""
How plugins work
----------------
From a user's perspective, plugins are enabled and disabled through the command
line interface or through a UI. Users can also configure a plugin's behavior
through the main Kolibri interface.
.. note::
We have not yet written a configuration API, for now just make sure
configuration-related variables are kept in a central location of your
plugin.
It's up to the plugin to provide configuration ``Form`` classes and register
them.
We should aim for a configuration style in which data can be pre-seeded,
dumped and exported easily.
From a developer's perspective, plugins are Django applications listed
in ``INSTALLED_APPS`` and are initialized once when the server starts, mean at
the load time of the django project, i.e. Kolibri.
Loading a plugin
~~~~~~~~~~~~~~~~
In general, a plugin should **never** modify internals of Kolibri or other
plugins without using the hooks API or normal conventional Django scenarios.
.. note::
Each app in ``INSTALLED_APPS`` is searched for the special
``kolibri_plugin`` module.
Everything that a plugin does is expected to be defined through
``<myapp>/kolibri_plugin.py``.
"""
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import importlib
import logging
from django.conf.urls import include
from django.conf.urls import url
from .base import KolibriPluginBase
logger = logging.getLogger(__name__)
# : Main registry is private for now, as we figure out if there is any external
# : module that has a legitimate business
__registry = []
__initialized = False
def initialize(apps=None):
"""
Called once at load time to register hook callbacks.
"""
global __initialized, __registry
if not apps:
from django.conf import settings
apps = settings.INSTALLED_APPS
if not __initialized:
logger.debug("Loading kolibri plugin registry...")
for app in apps:
try:
# Handle AppConfig INSTALLED_APPS string
if ".apps." in app:
# remove .apps.Config line in string
import_string = app.split('.apps.')[0]
else:
import_string = app
import_string += ".kolibri_plugin"
plugin_module = importlib.import_module(import_string)
logger.debug("Loaded kolibri plugin: {}".format(app))
# Load a list of all class types in module
all_classes = [cls for cls in plugin_module.__dict__.values() if isinstance(cls, type)]
# Filter the list to only match the ones that belong to the module
# and not the ones that have been imported
plugin_package = plugin_module.__package__ if plugin_module.__package__ else \
plugin_module.__name__.rpartition('.')[0]
all_classes = filter(lambda x: plugin_package + ".kolibri_plugin" == x.__module__, all_classes)
plugin_classes = []
for Klass in all_classes:
if type(Klass) == type and issubclass(Klass, KolibriPluginBase):
plugin_classes.append(Klass)
for PluginClass in plugin_classes:
# Initialize the class, nothing more happens for now.
logger.debug("Initializing plugin: {}".format(PluginClass.__name__))
__registry.append(PluginClass())
except ImportError:
pass
__initialized = True
def get_urls():
global __initialized, __registry
assert __initialized, "Registry not initialized"
urlpatterns = []
for plugin_instance in __registry:
url_module = plugin_instance.url_module()
if url_module:
urlpatterns.append(
url(
plugin_instance.url_slug(),
include(
url_module,
namespace=plugin_instance.url_namespace()
)
)
)
return urlpatterns
|
python
|
# -*- coding: utf-8 -*-
"""
Base classes for Models.
"""
import typing as tp
from uuid import UUID
ModelType = tp.TypeVar("ModelType", bound='ModelBase')
class Model(tp.Protocol):
"""
Interface for base model class.
"""
uid: tp.Optional[UUID]
class ModelBase(object):
"""
Model storage ultimate base class.
"""
def __init__(self, *args: tp.Any, **kwargs: tp.Any) -> None:
self.uid: tp.Optional[UUID] = kwargs.pop('uid', None)
return super().__init__()
def __repr__(self) -> str:
return f"{self.__class__.__name__}(uid={self.uid!r})"
|
python
|
# pylint doesn't know about pytest fixtures
# pylint: disable=unused-argument
import datetime
import os
import time
import uuid
import boto3
import pytest
from dagster_k8s.test import wait_for_job_and_get_raw_logs
from dagster_k8s_test_infra.integration_utils import (
can_terminate_run_over_graphql,
image_pull_policy,
launch_run_over_graphql,
terminate_run_over_graphql,
)
from dagster_test.test_project import cleanup_memoized_results, get_test_project_environments_path
from dagster_test.test_project.test_pipelines.repo import define_memoization_pipeline
from dagster import DagsterEventType
from dagster.core.storage.pipeline_run import PipelineRunStatus
from dagster.core.storage.tags import DOCKER_IMAGE_TAG
from dagster.utils.merger import deep_merge_dicts, merge_dicts
from dagster.utils.yaml_utils import merge_yamls
IS_BUILDKITE = os.getenv("BUILDKITE") is not None
def get_celery_engine_config(dagster_docker_image, job_namespace):
return {
"execution": {
"celery-k8s": {
"config": merge_dicts(
(
{
"job_image": dagster_docker_image,
}
if dagster_docker_image
else {}
),
{
"job_namespace": job_namespace,
"image_pull_policy": image_pull_policy(),
},
)
}
},
}
def get_celery_job_engine_config(
dagster_docker_image, job_namespace, include_dagster_pipeline_env=False
):
return {
"execution": {
"config": merge_dicts(
(
{
"job_image": dagster_docker_image,
}
if dagster_docker_image
else {}
),
{
"job_namespace": job_namespace,
"image_pull_policy": image_pull_policy(),
},
(
{"env_config_maps": ["dagster-pipeline-env"]}
if include_dagster_pipeline_env
else {}
),
)
},
}
def test_execute_on_celery_k8s_default( # pylint: disable=redefined-outer-name
dagster_docker_image,
dagster_instance,
helm_namespace,
dagit_url,
):
run_config = merge_dicts(
merge_yamls(
[
os.path.join(get_test_project_environments_path(), "env.yaml"),
os.path.join(get_test_project_environments_path(), "env_s3.yaml"),
]
),
get_celery_engine_config(
dagster_docker_image=dagster_docker_image, job_namespace=helm_namespace
),
)
run_id = launch_run_over_graphql(
dagit_url, run_config=run_config, pipeline_name="demo_pipeline_celery"
)
result = wait_for_job_and_get_raw_logs(
job_name="dagster-run-%s" % run_id, namespace=helm_namespace
)
assert "PIPELINE_SUCCESS" in result, "no match, result: {}".format(result)
updated_run = dagster_instance.get_run_by_id(run_id)
assert updated_run.tags[DOCKER_IMAGE_TAG] == dagster_docker_image
def test_execute_on_celery_k8s_job_api( # pylint: disable=redefined-outer-name
dagster_docker_image, dagster_instance, helm_namespace, dagit_url
):
run_config = merge_dicts(
merge_yamls(
[
os.path.join(get_test_project_environments_path(), "env.yaml"),
os.path.join(get_test_project_environments_path(), "env_s3.yaml"),
]
),
get_celery_job_engine_config(
dagster_docker_image=dagster_docker_image, job_namespace=helm_namespace
),
)
run_id = launch_run_over_graphql(
dagit_url, run_config=run_config, pipeline_name="demo_job_celery"
)
result = wait_for_job_and_get_raw_logs(
job_name="dagster-run-%s" % run_id, namespace=helm_namespace
)
assert "PIPELINE_SUCCESS" in result, "no match, result: {}".format(result)
updated_run = dagster_instance.get_run_by_id(run_id)
assert updated_run.tags[DOCKER_IMAGE_TAG] == dagster_docker_image
def test_execute_on_celery_k8s_job_api_with_legacy_configmap_set( # pylint: disable=redefined-outer-name
dagster_docker_image, dagster_instance, helm_namespace, dagit_url
):
# Originally, jobs needed to include "dagster-pipeline-env" to pick up needed config when
# using the helm chart - it's no longer needed, but verify that nothing breaks if it's included
run_config = merge_dicts(
merge_yamls(
[
os.path.join(get_test_project_environments_path(), "env.yaml"),
os.path.join(get_test_project_environments_path(), "env_s3.yaml"),
]
),
get_celery_job_engine_config(
dagster_docker_image=dagster_docker_image,
job_namespace=helm_namespace,
include_dagster_pipeline_env=True,
),
)
run_id = launch_run_over_graphql(
dagit_url, run_config=run_config, pipeline_name="demo_job_celery"
)
result = wait_for_job_and_get_raw_logs(
job_name="dagster-run-%s" % run_id, namespace=helm_namespace
)
assert "PIPELINE_SUCCESS" in result, "no match, result: {}".format(result)
updated_run = dagster_instance.get_run_by_id(run_id)
assert updated_run.tags[DOCKER_IMAGE_TAG] == dagster_docker_image
def test_execute_on_celery_k8s_image_from_origin( # pylint: disable=redefined-outer-name
dagster_docker_image, dagster_instance, helm_namespace, dagit_url
):
# Like the previous test, but the image is found from the pipeline origin
# rather than the executor config
run_config = merge_dicts(
merge_yamls(
[
os.path.join(get_test_project_environments_path(), "env.yaml"),
os.path.join(get_test_project_environments_path(), "env_s3.yaml"),
]
),
get_celery_engine_config(dagster_docker_image=None, job_namespace=helm_namespace),
)
run_id = launch_run_over_graphql(
dagit_url, run_config=run_config, pipeline_name="demo_pipeline_celery"
)
result = wait_for_job_and_get_raw_logs(
job_name="dagster-run-%s" % run_id, namespace=helm_namespace
)
assert "PIPELINE_SUCCESS" in result, "no match, result: {}".format(result)
updated_run = dagster_instance.get_run_by_id(run_id)
assert updated_run.tags[DOCKER_IMAGE_TAG] == dagster_docker_image
def test_execute_subset_on_celery_k8s( # pylint: disable=redefined-outer-name
dagster_docker_image, helm_namespace, dagit_url
):
run_config = merge_dicts(
merge_yamls(
[
os.path.join(get_test_project_environments_path(), "env_subset.yaml"),
os.path.join(get_test_project_environments_path(), "env_s3.yaml"),
]
),
get_celery_engine_config(
dagster_docker_image=dagster_docker_image, job_namespace=helm_namespace
),
)
run_id = launch_run_over_graphql(
dagit_url,
run_config=run_config,
pipeline_name="demo_pipeline_celery",
solid_selection=["count_letters"],
)
result = wait_for_job_and_get_raw_logs(
job_name="dagster-run-%s" % run_id, namespace=helm_namespace
)
assert "PIPELINE_SUCCESS" in result, "no match, result: {}".format(result)
def test_execute_on_celery_k8s_retry_pipeline( # pylint: disable=redefined-outer-name
dagster_docker_image, dagster_instance, helm_namespace, dagit_url
):
run_config = merge_dicts(
merge_yamls([os.path.join(get_test_project_environments_path(), "env_s3.yaml")]),
get_celery_engine_config(
dagster_docker_image=dagster_docker_image, job_namespace=helm_namespace
),
)
run_id = launch_run_over_graphql(
dagit_url, run_config=run_config, pipeline_name="retry_pipeline"
)
result = wait_for_job_and_get_raw_logs(
job_name="dagster-run-%s" % run_id, namespace=helm_namespace
)
assert "PIPELINE_SUCCESS" in result, "no match, result: {}".format(result)
stats = dagster_instance.get_run_stats(run_id)
assert stats.steps_succeeded == 1
assert DagsterEventType.STEP_START in [
event.dagster_event.event_type
for event in dagster_instance.all_logs(run_id)
if event.is_dagster_event
]
assert DagsterEventType.STEP_UP_FOR_RETRY in [
event.dagster_event.event_type
for event in dagster_instance.all_logs(run_id)
if event.is_dagster_event
]
assert DagsterEventType.STEP_RESTARTED in [
event.dagster_event.event_type
for event in dagster_instance.all_logs(run_id)
if event.is_dagster_event
]
assert DagsterEventType.STEP_SUCCESS in [
event.dagster_event.event_type
for event in dagster_instance.all_logs(run_id)
if event.is_dagster_event
]
def test_execute_on_celery_k8s_with_resource_requirements( # pylint: disable=redefined-outer-name
dagster_docker_image, dagster_instance, helm_namespace, dagit_url
):
run_config = merge_dicts(
merge_yamls(
[
os.path.join(get_test_project_environments_path(), "env_s3.yaml"),
]
),
get_celery_engine_config(
dagster_docker_image=dagster_docker_image, job_namespace=helm_namespace
),
)
run_id = launch_run_over_graphql(
dagit_url, run_config=run_config, pipeline_name="resources_limit_pipeline"
)
result = wait_for_job_and_get_raw_logs(
job_name="dagster-run-%s" % run_id, namespace=helm_namespace
)
assert "PIPELINE_SUCCESS" in result, "no match, result: {}".format(result)
def _test_termination(dagit_url, dagster_instance, run_config):
run_id = launch_run_over_graphql(
dagit_url, run_config=run_config, pipeline_name="resource_pipeline"
)
# Wait for pipeline run to start
timeout = datetime.timedelta(0, 120)
start_time = datetime.datetime.now()
while True:
assert datetime.datetime.now() < start_time + timeout, "Timed out waiting for can_terminate"
pipeline_run = dagster_instance.get_run_by_id(run_id)
if can_terminate_run_over_graphql(dagit_url, run_id):
break
time.sleep(5)
# Wait for step to start
step_start_found = False
start_time = datetime.datetime.now()
while datetime.datetime.now() < start_time + timeout:
event_records = dagster_instance.all_logs(run_id)
for event_record in event_records:
if (
event_record.dagster_event
and event_record.dagster_event.event_type == DagsterEventType.STEP_START
):
step_start_found = True
break
if step_start_found:
break
time.sleep(5)
assert step_start_found
# Terminate run
assert can_terminate_run_over_graphql(dagit_url, run_id=run_id)
terminate_run_over_graphql(dagit_url, run_id=run_id)
# Check that pipeline run is marked as canceled
pipeline_run_status_canceled = False
start_time = datetime.datetime.now()
while datetime.datetime.now() < start_time + timeout:
pipeline_run = dagster_instance.get_run_by_id(run_id)
if pipeline_run.status == PipelineRunStatus.CANCELED:
pipeline_run_status_canceled = True
break
time.sleep(5)
assert pipeline_run_status_canceled
# Check that terminate cannot be called again
assert not can_terminate_run_over_graphql(dagit_url, run_id=run_id)
# Check for step failure and resource tear down
expected_events_found = False
start_time = datetime.datetime.now()
while datetime.datetime.now() < start_time + timeout:
step_failures_count = 0
resource_tear_down_count = 0
resource_init_count = 0
termination_request_count = 0
termination_success_count = 0
event_records = dagster_instance.all_logs(run_id)
for event_record in event_records:
if event_record.dagster_event:
if event_record.dagster_event.event_type == DagsterEventType.STEP_FAILURE:
step_failures_count += 1
elif event_record.dagster_event.event_type == DagsterEventType.PIPELINE_CANCELING:
termination_request_count += 1
elif event_record.dagster_event.event_type == DagsterEventType.PIPELINE_CANCELED:
termination_success_count += 1
elif event_record.message:
if "initializing s3_resource_with_context_manager" in event_record.message:
resource_init_count += 1
if "tearing down s3_resource_with_context_manager" in event_record.message:
resource_tear_down_count += 1
if (
step_failures_count == 1
and resource_init_count == 1
and resource_tear_down_count == 1
and termination_request_count == 1
and termination_success_count == 1
):
expected_events_found = True
break
time.sleep(5)
assert expected_events_found
s3 = boto3.resource("s3", region_name="us-west-1", use_ssl=True, endpoint_url=None).meta.client
bucket = "dagster-scratch-80542c2"
key = "resource_termination_test/{}".format(run_id)
assert s3.get_object(Bucket=bucket, Key=key)
def test_execute_on_celery_k8s_with_termination( # pylint: disable=redefined-outer-name
dagster_docker_image,
dagster_instance,
helm_namespace,
dagit_url,
):
run_config = merge_dicts(
merge_yamls(
[
os.path.join(get_test_project_environments_path(), "env_s3.yaml"),
]
),
get_celery_engine_config(
dagster_docker_image=dagster_docker_image, job_namespace=helm_namespace
),
)
_test_termination(dagit_url, dagster_instance, run_config)
@pytest.fixture(scope="function")
def set_dagster_k8s_pipeline_run_namespace_env(helm_namespace):
old_value = None
try:
old_value = os.getenv("DAGSTER_K8S_PIPELINE_RUN_NAMESPACE")
os.environ["DAGSTER_K8S_PIPELINE_RUN_NAMESPACE"] = helm_namespace
yield
finally:
if old_value is not None:
os.environ["DAGSTER_K8S_PIPELINE_RUN_NAMESPACE"] = old_value
def test_execute_on_celery_k8s_with_env_var_and_termination( # pylint: disable=redefined-outer-name
dagster_docker_image, dagster_instance, set_dagster_k8s_pipeline_run_namespace_env, dagit_url
):
run_config = merge_dicts(
merge_yamls(
[
os.path.join(get_test_project_environments_path(), "env_s3.yaml"),
]
),
get_celery_engine_config(
dagster_docker_image=dagster_docker_image,
job_namespace={"env": "DAGSTER_K8S_PIPELINE_RUN_NAMESPACE"},
),
)
_test_termination(dagit_url, dagster_instance, run_config)
def test_execute_on_celery_k8s_with_hard_failure( # pylint: disable=redefined-outer-name
dagster_docker_image, dagster_instance, set_dagster_k8s_pipeline_run_namespace_env, dagit_url
):
run_config = merge_dicts(
merge_dicts(
merge_yamls(
[
os.path.join(get_test_project_environments_path(), "env_s3.yaml"),
]
),
get_celery_engine_config(
dagster_docker_image=dagster_docker_image,
job_namespace={"env": "DAGSTER_K8S_PIPELINE_RUN_NAMESPACE"},
),
),
{"solids": {"hard_fail_or_0": {"config": {"fail": True}}}},
)
run_id = launch_run_over_graphql(dagit_url, run_config=run_config, pipeline_name="hard_failer")
# Check that pipeline run is marked as failed
pipeline_run_status_failure = False
start_time = datetime.datetime.now()
timeout = datetime.timedelta(0, 120)
while datetime.datetime.now() < start_time + timeout:
pipeline_run = dagster_instance.get_run_by_id(run_id)
if pipeline_run.status == PipelineRunStatus.FAILURE:
pipeline_run_status_failure = True
break
time.sleep(5)
assert pipeline_run_status_failure
# Check for step failure for hard_fail_or_0.compute
start_time = datetime.datetime.now()
step_failure_found = False
while datetime.datetime.now() < start_time + timeout:
event_records = dagster_instance.all_logs(run_id)
for event_record in event_records:
if event_record.dagster_event:
if (
event_record.dagster_event.event_type == DagsterEventType.STEP_FAILURE
and event_record.dagster_event.step_key == "hard_fail_or_0"
):
step_failure_found = True
break
time.sleep(5)
assert step_failure_found
def _get_step_events(event_logs):
return [
event_log.dagster_event
for event_log in event_logs
if event_log.dagster_event is not None and event_log.dagster_event.is_step_event
]
def test_memoization_on_celery_k8s( # pylint: disable=redefined-outer-name
dagster_docker_image, dagster_instance, helm_namespace, dagit_url
):
ephemeral_prefix = str(uuid.uuid4())
run_config = deep_merge_dicts(
merge_yamls([os.path.join(get_test_project_environments_path(), "env_s3.yaml")]),
get_celery_engine_config(
dagster_docker_image=dagster_docker_image, job_namespace=helm_namespace
),
)
run_config = deep_merge_dicts(
run_config,
{"resources": {"io_manager": {"config": {"s3_prefix": ephemeral_prefix}}}},
)
try:
run_ids = []
for _ in range(2):
run_id = launch_run_over_graphql(
dagit_url,
run_config=run_config,
pipeline_name="memoization_pipeline",
mode="celery",
)
result = wait_for_job_and_get_raw_logs(
job_name="dagster-run-%s" % run_id, namespace=helm_namespace
)
assert "PIPELINE_SUCCESS" in result, "no match, result: {}".format(result)
run_ids.append(run_id)
unmemoized_run_id = run_ids[0]
step_events = _get_step_events(dagster_instance.all_logs(unmemoized_run_id))
assert len(step_events) == 4
memoized_run_id = run_ids[1]
step_events = _get_step_events(dagster_instance.all_logs(memoized_run_id))
assert len(step_events) == 0
finally:
cleanup_memoized_results(
define_memoization_pipeline(), "celery", dagster_instance, run_config
)
@pytest.mark.integration
def test_volume_mounts(dagster_docker_image, dagster_instance, helm_namespace, dagit_url):
run_config = deep_merge_dicts(
merge_yamls([os.path.join(get_test_project_environments_path(), "env_s3.yaml")]),
get_celery_engine_config(
dagster_docker_image=dagster_docker_image, job_namespace=helm_namespace
),
)
run_id = launch_run_over_graphql(
dagit_url,
run_config=run_config,
pipeline_name="volume_mount_pipeline",
mode="celery",
)
result = wait_for_job_and_get_raw_logs(
job_name="dagster-run-%s" % run_id, namespace=helm_namespace
)
assert "PIPELINE_SUCCESS" in result, "no match, result: {}".format(result)
|
python
|
from __future__ import annotations
import src.globe.hexasphere as hexasphere
if __name__ == "__main__":
hs = hexasphere.Hexsphere(50, 1, 0.8)
print(hs)
|
python
|
import itertools
from matplotlib import cm
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.patches import Rectangle
from rllab.envs.base import Env
from rllab.misc import logger
from rllab.spaces import Box
from rllab.spaces import Discrete
from utils import flat_to_one_hot, np_seed
class DiscreteEnv(Env):
def __init__(self, transition_matrix, reward, init_state, terminate_on_reward=False):
super(DiscreteEnv, self).__init__()
dX, dA, dXX = transition_matrix.shape
self.nstates = dX
self.nactions = dA
self.transitions = transition_matrix
self.init_state = init_state
self.reward = reward
self.terminate_on_reward = terminate_on_reward
self.__observation_space = Box(0, 1, shape=(self.nstates,))
#max_A = 0
#for trans in self.transitions:
# max_A = max(max_A, len(self.transitions[trans]))
self.__action_space = Discrete(dA)
def reset(self):
self.cur_state = self.init_state
obs = flat_to_one_hot(self.cur_state, ndim=self.nstates)
return obs
def step(self, a):
transition_probs = self.transitions[self.cur_state, a]
next_state = np.random.choice(np.arange(self.nstates), p=transition_probs)
r = self.reward[self.cur_state, a, next_state]
self.cur_state = next_state
obs = flat_to_one_hot(self.cur_state, ndim=self.nstates)
done = False
if self.terminate_on_reward and r>0:
done = True
return obs, r, done, {}
def tabular_trans_distr(self, s, a):
return self.transitions[s, a]
def reward_fn(self, s, a):
return self.reward[s, a]
def log_diagnostics(self, paths):
#Ntraj = len(paths)
#acts = np.array([traj['actions'] for traj in paths])
obs = np.array([np.sum(traj['observations'], axis=0) for traj in paths])
state_count = np.sum(obs, axis=0)
#state_count = np.mean(state_count, axis=0)
state_freq = state_count/float(np.sum(state_count))
for state in range(self.nstates):
logger.record_tabular('AvgStateFreq%d'%state, state_freq[state])
@property
def transition_matrix(self):
return self.transitions
@property
def rew_matrix(self):
return self.reward
@property
def initial_state_distribution(self):
return flat_to_one_hot(self.init_state, ndim=self.nstates)
@property
def action_space(self):
return self.__action_space
@property
def observation_space(self):
return self.__observation_space
def random_env(Nstates, Nact, seed=None, terminate=False, t_sparsity=0.75):
assert Nstates >= 2
if seed is None:
seed = 0
reward_state=0
start_state=1
with np_seed(seed):
transition_matrix = np.random.rand(Nstates, Nact, Nstates)
transition_matrix = np.exp(transition_matrix)
for s in range(Nstates):
for a in range(Nact):
zero_idxs = np.random.randint(0, Nstates, size=int(Nstates*t_sparsity))
transition_matrix[s, a, zero_idxs] = 0.0
transition_matrix = transition_matrix/np.sum(transition_matrix, axis=2, keepdims=True)
reward = np.zeros((Nstates, Nact))
reward[reward_state, :] = 1.0
#reward = np.random.randn(Nstates,1 ) + reward
stable_action = seed % Nact #np.random.randint(0, Nact)
transition_matrix[reward_state, stable_action] = np.zeros(Nstates)
transition_matrix[reward_state, stable_action, reward_state] = 1
return DiscreteEnv(transition_matrix, reward=reward, init_state=start_state, terminate_on_reward=terminate)
if __name__ == '__main__':
env = random_env(5, 2, seed=0)
print(env.transitions)
print(env.transitions[0,0])
print(env.transitions[0,1])
env.reset()
for _ in range(100):
print(env.step(env.action_space.sample()))
|
python
|
from distutils.core import setup
import setuptools
setup(
name="turkishnlp",
version="0.0.61",
packages=['turkishnlp'],
description="A python script that processes Turkish language",
long_description=open('README.md', encoding="utf8").read(),
long_description_content_type='text/markdown',
url="https://github.com/MeteHanC/turkishnlp",
author="Metehan Cetinkaya",
author_email="[email protected]",
maintainer="Metehan Cetinkaya",
maintainer_email="[email protected]",
keywords=['turkishnlp', 'python', 'nlp', 'language processing'],
classifiers=[
'Programming Language :: Python',
'Environment :: MacOS X',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
]
)
|
python
|
"""Frontend for spectra group project"""
__author__ = """Group01"""
__version__ = '0.1.0'
|
python
|
#!/usr/bin/env python
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import rdoinfo
import sh
import shutil
import sys
if len(sys.argv) > 1:
UC_RELEASE = sys.argv[1]
else:
UC_RELEASE = 'wallaby-uc'
def update_puppet_uc():
if os.path.exists(os.path.join(".", "modules")):
shutil.rmtree("./modules")
info = rdoinfo.parse_info_file('rdo.yml')
puppet_info = []
for package in info['packages']:
if package['name'].startswith('puppet'):
puppet_info.append([package['name'], package['upstream']])
for package in puppet_info:
url = package[1]
if 'openstack' in url: # Do not bump OpenStack modules
continue
module = package[0]
gitpath = os.path.join("modules", module)
sh.git.clone(url, gitpath)
git = sh.git.bake(_cwd=gitpath, _tty_out=False)
try:
rev_list = str(git('rev-list', '--tags', '--max-count=1')).strip()
tag = str(git.describe('--tags', rev_list)).strip()
with open('upper-constraints.txt', 'a') as fp:
fp.write("%s===%s\n" % (module, tag))
except Exception:
continue
shutil.rmtree(gitpath)
update_uc = sh.Command('./update-uc.py')
update_uc(UC_RELEASE)
if __name__ == '__main__':
update_puppet_uc()
|
python
|
import os
import csv
import json
import torch
import pickle
import random
import warnings
import numpy as np
from functools import reduce
from typing import Dict, List, Tuple, Set, Any
__all__ = [
'to_one_hot',
'seq_len_to_mask',
'ignore_waring',
'make_seed',
'load_pkl',
'save_pkl',
'ensure_dir',
'load_csv',
'load_jsonld',
'jsonld2csv',
'csv2jsonld',
]
Path = str
def to_one_hot(x, length):
batch_size = x.size(0)
x_one_hot = torch.zeros(batch_size, length).to(x.device)
for i in range(batch_size):
x_one_hot[i, x[i]] = 1.0
return x_one_hot
def model_summary(model):
"""
得到模型的总参数量
:params model: Pytorch 模型
:return tuple: 包含总参数量,可训练参数量,不可训练参数量
"""
train = []
nontrain = []
def layer_summary(module):
def count_size(sizes):
return reduce(lambda x, y: x * y, sizes)
for p in module.parameters(recurse=False):
if p.requires_grad:
train.append(count_size(p.shape))
else:
nontrain.append(count_size(p.shape))
for subm in module.children():
layer_summary(subm)
layer_summary(model)
total_train = sum(train)
total_nontrain = sum(nontrain)
total = total_train + total_nontrain
strings = []
strings.append('Total params: {:,}'.format(total))
strings.append('Trainable params: {:,}'.format(total_train))
strings.append('Non-trainable params: {:,}'.format(total_nontrain))
max_len = len(max(strings, key=len))
bar = '-' * (max_len + 3)
strings = [bar] + strings + [bar]
print('\n'.join(strings))
return total, total_train, total_nontrain
def seq_len_to_mask(seq_len, max_len=None):
"""
将一个表示sequence length的一维数组转换为二维的mask,不包含的位置为0。
转变 1-d seq_len到2-d mask.
.. code-block::
>>> seq_len = torch.arange(2, 16)
>>> mask = seq_len_to_mask(seq_len)
>>> print(mask.size())
torch.Size([14, 15])
>>> seq_len = np.arange(2, 16)
>>> mask = seq_len_to_mask(seq_len)
>>> print(mask.shape)
(14, 15)
>>> seq_len = torch.arange(2, 16)
>>> mask = seq_len_to_mask(seq_len, max_len=100)
>>>print(mask.size())
torch.Size([14, 100])
:param np.ndarray,torch.LongTensor seq_len: shape将是(B,)
:param int max_len: 将长度pad到这个长度。默认(None)使用的是seq_len中最长的长度。但在nn.DataParallel的场景下可能不同卡的seq_len会有
区别,所以需要传入一个max_len使得mask的长度是pad到该长度。
:return: np.ndarray, torch.Tensor 。shape将是(B, max_length), 元素类似为bool或torch.uint8
"""
if isinstance(seq_len, np.ndarray):
assert len(np.shape(seq_len)) == 1, f"seq_len can only have one dimension, got {len(np.shape(seq_len))}."
max_len = int(max_len) if max_len else int(seq_len.max())
broad_cast_seq_len = np.tile(np.arange(max_len), (len(seq_len), 1))
mask = broad_cast_seq_len < seq_len.reshape(-1, 1)
elif isinstance(seq_len, torch.Tensor):
assert seq_len.dim() == 1, f"seq_len can only have one dimension, got {seq_len.dim() == 1}."
batch_size = seq_len.size(0)
max_len = int(max_len) if max_len else seq_len.max().long()
broad_cast_seq_len = torch.arange(max_len).expand(batch_size, -1).to(seq_len)
mask = broad_cast_seq_len.lt(seq_len.unsqueeze(1))
else:
raise TypeError("Only support 1-d numpy.ndarray or 1-d torch.Tensor.")
return mask
def ignore_waring():
warnings.filterwarnings("ignore")
def make_seed(num: int = 1) -> None:
random.seed(num)
np.random.seed(num)
torch.manual_seed(num)
torch.cuda.manual_seed(num)
torch.cuda.manual_seed_all(num)
def load_pkl(fp: str, obj_name: str = 'data', verbose: bool = True) -> Any:
if verbose:
print(f'load {obj_name} in {fp}')
with open(fp, 'rb') as f:
data = pickle.load(f)
return data
def save_pkl(fp: Path, obj, obj_name: str = 'data', verbose: bool = True) -> None:
if verbose:
print(f'save {obj_name} in {fp}')
with open(fp, 'wb') as f:
pickle.dump(obj, f)
def ensure_dir(d: str, verbose: bool = True) -> None:
'''
判断目录是否存在,不存在时创建
:param d: directory
:param verbose: whether print logging
:return: None
'''
if not os.path.exists(d):
if verbose:
print("Directory '{}' do not exist; creating...".format(d))
os.makedirs(d)
def load_csv(fp: str) -> List:
print(f'load {fp}')
with open(fp, encoding='utf-8') as f:
reader = csv.DictReader(f)
return list(reader)
def load_jsonld(fp: str) -> List:
print(f'load {fp}')
datas = []
with open(fp, encoding='utf-8') as f:
for l in f:
line = json.loads(l)
data = list(line.values())
datas.append(data)
return datas
def jsonld2csv(fp: str, verbose: bool = True) -> str:
'''
读入 jsonld 文件,存储在同位置同名的 csv 文件
:param fp: jsonld 文件地址
:param verbose: whether print logging
:return: csv 文件地址
'''
data = []
root, ext = os.path.splitext(fp)
fp_new = root + '.csv'
if verbose:
print(f'read jsonld file in: {fp}')
with open(fp, encoding='utf-8') as f:
for l in f:
line = json.loads(l)
data.append(line)
if verbose:
print('saving...')
with open(fp_new, 'w', encoding='utf-8') as f:
fieldnames = data[0].keys()
writer = csv.DictWriter(f, fieldnames=fieldnames, dialect='excel')
writer.writeheader()
writer.writerows(data)
if verbose:
print(f'saved csv file in: {fp_new}')
return fp_new
def csv2jsonld(fp: str, verbose: bool = True) -> str:
'''
读入 csv 文件,存储为同位置同名的 jsonld 文件
:param fp: csv 文件地址
:param verbose: whether print logging
:return: jsonld 地址
'''
data = []
root, ext = os.path.splitext(fp)
fp_new = root + '.jsonld'
if verbose:
print(f'read csv file in: {fp}')
with open(fp, encoding='utf-8') as f:
writer = csv.DictReader(f, fieldnames=None, dialect='excel')
for line in writer:
data.append(line)
if verbose:
print('saving...')
with open(fp_new, 'w', encoding='utf-8') as f:
f.write(os.linesep.join([json.dumps(l, ensure_ascii=False) for l in data]))
if verbose:
print(f'saved jsonld file in: {fp_new}')
return fp_new
if __name__ == '__main__':
pass
|
python
|
import os
import boto3
AMI = os.environ["AMI"]
INSTANCE_TYPE = os.environ["INSTANCE_TYPE"]
KEY_NAME = os.environ["KEY_NAME"]
SUBNET_ID = os.environ["SUBNET_ID"]
REGION = os.environ["REGION"]
INSTANCE_PROFILE = os.environ["INSTANCE_PROFILE"]
ec2 = boto3.client("ec2", region_name=REGION)
def create_instance(event, files_to_download):
"""
Using the event variables and the modified list of files to download create the ec2 instance to do the downloading
"""
# convert to string with double quotes so it knows its a string
files_to_download = ",".join(map('"{0}"'.format, files_to_download))
vars = {
"FTP_HOST": event["ftp_url"],
"FTP_PATH": event["ftp_path"],
"FTP_USERNAME": event["username"],
"FTP_PASSWORD": event["password"],
"FTP_AUTH_KEY": event["auth_key"],
"S3_BUCKET_NAME": event["s3_bucket"],
"PRODUCTS_TABLE": event["product_table"],
"files_to_download": files_to_download,
"s3_path": event["s3_path"],
}
print(vars)
init_script = """#!/bin/bash
/bin/echo "**************************"
/bin/echo "* Running FTP to S3. *"
/bin/echo "**************************"
/bin/pwd
/bin/whoami
export S3_BUCKET_NAME={S3_BUCKET_NAME}
export PRODUCTS_TABLE={PRODUCTS_TABLE}
export FTP_HOST={FTP_HOST}
export FTP_PATH={FTP_PATH}
export FTP_USERNAME={FTP_USERNAME}
export FTP_PASSWORD={FTP_PASSWORD}
/bin/echo python3 /home/ec2-user/ftp_to_s3.py {s3_path} {files_to_download}
PYTHONUSERBASE=/home/ec2-user/.local python3.8 /home/ec2-user/ftp_to_s3.py {s3_path} {files_to_download}
shutdown now -h""".format(
**vars
)
instance = ec2.run_instances(
ImageId=AMI,
InstanceType=INSTANCE_TYPE,
KeyName=KEY_NAME,
SubnetId=SUBNET_ID,
MaxCount=1,
MinCount=1,
InstanceInitiatedShutdownBehavior="terminate",
UserData=init_script,
IamInstanceProfile={"Arn": INSTANCE_PROFILE},
BlockDeviceMappings=[{"DeviceName": "/dev/xvda", "Ebs": {"VolumeSize": 50}}],
)
instance_id = instance["Instances"][0]["InstanceId"]
print("***New Instance! {0}***".format(instance_id))
print("Instance downloading these files: {0}".format(files_to_download))
return instance_id
def lambda_handler(event, context):
# variables sent from scheduler.py
print(event, context)
# calculate files to download total size
files_list = event["files_to_download"]
total_size = 0
size_limit = 30212254720 # set to 30GBish
files_to_download = []
for obj in files_list:
total_size += int(obj["size"])
if total_size < size_limit:
files_to_download.append(obj)
else:
create_instance(event, files_to_download)
files_to_download = [obj]
total_size = int(obj["size"])
# files_to_download.append(obj)
create_instance(event, files_to_download)
print("Finished.")
|
python
|
# coding: utf-8
"""Pytest fixtures and utilities for testing algorithms."""
import gym
import torch
import torch.nn as nn
import torch.distributions as distrib
import pytest
from irl.algo.value_methods import TensorQValues
class ProbPolicy(nn.Module):
"""A simple test probabilistic policy."""
def __init__(
self, dim_in: int, dim_out: int, continuous: bool = False, critic: bool = False
) -> None:
"""Initialize probabilistic policy."""
super().__init__()
self.lin = nn.Linear(dim_in, dim_out)
self.critic = nn.Linear(dim_in, 1) if critic else None
self.continuous = continuous
def forward(self, obs: torch.Tensor) -> distrib.Distribution:
"""Forward pass."""
h = self.lin(obs)
if self.continuous:
probs = distrib.Normal(h, 1.0)
else:
probs = distrib.Categorical(logits=h)
if self.critic is not None:
return probs, self.critic(obs)
else:
return probs
def new_with_critic(self) -> "ProbPolicy":
"""Return a similar probabilistic policy with a critic."""
return ProbPolicy(
dim_in=self.lin.in_features,
dim_out=self.lin.out_features,
continuous=self.continuous,
critic=True,
)
@pytest.fixture
def prob_policy(env_factory) -> nn.Module:
"""Create a ProbPolicy relevant for the environment."""
env = env_factory()
dim_in, = env.observation_space.shape
continuous = isinstance(env.action_space, gym.spaces.Box)
if continuous:
dim_out, = env.action_space.shape
else:
dim_out = env.action_space.n
return ProbPolicy(dim_in, dim_out, continuous)
class DQN(nn.Module):
"""A simple test deep Q network."""
def __init__(self, dim_in: int, dim_out: int) -> None:
"""Initialize a deep Q network."""
super().__init__()
self.lin = nn.Linear(dim_in, dim_out)
def forward(self, obs: torch.Tensor) -> distrib.Distribution:
"""Forward pass."""
return TensorQValues(self.lin(obs))
@pytest.fixture
def dqn(env_factory) -> nn.Module:
"""Createa a DQN relevant for the environment."""
env = env_factory()
if isinstance(env.action_space, gym.spaces.Box):
pytest.skip("DQN is not suitted for continuous environment.")
dim_in, = env.observation_space.shape
dim_out = env.action_space.n
return DQN(dim_in, dim_out)
|
python
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# SPDX-License-Identifier: Apache-2.0
# SPDX-FileCopyrightText: © 2021 Massachusetts Institute of Technology.
# SPDX-FileCopyrightText: © 2021 Lee McCuller <[email protected]>
# NOTICE: authors should document their contributions in concisely in NOTICE
# with details inline in source files, comments, and docstrings.
"""
"""
from wavestate.model import optics
from wavestate.model import base
def system1064(SHG=True):
obj_sys = base.SimulationObject()
freqs = base.SimulationObject()
obj_sys["frequencies/"] = freqs
freqs["Nd1064/"] = optics.OpticalFrequency()
freqs["Nd1064/wavelength[m]"] = 1064e-9
if SHG:
freqs["Nd1064/order"] = 2
else:
freqs["Nd1064/order"] = 1
aliases1064 = freqs["aliases_1064/"] = optics.OpticalFrequencyAliases()
aliases1064["to"] = {"Nd1064": 1}
aliases1064["names"] = ["1064", 1064, "1064nm", 1064e-9]
aliases532 = freqs["aliases_532/"] = optics.OpticalFrequencyAliases()
aliases532["to"] = {"Nd1064": 2}
aliases532["names"] = ["532", 532, "532nm", 532e-9]
return obj_sys
def system1550(SHG=True):
obj_sys = base.SimulationObject()
freqs = base.SimulationObject()
obj_sys["frequencies/"] = freqs
freqs["1550/"] = optics.OpticalFrequency()
freqs["1550/wavelength[m]"] = 1550e-9
if SHG:
freqs["1550/order"] = 2
else:
freqs["1550/order"] = 1
aliases1550 = freqs["aliases_1550/"] = optics.OpticalFrequencyAliases()
aliases1550["to"] = {"1550": 1}
aliases1550["names"] = ["1550", 1550, "1550nm", 1550e-9]
aliases775 = freqs["aliases_775/"] = optics.OpticalFrequencyAliases()
aliases775["to"] = {"1550": 2}
aliases775["names"] = ["775", 775, "775nm", 775e-9]
return obj_sys
|
python
|
from __future__ import print_function, division, absolute_import
from distributed.compatibility import (
gzip_compress, gzip_decompress, finalize)
def test_gzip():
b = b'Hello, world!'
c = gzip_compress(b)
d = gzip_decompress(c)
assert b == d
def test_finalize():
class C(object):
pass
l = []
def cb(value):
l.append(value)
o = C()
finalize(o, cb, 1)
assert not l
del o
assert l.pop() == 1
o = C()
fin = finalize(o, cb, 2)
assert fin.alive
fin()
assert not fin.alive
assert l.pop() == 2
del o
assert not l
|
python
|
# ActivitySim
# See full license in LICENSE.txt.
import logging
import pandas as pd
from activitysim.core import tracing
from activitysim.core import config
from activitysim.core import pipeline
from activitysim.core import inject
from activitysim.core.util import assign_in_place
from activitysim.abm.models.trip_purpose import run_trip_purpose
from activitysim.abm.models.trip_destination import run_trip_destination
from activitysim.abm.models.util.trip import flag_failed_trip_leg_mates
from activitysim.abm.models.util.trip import cleanup_failed_trips
logger = logging.getLogger(__name__)
def run_trip_purpose_and_destination(
trips_df,
tours_merged_df,
chunk_size,
trace_hh_id,
trace_label):
assert not trips_df.empty
choices = run_trip_purpose(
trips_df,
chunk_size=chunk_size,
trace_hh_id=trace_hh_id,
trace_label=tracing.extend_trace_label(trace_label, 'purpose')
)
trips_df['purpose'] = choices
trips_df, save_sample_df = run_trip_destination(
trips_df,
tours_merged_df,
chunk_size, trace_hh_id,
trace_label=tracing.extend_trace_label(trace_label, 'destination'))
return trips_df, save_sample_df
@inject.step()
def trip_purpose_and_destination(
trips,
tours_merged,
chunk_size,
trace_hh_id):
trace_label = "trip_purpose_and_destination"
model_settings = config.read_model_settings('trip_purpose_and_destination.yaml')
# for consistency, read sample_table_name setting from trip_destination settings file
trip_destination_model_settings = config.read_model_settings('trip_destination.yaml')
sample_table_name = trip_destination_model_settings.get('DEST_CHOICE_SAMPLE_TABLE_NAME')
want_sample_table = config.setting('want_dest_choice_sample_tables') and sample_table_name is not None
MAX_ITERATIONS = model_settings.get('MAX_ITERATIONS', 5)
trips_df = trips.to_frame()
tours_merged_df = tours_merged.to_frame()
if trips_df.empty:
logger.info("%s - no trips. Nothing to do." % trace_label)
return
# FIXME could allow MAX_ITERATIONS=0 to allow for cleanup-only run
# in which case, we would need to drop bad trips, WITHOUT failing bad_trip leg_mates
assert (MAX_ITERATIONS > 0)
# if trip_destination has been run before, keep only failed trips (and leg_mates) to retry
if 'destination' in trips_df:
if 'failed' not in trips_df.columns:
# trip_destination model cleaned up any failed trips
logger.info("%s - no failed column from prior model run." % trace_label)
return
elif not trips_df.failed.any():
# 'failed' column but no failed trips from prior run of trip_destination
logger.info("%s - no failed trips from prior model run." % trace_label)
trips_df.drop(columns='failed', inplace=True)
pipeline.replace_table("trips", trips_df)
return
else:
logger.info("trip_destination has already been run. Rerunning failed trips")
flag_failed_trip_leg_mates(trips_df, 'failed')
trips_df = trips_df[trips_df.failed]
tours_merged_df = tours_merged_df[tours_merged_df.index.isin(trips_df.tour_id)]
logger.info("Rerunning %s failed trips and leg-mates" % trips_df.shape[0])
# drop any previously saved samples of failed trips
if want_sample_table and pipeline.is_table(sample_table_name):
logger.info("Dropping any previously saved samples of failed trips")
save_sample_df = pipeline.get_table(sample_table_name)
save_sample_df.drop(trips_df.index, level='trip_id', inplace=True)
pipeline.replace_table(sample_table_name, save_sample_df)
del save_sample_df
processed_trips = []
save_samples = []
i = 0
TRIP_RESULT_COLUMNS = ['purpose', 'destination', 'origin', 'failed']
while True:
i += 1
for c in TRIP_RESULT_COLUMNS:
if c in trips_df:
del trips_df[c]
trips_df, save_sample_df = run_trip_purpose_and_destination(
trips_df,
tours_merged_df,
chunk_size=chunk_size,
trace_hh_id=trace_hh_id,
trace_label=tracing.extend_trace_label(trace_label, "i%s" % i))
# # if testing, make sure at least one trip fails
if config.setting('testing_fail_trip_destination', False) \
and (i == 1) and not trips_df.failed.any():
fail_o = trips_df[trips_df.trip_num < trips_df.trip_count].origin.max()
trips_df.failed = (trips_df.origin == fail_o) & \
(trips_df.trip_num < trips_df.trip_count)
num_failed_trips = trips_df.failed.sum()
# if there were no failed trips, we are done
if num_failed_trips == 0:
processed_trips.append(trips_df[TRIP_RESULT_COLUMNS])
if save_sample_df is not None:
save_samples.append(save_sample_df)
break
logger.warning("%s %s failed trips in iteration %s" % (trace_label, num_failed_trips, i))
file_name = "%s_i%s_failed_trips" % (trace_label, i)
logger.info("writing failed trips to %s" % file_name)
tracing.write_csv(trips_df[trips_df.failed], file_name=file_name, transpose=False)
# if max iterations reached, add remaining trips to processed_trips and give up
# note that we do this BEFORE failing leg_mates so resulting trip legs are complete
if i >= MAX_ITERATIONS:
logger.warning("%s too many iterations %s" % (trace_label, i))
processed_trips.append(trips_df[TRIP_RESULT_COLUMNS])
if save_sample_df is not None:
save_sample_df.drop(trips_df[trips_df.failed].index, level='trip_id', inplace=True)
save_samples.append(save_sample_df)
break
# otherwise, if any trips failed, then their leg-mates trips must also fail
flag_failed_trip_leg_mates(trips_df, 'failed')
# add the good trips to processed_trips
processed_trips.append(trips_df[~trips_df.failed][TRIP_RESULT_COLUMNS])
# and keep the failed ones to retry
trips_df = trips_df[trips_df.failed]
tours_merged_df = tours_merged_df[tours_merged_df.index.isin(trips_df.tour_id)]
# add trip samples of processed_trips to processed_samples
if save_sample_df is not None:
# drop failed trip samples
save_sample_df.drop(trips_df.index, level='trip_id', inplace=True)
save_samples.append(save_sample_df)
# - assign result columns to trips
processed_trips = pd.concat(processed_trips)
if len(save_samples) > 0:
save_sample_df = pd.concat(save_samples)
logger.info("adding %s samples to %s" % (len(save_sample_df), sample_table_name))
pipeline.extend_table(sample_table_name, save_sample_df)
logger.info("%s %s failed trips after %s iterations" %
(trace_label, processed_trips.failed.sum(), i))
trips_df = trips.to_frame()
assign_in_place(trips_df, processed_trips)
trips_df = cleanup_failed_trips(trips_df)
pipeline.replace_table("trips", trips_df)
# check to make sure we wrote sample file if requestsd
if want_sample_table and len(trips_df) > 0:
assert pipeline.is_table(sample_table_name)
# since we have saved samples for all successful trips
# once we discard failed trips, we should samples for all trips
save_sample_df = pipeline.get_table(sample_table_name)
# expect samples only for intermediate trip destinatinos
assert \
len(save_sample_df.index.get_level_values(0).unique()) == \
len(trips_df[trips_df.trip_num < trips_df.trip_count])
del save_sample_df
if trace_hh_id:
tracing.trace_df(trips_df,
label=trace_label,
slicer='trip_id',
index_label='trip_id',
warn_if_empty=True)
|
python
|
# pytest file that runs the things in shell-sessions/
import codecs
import os
import pathlib
import re
import shutil
import sys
import time
import pytest
import asdac.__main__
sessions_dir = pathlib.Path(__file__).absolute().parent / 'shell-sessions'
@pytest.fixture
def shell_session_environment(tmp_path):
os.chdir(str(tmp_path))
for file in (sessions_dir / 'files').iterdir():
shutil.copy(str(file), '.')
with open('bom.asda', 'wb') as file:
file.write(codecs.BOM_UTF8 + b'print("Hello")\n')
with open('bombom.asda', 'wb') as file:
file.write(codecs.BOM_UTF8 + codecs.BOM_UTF8 + b'print("Hello")\n')
def create_test_func(path):
@pytest.mark.slow
def test_func(shell_session_environment, monkeypatch, capsys):
with path.open('r', encoding='utf-8') as file:
session = file.read().replace(r'<\uFEFF>', '\uFEFF')
for command, output in re.findall(r'^\$ (.*)\n([^\$]*)', session,
flags=re.MULTILINE):
program, *args = command.split()
expected_output = output.rstrip()
if expected_output:
expected_output += '\n'
if program == '#':
actual_output = ''
elif program == 'touch':
time.sleep(0.05) # sometimes fails without this
[path_string] = args
pathlib.Path(path_string).touch()
actual_output = ''
elif program == 'asdac':
monkeypatch.setattr(sys, 'argv', ['asdac'] + args)
try:
asdac.__main__.main()
except SystemExit as e:
if isinstance(e.code, str):
print(e.code, file=sys.stderr)
output, errors = capsys.readouterr()
assert not output
actual_output = errors.replace(os.sep, '/')
else:
raise RuntimeError("unknown program: " + program)
assert expected_output == actual_output
# magic is fun
test_func.__name__ = test_func.__qualname__ = 'test_%s_session' % path.stem
globals()[test_func.__name__] = test_func
for path in sessions_dir.iterdir():
if path.suffix == '.txt':
create_test_func(path)
|
python
|
#!/usr/bin/env python
import asyncio
import websockets
isExit = False
async def client(uri):
global isExit
async with websockets.connect(uri) as websocket:
while True:
content = await websocket.recv()
print(content)
await asyncio.sleep(0.1)
if isExit:
break
try:
asyncio.get_event_loop().run_until_complete(client('ws://localhost:8765'))
except KeyboardInterrupt as e:
print("KeyboardInterrupt")
isExit = True
|
python
|
##
# This software was developed and / or modified by Raytheon Company,
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
#
# U.S. EXPORT CONTROLLED TECHNICAL DATA
# This software product contains export-restricted data whose
# export/transfer/disclosure is restricted by U.S. law. Dissemination
# to non-U.S. persons whether in the United States or abroad requires
# an export license or other authorization.
#
# Contractor Name: Raytheon Company
# Contractor Address: 6825 Pine Street, Suite 340
# Mail Stop B8
# Omaha, NE 68106
# 402.291.0100
#
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
# further licensing information.
##
#
# SOFTWARE HISTORY
#
# Date Ticket# Engineer Description
# ------------ ---------- ----------- --------------------------
# 02/20/2016 6110 tgurney Extract and refactor from AvnFPS
# MosData.py
#
#
##
# This is a base file that is not intended to be overridden.
##
from com.raytheon.uf.edex.aviation.aag import AAGData
import time
import AvnLib
import TafGen
UNLIMITED = 99999
TO_KT = 3600.0/1852.0
FILL_VALUE = -9999.0
class AAGTafGen(TafGen.TafGen):
def __init__(self, allFcst):
self.model = 'gfslamp'
self.ident = allFcst['ident']['str']
self.fcst = allFcst['group']
self.startTimes = [t['time']['from'] for t in self.fcst]
self.endTimes = [t['time']['to'] for t in self.fcst]
self.grpTaf = TafGen.Config(self.ident, 'gfslamp').grpTaf()
self.fltCat = TafGen.Config(self.ident, 'gfslamp').fltCat()
self.tafTime = time.time()
self.tafDuration = 24
nBeg, nEnd = self.getTafPrd(self.tafDuration)
self.projData = [
TafGen.LampProjection(self.ident, self.grpTaf, self.fltCat,
dat, self.tafTime).getData()
for dat in self.fcst[nBeg:nEnd]
]
self.subStartTimes = self.startTimes[nBeg:nEnd]
self.subEndTimes = self.endTimes[nBeg:nEnd]
class _PointDataView:
def __init__(self, java_PointDataView):
self.__javaPdv = java_PointDataView
self.__keys = []
keyset = self.__javaPdv.getContainer().getParameters()
itr = keyset.iterator()
while itr.hasNext():
self.__keys.append(str(itr.next()))
def __getitem__(self, key):
result = None
strValType = self.getType(key)
if strValType == 'FLOAT':
result = self.__javaPdv.getFloat(key)
elif strValType == 'STRING':
result = self.__javaPdv.getString(key)
elif strValType == 'INT':
result = self.__javaPdv.getInt(key)
elif strValType == 'LONG':
result = self.__javaPdv.getLong(key)
return result
def getType(self, key):
val = self.__javaPdv.getType(key)
if val:
val = str(val)
return val
def has_key(self, key):
return self.__keys.__contains__(key)
def keys(self):
return self.__keys
def __contains__(self, key):
return self.has_key(key)
###############################################################################
def accumulate(iterable):
s = 0
for v in iterable:
s += v
yield s
###############################################################################
def _getCigProbs(v,element,numCategories):
try:
probs = [v[element+str(x)] for x in xrange(1,numCategories+1)]
return [min(x,100) for x in accumulate(probs)]
except KeyError:
return [0]*(numCategories-1)+[100]
def _getVisProbs(v,element,numCategories):
try:
return [v[element+str(x)] for x in xrange(1,numCategories+1)]+[100]
except KeyError:
return [0]*(numCategories-1)+[100]
###############################################################################
class _GfsLampData:
VSBY_VALUE = {
8: 0.25,
9: 0.5,
10: 1.5,
11: 2.5,
5: 4.0,
6: 6,
7: 10.0
}
CIG_VALUE = {
1: 100,
2: 300,
3: 700,
8: 1500,
9: 2500,
5: 5000,
6: 10000,
7: 25000
}
COVER_STR = {
0: 'SKC',
13: 'FEW',
11: 'SCT',
12: 'BKN',
8: 'OVC'
}
numData = 25 # 25 forecast hours
PRECIP_TYPE = {
13: 'SHPL',
11: 'FZDZ',
12: 'FZRA',
23: 'SHSN',
21: 'DZSN',
22: 'SN',
33: 'SHRA',
31: 'DZ',
32: 'RA'
}
OBV_TYPE = {
1: 'BL',
2: 'HZ',
3: 'FG',
4: '',
5: 'BR'
}
CIG_COVT = {1:1, 2:2, 3:3, 8:4, 9:5, 5:6, 6:7, 7:8}
VIS_COVT = {8:1, 9:2, 10:3, 11:4, 5:5, 6:6, 7:7}
def cigBestCat(self,t):
try:
return '%3.0f' % self.CIG_COVT[int(t+0.1)]
except:
return ''
def visBestCat(self,t):
try:
return '%3.0f' % self.VIS_COVT[int(t+0.1)]
except:
return ''
def makeObv(self, v):
t = int(v['obVis_bestCat'])
s = self.OBV_TYPE.get(t, '')
if s:
return {'str': s}
else:
return None
def makePcp(self, v, vsby, pdc, n, fcstHrList):
d = {}
p = v['POP_hour']
if p != FILL_VALUE:
d['pop'] = int(p)
p = int(v['POP_hour_bestCat'])
if p != FILL_VALUE:
d['pcat'] = p
# tstm has overlapped 2-hour forecasts in the first five hours,then 2 hour
if n < self.numData - 1:
p = _PointDataView(pdc.readRandom(fcstHrList[n+1]))['ltg2hr']
if p == FILL_VALUE and n < self.numData-2:
try:
p = _PointDataView(pdc.readRandom(fcstHrList[n+2]))['ltg2hr']
except:
p = FILL_VALUE
else:
p = FILL_VALUE
if p != FILL_VALUE:
d['pot'] = int(p)
if n < self.numData-1:
p = int(_PointDataView(pdc.readRandom(fcstHrList[n+1]))['ltg_bestCat'])
if p == FILL_VALUE and n < self.numData-2:
try:
#p = int(v['ltg_bestCat'][recno,n+2])
p = int(_PointDataView(pdc.readRandom(fcstHrList[n+2]))['ltg_bestCat'])
except:
p = FILL_VALUE
else:
p = FILL_VALUE
if p != FILL_VALUE:
d['tcat'] = p
ptype = int(v['precipType'])
#if ptype is missing, it's rain
if ptype == FILL_VALUE:
ptype = 3 # rain
pchar = int(v['POP_bestCat'])
if pchar == FILL_VALUE:
pchar = 2
intensity = ''
if ptype == 2 or pchar == 1: # SN or DZ
if vsby:
if vsby < 0.245:
intensity = '+'
elif vsby > 0.50:
intensity = '-'
else:
intensity = '-'
pcp = self.PRECIP_TYPE[ptype * 10 + pchar]
d.update({'str': intensity + pcp, 'int': intensity})
return d
def makeSky(self, ceiling_bestCat, clouds_bestCat):
cig = self.CIG_VALUE.get(int(ceiling_bestCat), None)
cover = int(clouds_bestCat)
#if sky cover is not BKN or OVC, set cig to unlimited.
if cover in [0, 13, 11]:
cig = UNLIMITED
if cig is not None:
if cover == 0:
d = {'str': 'SKC', 'cover': 0, 'cig': UNLIMITED}
elif cover in self.COVER_STR.keys():
if cig != UNLIMITED:
d = {
'str': '%s%03d' % (self.COVER_STR[cover], cig/100),
'cover': cover,
'cig': cig
}
else:
d = {'str': '%s%03d' % (self.COVER_STR[cover], 250),
'cover': cover,
'cig': cig
}
else:
return None
return d
else:
return None
def makeWind(self, v, noToKt):
d = {}
gg = 0
dd = int(v['windDir'])
if dd != FILL_VALUE:
dd = 10* ((dd + 5) // 10)
if dd == 0:
dd = 360
d['dd'] = dd
if 'windSpeedInflated' in v:
ff = float(v['windSpeedInflated'])
fillValue = FILL_VALUE
else:
ff = float(v['windSpeed'])
fillValue = FILL_VALUE
if ff != fillValue:
if noToKt:
d['ff'] = int(ff + 0.5)
else:
d['ff'] = int(ff * TO_KT + 0.5)
if d['ff'] == 0:
d['dd'] = 0
if 'MaxWindSpeed' in v:
gg = int(v['MaxWindSpeed'] * TO_KT + 0.5)
if 'dd' in d and 'ff' in d and 9998 > gg > 0:
d['gg'] = int(gg)
d['str'] = '%03d%02dG%02dKT' % (d['dd'], d['ff'], d['gg'])
else:
if 'dd' in d and 'ff' in d:
d['str'] = '%03d%02dKT' % (d['dd'], d['ff'])
else:
d['str'] = '??????KT'
return d
def makeVsby(self, var):
# returns mid point of category range
tmp = self.VSBY_VALUE.get(int(var), None)
if tmp:
return AvnLib.fixTafVsby(tmp)
else:
return None
def makeData(self, pdc, ident):
self.numData = min(self.numData, pdc.getCurrentSz())
self.issuetime = pdc.readRandom(0).getDataTime(False).getRefTime().getTime() / 1000
fcstHrList = range(pdc.getCurrentSz())
fcstHrList.sort()
self._validTimeList = []
for f in fcstHrList:
self._validTimeList.append(self.issuetime + (f * 3600))
d = {'itime': {'value': self.issuetime,
'str': time.strftime('%d%H%MZ', time.gmtime(self.issuetime))},
'ident': {'str': ident}}
d['group'] = [self.makePeriod(pdc, n, fcstHrList) for n in range(self.numData)]
return d
def makePeriod(self, pdc, n, fcstHrList):
v = _PointDataView(pdc.readRandom(fcstHrList[n]))
try:
f, t = self._validTimeList[n:n+2]
except ValueError:
# LAMP only has 25 projections, so need to consider running out of pairs
f, t = self._validTimeList[n], self._validTimeList[n]+3600
g = {'time': {'from': f, 'to': t}}
d = self.makeWind(v, 0)
if d:
g['wind'] = d
d = self.makeVsby(v['vis_bestCat'])
if d:
g['vsby'] = d
vsby = d['value']
else:
vsby = None
d = self.makeVsby(v['cvis_bestCat'])
if d:
g['cvsby'] = d
cvsby = d['value']
else:
cvsby = None
if v['POP_hour']*100 > 40:
vsby = cvsby
d = self.makePcp(v, vsby, pdc, n, fcstHrList)
if d:
g['pcp'] = d
d = self.makeObv(v)
if d:
g['obv'] = d
#cobv is the same as obv until 'FG' and 'BR' is switched based vis
g['cobv'] = d
d = self.makeSky(v['ceiling_bestCat'], v['clouds_bestCat'])
if d:
g['sky'] = d
try:
d = self.makeSky(v['c_ceiling_bestCat'], v['clouds_bestCat'])
if d:
g['csky'] = d
except:
pass
# fix visibility and obstruction to vision
if 'vsby' in g and 'obv' in g and g['obv']['str'] in ['BR', 'FG']:
vsby = g['vsby']['value']
if vsby > 6.1:
g['vsby'] = {'str': '6SM', 'value': 6.0}
if vsby < 0.6:
g['obv']['str'] = 'FG'
elif vsby <= 6.1:
g['obv']['str'] = 'BR'
# fix conditional visibility and obstruction to vision
if 'cvsby' in g and 'obv' in g and g['obv']['str'] in ['BR', 'FG']:
vsby = g['cvsby']['value']
if vsby > 6.1:
g['cvsby'] = {'str': '6SM', 'value': 6.0}
if vsby < 0.6:
g['cobv']['str'] = 'FG'
elif vsby <= 6.1:
g['cobv']['str'] = 'BR'
#
# include the probabilities
# Look ahead for the 6hr QPF POP
#
g['pop6hr'] = -1
try:
for i in range(n, 25):
if _PointDataView(pdc.readRandom(fcstHrList[i]))['PQPF_6hr'] < 100:
g['pop6hr'] = _PointDataView(pdc.readRandom(fcstHrList[i]))['PQPF_6hr']
break
except KeyError:
pass
# Probability of ceiling categories including best category
g['cprob'] = _getCigProbs(v,'ceiling_cat',8)
g['ccprob'] = _getCigProbs(v,'c_ceiling_cat',8)
try:
g['cig_bestCat'] = int(self.cigBestCat(v['ceiling_bestCat']))
g['ccig_bestCat'] =int(self.cigBestCat(v['c_ceiling_bestCat']))
except ValueError:
pass
# Probability of visibility categories including best category
g['vprob'] = _getVisProbs(v,'vis_cat',6)
g['cvprob'] = _getVisProbs(v,'cvis_cat',6)
try:
g['vis_bestCat'] = int(self.visBestCat(v['vis_bestCat']))
g['cvis_bestCat'] = int(self.visBestCat(v['cvis_bestCat']))
except ValueError:
pass
return g
def tafPartToAAGData(tafPart, fcstType):
aagData = None
if fcstType in tafPart:
aagData = AAGData()
pcpObv = ""
if 'time' in tafPart[fcstType]:
aagData.setTimeFromSeconds(tafPart[fcstType]['time']['from'])
aagData.setTimeToSeconds(tafPart[fcstType]['time']['to'])
if 'vsby' in tafPart[fcstType]:
aagData.setVisibility(tafPart[fcstType]['vsby']['str'])
if 'wind' in tafPart[fcstType]:
aagData.setWind(tafPart[fcstType]['wind']['str'])
if 'sky' in tafPart[fcstType]:
aagData.setSky(tafPart[fcstType]['sky']['str'])
if 'pcp' in tafPart[fcstType]:
pcpObv += tafPart[fcstType]['pcp']['str']
if 'obv' in tafPart[fcstType]:
if pcpObv != "":
pcpObv += " "
pcpObv += tafPart[fcstType]['obv']['str']
if pcpObv != "":
aagData.setWeather(pcpObv)
return aagData
def getAAGData(siteID, pdc):
data = _GfsLampData().makeData(pdc, siteID)
tafParts = AAGTafGen(data).formNewDic(False)
aagDatas = []
for tafPart in tafParts:
# If not one of "prev","ocnl" then it is junk, ignore it
for fcstType in ('ocnl', 'prev'):
aagData = tafPartToAAGData(tafPart, fcstType)
if aagData:
if fcstType == 'ocnl':
aagData.setForecastType(tafPart['ocnl']['type'])
elif fcstType == 'prev':
aagData.setForecastType('FM')
aagDatas.append(aagData)
return aagDatas
|
python
|
from actors.actions.action import Action
class DelayedAction(Action):
def __init__(self, action, delay_remaining=1):
self.action = action
self.delay_remaining = delay_remaining
def on(self, actor, tile, root):
delay_remaining = self.delay_remaining - 1
return root, (DelayedAction(self.action, delay_remaining) if delay_remaining > 0 else self.action)
|
python
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2019 Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG),
# acting on behalf of its Max Planck Institute for Intelligent Systems and the
# Max Planck Institute for Biological Cybernetics. All rights reserved.
#
# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is holder of all proprietary rights
# on this computer program. You can only use this computer program if you have closed a license agreement
# with MPG or you get the right to use the computer program from someone who is authorized to grant you that right.
# Any use of the computer program without a valid license is prohibited and liable to prosecution.
# Contact: [email protected]
#
#
# If you use this code in a research publication please consider citing the following:
#
# Expressive Body Capture: 3D Hands, Face, and Body from a Single Image <https://arxiv.org/abs/1904.05866>
#
# Code Developed by: Nima Ghorbani <https://www.linkedin.com/in/nghorbani/>
# 2018.11.07
import cv2
import numpy as np
import os
from PIL import Image
fontColors = {'red': (255, 0, 0),
'green': (0, 255, 0),
'yellow': (255, 255, 0),
'blue': (0, 255, 255),
'orange': (255, 165, 0),
'black': (0, 0, 0),
'grey': (169, 169, 169),
'white': (255, 255, 255),
}
def crop_to_bounding_box(image, offset_height, offset_width, target_height, target_width):
cropped = image[offset_height:offset_height + target_height, offset_width:offset_width + target_width, :]
return cropped
def pad_to_bounding_box(image, offset_height, offset_width, target_height, target_width):
height, width, depth = image.shape
after_padding_width = target_width - offset_width - width
after_padding_height = target_height - offset_height - height
# Do not pad on the depth dimensions.
paddings = ((offset_height, after_padding_height), (offset_width, after_padding_width), (0, 0))
padded = np.pad(image, paddings, 'constant')
return padded
def resize_image_with_crop_or_pad(image, target_height, target_width):
# crop to ratio, center
height, width, c = image.shape
width_diff = target_width - width
offset_crop_width = max(-width_diff // 2, 0)
offset_pad_width = max(width_diff // 2, 0)
height_diff = target_height - height
offset_crop_height = max(-height_diff // 2, 0)
offset_pad_height = max(height_diff // 2, 0)
# Maybe crop if needed.
# print('image shape', image.shape)
cropped = crop_to_bounding_box(image, offset_crop_height, offset_crop_width,
min(target_height, height),
min(target_width, width))
# print('after cropp', cropped.shape)
# Maybe pad if needed.
resized = pad_to_bounding_box(cropped, offset_pad_height, offset_pad_width,
target_height, target_width)
# print('after pad', resized.shape)
return resized[:target_height, :target_width, :]
def cropout_openpose(pil_image,pose, want_image=True, crop_margin=0.08):
im_orig = cv2.cvtColor(np.array(pil_image), cv2.COLOR_RGB2BGR)
im_height, im_width = im_orig.shape[0], im_orig.shape[1]
pose = pose[pose[:, 2] > 0.0]
x_min, x_max = pose[:, 0].min(), pose[:, 0].max()
y_min, y_max = pose[:, 1].min(), pose[:, 1].max()
margin_h = crop_margin * im_height
margin_w = crop_margin * im_width
offset_height = int(max((y_min - margin_h), 0))
target_height = int(min((y_max + margin_h), im_height)) - offset_height
offset_width = int(max((x_min - margin_w), 0))
target_width = int(min((x_max + margin_w), im_width)) - offset_width
crop_info = {'crop_boundary':
{'offset_height':offset_height,
'target_height':target_height,
'offset_width':offset_width,
'target_width':target_width}}
if want_image:
crop_info['cropped_image'] = crop_to_bounding_box(im_orig, offset_height, offset_width, target_height, target_width)
return crop_info
def put_text_in_image(images, text, color ='white', position=None):
'''
:param images: 4D array of images
:param text: list of text to be printed in each image
:param color: the color or colors of each text
:return:
'''
import cv2
if not isinstance(text, list): text = [text]
if not isinstance(color, list): color = [color for _ in range(images.shape[0])]
if images.ndim == 3: images = images.reshape(1,images.shape[0],images.shape[1],3)
images_out = []
for imIdx in range(images.shape[0]):
img = images[imIdx].astype(np.uint8)
font = cv2.FONT_HERSHEY_SIMPLEX
if position is None:position = (10, img.shape[1])
fontScale = 1.
lineType = 2
fontColor = fontColors[color[imIdx]]
cv2.putText(img, text[imIdx],
position,
font,
fontScale,
fontColor,
lineType)
images_out.append(img)
return np.array(images_out)
def read_prep_image(im_fname, avoid_distortion=True):
'''
if min(height, width) is larger than 224 subsample to 224. this will also affect the larger dimension.
in the end crop and pad the whole image to get to 224x224
:param im_fname:
:return:
'''
import cv2
if isinstance(im_fname, np.ndarray):
image_data = im_fname
else:
image_data = cv2.imread(im_fname, 3)
# height, width = image_reader.read_image_dims(sess, image_data)
# image_data = image_reader.decode_jpeg(sess, image_data)
# print(image_data.min(), image_data.max(), image_data.shape)
# import matplotlib.pyplot as plt
# plt.imshow(image_data[:,:,::-1].astype(np.uint8))
# plt.show()
# height, width = image_data.shape[0], image_data.shape[1]
# if min(height, width) > 224:
# print(image_data.shape)
# rt = 224. / min(height, width)
# image_data = cv2.resize(image_data, (int(rt * width), int(rt * height)), interpolation=cv2.INTER_AREA)
# print('>>resized to>>',image_data.shape)
height, width = image_data.shape[0], image_data.shape[1]
if avoid_distortion:
if max(height, width) > 224:
# print(image_data.shape)
rt = 224. / max(height, width)
image_data = cv2.resize(image_data, (int(rt * width), int(rt * height)), interpolation=cv2.INTER_AREA)
# print('>>resized to>>',image_data.shape)
else:
from skimage.transform import resize
image_data = resize(image_data, (224, 224), mode='constant', anti_aliasing=False, preserve_range=True)
# print(image_data.min(), image_data.max(), image_data.shape)
# import matplotlib.pyplot as plt
# plt.imshow(image_data[:,:,::-1].astype(np.uint8))
# plt.show()
image_data = resize_image_with_crop_or_pad(image_data, 224, 224)
# print(image_data.min(), image_data.max(), image_data.shape)
# import matplotlib.pyplot as plt
# plt.imshow(image_data[:, :, ::-1].astype(np.uint8))
# plt.show()
#return image_data.astype(np.float32)
return image_data.astype(np.uint8)
def save_images(images, out_dir, im_names = None):
from homogenus.tools.omni_tools import id_generator
if images.ndim == 3: images = images.reshape(1,images.shape[0],images.shape[1],3)
from PIL import Image
if im_names is None:
im_names = ['%s.jpg'%id_generator(4) for i in range(images.shape[0])]
for imIdx in range(images.shape[0]):
result = Image.fromarray(images[imIdx].astype(np.uint8))
result.save(os.path.join(out_dir, im_names[imIdx]))
return True
|
python
|
#!/usr/bin/env python
"""
Script to calculate the mean and std
Usage:
./scripts/cal_deepfashion_ds_meanstd.py
"""
import os.path
import sys
cur_path = os.path.realpath(__file__)
cur_dir = os.path.dirname(cur_path)
parent_dir = cur_dir[:cur_dir.rfind(os.path.sep)]
sys.path.insert(0, parent_dir)
# --------------------------------------------
from utils.datasets import DeepFashionDataset
from utils.preprocessing import StandardScaler
from torchvision.transforms import ToTensor
from torchvision.transforms import Compose
from torchvision.transforms import Resize
from torchvision.transforms import Normalize
from torch.utils.data import DataLoader
import torch
from tqdm import tqdm
if __name__ == "__main__":
deep_fashion_root_dir = "./deepfashion_data"
trans = Compose([
Resize((224, 224)),
ToTensor(),
# Normalize([0.7464, 0.7155, 0.7043], [0.2606, 0.2716, 0.2744]), # For check against
])
train_ds = DeepFashionDataset(
deep_fashion_root_dir, 'train', transform=trans)
loader = DataLoader(train_ds, batch_size=200, num_workers=2)
scalar = StandardScaler()
for imgs, _ in tqdm(loader):
scalar.partial_fit(imgs)
print("--------------------")
print(scalar._mean)
print(scalar._var)
print(scalar._std)
print("--------------------")
|
python
|
'''
This function returns the first longest word from the input string
'''
def LongestWord(sen):
max=0
st=""
for c in sen:
if c.isalnum():
st=st+c
else:
st=st+" "
words=st.split(" ")
for word in words:
if len(word) > max:
max=len(word)
longestword=word
return longestword
print(LongestWord(input("Please enter a string: \n")))
|
python
|
from django.urls import NoReverseMatch, reverse
from django.utils.html import format_html
from django.utils.translation import ugettext as _
from django.utils.translation import ugettext_noop
from django.views import View
from memoized import memoized
from dimagi.utils.parsing import string_to_utc_datetime
from dimagi.utils.web import json_response
from corehq import toggles
from corehq.apps.reports.analytics.esaccessors import get_paged_forms_by_type
from corehq.apps.reports.datatables import DataTablesColumn, DataTablesHeader
from corehq.apps.reports.display import xmlns_to_name
from corehq.apps.reports.standard.deployments import DeploymentsReport
from corehq.apps.reports.standard.forms.filters import SubmissionTypeFilter
from corehq.apps.users.util import cached_user_id_to_username
from corehq.const import SERVER_DATETIME_FORMAT
from corehq.form_processor.reprocess import ReprocessingError
from corehq.util import cmp
from corehq.util.timezones.conversions import ServerTime
def _compare_submissions(x, y):
# these are backwards because we want most recent to come first
return cmp(y.received_on, x.received_on)
class SubmissionErrorReport(DeploymentsReport):
name = ugettext_noop("Raw Forms, Errors & Duplicates")
slug = "submit_errors"
ajax_pagination = True
asynchronous = False
base_template = 'reports/standard/submission_error_report.html'
fields = ['corehq.apps.reports.standard.forms.filters.SubmissionTypeFilter']
@property
@memoized
def headers(self):
headers = DataTablesHeader(DataTablesColumn(_("View Form"), sortable=False),
DataTablesColumn(_("Username"), prop_name="username"),
DataTablesColumn(_("Submit Time"), prop_name="received_on"),
DataTablesColumn(_("Form Type"), sortable=False),
DataTablesColumn(_("Error Type"), sortable=False),
DataTablesColumn(_("Error Message"), sortable=False))
if self.support_toggle_enabled:
headers.add_column(DataTablesColumn(_("Re-process Form")))
headers.custom_sort = [[2, "desc"]]
return headers
_submitfilter = None
@property
def submitfilter(self):
if self._submitfilter is None:
self._submitfilter = SubmissionTypeFilter.get_filter_toggle(self.request)
return self._submitfilter
@property
def sort_params(self):
sort_col_idx = int(self.request.GET['iSortCol_0'])
col = self.headers.header[sort_col_idx]
sort_prop = hasattr(col, "prop_name") and col.prop_name
desc = self.request.GET.get('sSortDir_0') == 'desc'
return sort_prop, desc
@property
@memoized
def paged_result(self):
doc_types = [filter_.doc_type for filter_ in [filter_ for filter_ in self.submitfilter if filter_.show]]
sort_col, desc = self.sort_params
return get_paged_forms_by_type(
self.domain,
doc_types,
sort_col=sort_col,
desc=desc,
start=self.pagination.start,
size=self.pagination.count,
)
@property
def shared_pagination_GET_params(self):
shared_params = super(SubmissionErrorReport, self).shared_pagination_GET_params
shared_params.append(dict(
name=SubmissionTypeFilter.slug,
value=[f.type for f in self.submitfilter if f.show]
))
return shared_params
@property
def total_records(self):
return self.paged_result.total
@property
def support_toggle_enabled(self):
return toggles.SUPPORT.enabled_for_request(self.request)
def _make_reproces_button(self, xform_dict):
if not xform_dict['doc_type'] == 'XFormError':
return ''
return '''
<button
class="btn btn-default reprocess-error"
data-form-id={}>
Re-process Form
</button>
'''.format(xform_dict['_id'])
@property
def rows(self):
EMPTY_ERROR = _("No Error")
EMPTY_USER = _("No User")
EMPTY_FORM = _("Unknown Form")
def _to_row(xform_dict):
def _fmt_url(doc_id):
if xform_dict['doc_type'] in [
"XFormInstance",
"XFormArchived",
"XFormError",
"XFormDeprecated"]:
view_name = 'render_form_data'
else:
view_name = 'download_form'
try:
return format_html(
"<a class='ajax_dialog' href='{url}'>{text}</a>",
url=reverse(view_name, args=[self.domain, doc_id]),
text=_("View Form")
)
except NoReverseMatch:
return 'unable to view form'
def _fmt_date(somedate):
time = ServerTime(somedate).user_time(self.timezone).done()
return time.strftime(SERVER_DATETIME_FORMAT)
if xform_dict['form'].get('meta'):
form_name = xmlns_to_name(
self.domain,
xform_dict.get('xmlns'),
app_id=xform_dict.get('app_id'),
)
form_username = xform_dict['form']['meta'].get('username', EMPTY_USER)
else:
form_name = EMPTY_FORM
form_username = EMPTY_USER
error_type = SubmissionTypeFilter.display_name_by_doc_type(xform_dict['doc_type'])
if xform_dict['doc_type'] == "XFormArchived":
archive_operations = [operation for operation in xform_dict.get('history')
if operation.get('operation') == 'archive']
if archive_operations:
error_type = _("{username} {archived_form} on {date}").format(
username=cached_user_id_to_username(archive_operations[-1].get('user')) or "",
archived_form=SubmissionTypeFilter.display_name_by_doc_type(xform_dict['doc_type']),
date=_fmt_date(string_to_utc_datetime(archive_operations[-1].get('date'))),
)
return [
_fmt_url(xform_dict['_id']),
form_username,
_fmt_date(string_to_utc_datetime(xform_dict['received_on'])),
form_name,
error_type,
xform_dict.get('problem', EMPTY_ERROR),
self._make_reproces_button(xform_dict) if self.support_toggle_enabled else '',
]
return [_to_row(xform_dict) for xform_dict in self.paged_result.hits]
class ReprocessXFormErrorView(View):
urlname = 'reprocess_xform_errors'
http_method_names = ['post']
def post(self, request, domain):
from corehq.form_processor.reprocess import reprocess_xform_error_by_id
form_id = request.POST['form_id']
if not form_id:
return json_response({
'success': False,
'failure_reason': 'Missing "form_id"'
})
try:
reprocess_xform_error_by_id(form_id, domain=domain)
except ReprocessingError as e:
return json_response({
'success': False,
'failure_reason': str(e),
})
else:
return json_response({
'success': True,
})
|
python
|
"""Base class for module overlays."""
from pytype import datatypes
from pytype.abstract import abstract
class Overlay(abstract.Module):
"""A layer between pytype and a module's pytd definition.
An overlay pretends to be a module, but provides members that generate extra
typing information that cannot be expressed in a pytd file. For example,
collections.namedtuple is a factory method that generates class definitions
at runtime. An overlay is needed for Pytype to generate these classes.
An Overlay will typically import its underlying module in its __init__, e.g.
by calling ctx.loader.import_name(). Due to this, Overlays should only be used
when their underlying module is imported by the Python script being analyzed!
A subclass of Overlay should have an __init__ with the signature:
def __init__(self, ctx)
Attributes:
real_module: An abstract.Module wrapping the AST for the underlying module.
"""
def __init__(self, ctx, name, member_map, ast):
"""Initialize the overlay.
Args:
ctx: Instance of context.Context.
name: A string containing the name of the underlying module.
member_map: Dict of str to abstract.BaseValues that provide type
information not available in the underlying module.
ast: An pytd.TypeDeclUnit containing the AST for the underlying module.
Used to access type information for members of the module that are not
explicitly provided by the overlay.
"""
super().__init__(ctx, name, member_map, ast)
self.real_module = ctx.convert.constant_to_value(
ast, subst=datatypes.AliasingDict(), node=ctx.root_node)
def _convert_member(self, member, subst=None):
val = member(self.ctx)
val.module = self.name
return val.to_variable(self.ctx.root_node)
def get_module(self, name):
"""Returns the abstract.Module for the given name."""
if name in self._member_map:
return self
else:
return self.real_module
def items(self):
items = super().items()
items += [(name, item) for name, item in self.real_module.items()
if name not in self._member_map]
return items
def build(name, builder):
"""Wrapper to turn (name, ctx) -> val method signatures into (ctx) -> val."""
return lambda ctx: builder(name, ctx)
|
python
|
import cplex
import numpy as np
names = ["x11", "x12", "x13", "x14",
"x21", "x22", "x23", "x24",
"x31", "x32", "x33", "x34",
"y11", "y12", "y13", "y14",
"y21", "y22", "y23", "y24",
"y31", "y32", "y33", "y34"]
T = np.array([[3.0, 2.0, 2.0, 1.0],
[4.0, 3.0, 3.0, 2.0],
[5.0, 5.0, 4.0, 2.0]])
Q = np.array([50.0, 30.0, 20.0])
N = np.array([5.0, 8.0, 10.0])
D = np.array([700.0, 1500.0, 700.0, 1500.0])
C = np.array([[1000.0, 1100.0, 1200.0, 1500.0],
[800.0, 900.0, 1000.0, 1000.0],
[600.0, 800.0, 800.0, 900.0]])
P = np.array([40.0, 50.0, 45.0, 70.0])
# Example with deep branching:
# T = np.array([[3.0, 2.0, 2.0, 7.0],
# [4.0, 3.0, 1.0, 2.0],
# [7.0, 2.0, 4.0, 2.0]])
# Q = np.array([52.0, 29.0, 13.0])
# N = np.array([5.0, 8.0, 10.0])
# D = np.array([530.0, 1720.0, 780.0, 1530.0])
#
# C = np.array([[1000.0, 1100.0, 1200.0, 1500.0],
# [800.0, 900.0, 1000.0, 1000.0],
# [600.0, 800.0, 800.0, 900.0]])
# P = np.array([42.0, 39.0, 45.0, 69.0])
Z = np.array([Q * T[:, i] * P[i] for i in range(4)]).T
objective = list(np.array([Z, -T * C]).flatten())
lower_bounds = [0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0]
upper_bounds = [cplex.infinity, cplex.infinity, cplex.infinity, cplex.infinity,
cplex.infinity, cplex.infinity, cplex.infinity, cplex.infinity,
cplex.infinity, cplex.infinity, cplex.infinity, cplex.infinity,
cplex.infinity, cplex.infinity, cplex.infinity, cplex.infinity,
cplex.infinity, cplex.infinity, cplex.infinity, cplex.infinity,
cplex.infinity, cplex.infinity, cplex.infinity, cplex.infinity]
constraint_names = ["n1", "n2", "n3",
"d1", "d2", "d3", "d4",
"xy11", "xy12", "xy13", "xy14",
"xy21", "xy22", "xy23", "xy24",
"xy31", "xy32", "xy33", "xy34"]
constraint_n1 = [["y11", "y12", "y13", "y14"], [1.0, 1.0, 1.0, 1.0]]
constraint_n2 = [["y21", "y22", "y23", "y24"], [1.0, 1.0, 1.0, 1.0]]
constraint_n3 = [["y31", "y32", "y33", "y34"], [1.0, 1.0, 1.0, 1.0]]
constraint_d1 = [["x11", "x21", "x31"], [Q[0] * T[0][0], Q[1] * T[1][0], Q[2] * T[2][0]]]
constraint_d2 = [["x12", "x22", "x32"], [Q[0] * T[0][1], Q[1] * T[1][1], Q[2] * T[2][1]]]
constraint_d3 = [["x13", "x23", "x33"], [Q[0] * T[0][2], Q[1] * T[1][2], Q[2] * T[2][2]]]
constraint_d4 = [["x14", "x24", "x34"], [Q[0] * T[0][3], Q[1] * T[1][3], Q[2] * T[2][3]]]
constraint_xy11 = [["x11", "y11"], [1.0, -1.0]]
constraint_xy12 = [["x12", "y12"], [1.0, -1.0]]
constraint_xy13 = [["x13", "y13"], [1.0, -1.0]]
constraint_xy14 = [["x14", "y14"], [1.0, -1.0]]
constraint_xy21 = [["x21", "y21"], [1.0, -1.0]]
constraint_xy22 = [["x22", "y22"], [1.0, -1.0]]
constraint_xy23 = [["x23", "y23"], [1.0, -1.0]]
constraint_xy24 = [["x24", "y24"], [1.0, -1.0]]
constraint_xy31 = [["x31", "y31"], [1.0, -1.0]]
constraint_xy32 = [["x32", "y32"], [1.0, -1.0]]
constraint_xy33 = [["x33", "y33"], [1.0, -1.0]]
constraint_xy34 = [["x34", "y34"], [1.0, -1.0]]
constraints = [constraint_n1, constraint_n2, constraint_n3,
constraint_d1, constraint_d2, constraint_d3, constraint_d4,
constraint_xy11, constraint_xy12, constraint_xy13, constraint_xy14,
constraint_xy21, constraint_xy22, constraint_xy23, constraint_xy24,
constraint_xy31, constraint_xy32, constraint_xy33, constraint_xy34]
rhs = [N[0], N[1], N[2], # for constraint_n#
D[0], D[1], D[2], D[3], # for constraint_d#
0.0, 0.0, 0.0, 0.0, # for constraint_xy##
0.0, 0.0, 0.0, 0.0, # for constraint_xy##
0.0, 0.0, 0.0, 0.0 # for constraint_xy##
]
constraint_senses = ["L", "L", "L",
"L", "L", "L", "L",
"L", "L", "L", "L",
"L", "L", "L", "L",
"L", "L", "L", "L"]
|
python
|
"""Main entry point for the pixelation tool."""
import sys
from .constants import SUCCESS
from .core import PixelArt
from .parser import build_parser, parse_args
def main() -> None:
"""Parses the command line arguments and runs the tool."""
arg_parser = build_parser()
args = parse_args(arg_parser)
pixel_art = PixelArt(args['filename'],
args['granularity'],
args['ncolors'],
args['nbits'],
args['color_space'],
args['verbose'])
pixelated_image = pixel_art.pixelate()
pixelated_image.show()
if args['save']:
if args['verbose']:
print("Saving to " + args['filename'].split(".")[0] + "_pixelated.png ...")
pixelated_image.save(args['filename'].split(".")[0] + "_pixelated.png")
if args['verbose']:
print('Done')
return SUCCESS
if __name__ == "__main__":
sys.exit(main())
|
python
|
"""Classes for defining instructions."""
from __future__ import absolute_import
from . import camel_case
from .types import ValueType
from .operands import Operand
from .formats import InstructionFormat
try:
from typing import Union, Sequence, List, Tuple, Any, TYPE_CHECKING # noqa
from typing import Dict # noqa
if TYPE_CHECKING:
from .ast import Expr, Apply, Var, Def, VarAtomMap # noqa
from .typevar import TypeVar # noqa
from .ti import TypeConstraint # noqa
from .xform import XForm, Rtl
# List of operands for ins/outs:
OpList = Union[Sequence[Operand], Operand]
ConstrList = Union[Sequence[TypeConstraint], TypeConstraint]
MaybeBoundInst = Union['Instruction', 'BoundInstruction']
InstructionSemantics = Sequence[XForm]
SemDefCase = Union[Rtl, Tuple[Rtl, Sequence[TypeConstraint]], XForm]
except ImportError:
pass
class InstructionGroup(object):
"""
Every instruction must belong to exactly one instruction group. A given
target architecture can support instructions from multiple groups, and it
does not necessarily support all instructions in a group.
New instructions are automatically added to the currently open instruction
group.
"""
# The currently open instruction group.
_current = None # type: InstructionGroup
def open(self):
# type: () -> None
"""
Open this instruction group such that future new instructions are
added to this group.
"""
assert InstructionGroup._current is None, (
"Can't open {} since {} is already open"
.format(self, InstructionGroup._current))
InstructionGroup._current = self
def close(self):
# type: () -> None
"""
Close this instruction group. This function should be called before
opening another instruction group.
"""
assert InstructionGroup._current is self, (
"Can't close {}, the open instuction group is {}"
.format(self, InstructionGroup._current))
InstructionGroup._current = None
def __init__(self, name, doc):
# type: (str, str) -> None
self.name = name
self.__doc__ = doc
self.instructions = [] # type: List[Instruction]
self.open()
@staticmethod
def append(inst):
# type: (Instruction) -> None
assert InstructionGroup._current, \
"Open an instruction group before defining instructions."
InstructionGroup._current.instructions.append(inst)
class Instruction(object):
"""
The operands to the instruction are specified as two tuples: ``ins`` and
``outs``. Since the Python singleton tuple syntax is a bit awkward, it is
allowed to specify a singleton as just the operand itself, i.e., `ins=x`
and `ins=(x,)` are both allowed and mean the same thing.
:param name: Instruction mnemonic, also becomes opcode name.
:param doc: Documentation string.
:param ins: Tuple of input operands. This can be a mix of SSA value
operands and other operand kinds.
:param outs: Tuple of output operands. The output operands must be SSA
values or `variable_args`.
:param constraints: Tuple of instruction-specific TypeConstraints.
:param is_terminator: This is a terminator instruction.
:param is_branch: This is a branch instruction.
:param is_indirect_branch: This is an indirect branch instruction.
:param is_call: This is a call instruction.
:param is_return: This is a return instruction.
:param is_ghost: This is a ghost instruction, which has no encoding and no
other register allocation constraints.
:param can_trap: This instruction can trap.
:param can_load: This instruction can load from memory.
:param can_store: This instruction can store to memory.
:param other_side_effects: Instruction has other side effects.
"""
# Boolean instruction attributes that can be passed as keyword arguments to
# the constructor. Map attribute name to doc comment for generated Rust
# code.
ATTRIBS = {
'is_terminator': 'True for instructions that terminate the EBB.',
'is_branch': 'True for all branch or jump instructions.',
'is_indirect_branch':
'True for all indirect branch or jump instructions.',
'is_call': 'Is this a call instruction?',
'is_return': 'Is this a return instruction?',
'is_ghost': 'Is this a ghost instruction?',
'can_load': 'Can this instruction read from memory?',
'can_store': 'Can this instruction write to memory?',
'can_trap': 'Can this instruction cause a trap?',
'other_side_effects':
'Does this instruction have other side effects besides can_*',
'writes_cpu_flags': 'Does this instruction write to CPU flags?',
}
def __init__(self, name, doc, ins=(), outs=(), constraints=(), **kwargs):
# type: (str, str, OpList, OpList, ConstrList, **Any) -> None
self.name = name
self.camel_name = camel_case(name)
self.__doc__ = doc
self.ins = self._to_operand_tuple(ins)
self.outs = self._to_operand_tuple(outs)
self.constraints = self._to_constraint_tuple(constraints)
self.format = InstructionFormat.lookup(self.ins, self.outs)
self.semantics = None # type: InstructionSemantics
# Opcode number, assigned by gen_instr.py.
self.number = None # type: int
# Indexes into `self.outs` for value results.
# Other results are `variable_args`.
self.value_results = tuple(
i for i, o in enumerate(self.outs) if o.is_value())
# Indexes into `self.ins` for value operands.
self.value_opnums = tuple(
i for i, o in enumerate(self.ins) if o.is_value())
# Indexes into `self.ins` for non-value operands.
self.imm_opnums = tuple(
i for i, o in enumerate(self.ins) if o.is_immediate())
self._verify_polymorphic()
for attr in kwargs:
if attr not in Instruction.ATTRIBS:
raise AssertionError(
"unknown instruction attribute '" + attr + "'")
for attr in Instruction.ATTRIBS:
setattr(self, attr, not not kwargs.get(attr, False))
# Infer the 'writes_cpu_flags' field value.
if 'writes_cpu_flags' not in kwargs:
self.writes_cpu_flags = any(
out.is_cpu_flags() for out in self.outs)
InstructionGroup.append(self)
def __str__(self):
# type: () -> str
prefix = ', '.join(o.name for o in self.outs)
if prefix:
prefix = prefix + ' = '
suffix = ', '.join(o.name for o in self.ins)
return '{}{} {}'.format(prefix, self.name, suffix)
def snake_name(self):
# type: () -> str
"""
Get the snake_case name of this instruction.
Keywords in Rust and Python are altered by appending a '_'
"""
if self.name == 'return':
return 'return_'
else:
return self.name
def blurb(self):
# type: () -> str
"""Get the first line of the doc comment"""
for line in self.__doc__.split('\n'):
line = line.strip()
if line:
return line
return ""
def _verify_polymorphic(self):
# type: () -> None
"""
Check if this instruction is polymorphic, and verify its use of type
variables.
"""
poly_ins = [
i for i in self.value_opnums
if self.ins[i].typevar.free_typevar()]
poly_outs = [
i for i, o in enumerate(self.outs)
if o.is_value() and o.typevar.free_typevar()]
self.is_polymorphic = len(poly_ins) > 0 or len(poly_outs) > 0
if not self.is_polymorphic:
return
# Prefer to use the typevar_operand to infer the controlling typevar.
self.use_typevar_operand = False
typevar_error = None
tv_op = self.format.typevar_operand
if tv_op is not None and tv_op < len(self.value_opnums):
try:
opnum = self.value_opnums[tv_op]
tv = self.ins[opnum].typevar
if tv is tv.free_typevar() or tv.singleton_type() is not None:
self.other_typevars = self._verify_ctrl_typevar(tv)
self.ctrl_typevar = tv
self.use_typevar_operand = True
except RuntimeError as e:
typevar_error = e
if not self.use_typevar_operand:
# The typevar_operand argument doesn't work. Can we infer from the
# first result instead?
if len(self.outs) == 0:
if typevar_error:
raise typevar_error
else:
raise RuntimeError(
"typevar_operand must be a free type variable")
tv = self.outs[0].typevar
if tv is not tv.free_typevar():
raise RuntimeError("first result must be a free type variable")
self.other_typevars = self._verify_ctrl_typevar(tv)
self.ctrl_typevar = tv
def _verify_ctrl_typevar(self, ctrl_typevar):
# type: (TypeVar) -> List[TypeVar]
"""
Verify that the use of TypeVars is consistent with `ctrl_typevar` as
the controlling type variable.
All polymorhic inputs must either be derived from `ctrl_typevar` or be
independent free type variables only used once.
All polymorphic results must be derived from `ctrl_typevar`.
Return list of other type variables used, or raise an error.
"""
other_tvs = [] # type: List[TypeVar]
# Check value inputs.
for opnum in self.value_opnums:
typ = self.ins[opnum].typevar
tv = typ.free_typevar()
# Non-polymorphic or derived form ctrl_typevar is OK.
if tv is None or tv is ctrl_typevar:
continue
# No other derived typevars allowed.
if typ is not tv:
raise RuntimeError(
"{}: type variable {} must be derived from {}"
.format(self.ins[opnum], typ.name, ctrl_typevar))
# Other free type variables can only be used once each.
if tv in other_tvs:
raise RuntimeError(
"type variable {} can't be used more than once"
.format(tv.name))
other_tvs.append(tv)
# Check outputs.
for result in self.outs:
if not result.is_value():
continue
typ = result.typevar
tv = typ.free_typevar()
# Non-polymorphic or derived from ctrl_typevar is OK.
if tv is None or tv is ctrl_typevar:
continue
raise RuntimeError(
"type variable in output not derived from ctrl_typevar")
return other_tvs
def all_typevars(self):
# type: () -> List[TypeVar]
"""
Get a list of all type variables in the instruction.
"""
if self.is_polymorphic:
return [self.ctrl_typevar] + self.other_typevars
else:
return []
@staticmethod
def _to_operand_tuple(x):
# type: (Union[Sequence[Operand], Operand]) -> Tuple[Operand, ...]
# Allow a single Operand instance instead of the awkward singleton
# tuple syntax.
if isinstance(x, Operand):
y = (x,) # type: Tuple[Operand, ...]
else:
y = tuple(x)
for op in y:
assert isinstance(op, Operand)
return y
@staticmethod
def _to_constraint_tuple(x):
# type: (ConstrList) -> Tuple[TypeConstraint, ...]
"""
Allow a single TypeConstraint instance instead of the awkward singleton
tuple syntax.
"""
# import placed here to avoid circular dependency
from .ti import TypeConstraint # noqa
if isinstance(x, TypeConstraint):
y = (x,) # type: Tuple[TypeConstraint, ...]
else:
y = tuple(x)
for op in y:
assert isinstance(op, TypeConstraint)
return y
def bind(self, *args):
# type: (*ValueType) -> BoundInstruction
"""
Bind a polymorphic instruction to a concrete list of type variable
values.
"""
assert self.is_polymorphic
return BoundInstruction(self, args)
def __getattr__(self, name):
# type: (str) -> BoundInstruction
"""
Bind a polymorphic instruction to a single type variable with dot
syntax:
>>> iadd.i32
"""
assert name != 'any', 'Wildcard not allowed for ctrl_typevar'
return self.bind(ValueType.by_name(name))
def fully_bound(self):
# type: () -> Tuple[Instruction, Tuple[ValueType, ...]]
"""
Verify that all typevars have been bound, and return a
`(inst, typevars)` pair.
This version in `Instruction` itself allows non-polymorphic
instructions to duck-type as `BoundInstruction`\\s.
"""
assert not self.is_polymorphic, self
return (self, ())
def __call__(self, *args):
# type: (*Expr) -> Apply
"""
Create an `ast.Apply` AST node representing the application of this
instruction to the arguments.
"""
from .ast import Apply # noqa
return Apply(self, args)
def set_semantics(self, src, *dsts):
# type: (Union[Def, Apply], *SemDefCase) -> None
"""Set our semantics."""
from semantics import verify_semantics
from .xform import XForm, Rtl
sem = [] # type: List[XForm]
for dst in dsts:
if isinstance(dst, Rtl):
sem.append(XForm(Rtl(src).copy({}), dst))
elif isinstance(dst, XForm):
sem.append(XForm(
dst.src.copy({}),
dst.dst.copy({}),
dst.constraints))
else:
assert isinstance(dst, tuple)
sem.append(XForm(Rtl(src).copy({}), dst[0],
constraints=dst[1]))
verify_semantics(self, Rtl(src), sem)
self.semantics = sem
class BoundInstruction(object):
"""
A polymorphic `Instruction` bound to concrete type variables.
"""
def __init__(self, inst, typevars):
# type: (Instruction, Tuple[ValueType, ...]) -> None
self.inst = inst
self.typevars = typevars
assert len(typevars) <= 1 + len(inst.other_typevars)
def __str__(self):
# type: () -> str
return '.'.join([self.inst.name, ] + list(map(str, self.typevars)))
def bind(self, *args):
# type: (*ValueType) -> BoundInstruction
"""
Bind additional typevars.
"""
return BoundInstruction(self.inst, self.typevars + args)
def __getattr__(self, name):
# type: (str) -> BoundInstruction
"""
Bind an additional typevar dot syntax:
>>> uext.i32.i8
"""
if name == 'any':
# This is a wild card bind represented as a None type variable.
return self.bind(None)
return self.bind(ValueType.by_name(name))
def fully_bound(self):
# type: () -> Tuple[Instruction, Tuple[ValueType, ...]]
"""
Verify that all typevars have been bound, and return a
`(inst, typevars)` pair.
"""
if len(self.typevars) < 1 + len(self.inst.other_typevars):
unb = ', '.join(
str(tv) for tv in
self.inst.other_typevars[len(self.typevars) - 1:])
raise AssertionError("Unbound typevar {} in {}".format(unb, self))
assert len(self.typevars) == 1 + len(self.inst.other_typevars)
return (self.inst, self.typevars)
def __call__(self, *args):
# type: (*Expr) -> Apply
"""
Create an `ast.Apply` AST node representing the application of this
instruction to the arguments.
"""
from .ast import Apply # noqa
return Apply(self, args)
|
python
|
#!/usr/bin/env python3
"""
Abstract base class for data Readers.
"""
import sys
sys.path.append('.')
from logger.utils import formats
################################################################################
class Reader:
"""
Base class Reader about which we know nothing else. By default the
output format is Unknown unless overridden.
"""
def __init__(self, output_format=formats.Unknown):
self.output_format(output_format)
############################
def output_format(self, new_format=None):
"""
Return our output format or set a new output format
"""
if new_format is not None:
if not formats.is_format(new_format):
raise TypeError('Argument "%s" is not a known format type' % new_format)
self.out_format = new_format
return self.out_format
############################
def read(self):
"""
read() should return None when there are no more records.
"""
raise NotImplementedError('Class %s (subclass of Reader) is missing '
'implementation of read() method.'
% self.__class__.__name__)
################################################################################
class StorageReader(Reader):
"""
A StorageReader is something like a file, where we can, in theory,
seek and rewind, or retrieve a range of records.
"""
def __init__(self, output_format=formats.Unknown):
super().__init__(output_format=output_format)
pass
# Behavior is intended to mimic file seek() behavior but with
# respect to records: 'offset' means number of records, and origin
# is either 'start', 'current' or 'end'.
def seek(self, offset=0, origin='current'):
raise NotImplementedError('Class %s (subclass of StorageReader) is missing '
'implementation of seek() method.'
% self.__class__.__name__)
############################
def read_range(self, start=None, stop=None):
"""
Read a range of records beginning with record number start, and ending
*before* record number stop.
"""
raise NotImplementedError('Class %s (subclass of StorageReader) is missing '
'implementation of read_range() method.'
% self.__class__.__name__)
################################################################################
class TimestampedReader(StorageReader):
"""
A TimestampedReader is a special case of a StorageReader where we
can seek and retrieve a range specified by timestamps.
"""
def __init__(self, output_format=formats.Unknown):
super().__init__(output_format=output_format)
pass
# Behavior is intended to mimic file seek() behavior but with
# respect to timestamps: 'offset' means number of milliseconds, and
# origin is either 'start', 'current' or 'end'.
def seek_time(self, offset=0, origin='current'):
raise NotImplementedError('Class %s (subclass of TimestampedReader) is missing '
'implementation of seek_time() method.'
% self.__class__.__name__)
# Read a range of records beginning with timestamp start
# milliseconds, and ending *before* timestamp stop milliseconds.
def read_time_range(self, start=None, stop=None):
raise NotImplementedError('Class %s (subclass of TimestampedReader) is missing '
'implementation of read_range() method.'
% self.__class__.__name__)
|
python
|
import os
import shutil
from codecs import open
from os import path
from setuptools import setup, Command
here = path.abspath(path.dirname(__file__))
name = 'sqe'
version = '0.1.0'
class CleanCommand(Command):
description = "custom clean command that forcefully removes dist and build directories"
user_options = []
def initialize_options(self):
self.cwd = None
def finalize_options(self):
self.cwd = os.getcwd()
def run(self):
if path.exists(path.join(here, 'build')):
shutil.rmtree(path.join(here, 'build'))
if path.exists(path.join(here, 'dist')):
shutil.rmtree(path.join(here, 'dist'))
if path.exists(path.join(here, name.replace('-', '_') + '.egg-info')):
shutil.rmtree(path.join(here, name.replace('-', '_') + '.egg-info'))
# Get the long description from the README file
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
with open(path.join(here, 'requirements.txt'), encoding='utf-8') as f:
data = f.readlines()
requires = data
setup(
name=name,
version=version, # Required
description='JFrog Artifactory Exporter',
long_description=long_description,
long_description_content_type='text/markdown',
url='https://github.com/nthienan/sonarqube-exporter',
author='An Nguyen',
author_email='[email protected]',
license='MIT',
classifiers=[ # Optional
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Topic :: Software Development :: DevOps',
"License :: OSI Approved :: MIT License",
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.6',
],
keywords='sonarqube-exporter, sonarqube, exporter, sqe, devops',
# You can just specify package directories manually here if your project is
# simple. Or you can use find_packages().
#
# Alternatively, if you just want to distribute a single Python file, use
# the `py_modules` argument instead as follows, which will expect a file
# called `my_module.py` to exist:
#
# py_modules=["my_module"],
#
packages=['sqe'],
package_dir={'sqe': 'src'},
# scripts=['src/jae'],
# This field lists other packages that your project depends on to run.
# Any package you put here will be installed by pip when your project is
# installed, so they must be valid existing projects.
#
# For an analysis of "install_requires" vs pip's requirements files see:
# https://packaging.python.org/en/latest/requirements.html
install_requires=requires,
# To provide executable scripts, use entry points in preference to the
# "scripts" keyword. Entry points provide cross-platform support and allow
# `pip` to create the appropriate form of executable for the target
# platform.
#
# For example, the following would provide a command called `sample` which
# executes the function `main` from this package when invoked:
# entry_points={ # Optional
# 'console_scripts': [
# 'sample=sample:main',
# ],
# },
entry_points={
'console_scripts': [
'sqe=sqe.app:main'
],
},
project_urls={
'Bug Reports': 'https://github.com/nthienan/sonarqube-exporter/issues',
'Source': 'https://github.com/nthienan/sonarqube-exporter',
},
cmdclass={
'clean': CleanCommand
},
)
|
python
|
from datetime import date, datetime, timedelta
from django.test import TestCase
from projects.models import Project, Invoice
from inspectors.models import Inspector
class ProjectModelTest(TestCase):
def setUp(self):
date_1 = date.today()
date_2 = date_1 + timedelta(days=10)
p1 = Project.objects.create(
prudent_number = '103.219',
penndot_number = 'E01993',
name = 'CP2',
inspector = [Inspector.objects.create(
id=2,
first_name = "bard",
last_name = "Test",
office = "King of Prussia",
classification = "TCI-2",
address ="123 kitty lane",
location = "Pittsburgh, PA",
work_radius = 95,
email = "[email protected]",
phone_number = "3022993322"
).save()],
office = 'King of Prussia',
start_date = date_1,
end_date = date_2,
st_hours = 300,
ot_hours = 25,
payroll_budget = 132000,
other_cost_budget = 10000,
)
p2 = Project.objects.create(
prudent_number = '103.111',
penndot_number = 'E01994',
name = 'Septa bridge over mars',
inspector = [Inspector.objects.create(
id=1,
first_name = "Mark",
last_name = "Test",
office = "King of Prussia",
classification = "TCI-2",
address ="123 kitty lane",
location = "Pittsburgh, PA",
work_radius = 95,
email = "[email protected]",
phone_number = "3022993322",
).save()],
office = 'King of Prussia',
start_date = date_1,
end_date = date_2,
st_hours = 300,
ot_hours = 25,
payroll_budget = 132000,
other_cost_budget = 10000,
)
i1 = Invoice.objects.create(
project = p1,
estimate_num = 1,
start_date = date_1,
end_date = date_1 + timedelta(days=3),
payroll= 11260.90,
other_cost = 505.11,
st_hours = 13,
ot_hours = 16,
invoice_num = '19961',
)
i2 = Invoice.objects.create(
project = p1,
estimate_num = 2,
start_date = date_1,
end_date = date_1 + timedelta(days=-3),
payroll = 32134.90,
other_cost = 505.11,
st_hours = 13,
ot_hours = 16,
invoice_num = '19962',
)
i3 = Invoice.objects.create(
project = p2,
estimate_num = 2,
start_date = date_1,
end_date = date_1 + timedelta(days=-3),
payroll = 11260.90,
other_cost = 505.11,
st_hours = 13,
ot_hours = 16,
invoice_num = '19963',
)
def test_project_creation(self):
''' Project manager accounts for all 3 invoices created'''
project_count = Project.objects.count()
self.assertEqual(project_count, 2)
def test_invoice_creation(self):
''' Invoice manager accounts for all 3 invoices created'''
invoice_count = Invoice.objects.count()
self.assertEqual(invoice_count, 3)
def test_budget_totals(self):
'''sum of payroll for associated invoices'''
p1 = Project.objects.last()
self.assertEqual(p1.payroll_to_date, (11260.90 + 32134.90))
self.assertEqual(p1.other_cost_to_date, 1010.22)
# def test_total_other_cost(self):
# '''sum of other cost in all associated invoices'''
# p1 = Project.objects.last()
# self.assertEqual(p1)
def test_last_invoiced(self):
''' Returns end date of most recent invoice in mm/dd/yyyy format'''
p1 = Project.objects.last()
self.assertEqual(p1.last_invoiced, datetime.strftime(date.today() + timedelta(days=3),"%m/%d/%Y"))
def test_burn_rate_calc(self):
'''Calculates months remaining at current rate'''
p1 = Project.objects.last()
self.assertEqual(p1.is_almost_finished(), 4)
# Make sure its recalculated after a new invoice
Invoice.objects.create(
project = p1,
estimate_num = 3,
start_date = date.today(),
end_date = date.today() + timedelta(days=-15),
payroll = 25000.90,
other_cost = 505.11,
st_hours = 13,
ot_hours = 16,
invoice_num = '19964',
)
self.assertEqual(p1.is_almost_finished(), 3)
|
python
|
# coding: utf-8
"""
vloadbalancer
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from ncloud_vloadbalancer.api_client import ApiClient
class V2Api(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def add_target(self, add_target_request, **kwargs): # noqa: E501
"""add_target # noqa: E501
타겟추가 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.add_target(add_target_request, async=True)
>>> result = thread.get()
:param async bool
:param AddTargetRequest add_target_request: addTargetRequest (required)
:return: AddTargetResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.add_target_with_http_info(add_target_request, **kwargs) # noqa: E501
else:
(data) = self.add_target_with_http_info(add_target_request, **kwargs) # noqa: E501
return data
def add_target_with_http_info(self, add_target_request, **kwargs): # noqa: E501
"""add_target # noqa: E501
타겟추가 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.add_target_with_http_info(add_target_request, async=True)
>>> result = thread.get()
:param async bool
:param AddTargetRequest add_target_request: addTargetRequest (required)
:return: AddTargetResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['add_target_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_target" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'add_target_request' is set
if ('add_target_request' not in params or
params['add_target_request'] is None):
raise ValueError("Missing the required parameter `add_target_request` when calling `add_target`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'add_target_request' in params:
body_params = params['add_target_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/addTarget', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AddTargetResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def change_load_balancer_instance_configuration(self, change_load_balancer_instance_configuration_request, **kwargs): # noqa: E501
"""change_load_balancer_instance_configuration # noqa: E501
로드밸런서인스턴스설정변경 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.change_load_balancer_instance_configuration(change_load_balancer_instance_configuration_request, async=True)
>>> result = thread.get()
:param async bool
:param ChangeLoadBalancerInstanceConfigurationRequest change_load_balancer_instance_configuration_request: changeLoadBalancerInstanceConfigurationRequest (required)
:return: ChangeLoadBalancerInstanceConfigurationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.change_load_balancer_instance_configuration_with_http_info(change_load_balancer_instance_configuration_request, **kwargs) # noqa: E501
else:
(data) = self.change_load_balancer_instance_configuration_with_http_info(change_load_balancer_instance_configuration_request, **kwargs) # noqa: E501
return data
def change_load_balancer_instance_configuration_with_http_info(self, change_load_balancer_instance_configuration_request, **kwargs): # noqa: E501
"""change_load_balancer_instance_configuration # noqa: E501
로드밸런서인스턴스설정변경 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.change_load_balancer_instance_configuration_with_http_info(change_load_balancer_instance_configuration_request, async=True)
>>> result = thread.get()
:param async bool
:param ChangeLoadBalancerInstanceConfigurationRequest change_load_balancer_instance_configuration_request: changeLoadBalancerInstanceConfigurationRequest (required)
:return: ChangeLoadBalancerInstanceConfigurationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['change_load_balancer_instance_configuration_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method change_load_balancer_instance_configuration" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'change_load_balancer_instance_configuration_request' is set
if ('change_load_balancer_instance_configuration_request' not in params or
params['change_load_balancer_instance_configuration_request'] is None):
raise ValueError("Missing the required parameter `change_load_balancer_instance_configuration_request` when calling `change_load_balancer_instance_configuration`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'change_load_balancer_instance_configuration_request' in params:
body_params = params['change_load_balancer_instance_configuration_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/changeLoadBalancerInstanceConfiguration', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ChangeLoadBalancerInstanceConfigurationResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def change_load_balancer_listener_configuration(self, change_load_balancer_listener_configuration_request, **kwargs): # noqa: E501
"""change_load_balancer_listener_configuration # noqa: E501
로드밸런서리스너설정변경 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.change_load_balancer_listener_configuration(change_load_balancer_listener_configuration_request, async=True)
>>> result = thread.get()
:param async bool
:param ChangeLoadBalancerListenerConfigurationRequest change_load_balancer_listener_configuration_request: changeLoadBalancerListenerConfigurationRequest (required)
:return: ChangeLoadBalancerListenerConfigurationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.change_load_balancer_listener_configuration_with_http_info(change_load_balancer_listener_configuration_request, **kwargs) # noqa: E501
else:
(data) = self.change_load_balancer_listener_configuration_with_http_info(change_load_balancer_listener_configuration_request, **kwargs) # noqa: E501
return data
def change_load_balancer_listener_configuration_with_http_info(self, change_load_balancer_listener_configuration_request, **kwargs): # noqa: E501
"""change_load_balancer_listener_configuration # noqa: E501
로드밸런서리스너설정변경 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.change_load_balancer_listener_configuration_with_http_info(change_load_balancer_listener_configuration_request, async=True)
>>> result = thread.get()
:param async bool
:param ChangeLoadBalancerListenerConfigurationRequest change_load_balancer_listener_configuration_request: changeLoadBalancerListenerConfigurationRequest (required)
:return: ChangeLoadBalancerListenerConfigurationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['change_load_balancer_listener_configuration_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method change_load_balancer_listener_configuration" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'change_load_balancer_listener_configuration_request' is set
if ('change_load_balancer_listener_configuration_request' not in params or
params['change_load_balancer_listener_configuration_request'] is None):
raise ValueError("Missing the required parameter `change_load_balancer_listener_configuration_request` when calling `change_load_balancer_listener_configuration`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'change_load_balancer_listener_configuration_request' in params:
body_params = params['change_load_balancer_listener_configuration_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/changeLoadBalancerListenerConfiguration', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ChangeLoadBalancerListenerConfigurationResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def change_target_group_configuration(self, change_target_group_configuration_request, **kwargs): # noqa: E501
"""change_target_group_configuration # noqa: E501
타겟그룹설정변경 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.change_target_group_configuration(change_target_group_configuration_request, async=True)
>>> result = thread.get()
:param async bool
:param ChangeTargetGroupConfigurationRequest change_target_group_configuration_request: changeTargetGroupConfigurationRequest (required)
:return: ChangeTargetGroupConfigurationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.change_target_group_configuration_with_http_info(change_target_group_configuration_request, **kwargs) # noqa: E501
else:
(data) = self.change_target_group_configuration_with_http_info(change_target_group_configuration_request, **kwargs) # noqa: E501
return data
def change_target_group_configuration_with_http_info(self, change_target_group_configuration_request, **kwargs): # noqa: E501
"""change_target_group_configuration # noqa: E501
타겟그룹설정변경 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.change_target_group_configuration_with_http_info(change_target_group_configuration_request, async=True)
>>> result = thread.get()
:param async bool
:param ChangeTargetGroupConfigurationRequest change_target_group_configuration_request: changeTargetGroupConfigurationRequest (required)
:return: ChangeTargetGroupConfigurationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['change_target_group_configuration_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method change_target_group_configuration" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'change_target_group_configuration_request' is set
if ('change_target_group_configuration_request' not in params or
params['change_target_group_configuration_request'] is None):
raise ValueError("Missing the required parameter `change_target_group_configuration_request` when calling `change_target_group_configuration`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'change_target_group_configuration_request' in params:
body_params = params['change_target_group_configuration_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/changeTargetGroupConfiguration', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ChangeTargetGroupConfigurationResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def change_target_group_health_check_configuration(self, change_target_group_health_check_configuration_request, **kwargs): # noqa: E501
"""change_target_group_health_check_configuration # noqa: E501
타겟그룹헬스체크설정변경 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.change_target_group_health_check_configuration(change_target_group_health_check_configuration_request, async=True)
>>> result = thread.get()
:param async bool
:param ChangeTargetGroupHealthCheckConfigurationRequest change_target_group_health_check_configuration_request: changeTargetGroupHealthCheckConfigurationRequest (required)
:return: ChangeTargetGroupHealthCheckConfigurationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.change_target_group_health_check_configuration_with_http_info(change_target_group_health_check_configuration_request, **kwargs) # noqa: E501
else:
(data) = self.change_target_group_health_check_configuration_with_http_info(change_target_group_health_check_configuration_request, **kwargs) # noqa: E501
return data
def change_target_group_health_check_configuration_with_http_info(self, change_target_group_health_check_configuration_request, **kwargs): # noqa: E501
"""change_target_group_health_check_configuration # noqa: E501
타겟그룹헬스체크설정변경 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.change_target_group_health_check_configuration_with_http_info(change_target_group_health_check_configuration_request, async=True)
>>> result = thread.get()
:param async bool
:param ChangeTargetGroupHealthCheckConfigurationRequest change_target_group_health_check_configuration_request: changeTargetGroupHealthCheckConfigurationRequest (required)
:return: ChangeTargetGroupHealthCheckConfigurationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['change_target_group_health_check_configuration_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method change_target_group_health_check_configuration" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'change_target_group_health_check_configuration_request' is set
if ('change_target_group_health_check_configuration_request' not in params or
params['change_target_group_health_check_configuration_request'] is None):
raise ValueError("Missing the required parameter `change_target_group_health_check_configuration_request` when calling `change_target_group_health_check_configuration`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'change_target_group_health_check_configuration_request' in params:
body_params = params['change_target_group_health_check_configuration_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/changeTargetGroupHealthCheckConfiguration', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ChangeTargetGroupHealthCheckConfigurationResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_load_balancer_instance(self, create_load_balancer_instance_request, **kwargs): # noqa: E501
"""create_load_balancer_instance # noqa: E501
로드밸런서인스턴스생성 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_load_balancer_instance(create_load_balancer_instance_request, async=True)
>>> result = thread.get()
:param async bool
:param CreateLoadBalancerInstanceRequest create_load_balancer_instance_request: createLoadBalancerInstanceRequest (required)
:return: CreateLoadBalancerInstanceResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.create_load_balancer_instance_with_http_info(create_load_balancer_instance_request, **kwargs) # noqa: E501
else:
(data) = self.create_load_balancer_instance_with_http_info(create_load_balancer_instance_request, **kwargs) # noqa: E501
return data
def create_load_balancer_instance_with_http_info(self, create_load_balancer_instance_request, **kwargs): # noqa: E501
"""create_load_balancer_instance # noqa: E501
로드밸런서인스턴스생성 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_load_balancer_instance_with_http_info(create_load_balancer_instance_request, async=True)
>>> result = thread.get()
:param async bool
:param CreateLoadBalancerInstanceRequest create_load_balancer_instance_request: createLoadBalancerInstanceRequest (required)
:return: CreateLoadBalancerInstanceResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['create_load_balancer_instance_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_load_balancer_instance" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'create_load_balancer_instance_request' is set
if ('create_load_balancer_instance_request' not in params or
params['create_load_balancer_instance_request'] is None):
raise ValueError("Missing the required parameter `create_load_balancer_instance_request` when calling `create_load_balancer_instance`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'create_load_balancer_instance_request' in params:
body_params = params['create_load_balancer_instance_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/createLoadBalancerInstance', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CreateLoadBalancerInstanceResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_load_balancer_listener(self, create_load_balancer_listener_request, **kwargs): # noqa: E501
"""create_load_balancer_listener # noqa: E501
로드밸런서리스너생성 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_load_balancer_listener(create_load_balancer_listener_request, async=True)
>>> result = thread.get()
:param async bool
:param CreateLoadBalancerListenerRequest create_load_balancer_listener_request: createLoadBalancerListenerRequest (required)
:return: CreateLoadBalancerListenerResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.create_load_balancer_listener_with_http_info(create_load_balancer_listener_request, **kwargs) # noqa: E501
else:
(data) = self.create_load_balancer_listener_with_http_info(create_load_balancer_listener_request, **kwargs) # noqa: E501
return data
def create_load_balancer_listener_with_http_info(self, create_load_balancer_listener_request, **kwargs): # noqa: E501
"""create_load_balancer_listener # noqa: E501
로드밸런서리스너생성 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_load_balancer_listener_with_http_info(create_load_balancer_listener_request, async=True)
>>> result = thread.get()
:param async bool
:param CreateLoadBalancerListenerRequest create_load_balancer_listener_request: createLoadBalancerListenerRequest (required)
:return: CreateLoadBalancerListenerResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['create_load_balancer_listener_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_load_balancer_listener" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'create_load_balancer_listener_request' is set
if ('create_load_balancer_listener_request' not in params or
params['create_load_balancer_listener_request'] is None):
raise ValueError("Missing the required parameter `create_load_balancer_listener_request` when calling `create_load_balancer_listener`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'create_load_balancer_listener_request' in params:
body_params = params['create_load_balancer_listener_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/createLoadBalancerListener', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CreateLoadBalancerListenerResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_target_group(self, create_target_group_request, **kwargs): # noqa: E501
"""create_target_group # noqa: E501
타겟그룹생성 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_target_group(create_target_group_request, async=True)
>>> result = thread.get()
:param async bool
:param CreateTargetGroupRequest create_target_group_request: createTargetGroupRequest (required)
:return: CreateTargetGroupResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.create_target_group_with_http_info(create_target_group_request, **kwargs) # noqa: E501
else:
(data) = self.create_target_group_with_http_info(create_target_group_request, **kwargs) # noqa: E501
return data
def create_target_group_with_http_info(self, create_target_group_request, **kwargs): # noqa: E501
"""create_target_group # noqa: E501
타겟그룹생성 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_target_group_with_http_info(create_target_group_request, async=True)
>>> result = thread.get()
:param async bool
:param CreateTargetGroupRequest create_target_group_request: createTargetGroupRequest (required)
:return: CreateTargetGroupResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['create_target_group_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_target_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'create_target_group_request' is set
if ('create_target_group_request' not in params or
params['create_target_group_request'] is None):
raise ValueError("Missing the required parameter `create_target_group_request` when calling `create_target_group`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'create_target_group_request' in params:
body_params = params['create_target_group_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/createTargetGroup', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CreateTargetGroupResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_load_balancer_instances(self, delete_load_balancer_instances_request, **kwargs): # noqa: E501
"""delete_load_balancer_instances # noqa: E501
로드밸런서인스턴스삭제 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_load_balancer_instances(delete_load_balancer_instances_request, async=True)
>>> result = thread.get()
:param async bool
:param DeleteLoadBalancerInstancesRequest delete_load_balancer_instances_request: deleteLoadBalancerInstancesRequest (required)
:return: DeleteLoadBalancerInstancesResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.delete_load_balancer_instances_with_http_info(delete_load_balancer_instances_request, **kwargs) # noqa: E501
else:
(data) = self.delete_load_balancer_instances_with_http_info(delete_load_balancer_instances_request, **kwargs) # noqa: E501
return data
def delete_load_balancer_instances_with_http_info(self, delete_load_balancer_instances_request, **kwargs): # noqa: E501
"""delete_load_balancer_instances # noqa: E501
로드밸런서인스턴스삭제 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_load_balancer_instances_with_http_info(delete_load_balancer_instances_request, async=True)
>>> result = thread.get()
:param async bool
:param DeleteLoadBalancerInstancesRequest delete_load_balancer_instances_request: deleteLoadBalancerInstancesRequest (required)
:return: DeleteLoadBalancerInstancesResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['delete_load_balancer_instances_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_load_balancer_instances" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'delete_load_balancer_instances_request' is set
if ('delete_load_balancer_instances_request' not in params or
params['delete_load_balancer_instances_request'] is None):
raise ValueError("Missing the required parameter `delete_load_balancer_instances_request` when calling `delete_load_balancer_instances`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'delete_load_balancer_instances_request' in params:
body_params = params['delete_load_balancer_instances_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/deleteLoadBalancerInstances', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeleteLoadBalancerInstancesResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_load_balancer_listeners(self, delete_load_balancer_listeners_request, **kwargs): # noqa: E501
"""delete_load_balancer_listeners # noqa: E501
로드밸런서리스너삭제 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_load_balancer_listeners(delete_load_balancer_listeners_request, async=True)
>>> result = thread.get()
:param async bool
:param DeleteLoadBalancerListenersRequest delete_load_balancer_listeners_request: deleteLoadBalancerListenersRequest (required)
:return: DeleteLoadBalancerListenersResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.delete_load_balancer_listeners_with_http_info(delete_load_balancer_listeners_request, **kwargs) # noqa: E501
else:
(data) = self.delete_load_balancer_listeners_with_http_info(delete_load_balancer_listeners_request, **kwargs) # noqa: E501
return data
def delete_load_balancer_listeners_with_http_info(self, delete_load_balancer_listeners_request, **kwargs): # noqa: E501
"""delete_load_balancer_listeners # noqa: E501
로드밸런서리스너삭제 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_load_balancer_listeners_with_http_info(delete_load_balancer_listeners_request, async=True)
>>> result = thread.get()
:param async bool
:param DeleteLoadBalancerListenersRequest delete_load_balancer_listeners_request: deleteLoadBalancerListenersRequest (required)
:return: DeleteLoadBalancerListenersResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['delete_load_balancer_listeners_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_load_balancer_listeners" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'delete_load_balancer_listeners_request' is set
if ('delete_load_balancer_listeners_request' not in params or
params['delete_load_balancer_listeners_request'] is None):
raise ValueError("Missing the required parameter `delete_load_balancer_listeners_request` when calling `delete_load_balancer_listeners`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'delete_load_balancer_listeners_request' in params:
body_params = params['delete_load_balancer_listeners_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/deleteLoadBalancerListeners', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeleteLoadBalancerListenersResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_target_groups(self, delete_target_groups_request, **kwargs): # noqa: E501
"""delete_target_groups # noqa: E501
타겟그룹삭제 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_target_groups(delete_target_groups_request, async=True)
>>> result = thread.get()
:param async bool
:param DeleteTargetGroupsRequest delete_target_groups_request: deleteTargetGroupsRequest (required)
:return: DeleteTargetGroupsResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.delete_target_groups_with_http_info(delete_target_groups_request, **kwargs) # noqa: E501
else:
(data) = self.delete_target_groups_with_http_info(delete_target_groups_request, **kwargs) # noqa: E501
return data
def delete_target_groups_with_http_info(self, delete_target_groups_request, **kwargs): # noqa: E501
"""delete_target_groups # noqa: E501
타겟그룹삭제 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_target_groups_with_http_info(delete_target_groups_request, async=True)
>>> result = thread.get()
:param async bool
:param DeleteTargetGroupsRequest delete_target_groups_request: deleteTargetGroupsRequest (required)
:return: DeleteTargetGroupsResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['delete_target_groups_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_target_groups" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'delete_target_groups_request' is set
if ('delete_target_groups_request' not in params or
params['delete_target_groups_request'] is None):
raise ValueError("Missing the required parameter `delete_target_groups_request` when calling `delete_target_groups`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'delete_target_groups_request' in params:
body_params = params['delete_target_groups_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/deleteTargetGroups', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeleteTargetGroupsResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_load_balancer_instance_detail(self, get_load_balancer_instance_detail_request, **kwargs): # noqa: E501
"""get_load_balancer_instance_detail # noqa: E501
로드밸런서인스턴스상세조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_load_balancer_instance_detail(get_load_balancer_instance_detail_request, async=True)
>>> result = thread.get()
:param async bool
:param GetLoadBalancerInstanceDetailRequest get_load_balancer_instance_detail_request: getLoadBalancerInstanceDetailRequest (required)
:return: GetLoadBalancerInstanceDetailResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_load_balancer_instance_detail_with_http_info(get_load_balancer_instance_detail_request, **kwargs) # noqa: E501
else:
(data) = self.get_load_balancer_instance_detail_with_http_info(get_load_balancer_instance_detail_request, **kwargs) # noqa: E501
return data
def get_load_balancer_instance_detail_with_http_info(self, get_load_balancer_instance_detail_request, **kwargs): # noqa: E501
"""get_load_balancer_instance_detail # noqa: E501
로드밸런서인스턴스상세조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_load_balancer_instance_detail_with_http_info(get_load_balancer_instance_detail_request, async=True)
>>> result = thread.get()
:param async bool
:param GetLoadBalancerInstanceDetailRequest get_load_balancer_instance_detail_request: getLoadBalancerInstanceDetailRequest (required)
:return: GetLoadBalancerInstanceDetailResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['get_load_balancer_instance_detail_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_load_balancer_instance_detail" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'get_load_balancer_instance_detail_request' is set
if ('get_load_balancer_instance_detail_request' not in params or
params['get_load_balancer_instance_detail_request'] is None):
raise ValueError("Missing the required parameter `get_load_balancer_instance_detail_request` when calling `get_load_balancer_instance_detail`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'get_load_balancer_instance_detail_request' in params:
body_params = params['get_load_balancer_instance_detail_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/getLoadBalancerInstanceDetail', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetLoadBalancerInstanceDetailResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_load_balancer_instance_list(self, get_load_balancer_instance_list_request, **kwargs): # noqa: E501
"""get_load_balancer_instance_list # noqa: E501
로드밸런서인스턴스리스트조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_load_balancer_instance_list(get_load_balancer_instance_list_request, async=True)
>>> result = thread.get()
:param async bool
:param GetLoadBalancerInstanceListRequest get_load_balancer_instance_list_request: getLoadBalancerInstanceListRequest (required)
:return: GetLoadBalancerInstanceListResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_load_balancer_instance_list_with_http_info(get_load_balancer_instance_list_request, **kwargs) # noqa: E501
else:
(data) = self.get_load_balancer_instance_list_with_http_info(get_load_balancer_instance_list_request, **kwargs) # noqa: E501
return data
def get_load_balancer_instance_list_with_http_info(self, get_load_balancer_instance_list_request, **kwargs): # noqa: E501
"""get_load_balancer_instance_list # noqa: E501
로드밸런서인스턴스리스트조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_load_balancer_instance_list_with_http_info(get_load_balancer_instance_list_request, async=True)
>>> result = thread.get()
:param async bool
:param GetLoadBalancerInstanceListRequest get_load_balancer_instance_list_request: getLoadBalancerInstanceListRequest (required)
:return: GetLoadBalancerInstanceListResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['get_load_balancer_instance_list_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_load_balancer_instance_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'get_load_balancer_instance_list_request' is set
if ('get_load_balancer_instance_list_request' not in params or
params['get_load_balancer_instance_list_request'] is None):
raise ValueError("Missing the required parameter `get_load_balancer_instance_list_request` when calling `get_load_balancer_instance_list`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'get_load_balancer_instance_list_request' in params:
body_params = params['get_load_balancer_instance_list_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/getLoadBalancerInstanceList', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetLoadBalancerInstanceListResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_load_balancer_listener_list(self, get_load_balancer_listener_list_request, **kwargs): # noqa: E501
"""get_load_balancer_listener_list # noqa: E501
로드밸런서리스너리스트조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_load_balancer_listener_list(get_load_balancer_listener_list_request, async=True)
>>> result = thread.get()
:param async bool
:param GetLoadBalancerListenerListRequest get_load_balancer_listener_list_request: getLoadBalancerListenerListRequest (required)
:return: GetLoadBalancerListenerListResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_load_balancer_listener_list_with_http_info(get_load_balancer_listener_list_request, **kwargs) # noqa: E501
else:
(data) = self.get_load_balancer_listener_list_with_http_info(get_load_balancer_listener_list_request, **kwargs) # noqa: E501
return data
def get_load_balancer_listener_list_with_http_info(self, get_load_balancer_listener_list_request, **kwargs): # noqa: E501
"""get_load_balancer_listener_list # noqa: E501
로드밸런서리스너리스트조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_load_balancer_listener_list_with_http_info(get_load_balancer_listener_list_request, async=True)
>>> result = thread.get()
:param async bool
:param GetLoadBalancerListenerListRequest get_load_balancer_listener_list_request: getLoadBalancerListenerListRequest (required)
:return: GetLoadBalancerListenerListResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['get_load_balancer_listener_list_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_load_balancer_listener_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'get_load_balancer_listener_list_request' is set
if ('get_load_balancer_listener_list_request' not in params or
params['get_load_balancer_listener_list_request'] is None):
raise ValueError("Missing the required parameter `get_load_balancer_listener_list_request` when calling `get_load_balancer_listener_list`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'get_load_balancer_listener_list_request' in params:
body_params = params['get_load_balancer_listener_list_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/getLoadBalancerListenerList', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetLoadBalancerListenerListResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_load_balancer_rule_list(self, get_load_balancer_rule_list_request, **kwargs): # noqa: E501
"""get_load_balancer_rule_list # noqa: E501
로드밸런서룰리스트조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_load_balancer_rule_list(get_load_balancer_rule_list_request, async=True)
>>> result = thread.get()
:param async bool
:param GetLoadBalancerRuleListRequest get_load_balancer_rule_list_request: getLoadBalancerRuleListRequest (required)
:return: GetLoadBalancerRuleListResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_load_balancer_rule_list_with_http_info(get_load_balancer_rule_list_request, **kwargs) # noqa: E501
else:
(data) = self.get_load_balancer_rule_list_with_http_info(get_load_balancer_rule_list_request, **kwargs) # noqa: E501
return data
def get_load_balancer_rule_list_with_http_info(self, get_load_balancer_rule_list_request, **kwargs): # noqa: E501
"""get_load_balancer_rule_list # noqa: E501
로드밸런서룰리스트조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_load_balancer_rule_list_with_http_info(get_load_balancer_rule_list_request, async=True)
>>> result = thread.get()
:param async bool
:param GetLoadBalancerRuleListRequest get_load_balancer_rule_list_request: getLoadBalancerRuleListRequest (required)
:return: GetLoadBalancerRuleListResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['get_load_balancer_rule_list_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_load_balancer_rule_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'get_load_balancer_rule_list_request' is set
if ('get_load_balancer_rule_list_request' not in params or
params['get_load_balancer_rule_list_request'] is None):
raise ValueError("Missing the required parameter `get_load_balancer_rule_list_request` when calling `get_load_balancer_rule_list`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'get_load_balancer_rule_list_request' in params:
body_params = params['get_load_balancer_rule_list_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/getLoadBalancerRuleList', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetLoadBalancerRuleListResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_target_group_detail(self, get_target_group_detail_request, **kwargs): # noqa: E501
"""get_target_group_detail # noqa: E501
타겟그룹상세조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_target_group_detail(get_target_group_detail_request, async=True)
>>> result = thread.get()
:param async bool
:param GetTargetGroupDetailRequest get_target_group_detail_request: getTargetGroupDetailRequest (required)
:return: GetTargetGroupDetailResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_target_group_detail_with_http_info(get_target_group_detail_request, **kwargs) # noqa: E501
else:
(data) = self.get_target_group_detail_with_http_info(get_target_group_detail_request, **kwargs) # noqa: E501
return data
def get_target_group_detail_with_http_info(self, get_target_group_detail_request, **kwargs): # noqa: E501
"""get_target_group_detail # noqa: E501
타겟그룹상세조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_target_group_detail_with_http_info(get_target_group_detail_request, async=True)
>>> result = thread.get()
:param async bool
:param GetTargetGroupDetailRequest get_target_group_detail_request: getTargetGroupDetailRequest (required)
:return: GetTargetGroupDetailResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['get_target_group_detail_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_target_group_detail" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'get_target_group_detail_request' is set
if ('get_target_group_detail_request' not in params or
params['get_target_group_detail_request'] is None):
raise ValueError("Missing the required parameter `get_target_group_detail_request` when calling `get_target_group_detail`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'get_target_group_detail_request' in params:
body_params = params['get_target_group_detail_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/getTargetGroupDetail', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetTargetGroupDetailResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_target_group_list(self, get_target_group_list_request, **kwargs): # noqa: E501
"""get_target_group_list # noqa: E501
타겟그룹리스트조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_target_group_list(get_target_group_list_request, async=True)
>>> result = thread.get()
:param async bool
:param GetTargetGroupListRequest get_target_group_list_request: getTargetGroupListRequest (required)
:return: GetTargetGroupListResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_target_group_list_with_http_info(get_target_group_list_request, **kwargs) # noqa: E501
else:
(data) = self.get_target_group_list_with_http_info(get_target_group_list_request, **kwargs) # noqa: E501
return data
def get_target_group_list_with_http_info(self, get_target_group_list_request, **kwargs): # noqa: E501
"""get_target_group_list # noqa: E501
타겟그룹리스트조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_target_group_list_with_http_info(get_target_group_list_request, async=True)
>>> result = thread.get()
:param async bool
:param GetTargetGroupListRequest get_target_group_list_request: getTargetGroupListRequest (required)
:return: GetTargetGroupListResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['get_target_group_list_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_target_group_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'get_target_group_list_request' is set
if ('get_target_group_list_request' not in params or
params['get_target_group_list_request'] is None):
raise ValueError("Missing the required parameter `get_target_group_list_request` when calling `get_target_group_list`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'get_target_group_list_request' in params:
body_params = params['get_target_group_list_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/getTargetGroupList', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetTargetGroupListResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_target_list(self, get_target_list_request, **kwargs): # noqa: E501
"""get_target_list # noqa: E501
타겟리스트조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_target_list(get_target_list_request, async=True)
>>> result = thread.get()
:param async bool
:param GetTargetListRequest get_target_list_request: getTargetListRequest (required)
:return: GetTargetListResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_target_list_with_http_info(get_target_list_request, **kwargs) # noqa: E501
else:
(data) = self.get_target_list_with_http_info(get_target_list_request, **kwargs) # noqa: E501
return data
def get_target_list_with_http_info(self, get_target_list_request, **kwargs): # noqa: E501
"""get_target_list # noqa: E501
타겟리스트조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_target_list_with_http_info(get_target_list_request, async=True)
>>> result = thread.get()
:param async bool
:param GetTargetListRequest get_target_list_request: getTargetListRequest (required)
:return: GetTargetListResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['get_target_list_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_target_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'get_target_list_request' is set
if ('get_target_list_request' not in params or
params['get_target_list_request'] is None):
raise ValueError("Missing the required parameter `get_target_list_request` when calling `get_target_list`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'get_target_list_request' in params:
body_params = params['get_target_list_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/getTargetList', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetTargetListResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def remove_target(self, remove_target_request, **kwargs): # noqa: E501
"""remove_target # noqa: E501
타겟제거 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.remove_target(remove_target_request, async=True)
>>> result = thread.get()
:param async bool
:param RemoveTargetRequest remove_target_request: removeTargetRequest (required)
:return: RemoveTargetResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.remove_target_with_http_info(remove_target_request, **kwargs) # noqa: E501
else:
(data) = self.remove_target_with_http_info(remove_target_request, **kwargs) # noqa: E501
return data
def remove_target_with_http_info(self, remove_target_request, **kwargs): # noqa: E501
"""remove_target # noqa: E501
타겟제거 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.remove_target_with_http_info(remove_target_request, async=True)
>>> result = thread.get()
:param async bool
:param RemoveTargetRequest remove_target_request: removeTargetRequest (required)
:return: RemoveTargetResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['remove_target_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method remove_target" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'remove_target_request' is set
if ('remove_target_request' not in params or
params['remove_target_request'] is None):
raise ValueError("Missing the required parameter `remove_target_request` when calling `remove_target`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'remove_target_request' in params:
body_params = params['remove_target_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/removeTarget', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RemoveTargetResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def set_load_balancer_description(self, set_load_balancer_description_request, **kwargs): # noqa: E501
"""set_load_balancer_description # noqa: E501
로드밸런서설명설정 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.set_load_balancer_description(set_load_balancer_description_request, async=True)
>>> result = thread.get()
:param async bool
:param SetLoadBalancerDescriptionRequest set_load_balancer_description_request: setLoadBalancerDescriptionRequest (required)
:return: SetLoadBalancerDescriptionResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.set_load_balancer_description_with_http_info(set_load_balancer_description_request, **kwargs) # noqa: E501
else:
(data) = self.set_load_balancer_description_with_http_info(set_load_balancer_description_request, **kwargs) # noqa: E501
return data
def set_load_balancer_description_with_http_info(self, set_load_balancer_description_request, **kwargs): # noqa: E501
"""set_load_balancer_description # noqa: E501
로드밸런서설명설정 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.set_load_balancer_description_with_http_info(set_load_balancer_description_request, async=True)
>>> result = thread.get()
:param async bool
:param SetLoadBalancerDescriptionRequest set_load_balancer_description_request: setLoadBalancerDescriptionRequest (required)
:return: SetLoadBalancerDescriptionResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['set_load_balancer_description_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_load_balancer_description" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'set_load_balancer_description_request' is set
if ('set_load_balancer_description_request' not in params or
params['set_load_balancer_description_request'] is None):
raise ValueError("Missing the required parameter `set_load_balancer_description_request` when calling `set_load_balancer_description`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'set_load_balancer_description_request' in params:
body_params = params['set_load_balancer_description_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/setLoadBalancerDescription', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SetLoadBalancerDescriptionResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def set_load_balancer_instance_subnet(self, set_load_balancer_instance_subnet_request, **kwargs): # noqa: E501
"""set_load_balancer_instance_subnet # noqa: E501
로드밸런서인스턴스서브넷설정 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.set_load_balancer_instance_subnet(set_load_balancer_instance_subnet_request, async=True)
>>> result = thread.get()
:param async bool
:param SetLoadBalancerInstanceSubnetRequest set_load_balancer_instance_subnet_request: setLoadBalancerInstanceSubnetRequest (required)
:return: SetLoadBalancerInstanceSubnetResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.set_load_balancer_instance_subnet_with_http_info(set_load_balancer_instance_subnet_request, **kwargs) # noqa: E501
else:
(data) = self.set_load_balancer_instance_subnet_with_http_info(set_load_balancer_instance_subnet_request, **kwargs) # noqa: E501
return data
def set_load_balancer_instance_subnet_with_http_info(self, set_load_balancer_instance_subnet_request, **kwargs): # noqa: E501
"""set_load_balancer_instance_subnet # noqa: E501
로드밸런서인스턴스서브넷설정 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.set_load_balancer_instance_subnet_with_http_info(set_load_balancer_instance_subnet_request, async=True)
>>> result = thread.get()
:param async bool
:param SetLoadBalancerInstanceSubnetRequest set_load_balancer_instance_subnet_request: setLoadBalancerInstanceSubnetRequest (required)
:return: SetLoadBalancerInstanceSubnetResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['set_load_balancer_instance_subnet_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_load_balancer_instance_subnet" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'set_load_balancer_instance_subnet_request' is set
if ('set_load_balancer_instance_subnet_request' not in params or
params['set_load_balancer_instance_subnet_request'] is None):
raise ValueError("Missing the required parameter `set_load_balancer_instance_subnet_request` when calling `set_load_balancer_instance_subnet`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'set_load_balancer_instance_subnet_request' in params:
body_params = params['set_load_balancer_instance_subnet_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/setLoadBalancerInstanceSubnet', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SetLoadBalancerInstanceSubnetResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def set_target(self, set_target_request, **kwargs): # noqa: E501
"""set_target # noqa: E501
타겟설정 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.set_target(set_target_request, async=True)
>>> result = thread.get()
:param async bool
:param SetTargetRequest set_target_request: setTargetRequest (required)
:return: SetTargetResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.set_target_with_http_info(set_target_request, **kwargs) # noqa: E501
else:
(data) = self.set_target_with_http_info(set_target_request, **kwargs) # noqa: E501
return data
def set_target_with_http_info(self, set_target_request, **kwargs): # noqa: E501
"""set_target # noqa: E501
타겟설정 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.set_target_with_http_info(set_target_request, async=True)
>>> result = thread.get()
:param async bool
:param SetTargetRequest set_target_request: setTargetRequest (required)
:return: SetTargetResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['set_target_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_target" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'set_target_request' is set
if ('set_target_request' not in params or
params['set_target_request'] is None):
raise ValueError("Missing the required parameter `set_target_request` when calling `set_target`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'set_target_request' in params:
body_params = params['set_target_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/setTarget', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SetTargetResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def set_target_group_description(self, set_target_group_description_request, **kwargs): # noqa: E501
"""set_target_group_description # noqa: E501
타겟그룹설명설정 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.set_target_group_description(set_target_group_description_request, async=True)
>>> result = thread.get()
:param async bool
:param SetTargetGroupDescriptionRequest set_target_group_description_request: setTargetGroupDescriptionRequest (required)
:return: SetTargetGroupDescriptionResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.set_target_group_description_with_http_info(set_target_group_description_request, **kwargs) # noqa: E501
else:
(data) = self.set_target_group_description_with_http_info(set_target_group_description_request, **kwargs) # noqa: E501
return data
def set_target_group_description_with_http_info(self, set_target_group_description_request, **kwargs): # noqa: E501
"""set_target_group_description # noqa: E501
타겟그룹설명설정 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.set_target_group_description_with_http_info(set_target_group_description_request, async=True)
>>> result = thread.get()
:param async bool
:param SetTargetGroupDescriptionRequest set_target_group_description_request: setTargetGroupDescriptionRequest (required)
:return: SetTargetGroupDescriptionResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['set_target_group_description_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_target_group_description" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'set_target_group_description_request' is set
if ('set_target_group_description_request' not in params or
params['set_target_group_description_request'] is None):
raise ValueError("Missing the required parameter `set_target_group_description_request` when calling `set_target_group_description`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'set_target_group_description_request' in params:
body_params = params['set_target_group_description_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/setTargetGroupDescription', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SetTargetGroupDescriptionResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
|
python
|
from selenium import webdriver
import logging
import os
logging.getLogger().setLevel(logging.INFO)
def lambda_handler(event, context):
logging.info("python-selenium-chromium-on-lambda started")
chrome_options = webdriver.ChromeOptions()
prefs = {"download.default_directory": "/tmp", "safebrowsing.enabled": True}
chrome_options.add_experimental_option("prefs", prefs)
chrome_options.add_argument("--disable-extensions")
chrome_options.add_argument("--disable-gpu")
chrome_options.add_argument("--no-sandbox")
chrome_options.add_argument("--single-process")
chrome_options.add_argument("--headless")
chrome_options.add_argument("--incognito")
chrome_options.add_argument("--disable-popup-blocking")
chrome_options.add_argument("--disable-translate")
chrome_options.add_argument("--start-maximized")
chrome_options.add_argument("--window-size=1600,1024")
chrome_options.add_argument("--disable-impl-side-painting")
chrome_options.add_argument("--disable-dev-shm-usage")
driver = webdriver.Chrome(chrome_options=chrome_options)
driver.implicitly_wait(60)
open_google(driver)
take_screenshot(driver, "screenshot_google.png")
driver.close()
return
def open_google(driver):
logging.info("opening google...")
url = "https://google.com"
driver.get(url)
def take_screenshot(driver, filename):
logging.info(f"taking screenshot {filename}...")
driver.save_screenshot(filename)
logging.info(f"screenshot location: {os.getcwd()}/{filename}")
|
python
|
#
# PySNMP MIB module WHISP-BOX-MIBV2-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/WHISP-BOX-MIBV2-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 21:29:30 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, Integer, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "OctetString", "Integer", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
SingleValueConstraint, ConstraintsUnion, ValueRangeConstraint, ConstraintsIntersection, ValueSizeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ConstraintsUnion", "ValueRangeConstraint", "ConstraintsIntersection", "ValueSizeConstraint")
ModuleCompliance, ObjectGroup, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "ObjectGroup", "NotificationGroup")
TimeTicks, iso, NotificationType, Gauge32, IpAddress, Unsigned32, Integer32, ObjectIdentity, Counter64, Counter32, MibIdentifier, Bits, ModuleIdentity, MibScalar, MibTable, MibTableRow, MibTableColumn = mibBuilder.importSymbols("SNMPv2-SMI", "TimeTicks", "iso", "NotificationType", "Gauge32", "IpAddress", "Unsigned32", "Integer32", "ObjectIdentity", "Counter64", "Counter32", "MibIdentifier", "Bits", "ModuleIdentity", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn")
MacAddress, TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "MacAddress", "TextualConvention", "DisplayString")
whispBox, whispModules = mibBuilder.importSymbols("WHISP-GLOBAL-REG-MIB", "whispBox", "whispModules")
EventString, WhispMACAddress, WhispLUID = mibBuilder.importSymbols("WHISP-TCV2-MIB", "EventString", "WhispMACAddress", "WhispLUID")
whispBoxLevelMibModule = ModuleIdentity((1, 3, 6, 1, 4, 1, 161, 19, 1, 1, 8))
if mibBuilder.loadTexts: whispBoxLevelMibModule.setLastUpdated('200304150000Z')
if mibBuilder.loadTexts: whispBoxLevelMibModule.setOrganization('Cambium Networks')
whispBoxStatus = MibIdentifier((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1))
whispBoxConfig = MibIdentifier((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2))
whispBoxControls = MibIdentifier((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 3))
whispBoxEventLog = MibIdentifier((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 5))
whispBoxConf = MibIdentifier((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 6))
whispBoxGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 6, 1))
whispBoxBridgeVar = MibIdentifier((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 7))
whispBoxCPVar = MibIdentifier((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9))
whispBoxEvent = MibIdentifier((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 12))
whispBoxDHCPClientEvent = MibIdentifier((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 12, 1))
whispBoxDNS = MibIdentifier((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 13))
whispBoxRFPhysical = MibIdentifier((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 15))
whispBoxRFConfig = MibIdentifier((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 16))
whispBoxSoftwareVer = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 1), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: whispBoxSoftwareVer.setStatus('current')
whispBoxFPGAVer = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: whispBoxFPGAVer.setStatus('current')
whispBoxEsn = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: whispBoxEsn.setStatus('current')
whispBoxBoot = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 4), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: whispBoxBoot.setStatus('current')
boxTemperature = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 5), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: boxTemperature.setStatus('obsolete')
boxDeviceType = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 6), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: boxDeviceType.setStatus('current')
boxDeviceTypeID = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 7), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: boxDeviceTypeID.setStatus('current')
boxEncryption = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 8), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: boxEncryption.setStatus('current')
etherLinkStatus = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 9), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: etherLinkStatus.setStatus('current')
boxFrequency = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 10), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: boxFrequency.setStatus('current')
platformVer = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: platformVer.setStatus('current')
platformType = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 12), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: platformType.setStatus('current')
dhcpLanIp = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 13), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dhcpLanIp.setStatus('current')
dhcpLanSubnetMask = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 14), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dhcpLanSubnetMask.setStatus('current')
dhcpLanGateway = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 15), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dhcpLanGateway.setStatus('current')
dhcpRfPublicIp = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 16), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dhcpRfPublicIp.setStatus('current')
dhcpRfPublicSubnetMask = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 17), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dhcpRfPublicSubnetMask.setStatus('current')
dhcpRfPublicGateway = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 18), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dhcpRfPublicGateway.setStatus('current')
lanDhcpStatus = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 19), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lanDhcpStatus.setStatus('current')
rfPublicDhcpStatus = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 20), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfPublicDhcpStatus.setStatus('current')
inSyncCount = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 21), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: inSyncCount.setStatus('current')
outSyncCount = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 22), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: outSyncCount.setStatus('current')
pllOutLockCount = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 23), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: pllOutLockCount.setStatus('current')
txCalFailure = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 24), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: txCalFailure.setStatus('current')
swVersion = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 25), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swVersion.setStatus('current')
pldVersion = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 26), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: pldVersion.setStatus('current')
platformInfo = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 27), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: platformInfo.setStatus('current')
antPolarization = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 28), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: antPolarization.setStatus('current')
packetOverloadCounter = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 29), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: packetOverloadCounter.setStatus('current')
whispBoxP11Personality = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 30), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: whispBoxP11Personality.setStatus('current')
whispBoxP11FPGAType = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 31), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: whispBoxP11FPGAType.setStatus('current')
whispBoxP11BstrapFPGAVer = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 32), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: whispBoxP11BstrapFPGAVer.setStatus('current')
numDFSDetections = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 33), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: numDFSDetections.setStatus('current')
rxOverrunPkts = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 34), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rxOverrunPkts.setStatus('current')
boxTemperatureC = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 35), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: boxTemperatureC.setStatus('current')
boxTemperatureF = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 36), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: boxTemperatureF.setStatus('current')
bridgeCbFecStatbin = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 37), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bridgeCbFecStatbin.setStatus('current')
bridgeCbFecStatbout = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 38), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bridgeCbFecStatbout.setStatus('current')
bridgeCbFecStatbtoss = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 39), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bridgeCbFecStatbtoss.setStatus('current')
bridgeCbFecStatbtosscap = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 40), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bridgeCbFecStatbtosscap.setStatus('current')
bridgeCbFecStatuin = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 41), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bridgeCbFecStatuin.setStatus('current')
bridgeCbFecStatuout = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 42), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bridgeCbFecStatuout.setStatus('current')
bridgeCbFecStatutoss = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 43), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bridgeCbFecStatutoss.setStatus('current')
bridgeCbFecStatutosscap = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 44), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bridgeCbFecStatutosscap.setStatus('current')
bridgeCbRFStatbin = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 45), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bridgeCbRFStatbin.setStatus('current')
bridgeCbRFStatbout = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 46), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bridgeCbRFStatbout.setStatus('current')
bridgeCbRFStatbtoss = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 47), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bridgeCbRFStatbtoss.setStatus('current')
bridgeCbRFStatbtosscap = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 48), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bridgeCbRFStatbtosscap.setStatus('current')
bridgeCbRFStatuin = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 49), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bridgeCbRFStatuin.setStatus('current')
bridgeCbRFStatuout = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 50), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bridgeCbRFStatuout.setStatus('current')
bridgeCbRFStatutoss = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 51), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bridgeCbRFStatutoss.setStatus('current')
bridgeCbRFStatutosscap = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 52), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bridgeCbRFStatutosscap.setStatus('current')
bridgeCbErrStatNI1QSend = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 53), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bridgeCbErrStatNI1QSend.setStatus('current')
bridgeCbErrStatNI2QSend = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 54), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bridgeCbErrStatNI2QSend.setStatus('current')
bridgeCbErrStatBridgeFull = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 55), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bridgeCbErrStatBridgeFull.setStatus('current')
bridgeCbErrStatSendMsg = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 56), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bridgeCbErrStatSendMsg.setStatus('current')
bridgeCbErrStatAPFecQSend = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 57), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bridgeCbErrStatAPFecQSend.setStatus('current')
bridgeCbErrStatApRfQSend = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 58), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bridgeCbErrStatApRfQSend.setStatus('current')
rfStatXmtUDataCnt = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 59), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatXmtUDataCnt.setStatus('current')
rfStatXmtBDataCnt = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 60), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatXmtBDataCnt.setStatus('current')
rfStatRcvUDataCnt = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 61), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatRcvUDataCnt.setStatus('current')
rfStatRcvBDataCnt = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 62), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatRcvBDataCnt.setStatus('current')
rfStatXmtCntlCnt = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 63), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatXmtCntlCnt.setStatus('current')
rfStatRcvCntlCnt = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 64), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatRcvCntlCnt.setStatus('current')
rfStatInSyncCount = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 65), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatInSyncCount.setStatus('current')
rfStatOutSyncCount = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 66), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatOutSyncCount.setStatus('current')
rfStatOverrunCount = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 67), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatOverrunCount.setStatus('current')
rfStatUnderrunCount = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 68), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatUnderrunCount.setStatus('current')
rfStatRcvCorruptDataCount = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 69), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatRcvCorruptDataCount.setStatus('current')
rfStatBadBcastCtlCnt = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 70), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatBadBcastCtlCnt.setStatus('current')
rfStatPLLOutOfLockCnt = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 71), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatPLLOutOfLockCnt.setStatus('current')
rfStatBeaconVerMismatchCnt = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 72), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatBeaconVerMismatchCnt.setStatus('current')
rfStatBadFreqBcnRcvCnt = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 73), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatBadFreqBcnRcvCnt.setStatus('current')
rfStatnonLiteBcnRcvCnt = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 74), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatnonLiteBcnRcvCnt.setStatus('current')
rfStatUnsupFeatBcnRcvCnt = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 75), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatUnsupFeatBcnRcvCnt.setStatus('current')
rfStatUnkwnFeatBcnRcvCnt = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 76), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatUnkwnFeatBcnRcvCnt.setStatus('current')
rfStatTxCalFailCnt = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 77), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatTxCalFailCnt.setStatus('current')
rfStatBadInSyncIDRcv = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 78), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatBadInSyncIDRcv.setStatus('current')
rfStatTempOutOfRange = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 79), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatTempOutOfRange.setStatus('current')
rfStatRSSIOutOfRange = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 80), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatRSSIOutOfRange.setStatus('current')
rfStatRangeCapEnf = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 81), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatRangeCapEnf.setStatus('current')
rfStatRcvLTStart = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 82), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatRcvLTStart.setStatus('current')
rfStatRcvLTStartHS = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 83), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatRcvLTStartHS.setStatus('current')
rfStatRcvLTResult = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 84), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatRcvLTResult.setStatus('current')
rfStatXmtLTResult = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 85), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatXmtLTResult.setStatus('current')
whispFeatureKeyOrigin = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 86), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: whispFeatureKeyOrigin.setStatus('current')
radioMSN = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 87), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: radioMSN.setStatus('current')
updateStatus = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 88), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: updateStatus.setStatus('current')
syslogStatTxSuccesses = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 89), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: syslogStatTxSuccesses.setStatus('current')
syslogStatDropped = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 90), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: syslogStatDropped.setStatus('current')
fecStatLinkLost = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 91), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fecStatLinkLost.setStatus('current')
fecStatLinkDetected = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 92), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fecStatLinkDetected.setStatus('current')
natDhcpStatus = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 93), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: natDhcpStatus.setStatus('current')
fecInDiscardsCount = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 94), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fecInDiscardsCount.setStatus('current')
fecInErrorsCount = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 95), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fecInErrorsCount.setStatus('current')
fecOutDiscardsCount = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 96), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fecOutDiscardsCount.setStatus('current')
fecOutErrorsCount = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 97), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fecOutErrorsCount.setStatus('current')
rfInDiscardsCount = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 98), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfInDiscardsCount.setStatus('current')
rfInErrorsCount = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 99), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfInErrorsCount.setStatus('current')
rfOutDiscardsCount = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 100), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfOutDiscardsCount.setStatus('current')
rfOutErrorsCount = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 101), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfOutErrorsCount.setStatus('current')
fecInDiscardsOverloadCount = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 102), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fecInDiscardsOverloadCount.setStatus('current')
fecOutDiscardsOverloadCount = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 103), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fecOutDiscardsOverloadCount.setStatus('current')
rfInDiscardsOverloadCount = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 104), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfInDiscardsOverloadCount.setStatus('current')
rfOutDiscardsOverloadCount = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 105), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfOutDiscardsOverloadCount.setStatus('current')
fpgaCompileInfo = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 106), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fpgaCompileInfo.setStatus('current')
fpgaBuildDate = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 107), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fpgaBuildDate.setStatus('current')
aggregateBandwidthCap = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 108), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: aggregateBandwidthCap.setStatus('current')
calibrationStatusBool = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 109), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("notCalibrated", 0), ("calibrated", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: calibrationStatusBool.setStatus('current')
calibrationStatusBox = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 110), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: calibrationStatusBox.setStatus('current')
radioEngKeyed = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 111), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: radioEngKeyed.setStatus('current')
bridgeCbFecStatfloods = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 112), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bridgeCbFecStatfloods.setStatus('current')
bridgeCbRFStatfloods = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 113), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bridgeCbRFStatfloods.setStatus('current')
agcGainRxCH1 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 114), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agcGainRxCH1.setStatus('current')
agcGainRxCH2 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 115), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agcGainRxCH2.setStatus('current')
antType = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 116), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("integrated", 0), ("external", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: antType.setStatus('current')
rfStatRcvCorruptControlCount = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 117), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatRcvCorruptControlCount.setStatus('current')
rfStatXmtMDataCnt = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 217), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatXmtMDataCnt.setStatus('current')
rfStatRcvMDataCnt = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 218), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatRcvMDataCnt.setStatus('current')
linkNegoSpeed = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 1), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: linkNegoSpeed.setStatus('obsolete')
colorCode = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 254))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: colorCode.setStatus('current')
displayOnlyAccess = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 3), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: displayOnlyAccess.setStatus('obsolete')
fullAccess = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 4), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fullAccess.setStatus('current')
webAutoUpdate = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 5), Integer32()).setUnits('Seconds').setMaxAccess("readwrite")
if mibBuilder.loadTexts: webAutoUpdate.setStatus('current')
pass1Status = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 6), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: pass1Status.setStatus('current')
pass2Status = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 7), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: pass2Status.setStatus('current')
bridgeEntryTimeout = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(25, 1440))).setUnits('minutes').setMaxAccess("readwrite")
if mibBuilder.loadTexts: bridgeEntryTimeout.setStatus('current')
snmpMibPerm = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("readWrite", 0), ("readOnly", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: snmpMibPerm.setStatus('current')
bhTimingMode = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 0))).clone(namedValues=NamedValues(("timingMaster", 1), ("timeingSlave", 0)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: bhTimingMode.setStatus('current')
bhModulation = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("tenMbitsPerSecond", 0), ("twentyMbitsPerSecond", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: bhModulation.setStatus('obsolete')
powerControl = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("low", 0), ("normal", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: powerControl.setStatus('current')
extFilterDelay = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 13), Integer32()).setUnits('nanoseconds').setMaxAccess("readwrite")
if mibBuilder.loadTexts: extFilterDelay.setStatus('current')
antennaGain = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 14), Integer32()).setUnits('dBi').setMaxAccess("readwrite")
if mibBuilder.loadTexts: antennaGain.setStatus('current')
eirp = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 15), Integer32()).setUnits('dBm').setMaxAccess("readwrite")
if mibBuilder.loadTexts: eirp.setStatus('obsolete')
dynamicLearning = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dynamicLearning.setStatus('current')
managementVID = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 17), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: managementVID.setStatus('current')
agingTimeout = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 18), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agingTimeout.setStatus('current')
frameType = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 19), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("allframes", 0), ("taggedonly", 1), ("untaggedonly", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: frameType.setStatus('current')
addVlanMember = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 20), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: addVlanMember.setStatus('current')
removeVlanMember = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 21), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: removeVlanMember.setStatus('current')
scheduling = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 22), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1))).clone(namedValues=NamedValues(("hardware", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: scheduling.setStatus('current')
transmitterOP = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 23), Integer32()).setUnits('dBm').setMaxAccess("readwrite")
if mibBuilder.loadTexts: transmitterOP.setStatus('current')
bridgeEnable = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 24), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("enable", 0), ("disable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: bridgeEnable.setStatus('current')
fecEnable = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 25), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 0))).clone(namedValues=NamedValues(("enable", 1), ("disable", 0)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fecEnable.setStatus('current')
trapIP1 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 26), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trapIP1.setStatus('obsolete')
trapIP2 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 27), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trapIP2.setStatus('obsolete')
trapIP3 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 28), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trapIP3.setStatus('obsolete')
trapIP4 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 29), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trapIP4.setStatus('obsolete')
trapIP5 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 30), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trapIP5.setStatus('obsolete')
trapIP6 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 31), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trapIP6.setStatus('obsolete')
trapIP7 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 32), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trapIP7.setStatus('obsolete')
trapIP8 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 33), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trapIP8.setStatus('obsolete')
trapIP9 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 34), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trapIP9.setStatus('obsolete')
trapIP10 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 35), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trapIP10.setStatus('obsolete')
commStringRWrite = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 36), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: commStringRWrite.setStatus('current')
subnetMask = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 37), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: subnetMask.setStatus('current')
mngtIP = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 38), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mngtIP.setStatus('current')
allowVIDAccess = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 39), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("enable", 0), ("disable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: allowVIDAccess.setStatus('current')
setDefaultPlug = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 40), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: setDefaultPlug.setStatus('current')
hwsCompatibility = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 41), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("enable", 0), ("disable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwsCompatibility.setStatus('obsolete')
gpsInput = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 42), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("generateSyncSignal", 0), ("syncToReceivedSignalTimingPort", 1), ("syncToReceivedSignalPowerPort", 2), ("syncToiGPS", 3), ("autoSync", 4), ("autoSyncFreeRun", 5)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: gpsInput.setStatus('current')
ism = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 43), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ism.setStatus('current')
hiPriority = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 44), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hiPriority.setStatus('obsolete')
userName = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 45), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: userName.setStatus('current')
userPassword = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 46), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: userPassword.setStatus('current')
userAccessLevel = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 47), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: userAccessLevel.setStatus('current')
deleteUser = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 48), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: deleteUser.setStatus('current')
twoXRate = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 49), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: twoXRate.setStatus('obsolete')
lanDhcpState = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 50), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: lanDhcpState.setStatus('current')
sessionTimeout = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 51), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sessionTimeout.setStatus('current')
vlanMemberSource = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 52), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("configured", 0), ("active", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vlanMemberSource.setStatus('current')
addCustomFreqList = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 53), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: addCustomFreqList.setStatus('current')
removeCustomFreqList = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 54), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: removeCustomFreqList.setStatus('current')
allowColocation = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 55), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: allowColocation.setStatus('obsolete')
changeUsrPwd = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 56), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: changeUsrPwd.setStatus('current')
mngtIP2 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 57), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mngtIP2.setStatus('current')
subnetMask2 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 58), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: subnetMask2.setStatus('current')
mngtIP3 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 59), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mngtIP3.setStatus('current')
subnetMask3 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 60), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: subnetMask3.setStatus('current')
mngtIP4 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 61), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mngtIP4.setStatus('current')
subnetMask4 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 62), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: subnetMask4.setStatus('current')
mngtIP5 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 63), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mngtIP5.setStatus('current')
subnetMask5 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 64), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: subnetMask5.setStatus('current')
mngtIP6 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 65), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mngtIP6.setStatus('current')
subnetMask6 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 66), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: subnetMask6.setStatus('current')
mngtIP7 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 67), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mngtIP7.setStatus('current')
subnetMask7 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 68), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: subnetMask7.setStatus('current')
mngtIP8 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 69), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mngtIP8.setStatus('current')
subnetMask8 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 70), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: subnetMask8.setStatus('current')
mngtIP9 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 71), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mngtIP9.setStatus('current')
subnetMask9 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 72), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: subnetMask9.setStatus('current')
mngtIP10 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 73), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mngtIP10.setStatus('current')
subnetMask10 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 74), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: subnetMask10.setStatus('current')
bhvlanEnable = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 75), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: bhvlanEnable.setStatus('current')
lldpBroadcastEnable = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 76), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: lldpBroadcastEnable.setStatus('current')
regionCode = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 77), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 6, 5, 3, 8, 10, 11, 7, 9, 2, 1, 4))).clone(namedValues=NamedValues(("none", 0), ("australia", 6), ("brazil", 5), ("canada", 3), ("india", 8), ("indonesia", 10), ("ireland", 11), ("russia", 7), ("spain", 9), ("us", 2), ("other", 1), ("europe", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: regionCode.setStatus('deprecated')
russiaRegion = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 78), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("category1", 1), ("category2", 2), ("category3", 3), ("category4", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: russiaRegion.setStatus('deprecated')
commStringROnly = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 79), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: commStringROnly.setStatus('current')
ethernetLinkSpeed = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 80), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 4, 8, 3, 5, 7, 12, 13, 15, 63))).clone(namedValues=NamedValues(("forced10H", 1), ("forced10F", 2), ("forced100H", 4), ("forced100F", 8), ("auto10F-10H", 3), ("auto100H-10H", 5), ("auto100H-10F-10H", 7), ("auto100F-100H", 12), ("auto100F-100H-10H", 13), ("auto100F-100H-10F-10H", 15), ("auto1000F-100F-100H-10F-10H", 63)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ethernetLinkSpeed.setStatus('current')
cyclicPrefix = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 81), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("one-quarter", 0), ("one-eighth", 1), ("one-sixteenth", 2), ("one-quarter-one-eighth", 3), ("one-quarter-one-sixteenth", 4), ("one-eighth-one-sixteenth", 5), ("one-quarter-one-eighth-one-sixteenth", 6)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cyclicPrefix.setStatus('current')
numberCustomFreq = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 82), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: numberCustomFreq.setStatus('current')
channelBandwidth = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 83), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: channelBandwidth.setStatus('current')
setDefaults = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 84), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("noChangeOrUndoFactoryDefaults", 0), ("setToFactoryDefaults", 1), ("factoryDefaultsSet-AwaitingReboot", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: setDefaults.setStatus('current')
radioRateAdapt = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 85), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 4, 1, 5, 2, 3, 7, 8, 9))).clone(namedValues=NamedValues(("onex", 0), ("onexmimo", 4), ("onextwox", 1), ("onextwoxmimo", 5), ("onextwoxthreex", 2), ("onextwoxthreexfourx", 3), ("onextwoxfourx", 7), ("onextwoxfourxsixx", 8), ("onextwoxfourxsixxeightx", 9)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: radioRateAdapt.setStatus('current')
siteInfoViewable = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 86), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 0))).clone(namedValues=NamedValues(("enable", 1), ("disable", 0)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: siteInfoViewable.setStatus('current')
largeVCQ = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 87), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 0))).clone(namedValues=NamedValues(("enable", 1), ("disable", 0)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: largeVCQ.setStatus('current')
latitude = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 88), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: latitude.setStatus('current')
longitude = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 89), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: longitude.setStatus('current')
height = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 90), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-2147483647, 2147483647))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: height.setStatus('current')
bandwidth = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 91), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 3, 5, 8, 9, 10, 11))).clone(namedValues=NamedValues(("bandwidth5mhz", 1), ("bandwidth10mhz", 3), ("bandwidth20mhz", 5), ("bandwidth5-10mhz", 8), ("bandwidth5-20mhz", 9), ("bandwidth10-20mhz", 10), ("bandwidth5-10-20mhz", 11)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: bandwidth.setStatus('current')
dataScramblingMethod = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 92), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("legacyDataScrambling", 0), ("r10DataScrambling", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dataScramblingMethod.setStatus('obsolete')
portVID = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 93), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portVID.setStatus('current')
radioRateAdaptUL = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 94), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 4, 1, 5, 2, 3, 7, 8, 9))).clone(namedValues=NamedValues(("onex", 0), ("onexmimo", 4), ("onextwox", 1), ("onextwoxmimo", 5), ("onextwoxthreex", 2), ("onextwoxthreexfourx", 3), ("onextwoxfourx", 7), ("onextwoxfourxsixx", 8), ("onextwoxfourxsixxeightx", 9)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: radioRateAdaptUL.setStatus('current')
providerVID = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 95), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: providerVID.setStatus('current')
mac1VIDMapAddr = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 96), MacAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mac1VIDMapAddr.setStatus('current')
mac1VIDMapVid = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 97), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mac1VIDMapVid.setStatus('current')
mac2VIDMapAddr = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 98), MacAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mac2VIDMapAddr.setStatus('current')
mac2VIDMapVid = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 99), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mac2VIDMapVid.setStatus('current')
mac3VIDMapAddr = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 100), MacAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mac3VIDMapAddr.setStatus('current')
mac3VIDMapVid = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 101), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mac3VIDMapVid.setStatus('current')
mac4VIDMapAddr = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 102), MacAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mac4VIDMapAddr.setStatus('current')
mac4VIDMapVid = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 103), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mac4VIDMapVid.setStatus('current')
mac5VIDMapAddr = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 104), MacAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mac5VIDMapAddr.setStatus('current')
mac5VIDMapVid = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 105), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mac5VIDMapVid.setStatus('current')
mac6VIDMapAddr = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 106), MacAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mac6VIDMapAddr.setStatus('current')
mac6VIDMapVid = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 107), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mac6VIDMapVid.setStatus('current')
mac7VIDMapAddr = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 108), MacAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mac7VIDMapAddr.setStatus('current')
mac7VIDMapVid = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 109), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mac7VIDMapVid.setStatus('current')
mac8VIDMapAddr = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 110), MacAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mac8VIDMapAddr.setStatus('current')
mac8VIDMapVid = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 111), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mac8VIDMapVid.setStatus('current')
mac9VIDMapAddr = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 112), MacAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mac9VIDMapAddr.setStatus('current')
mac9VIDMapVid = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 113), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mac9VIDMapVid.setStatus('current')
mac10VIDMapAddr = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 114), MacAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mac10VIDMapAddr.setStatus('current')
mac10VIDMapVid = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 115), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mac10VIDMapVid.setStatus('current')
vlanPortType = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 116), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("q", 0), ("qinq", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vlanPortType.setStatus('current')
vlanAcceptQinQFrames = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 117), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vlanAcceptQinQFrames.setStatus('current')
whispWebUserAccessMode = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 118), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("local", 0), ("remote", 1), ("remotethenlocal", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: whispWebUserAccessMode.setStatus('current')
usrAccountEnableAccounting = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 119), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("disable", 0), ("deviceAccess", 1), ("dataUsage", 2), ("all", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: usrAccountEnableAccounting.setStatus('current')
allowRejectThenLocal = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 120), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("doNotAllowLocalAuthifAAAReject", 0), ("allowLocalAuthIfAAAReject", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: allowRejectThenLocal.setStatus('current')
snrCalculation = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 121), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 0))).clone(namedValues=NamedValues(("enable", 1), ("disable", 0)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: snrCalculation.setStatus('deprecated')
priorityPrecedence = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 122), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("eight021pThenDiffServ", 0), ("diffservThenEight021p", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: priorityPrecedence.setStatus('current')
installationColorCode = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 123), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: installationColorCode.setStatus('current')
apSmMode = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 124), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 0))).clone(namedValues=NamedValues(("sm", 1), ("ap", 0)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apSmMode.setStatus('current')
pppoeFilter = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 125), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("filterOff", 0), ("filterOn", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: pppoeFilter.setStatus('current')
smbFilter = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 126), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("filterOff", 0), ("filterOn", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: smbFilter.setStatus('current')
snmpFilter = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 127), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("filterOff", 0), ("filterOn", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: snmpFilter.setStatus('current')
userP1Filter = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 128), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("filterOff", 0), ("filterOn", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: userP1Filter.setStatus('current')
userP2Filter = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 129), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("filterOff", 0), ("filterOn", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: userP2Filter.setStatus('current')
userP3Filter = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 130), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("filterOff", 0), ("filterOn", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: userP3Filter.setStatus('current')
allOtherIpFilter = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 131), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("filterOff", 0), ("filterOn", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: allOtherIpFilter.setStatus('current')
allIpv4Filter = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 132), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("filterOff", 0), ("filterOn", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: allIpv4Filter.setStatus('current')
arpFilter = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 133), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("filterOff", 0), ("filterOn", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: arpFilter.setStatus('current')
allOthersFilter = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 134), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("filterOff", 0), ("filterOn", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: allOthersFilter.setStatus('current')
userDefinedPort1 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 135), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: userDefinedPort1.setStatus('current')
port1TCPFilter = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 136), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("filterOff", 0), ("filterOn", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: port1TCPFilter.setStatus('current')
port1UDPFilter = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 137), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("filterOff", 0), ("filterOn", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: port1UDPFilter.setStatus('current')
userDefinedPort2 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 138), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: userDefinedPort2.setStatus('current')
port2TCPFilter = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 139), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("filterOff", 0), ("filterOn", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: port2TCPFilter.setStatus('current')
port2UDPFilter = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 140), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("filterOff", 0), ("filterOn", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: port2UDPFilter.setStatus('current')
userDefinedPort3 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 141), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: userDefinedPort3.setStatus('current')
port3TCPFilter = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 142), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("filterOff", 0), ("filterOn", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: port3TCPFilter.setStatus('current')
port3UDPFilter = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 143), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("filterOff", 0), ("filterOn", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: port3UDPFilter.setStatus('current')
bootpcFilter = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 144), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("filterOff", 0), ("filterOn", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: bootpcFilter.setStatus('current')
bootpsFilter = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 145), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("filterOff", 0), ("filterOn", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: bootpsFilter.setStatus('current')
ip4MultFilter = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 146), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("filterOff", 0), ("filterOn", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ip4MultFilter.setStatus('current')
packetFilterDirection = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 147), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("upstream", 1), ("downstream", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: packetFilterDirection.setStatus('current')
encryptionConfig = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 148), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("des", 0), ("aes", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: encryptionConfig.setStatus('current')
pppoeCtlPriority = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 149), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("normal", 0), ("high", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: pppoeCtlPriority.setStatus('current')
ftpPort = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 150), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ftpPort.setStatus('current')
httpPort = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 151), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: httpPort.setStatus('current')
snmpPort = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 153), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: snmpPort.setStatus('current')
snmpTrapPort = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 154), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: snmpTrapPort.setStatus('current')
syslogDomainNameAppend = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 156), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disableDomain", 0), ("appendDomain", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: syslogDomainNameAppend.setStatus('current')
syslogServerAddr = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 157), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: syslogServerAddr.setStatus('current')
syslogServerPort = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 158), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: syslogServerPort.setStatus('current')
syslogMinLevel = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 159), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("fatal", 0), ("alert", 1), ("critical", 2), ("error", 3), ("warning", 4), ("notice", 5), ("info", 6), ("debug", 7)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: syslogMinLevel.setStatus('current')
lan1DhcpRelease = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 201), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1))).clone(namedValues=NamedValues(("releaseIP", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: lan1DhcpRelease.setStatus('current')
lan1DhcpRenew = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 202), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1))).clone(namedValues=NamedValues(("renewIP", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: lan1DhcpRenew.setStatus('current')
lan3DhcpRelease = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 203), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1))).clone(namedValues=NamedValues(("releaseIP", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: lan3DhcpRelease.setStatus('current')
lan3DhcpRenew = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 204), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1))).clone(namedValues=NamedValues(("renewIP", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: lan3DhcpRenew.setStatus('current')
natDhcpRelease = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 205), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1))).clone(namedValues=NamedValues(("releaseIP", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: natDhcpRelease.setStatus('current')
natDhcpRenew = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 206), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1))).clone(namedValues=NamedValues(("renewIP", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: natDhcpRenew.setStatus('current')
region = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 207), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 6, 3, 2, 5, 4, 7))).clone(namedValues=NamedValues(("none", 0), ("otherRegulatory", 1), ("asia", 6), ("europe", 3), ("northAmerica", 2), ("oceania", 5), ("southAmerica", 4), ("africa", 7)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: region.setStatus('current')
regionAsia = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 208), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 2, 3, 4, 5, 6, 7, 8))).clone(namedValues=NamedValues(("none", 0), ("india", 2), ("indonesia", 3), ("russiacategory1", 4), ("russiacategory2", 5), ("russiacategory3", 6), ("russiacategory4", 7), ("vietnam", 8)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: regionAsia.setStatus('current')
regionEurope = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 209), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14))).clone(namedValues=NamedValues(("none", 0), ("spain", 2), ("ireland", 3), ("denmark", 4), ("finland", 5), ("germany", 6), ("greece", 7), ("iceland", 8), ("liechtenstein", 9), ("norway", 10), ("portugal", 11), ("switzerland", 12), ("serbia", 13), ("unitedkingdom", 14)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: regionEurope.setStatus('current')
regionNorthAmerica = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 210), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 3, 2, 4))).clone(namedValues=NamedValues(("none", 0), ("canada", 3), ("unitedStates", 2), ("mexico", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: regionNorthAmerica.setStatus('current')
regionOceania = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 211), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 2))).clone(namedValues=NamedValues(("none", 0), ("australia", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: regionOceania.setStatus('current')
regionSouthAmerica = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 212), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 2))).clone(namedValues=NamedValues(("none", 0), ("brazil", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: regionSouthAmerica.setStatus('current')
regionOtherRegulatory = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 213), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("none", 0), ("other", 1), ("otherFCC", 2), ("otherETSI", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: regionOtherRegulatory.setStatus('current')
interleave = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 214), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("default", 0), ("disabled", 1), ("enabled", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: interleave.setStatus('current')
receiveQualityDebug = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 215), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 0))).clone(namedValues=NamedValues(("enable", 1), ("disable", 0)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: receiveQualityDebug.setStatus('current')
apType = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 216), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("standardAP", 0), ("remoteAP", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apType.setStatus('current')
regionAfrica = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 217), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 2))).clone(namedValues=NamedValues(("none", 0), ("algeria", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: regionAfrica.setStatus('current')
addCustomFreqMimo = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 218), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: addCustomFreqMimo.setStatus('current')
removeCustomFreqMimo = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 219), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: removeCustomFreqMimo.setStatus('current')
timedSpectrumAnalysisDurationBox = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 220), Integer32().subtype(subtypeSpec=ValueRangeConstraint(10, 1000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: timedSpectrumAnalysisDurationBox.setStatus('current')
spectrumAnalysisActionBox = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 221), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("stopSpectrumAnalysis", 0), ("startTimedSpectrumAnalysis", 1), ("startContinuousSpectrumAnalysis", 2), ("idleNoSpectrumAnalysis", 3), ("idleCompleteSpectrumAnalysis", 4), ("inProgressTimedSpectrumAnalysis", 5), ("inProgressContinuousSpectrumAnalysis", 6), ("notReady", 7)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: spectrumAnalysisActionBox.setStatus('current')
saveFlash = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 3, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("doNotSaveToFlash", 0), ("saveToFlash", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: saveFlash.setStatus('obsolete')
reboot = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 3, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("finishedReboot", 0), ("reboot", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: reboot.setStatus('current')
clearEventLog = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 3, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("notClear", 0), ("clear", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: clearEventLog.setStatus('current')
rebootIfRequired = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 3, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("rebootNotRequired", 0), ("rebootRequired", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rebootIfRequired.setStatus('current')
clearBERStats = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 3, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("doNotClearBERStats", 0), ("clearBERStats", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: clearBERStats.setStatus('current')
updateDevice = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 3, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: updateDevice.setStatus('current')
whispBoxEvntLog = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 5, 1), EventString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: whispBoxEvntLog.setStatus('current')
whispBoxAttributesGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 6, 1, 1))
for _whispBoxAttributesGroup_obj in [[("WHISP-BOX-MIBV2-MIB", "vlanAcceptQinQFrames"), ("WHISP-BOX-MIBV2-MIB", "providerVID"), ("WHISP-BOX-MIBV2-MIB", "mac1VIDMapAddr"), ("WHISP-BOX-MIBV2-MIB", "mac1VIDMapVid"), ("WHISP-BOX-MIBV2-MIB", "mac2VIDMapAddr"), ("WHISP-BOX-MIBV2-MIB", "mac2VIDMapVid"), ("WHISP-BOX-MIBV2-MIB", "mac3VIDMapAddr"), ("WHISP-BOX-MIBV2-MIB", "mac3VIDMapVid"), ("WHISP-BOX-MIBV2-MIB", "mac4VIDMapAddr"), ("WHISP-BOX-MIBV2-MIB", "mac4VIDMapVid"), ("WHISP-BOX-MIBV2-MIB", "mac5VIDMapAddr"), ("WHISP-BOX-MIBV2-MIB", "mac5VIDMapVid"), ("WHISP-BOX-MIBV2-MIB", "mac6VIDMapAddr"), ("WHISP-BOX-MIBV2-MIB", "mac6VIDMapVid"), ("WHISP-BOX-MIBV2-MIB", "mac7VIDMapAddr"), ("WHISP-BOX-MIBV2-MIB", "mac7VIDMapVid"), ("WHISP-BOX-MIBV2-MIB", "mac8VIDMapAddr"), ("WHISP-BOX-MIBV2-MIB", "mac8VIDMapVid"), ("WHISP-BOX-MIBV2-MIB", "mac9VIDMapAddr"), ("WHISP-BOX-MIBV2-MIB", "mac9VIDMapVid"), ("WHISP-BOX-MIBV2-MIB", "mac10VIDMapAddr"), ("WHISP-BOX-MIBV2-MIB", "mac10VIDMapVid"), ("WHISP-BOX-MIBV2-MIB", "vlanPortType"), ("WHISP-BOX-MIBV2-MIB", "portVID"), ("WHISP-BOX-MIBV2-MIB", "timedSpectrumAnalysisDurationBox"), ("WHISP-BOX-MIBV2-MIB", "spectrumAnalysisActionBox"), ("WHISP-BOX-MIBV2-MIB", "calibrationStatusBox"), ("WHISP-BOX-MIBV2-MIB", "calibrationStatusBool"), ("WHISP-BOX-MIBV2-MIB", "agcGainRxCH1"), ("WHISP-BOX-MIBV2-MIB", "agcGainRxCH2"), ("WHISP-BOX-MIBV2-MIB", "whispBoxSoftwareVer"), ("WHISP-BOX-MIBV2-MIB", "whispBoxFPGAVer"), ("WHISP-BOX-MIBV2-MIB", "whispBoxEsn"), ("WHISP-BOX-MIBV2-MIB", "whispBoxBoot"), ("WHISP-BOX-MIBV2-MIB", "boxTemperature"), ("WHISP-BOX-MIBV2-MIB", "boxDeviceType"), ("WHISP-BOX-MIBV2-MIB", "boxDeviceTypeID"), ("WHISP-BOX-MIBV2-MIB", "boxEncryption"), ("WHISP-BOX-MIBV2-MIB", "etherLinkStatus"), ("WHISP-BOX-MIBV2-MIB", "boxFrequency"), ("WHISP-BOX-MIBV2-MIB", "platformVer"), ("WHISP-BOX-MIBV2-MIB", "platformType"), ("WHISP-BOX-MIBV2-MIB", "dhcpLanIp"), ("WHISP-BOX-MIBV2-MIB", "dhcpLanSubnetMask"), ("WHISP-BOX-MIBV2-MIB", "dhcpLanGateway"), ("WHISP-BOX-MIBV2-MIB", "dhcpRfPublicIp"), ("WHISP-BOX-MIBV2-MIB", "dhcpRfPublicSubnetMask"), ("WHISP-BOX-MIBV2-MIB", "dhcpRfPublicGateway"), ("WHISP-BOX-MIBV2-MIB", "lanDhcpStatus"), ("WHISP-BOX-MIBV2-MIB", "rfPublicDhcpStatus"), ("WHISP-BOX-MIBV2-MIB", "natDhcpStatus"), ("WHISP-BOX-MIBV2-MIB", "inSyncCount"), ("WHISP-BOX-MIBV2-MIB", "outSyncCount"), ("WHISP-BOX-MIBV2-MIB", "pllOutLockCount"), ("WHISP-BOX-MIBV2-MIB", "txCalFailure"), ("WHISP-BOX-MIBV2-MIB", "swVersion"), ("WHISP-BOX-MIBV2-MIB", "pldVersion"), ("WHISP-BOX-MIBV2-MIB", "platformInfo"), ("WHISP-BOX-MIBV2-MIB", "antType"), ("WHISP-BOX-MIBV2-MIB", "antPolarization"), ("WHISP-BOX-MIBV2-MIB", "packetOverloadCounter"), ("WHISP-BOX-MIBV2-MIB", "whispBoxP11Personality"), ("WHISP-BOX-MIBV2-MIB", "whispBoxP11FPGAType"), ("WHISP-BOX-MIBV2-MIB", "whispBoxP11BstrapFPGAVer"), ("WHISP-BOX-MIBV2-MIB", "numDFSDetections"), ("WHISP-BOX-MIBV2-MIB", "rxOverrunPkts"), ("WHISP-BOX-MIBV2-MIB", "boxTemperatureC"), ("WHISP-BOX-MIBV2-MIB", "boxTemperatureF"), ("WHISP-BOX-MIBV2-MIB", "linkNegoSpeed"), ("WHISP-BOX-MIBV2-MIB", "installationColorCode"), ("WHISP-BOX-MIBV2-MIB", "colorCode"), ("WHISP-BOX-MIBV2-MIB", "displayOnlyAccess"), ("WHISP-BOX-MIBV2-MIB", "fullAccess"), ("WHISP-BOX-MIBV2-MIB", "webAutoUpdate"), ("WHISP-BOX-MIBV2-MIB", "pass1Status"), ("WHISP-BOX-MIBV2-MIB", "pass2Status"), ("WHISP-BOX-MIBV2-MIB", "bridgeEntryTimeout"), ("WHISP-BOX-MIBV2-MIB", "snmpMibPerm"), ("WHISP-BOX-MIBV2-MIB", "bhTimingMode"), ("WHISP-BOX-MIBV2-MIB", "powerControl"), ("WHISP-BOX-MIBV2-MIB", "extFilterDelay"), ("WHISP-BOX-MIBV2-MIB", "antennaGain"), ("WHISP-BOX-MIBV2-MIB", "eirp"), ("WHISP-BOX-MIBV2-MIB", "dynamicLearning"), ("WHISP-BOX-MIBV2-MIB", "managementVID"), ("WHISP-BOX-MIBV2-MIB", "agingTimeout"), ("WHISP-BOX-MIBV2-MIB", "frameType"), ("WHISP-BOX-MIBV2-MIB", "addVlanMember"), ("WHISP-BOX-MIBV2-MIB", "removeVlanMember"), ("WHISP-BOX-MIBV2-MIB", "scheduling"), ("WHISP-BOX-MIBV2-MIB", "transmitterOP"), ("WHISP-BOX-MIBV2-MIB", "bridgeEnable"), ("WHISP-BOX-MIBV2-MIB", "fecEnable"), ("WHISP-BOX-MIBV2-MIB", "trapIP1"), ("WHISP-BOX-MIBV2-MIB", "trapIP2"), ("WHISP-BOX-MIBV2-MIB", "trapIP3"), ("WHISP-BOX-MIBV2-MIB", "trapIP4"), ("WHISP-BOX-MIBV2-MIB", "trapIP5"), ("WHISP-BOX-MIBV2-MIB", "trapIP6"), ("WHISP-BOX-MIBV2-MIB", "trapIP7"), ("WHISP-BOX-MIBV2-MIB", "trapIP8"), ("WHISP-BOX-MIBV2-MIB", "trapIP9"), ("WHISP-BOX-MIBV2-MIB", "trapIP10"), ("WHISP-BOX-MIBV2-MIB", "commStringRWrite"), ("WHISP-BOX-MIBV2-MIB", "subnetMask"), ("WHISP-BOX-MIBV2-MIB", "mngtIP"), ("WHISP-BOX-MIBV2-MIB", "allowVIDAccess"), ("WHISP-BOX-MIBV2-MIB", "setDefaultPlug"), ("WHISP-BOX-MIBV2-MIB", "hwsCompatibility"), ("WHISP-BOX-MIBV2-MIB", "gpsInput"), ("WHISP-BOX-MIBV2-MIB", "ism"), ("WHISP-BOX-MIBV2-MIB", "hiPriority"), ("WHISP-BOX-MIBV2-MIB", "userName"), ("WHISP-BOX-MIBV2-MIB", "userPassword"), ("WHISP-BOX-MIBV2-MIB", "userAccessLevel"), ("WHISP-BOX-MIBV2-MIB", "deleteUser"), ("WHISP-BOX-MIBV2-MIB", "twoXRate"), ("WHISP-BOX-MIBV2-MIB", "lanDhcpState"), ("WHISP-BOX-MIBV2-MIB", "dnsIpState"), ("WHISP-BOX-MIBV2-MIB", "sessionTimeout"), ("WHISP-BOX-MIBV2-MIB", "vlanMemberSource"), ("WHISP-BOX-MIBV2-MIB", "addCustomFreqList"), ("WHISP-BOX-MIBV2-MIB", "removeCustomFreqList"), ("WHISP-BOX-MIBV2-MIB", "allowColocation"), ("WHISP-BOX-MIBV2-MIB", "changeUsrPwd"), ("WHISP-BOX-MIBV2-MIB", "mngtIP2"), ("WHISP-BOX-MIBV2-MIB", "subnetMask2"), ("WHISP-BOX-MIBV2-MIB", "mngtIP3"), ("WHISP-BOX-MIBV2-MIB", "subnetMask3"), ("WHISP-BOX-MIBV2-MIB", "mngtIP4"), ("WHISP-BOX-MIBV2-MIB", "subnetMask4"), ("WHISP-BOX-MIBV2-MIB", "mngtIP5"), ("WHISP-BOX-MIBV2-MIB", "subnetMask5"), ("WHISP-BOX-MIBV2-MIB", "mngtIP6"), ("WHISP-BOX-MIBV2-MIB", "subnetMask6"), ("WHISP-BOX-MIBV2-MIB", "mngtIP7"), ("WHISP-BOX-MIBV2-MIB", "subnetMask7"), ("WHISP-BOX-MIBV2-MIB", "mngtIP8"), ("WHISP-BOX-MIBV2-MIB", "subnetMask8"), ("WHISP-BOX-MIBV2-MIB", "mngtIP9"), ("WHISP-BOX-MIBV2-MIB", "subnetMask9"), ("WHISP-BOX-MIBV2-MIB", "mngtIP10"), ("WHISP-BOX-MIBV2-MIB", "subnetMask10"), ("WHISP-BOX-MIBV2-MIB", "bhvlanEnable"), ("WHISP-BOX-MIBV2-MIB", "lldpBroadcastEnable"), ("WHISP-BOX-MIBV2-MIB", "radioRateAdapt"), ("WHISP-BOX-MIBV2-MIB", "fpgaBuildDate"), ("WHISP-BOX-MIBV2-MIB", "fpgaCompileInfo"), ("WHISP-BOX-MIBV2-MIB", "syslogDomainNameAppend"), ("WHISP-BOX-MIBV2-MIB", "syslogServerAddr"), ("WHISP-BOX-MIBV2-MIB", "syslogServerPort"), ("WHISP-BOX-MIBV2-MIB", "syslogMinLevel"), ("WHISP-BOX-MIBV2-MIB", "syslogStatTxSuccesses"), ("WHISP-BOX-MIBV2-MIB", "syslogStatDropped"), ("WHISP-BOX-MIBV2-MIB", "apType"), ("WHISP-BOX-MIBV2-MIB", "apSmMode"), ("WHISP-BOX-MIBV2-MIB", "region"), ("WHISP-BOX-MIBV2-MIB", "regionCode"), ("WHISP-BOX-MIBV2-MIB", "regionAsia"), ("WHISP-BOX-MIBV2-MIB", "regionEurope"), ("WHISP-BOX-MIBV2-MIB", "regionNorthAmerica"), ("WHISP-BOX-MIBV2-MIB", "regionOceania"), ("WHISP-BOX-MIBV2-MIB", "regionSouthAmerica"), ("WHISP-BOX-MIBV2-MIB", "regionAfrica"), ("WHISP-BOX-MIBV2-MIB", "regionOtherRegulatory"), ("WHISP-BOX-MIBV2-MIB", "radioRateAdaptUL"), ("WHISP-BOX-MIBV2-MIB", "dnsPrimaryMgmtIP"), ("WHISP-BOX-MIBV2-MIB", "dnsAlternateMgmtIP"), ("WHISP-BOX-MIBV2-MIB", "dnsMgmtDomainName"), ("WHISP-BOX-MIBV2-MIB", "addCustomFreqMimo"), ("WHISP-BOX-MIBV2-MIB", "removeCustomFreqMimo"), ("WHISP-BOX-MIBV2-MIB", "ftpPort"), ("WHISP-BOX-MIBV2-MIB", "httpPort"), ("WHISP-BOX-MIBV2-MIB", "snmpPort"), ("WHISP-BOX-MIBV2-MIB", "snmpTrapPort"), ("WHISP-BOX-MIBV2-MIB", "lan1DhcpRelease"), ("WHISP-BOX-MIBV2-MIB", "lan1DhcpRenew"), ("WHISP-BOX-MIBV2-MIB", "lan3DhcpRelease"), ("WHISP-BOX-MIBV2-MIB", "lan3DhcpRenew"), ("WHISP-BOX-MIBV2-MIB", "natDhcpRelease"), ("WHISP-BOX-MIBV2-MIB", "natDhcpRenew"), ("WHISP-BOX-MIBV2-MIB", "radioEngKeyed"), ("WHISP-BOX-MIBV2-MIB", "priorityPrecedence"), ("WHISP-BOX-MIBV2-MIB", "pppoeCtlPriority"), ("WHISP-BOX-MIBV2-MIB", "rfStatXmtUDataCnt"), ("WHISP-BOX-MIBV2-MIB", "rfStatXmtBDataCnt"), ("WHISP-BOX-MIBV2-MIB", "rfStatXmtMDataCnt"), ("WHISP-BOX-MIBV2-MIB", "rfStatRcvUDataCnt"), ("WHISP-BOX-MIBV2-MIB", "rfStatRcvBDataCnt"), ("WHISP-BOX-MIBV2-MIB", "rfStatRcvMDataCnt"), ("WHISP-BOX-MIBV2-MIB", "rfStatXmtCntlCnt"), ("WHISP-BOX-MIBV2-MIB", "rfStatRcvCntlCnt"), ("WHISP-BOX-MIBV2-MIB", "rfStatInSyncCount"), ("WHISP-BOX-MIBV2-MIB", "rfStatOutSyncCount"), ("WHISP-BOX-MIBV2-MIB", "rfStatOverrunCount"), ("WHISP-BOX-MIBV2-MIB", "rfStatUnderrunCount"), ("WHISP-BOX-MIBV2-MIB", "rfStatRcvCorruptDataCount"), ("WHISP-BOX-MIBV2-MIB", "rfStatRcvCorruptControlCount"), ("WHISP-BOX-MIBV2-MIB", "rfStatBadBcastCtlCnt"), ("WHISP-BOX-MIBV2-MIB", "rfStatPLLOutOfLockCnt"), ("WHISP-BOX-MIBV2-MIB", "rfStatBeaconVerMismatchCnt"), ("WHISP-BOX-MIBV2-MIB", "rfStatBadFreqBcnRcvCnt"), ("WHISP-BOX-MIBV2-MIB", "rfStatnonLiteBcnRcvCnt"), ("WHISP-BOX-MIBV2-MIB", "rfStatUnsupFeatBcnRcvCnt"), ("WHISP-BOX-MIBV2-MIB", "rfStatUnkwnFeatBcnRcvCnt"), ("WHISP-BOX-MIBV2-MIB", "rfStatTxCalFailCnt"), ("WHISP-BOX-MIBV2-MIB", "rfStatBadInSyncIDRcv"), ("WHISP-BOX-MIBV2-MIB", "rfStatTempOutOfRange"), ("WHISP-BOX-MIBV2-MIB", "rfStatRSSIOutOfRange"), ("WHISP-BOX-MIBV2-MIB", "rfStatRangeCapEnf"), ("WHISP-BOX-MIBV2-MIB", "rfStatRcvLTStart"), ("WHISP-BOX-MIBV2-MIB", "rfStatRcvLTStartHS"), ("WHISP-BOX-MIBV2-MIB", "rfStatRcvLTResult"), ("WHISP-BOX-MIBV2-MIB", "rfStatXmtLTResult"), ("WHISP-BOX-MIBV2-MIB", "whispFeatureKeyOrigin"), ("WHISP-BOX-MIBV2-MIB", "updateStatus"), ("WHISP-BOX-MIBV2-MIB", "bridgeCbFecStatbin"), ("WHISP-BOX-MIBV2-MIB", "bridgeCbFecStatbout"), ("WHISP-BOX-MIBV2-MIB", "bridgeCbFecStatbtoss"), ("WHISP-BOX-MIBV2-MIB", "bridgeCbFecStatbtosscap"), ("WHISP-BOX-MIBV2-MIB", "bridgeCbFecStatuin"), ("WHISP-BOX-MIBV2-MIB", "bridgeCbFecStatuout"), ("WHISP-BOX-MIBV2-MIB", "bridgeCbFecStatutoss"), ("WHISP-BOX-MIBV2-MIB", "bridgeCbFecStatutosscap"), ("WHISP-BOX-MIBV2-MIB", "bridgeCbRFStatbin"), ("WHISP-BOX-MIBV2-MIB", "bridgeCbRFStatbout"), ("WHISP-BOX-MIBV2-MIB", "bridgeCbFecStatfloods"), ("WHISP-BOX-MIBV2-MIB", "bridgeCbRFStatfloods"), ("WHISP-BOX-MIBV2-MIB", "bridgeCbRFStatbtoss"), ("WHISP-BOX-MIBV2-MIB", "bridgeCbRFStatbtosscap"), ("WHISP-BOX-MIBV2-MIB", "bridgeCbRFStatuin"), ("WHISP-BOX-MIBV2-MIB", "bridgeCbRFStatuout"), ("WHISP-BOX-MIBV2-MIB", "bridgeCbRFStatutoss"), ("WHISP-BOX-MIBV2-MIB", "bridgeCbRFStatutosscap"), ("WHISP-BOX-MIBV2-MIB", "bridgeCbErrStatNI1QSend"), ("WHISP-BOX-MIBV2-MIB", "bridgeCbErrStatNI2QSend"), ("WHISP-BOX-MIBV2-MIB", "bridgeCbErrStatBridgeFull"), ("WHISP-BOX-MIBV2-MIB", "bridgeCbErrStatSendMsg"), ("WHISP-BOX-MIBV2-MIB", "bridgeCbErrStatAPFecQSend"), ("WHISP-BOX-MIBV2-MIB", "bridgeCbErrStatApRfQSend"), ("WHISP-BOX-MIBV2-MIB", "fecStatLinkDetected"), ("WHISP-BOX-MIBV2-MIB", "fecStatLinkLost"), ("WHISP-BOX-MIBV2-MIB", "fecInDiscardsCount"), ("WHISP-BOX-MIBV2-MIB", "fecInErrorsCount"), ("WHISP-BOX-MIBV2-MIB", "fecOutDiscardsCount"), ("WHISP-BOX-MIBV2-MIB", "fecOutErrorsCount"), ("WHISP-BOX-MIBV2-MIB", "rfInDiscardsCount"), ("WHISP-BOX-MIBV2-MIB", "rfInErrorsCount"), ("WHISP-BOX-MIBV2-MIB", "rfOutDiscardsCount"), ("WHISP-BOX-MIBV2-MIB", "rfOutErrorsCount"), ("WHISP-BOX-MIBV2-MIB", "fecInDiscardsOverloadCount"), ("WHISP-BOX-MIBV2-MIB", "fecOutDiscardsOverloadCount"), ("WHISP-BOX-MIBV2-MIB", "rfInDiscardsOverloadCount"), ("WHISP-BOX-MIBV2-MIB", "rfOutDiscardsOverloadCount"), ("WHISP-BOX-MIBV2-MIB", "interleave")], [("WHISP-BOX-MIBV2-MIB", "radioMSN"), ("WHISP-BOX-MIBV2-MIB", "latitude"), ("WHISP-BOX-MIBV2-MIB", "longitude"), ("WHISP-BOX-MIBV2-MIB", "height"), ("WHISP-BOX-MIBV2-MIB", "bandwidth"), ("WHISP-BOX-MIBV2-MIB", "dataScramblingMethod"), ("WHISP-BOX-MIBV2-MIB", "whispWebUserAccessMode"), ("WHISP-BOX-MIBV2-MIB", "usrAccountEnableAccounting"), ("WHISP-BOX-MIBV2-MIB", "allowRejectThenLocal"), ("WHISP-BOX-MIBV2-MIB", "pppoeFilter"), ("WHISP-BOX-MIBV2-MIB", "smbFilter"), ("WHISP-BOX-MIBV2-MIB", "snmpFilter"), ("WHISP-BOX-MIBV2-MIB", "userP1Filter"), ("WHISP-BOX-MIBV2-MIB", "userP2Filter"), ("WHISP-BOX-MIBV2-MIB", "userP3Filter"), ("WHISP-BOX-MIBV2-MIB", "allOtherIpFilter"), ("WHISP-BOX-MIBV2-MIB", "allIpv4Filter"), ("WHISP-BOX-MIBV2-MIB", "arpFilter"), ("WHISP-BOX-MIBV2-MIB", "allOthersFilter"), ("WHISP-BOX-MIBV2-MIB", "userDefinedPort1"), ("WHISP-BOX-MIBV2-MIB", "port1TCPFilter"), ("WHISP-BOX-MIBV2-MIB", "port1UDPFilter"), ("WHISP-BOX-MIBV2-MIB", "userDefinedPort2"), ("WHISP-BOX-MIBV2-MIB", "port2TCPFilter"), ("WHISP-BOX-MIBV2-MIB", "port2UDPFilter"), ("WHISP-BOX-MIBV2-MIB", "userDefinedPort3"), ("WHISP-BOX-MIBV2-MIB", "port3TCPFilter"), ("WHISP-BOX-MIBV2-MIB", "port3UDPFilter"), ("WHISP-BOX-MIBV2-MIB", "bootpcFilter"), ("WHISP-BOX-MIBV2-MIB", "bootpsFilter"), ("WHISP-BOX-MIBV2-MIB", "ip4MultFilter"), ("WHISP-BOX-MIBV2-MIB", "packetFilterDirection"), ("WHISP-BOX-MIBV2-MIB", "encryptionConfig")]]:
if getattr(mibBuilder, 'version', 0) < (4, 4, 2):
# WARNING: leading objects get lost here!
whispBoxAttributesGroup = whispBoxAttributesGroup.setObjects(*_whispBoxAttributesGroup_obj)
else:
whispBoxAttributesGroup = whispBoxAttributesGroup.setObjects(*_whispBoxAttributesGroup_obj, **dict(append=True))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
whispBoxAttributesGroup = whispBoxAttributesGroup.setStatus('current')
whispBoxControlGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 6, 1, 2)).setObjects(("WHISP-BOX-MIBV2-MIB", "saveFlash"), ("WHISP-BOX-MIBV2-MIB", "reboot"), ("WHISP-BOX-MIBV2-MIB", "clearEventLog"), ("WHISP-BOX-MIBV2-MIB", "rebootIfRequired"), ("WHISP-BOX-MIBV2-MIB", "clearBERStats"), ("WHISP-BOX-MIBV2-MIB", "updateDevice"), ("WHISP-BOX-MIBV2-MIB", "siteInfoViewable"), ("WHISP-BOX-MIBV2-MIB", "largeVCQ"), ("WHISP-BOX-MIBV2-MIB", "snrCalculation"), ("WHISP-BOX-MIBV2-MIB", "receiveQualityDebug"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
whispBoxControlGroup = whispBoxControlGroup.setStatus('current')
whispBoxBTGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 6, 1, 3)).setObjects(("WHISP-BOX-MIBV2-MIB", "whispBridgeMacAddr"), ("WHISP-BOX-MIBV2-MIB", "whispBridgeDesLuid"), ("WHISP-BOX-MIBV2-MIB", "whispBridgeAge"), ("WHISP-BOX-MIBV2-MIB", "whispBridgeExt"), ("WHISP-BOX-MIBV2-MIB", "whispBridgeHash"), ("WHISP-BOX-MIBV2-MIB", "whispBridgeCAM"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
whispBoxBTGroup = whispBoxBTGroup.setStatus('current')
whispBoxVLANTableGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 6, 1, 4)).setObjects(("WHISP-BOX-MIBV2-MIB", "whispVID"), ("WHISP-BOX-MIBV2-MIB", "whispVType"), ("WHISP-BOX-MIBV2-MIB", "whispVAge"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
whispBoxVLANTableGroup = whispBoxVLANTableGroup.setStatus('current')
whispBoxCPTableGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 6, 1, 5)).setObjects(("WHISP-BOX-MIBV2-MIB", "codePoint0"), ("WHISP-BOX-MIBV2-MIB", "codePoint1"), ("WHISP-BOX-MIBV2-MIB", "codePoint2"), ("WHISP-BOX-MIBV2-MIB", "codePoint3"), ("WHISP-BOX-MIBV2-MIB", "codePoint4"), ("WHISP-BOX-MIBV2-MIB", "codePoint5"), ("WHISP-BOX-MIBV2-MIB", "codePoint6"), ("WHISP-BOX-MIBV2-MIB", "codePoint7"), ("WHISP-BOX-MIBV2-MIB", "codePoint8"), ("WHISP-BOX-MIBV2-MIB", "codePoint9"), ("WHISP-BOX-MIBV2-MIB", "codePoint10"), ("WHISP-BOX-MIBV2-MIB", "codePoint11"), ("WHISP-BOX-MIBV2-MIB", "codePoint12"), ("WHISP-BOX-MIBV2-MIB", "codePoint13"), ("WHISP-BOX-MIBV2-MIB", "codePoint14"), ("WHISP-BOX-MIBV2-MIB", "codePoint15"), ("WHISP-BOX-MIBV2-MIB", "codePoint16"), ("WHISP-BOX-MIBV2-MIB", "codePoint17"), ("WHISP-BOX-MIBV2-MIB", "codePoint18"), ("WHISP-BOX-MIBV2-MIB", "codePoint19"), ("WHISP-BOX-MIBV2-MIB", "codePoint20"), ("WHISP-BOX-MIBV2-MIB", "codePoint21"), ("WHISP-BOX-MIBV2-MIB", "codePoint22"), ("WHISP-BOX-MIBV2-MIB", "codePoint23"), ("WHISP-BOX-MIBV2-MIB", "codePoint24"), ("WHISP-BOX-MIBV2-MIB", "codePoint25"), ("WHISP-BOX-MIBV2-MIB", "codePoint26"), ("WHISP-BOX-MIBV2-MIB", "codePoint27"), ("WHISP-BOX-MIBV2-MIB", "codePoint28"), ("WHISP-BOX-MIBV2-MIB", "codePoint29"), ("WHISP-BOX-MIBV2-MIB", "codePoint30"), ("WHISP-BOX-MIBV2-MIB", "codePoint31"), ("WHISP-BOX-MIBV2-MIB", "codePoint32"), ("WHISP-BOX-MIBV2-MIB", "codePoint33"), ("WHISP-BOX-MIBV2-MIB", "codePoint34"), ("WHISP-BOX-MIBV2-MIB", "codePoint35"), ("WHISP-BOX-MIBV2-MIB", "codePoint36"), ("WHISP-BOX-MIBV2-MIB", "codePoint37"), ("WHISP-BOX-MIBV2-MIB", "codePoint38"), ("WHISP-BOX-MIBV2-MIB", "codePoint39"), ("WHISP-BOX-MIBV2-MIB", "codePoint40"), ("WHISP-BOX-MIBV2-MIB", "codePoint41"), ("WHISP-BOX-MIBV2-MIB", "codePoint42"), ("WHISP-BOX-MIBV2-MIB", "codePoint43"), ("WHISP-BOX-MIBV2-MIB", "codePoint44"), ("WHISP-BOX-MIBV2-MIB", "codePoint45"), ("WHISP-BOX-MIBV2-MIB", "codePoint46"), ("WHISP-BOX-MIBV2-MIB", "codePoint47"), ("WHISP-BOX-MIBV2-MIB", "codePoint48"), ("WHISP-BOX-MIBV2-MIB", "codePoint49"), ("WHISP-BOX-MIBV2-MIB", "codePoint50"), ("WHISP-BOX-MIBV2-MIB", "codePoint51"), ("WHISP-BOX-MIBV2-MIB", "codePoint52"), ("WHISP-BOX-MIBV2-MIB", "codePoint53"), ("WHISP-BOX-MIBV2-MIB", "codePoint54"), ("WHISP-BOX-MIBV2-MIB", "codePoint55"), ("WHISP-BOX-MIBV2-MIB", "codePoint56"), ("WHISP-BOX-MIBV2-MIB", "codePoint57"), ("WHISP-BOX-MIBV2-MIB", "codePoint58"), ("WHISP-BOX-MIBV2-MIB", "codePoint59"), ("WHISP-BOX-MIBV2-MIB", "codePoint60"), ("WHISP-BOX-MIBV2-MIB", "codePoint61"), ("WHISP-BOX-MIBV2-MIB", "codePoint62"), ("WHISP-BOX-MIBV2-MIB", "codePoint63"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
whispBoxCPTableGroup = whispBoxCPTableGroup.setStatus('current')
whispBoxUserTableGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 6, 1, 6)).setObjects(("WHISP-BOX-MIBV2-MIB", "entryIndex"), ("WHISP-BOX-MIBV2-MIB", "userLoginName"), ("WHISP-BOX-MIBV2-MIB", "userPswd"), ("WHISP-BOX-MIBV2-MIB", "accessLevel"), ("WHISP-BOX-MIBV2-MIB", "loginStatus"), ("WHISP-BOX-MIBV2-MIB", "loginMethod"), ("WHISP-BOX-MIBV2-MIB", "sessionTime"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
whispBoxUserTableGroup = whispBoxUserTableGroup.setStatus('current')
whispLayer2NeighborTableGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 6, 1, 7)).setObjects(("WHISP-BOX-MIBV2-MIB", "entryL2Index"), ("WHISP-BOX-MIBV2-MIB", "neighborMAC"), ("WHISP-BOX-MIBV2-MIB", "neighborIP"), ("WHISP-BOX-MIBV2-MIB", "neighborSiteName"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
whispLayer2NeighborTableGroup = whispLayer2NeighborTableGroup.setStatus('current')
whispBoxNotifGroup = NotificationGroup((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 6, 1, 8)).setObjects(("WHISP-BOX-MIBV2-MIB", "boxLan1DHCPClientEvent"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
whispBoxNotifGroup = whispBoxNotifGroup.setStatus('current')
whispBridgeTbUsed = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 7, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: whispBridgeTbUsed.setStatus('current')
whispBridgeTbFree = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 7, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: whispBridgeTbFree.setStatus('current')
whispBridgeTbErr = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 7, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: whispBridgeTbErr.setStatus('current')
codePoint0 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: codePoint0.setStatus('current')
codePoint1 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 2), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint1.setStatus('current')
codePoint2 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint2.setStatus('current')
codePoint3 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 4), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint3.setStatus('current')
codePoint4 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 5), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint4.setStatus('current')
codePoint5 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 6), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint5.setStatus('current')
codePoint6 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 7), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint6.setStatus('current')
codePoint7 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 8), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint7.setStatus('current')
codePoint8 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 9), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint8.setStatus('current')
codePoint9 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 10), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint9.setStatus('current')
codePoint10 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 11), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint10.setStatus('current')
codePoint11 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 12), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint11.setStatus('current')
codePoint12 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 13), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint12.setStatus('current')
codePoint13 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 14), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint13.setStatus('current')
codePoint14 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 15), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint14.setStatus('current')
codePoint15 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 16), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint15.setStatus('current')
codePoint16 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 17), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint16.setStatus('current')
codePoint17 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 18), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint17.setStatus('current')
codePoint18 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 19), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint18.setStatus('current')
codePoint19 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 20), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint19.setStatus('current')
codePoint20 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 21), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint20.setStatus('current')
codePoint21 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 22), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint21.setStatus('current')
codePoint22 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 23), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint22.setStatus('current')
codePoint23 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 24), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint23.setStatus('current')
codePoint24 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 25), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint24.setStatus('current')
codePoint25 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 26), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint25.setStatus('current')
codePoint26 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 27), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint26.setStatus('current')
codePoint27 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 28), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint27.setStatus('current')
codePoint28 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 29), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint28.setStatus('current')
codePoint29 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 30), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint29.setStatus('current')
codePoint30 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 31), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint30.setStatus('current')
codePoint31 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 32), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint31.setStatus('current')
codePoint32 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 33), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint32.setStatus('current')
codePoint33 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 34), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint33.setStatus('current')
codePoint34 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 35), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint34.setStatus('current')
codePoint35 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 36), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint35.setStatus('current')
codePoint36 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 37), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint36.setStatus('current')
codePoint37 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 38), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint37.setStatus('current')
codePoint38 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 39), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint38.setStatus('current')
codePoint39 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 40), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint39.setStatus('current')
codePoint40 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 41), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint40.setStatus('current')
codePoint41 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 42), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint41.setStatus('current')
codePoint42 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 43), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint42.setStatus('current')
codePoint43 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 44), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint43.setStatus('current')
codePoint44 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 45), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint44.setStatus('current')
codePoint45 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 46), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint45.setStatus('current')
codePoint46 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 47), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint46.setStatus('current')
codePoint47 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 48), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint47.setStatus('current')
codePoint48 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 49), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: codePoint48.setStatus('current')
codePoint49 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 50), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint49.setStatus('current')
codePoint50 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 51), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint50.setStatus('current')
codePoint51 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 52), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint51.setStatus('current')
codePoint52 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 53), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint52.setStatus('current')
codePoint53 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 54), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint53.setStatus('current')
codePoint54 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 55), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint54.setStatus('current')
codePoint55 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 56), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint55.setStatus('current')
codePoint56 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 57), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: codePoint56.setStatus('current')
codePoint57 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 58), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint57.setStatus('current')
codePoint58 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 59), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint58.setStatus('current')
codePoint59 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 60), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint59.setStatus('current')
codePoint60 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 61), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint60.setStatus('current')
codePoint61 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 62), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint61.setStatus('current')
codePoint62 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 63), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint62.setStatus('current')
codePoint63 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 64), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint63.setStatus('current')
boxLan1DHCPClientEvent = NotificationType((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 12, 1, 1)).setObjects(("WHISP-BOX-MIBV2-MIB", "dhcpLanIp"), ("WHISP-BOX-MIBV2-MIB", "whispBoxEsn"))
if mibBuilder.loadTexts: boxLan1DHCPClientEvent.setStatus('current')
dnsIpState = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 13, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("manual", 0), ("automatic", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dnsIpState.setStatus('current')
dnsPrimaryMgmtIP = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 13, 2), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dnsPrimaryMgmtIP.setStatus('current')
dnsAlternateMgmtIP = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 13, 3), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dnsAlternateMgmtIP.setStatus('current')
dnsMgmtDomainName = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 13, 4), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dnsMgmtDomainName.setStatus('current')
trapDomainNameAppend = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 13, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disableDomain", 0), ("appendDomain", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trapDomainNameAppend.setStatus('current')
trap1 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 13, 6), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trap1.setStatus('current')
trap2 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 13, 7), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trap2.setStatus('current')
trap3 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 13, 8), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trap3.setStatus('current')
trap4 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 13, 9), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trap4.setStatus('current')
trap5 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 13, 10), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trap5.setStatus('current')
trap6 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 13, 11), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trap6.setStatus('current')
trap7 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 13, 12), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trap7.setStatus('current')
trap8 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 13, 13), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trap8.setStatus('current')
trap9 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 13, 14), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trap9.setStatus('current')
trap10 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 13, 15), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trap10.setStatus('current')
whispBoxRFPhysicalRadios = MibTable((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 15, 1), )
if mibBuilder.loadTexts: whispBoxRFPhysicalRadios.setStatus('current')
whispBoxRFPhysicalRadioEntry = MibTableRow((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 15, 1, 1), ).setIndexNames((0, "WHISP-BOX-MIBV2-MIB", "radioIndex"))
if mibBuilder.loadTexts: whispBoxRFPhysicalRadioEntry.setStatus('current')
radioIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 15, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 256))).setMaxAccess("readonly")
if mibBuilder.loadTexts: radioIndex.setStatus('current')
radioType = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 15, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("fsk", 0), ("ofdm", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: radioType.setStatus('current')
radioPaths = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 15, 1, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 256))).setMaxAccess("readonly")
if mibBuilder.loadTexts: radioPaths.setStatus('current')
whispBoxRFPhysicalRadioPaths = MibTable((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 15, 2), )
if mibBuilder.loadTexts: whispBoxRFPhysicalRadioPaths.setStatus('current')
whispBoxRFPhysicalRadioPathEntry = MibTableRow((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 15, 2, 1), ).setIndexNames((0, "WHISP-BOX-MIBV2-MIB", "radioIndex"), (0, "WHISP-BOX-MIBV2-MIB", "pathIndex"))
if mibBuilder.loadTexts: whispBoxRFPhysicalRadioPathEntry.setStatus('current')
pathIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 15, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 256))).setMaxAccess("readonly")
if mibBuilder.loadTexts: pathIndex.setStatus('current')
whispBoxRFPhysicalRadioFrequencies = MibTable((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 15, 3), )
if mibBuilder.loadTexts: whispBoxRFPhysicalRadioFrequencies.setStatus('current')
whispBoxRFPhysicalRadioFrequencyEntry = MibTableRow((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 15, 3, 1), ).setIndexNames((0, "WHISP-BOX-MIBV2-MIB", "radioIndex"), (0, "WHISP-BOX-MIBV2-MIB", "frequency"))
if mibBuilder.loadTexts: whispBoxRFPhysicalRadioFrequencyEntry.setStatus('current')
frequency = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 15, 3, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 900000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: frequency.setStatus('current')
whispBoxRFConfigRadios = MibTable((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 16, 1), )
if mibBuilder.loadTexts: whispBoxRFConfigRadios.setStatus('current')
whispBoxRFConfigRadioEntry = MibTableRow((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 16, 1, 1), ).setIndexNames((0, "WHISP-BOX-MIBV2-MIB", "radioConfigIndex"))
if mibBuilder.loadTexts: whispBoxRFConfigRadioEntry.setStatus('current')
radioConfigIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 16, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 256))).setMaxAccess("readonly")
if mibBuilder.loadTexts: radioConfigIndex.setStatus('current')
radioFrequencyBand = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 16, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12))).clone(namedValues=NamedValues(("band700", 0), ("band900", 1), ("band2400", 2), ("band3500", 3), ("band3700", 4), ("band4900", 5), ("band5100", 6), ("band5200", 7), ("band5400", 8), ("band5700", 9), ("band5800", 10), ("band5900", 11), ("band6050", 12)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: radioFrequencyBand.setStatus('current')
whispBoxBridgeTable = MibTable((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 4), )
if mibBuilder.loadTexts: whispBoxBridgeTable.setStatus('current')
whispBoxBridgeEntry = MibTableRow((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 4, 1), ).setIndexNames((0, "WHISP-BOX-MIBV2-MIB", "whispBridgeMacAddr"))
if mibBuilder.loadTexts: whispBoxBridgeEntry.setStatus('current')
whispBridgeMacAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 4, 1, 1), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: whispBridgeMacAddr.setStatus('current')
whispBridgeDesLuid = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 4, 1, 2), WhispLUID()).setMaxAccess("readonly")
if mibBuilder.loadTexts: whispBridgeDesLuid.setStatus('current')
whispBridgeAge = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 4, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: whispBridgeAge.setStatus('current')
whispBridgeExt = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 4, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: whispBridgeExt.setStatus('current')
whispBridgeHash = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 4, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: whispBridgeHash.setStatus('current')
whispBridgeCAM = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 4, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: whispBridgeCAM.setStatus('obsolete')
whispVLANTable = MibTable((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 8), )
if mibBuilder.loadTexts: whispVLANTable.setStatus('current')
whispVLANEntry = MibTableRow((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 8, 1), ).setIndexNames((0, "WHISP-BOX-MIBV2-MIB", "whispVID"))
if mibBuilder.loadTexts: whispVLANEntry.setStatus('current')
whispVID = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 8, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 4095))).setMaxAccess("readonly")
if mibBuilder.loadTexts: whispVID.setStatus('current')
whispVType = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 8, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: whispVType.setStatus('current')
whispVAge = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 8, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: whispVAge.setStatus('current')
whispUserTable = MibTable((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 10), )
if mibBuilder.loadTexts: whispUserTable.setStatus('current')
whispUserEntry = MibTableRow((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 10, 1), ).setIndexNames((0, "WHISP-BOX-MIBV2-MIB", "entryIndex"))
if mibBuilder.loadTexts: whispUserEntry.setStatus('current')
entryIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 10, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 5))).setMaxAccess("readonly")
if mibBuilder.loadTexts: entryIndex.setStatus('current')
userLoginName = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 10, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: userLoginName.setStatus('current')
userPswd = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 10, 1, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: userPswd.setStatus('current')
accessLevel = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 10, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: accessLevel.setStatus('current')
loginStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 10, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: loginStatus.setStatus('current')
loginMethod = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 10, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: loginMethod.setStatus('current')
sessionTime = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 10, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sessionTime.setStatus('current')
whispLayer2NeighborTable = MibTable((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 11), )
if mibBuilder.loadTexts: whispLayer2NeighborTable.setStatus('current')
whispLayer2NeighborEntry = MibTableRow((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 11, 1), ).setIndexNames((0, "WHISP-BOX-MIBV2-MIB", "entryL2Index"))
if mibBuilder.loadTexts: whispLayer2NeighborEntry.setStatus('current')
entryL2Index = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 11, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 20))).setMaxAccess("readonly")
if mibBuilder.loadTexts: entryL2Index.setStatus('current')
neighborMAC = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 11, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: neighborMAC.setStatus('current')
neighborIP = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 11, 1, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: neighborIP.setStatus('current')
neighborSiteName = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 11, 1, 4), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: neighborSiteName.setStatus('current')
mibBuilder.exportSymbols("WHISP-BOX-MIBV2-MIB", lan3DhcpRenew=lan3DhcpRenew, mngtIP=mngtIP, rfStatOutSyncCount=rfStatOutSyncCount, hiPriority=hiPriority, trap7=trap7, codePoint25=codePoint25, allowVIDAccess=allowVIDAccess, codePoint46=codePoint46, codePoint59=codePoint59, bootpcFilter=bootpcFilter, rfStatRcvLTStartHS=rfStatRcvLTStartHS, mac10VIDMapVid=mac10VIDMapVid, lldpBroadcastEnable=lldpBroadcastEnable, mac7VIDMapAddr=mac7VIDMapAddr, inSyncCount=inSyncCount, codePoint17=codePoint17, codePoint27=codePoint27, ftpPort=ftpPort, codePoint20=codePoint20, whispBoxConf=whispBoxConf, fpgaCompileInfo=fpgaCompileInfo, whispBoxDHCPClientEvent=whispBoxDHCPClientEvent, longitude=longitude, mngtIP10=mngtIP10, rfOutErrorsCount=rfOutErrorsCount, bootpsFilter=bootpsFilter, mngtIP9=mngtIP9, codePoint19=codePoint19, codePoint22=codePoint22, dnsPrimaryMgmtIP=dnsPrimaryMgmtIP, dnsMgmtDomainName=dnsMgmtDomainName, syslogStatTxSuccesses=syslogStatTxSuccesses, codePoint2=codePoint2, rfInDiscardsOverloadCount=rfInDiscardsOverloadCount, whispBridgeExt=whispBridgeExt, bridgeCbFecStatfloods=bridgeCbFecStatfloods, bhTimingMode=bhTimingMode, russiaRegion=russiaRegion, entryIndex=entryIndex, regionEurope=regionEurope, removeCustomFreqList=removeCustomFreqList, codePoint10=codePoint10, mngtIP8=mngtIP8, whispBoxEvent=whispBoxEvent, rfStatRcvMDataCnt=rfStatRcvMDataCnt, accessLevel=accessLevel, codePoint7=codePoint7, whispBoxRFPhysical=whispBoxRFPhysical, mac1VIDMapVid=mac1VIDMapVid, syslogServerPort=syslogServerPort, codePoint32=codePoint32, pass2Status=pass2Status, allOtherIpFilter=allOtherIpFilter, snmpFilter=snmpFilter, rfStatUnderrunCount=rfStatUnderrunCount, snrCalculation=snrCalculation, syslogServerAddr=syslogServerAddr, codePoint33=codePoint33, userP3Filter=userP3Filter, bridgeCbFecStatbtosscap=bridgeCbFecStatbtosscap, whispBoxP11FPGAType=whispBoxP11FPGAType, portVID=portVID, whispBridgeCAM=whispBridgeCAM, trap5=trap5, mngtIP5=mngtIP5, bridgeEnable=bridgeEnable, mac8VIDMapAddr=mac8VIDMapAddr, mac6VIDMapAddr=mac6VIDMapAddr, radioConfigIndex=radioConfigIndex, codePoint55=codePoint55, whispBoxP11BstrapFPGAVer=whispBoxP11BstrapFPGAVer, whispLayer2NeighborEntry=whispLayer2NeighborEntry, sessionTimeout=sessionTimeout, whispFeatureKeyOrigin=whispFeatureKeyOrigin, whispUserTable=whispUserTable, mac10VIDMapAddr=mac10VIDMapAddr, userDefinedPort3=userDefinedPort3, changeUsrPwd=changeUsrPwd, bhModulation=bhModulation, bridgeCbFecStatutosscap=bridgeCbFecStatutosscap, region=region, bandwidth=bandwidth, lan3DhcpRelease=lan3DhcpRelease, whispWebUserAccessMode=whispWebUserAccessMode, syslogDomainNameAppend=syslogDomainNameAppend, whispBoxEsn=whispBoxEsn, transmitterOP=transmitterOP, vlanAcceptQinQFrames=vlanAcceptQinQFrames, radioType=radioType, trapIP2=trapIP2, siteInfoViewable=siteInfoViewable, vlanPortType=vlanPortType, subnetMask9=subnetMask9, subnetMask5=subnetMask5, boxDeviceType=boxDeviceType, sessionTime=sessionTime, rfOutDiscardsCount=rfOutDiscardsCount, channelBandwidth=channelBandwidth, fecInDiscardsOverloadCount=fecInDiscardsOverloadCount, allOthersFilter=allOthersFilter, mac8VIDMapVid=mac8VIDMapVid, pppoeFilter=pppoeFilter, rfStatXmtLTResult=rfStatXmtLTResult, trapIP9=trapIP9, trap3=trap3, dhcpRfPublicSubnetMask=dhcpRfPublicSubnetMask, whispBoxRFPhysicalRadioFrequencyEntry=whispBoxRFPhysicalRadioFrequencyEntry, userAccessLevel=userAccessLevel, rfPublicDhcpStatus=rfPublicDhcpStatus, dhcpRfPublicGateway=dhcpRfPublicGateway, codePoint42=codePoint42, numberCustomFreq=numberCustomFreq, agcGainRxCH2=agcGainRxCH2, fullAccess=fullAccess, neighborMAC=neighborMAC, whispBoxControls=whispBoxControls, whispBoxCPTableGroup=whispBoxCPTableGroup, rfOutDiscardsOverloadCount=rfOutDiscardsOverloadCount, trapDomainNameAppend=trapDomainNameAppend, installationColorCode=installationColorCode, whispBoxRFPhysicalRadioPaths=whispBoxRFPhysicalRadioPaths, powerControl=powerControl, ip4MultFilter=ip4MultFilter, codePoint8=codePoint8, outSyncCount=outSyncCount, rfStatUnkwnFeatBcnRcvCnt=rfStatUnkwnFeatBcnRcvCnt, fecOutDiscardsOverloadCount=fecOutDiscardsOverloadCount, userP2Filter=userP2Filter, codePoint39=codePoint39, rfStatXmtUDataCnt=rfStatXmtUDataCnt, codePoint18=codePoint18, packetOverloadCounter=packetOverloadCounter, radioIndex=radioIndex, antPolarization=antPolarization, syslogStatDropped=syslogStatDropped, codePoint45=codePoint45, calibrationStatusBool=calibrationStatusBool, commStringRWrite=commStringRWrite, whispBoxFPGAVer=whispBoxFPGAVer, mac1VIDMapAddr=mac1VIDMapAddr, regionCode=regionCode, boxEncryption=boxEncryption, port3UDPFilter=port3UDPFilter, codePoint11=codePoint11, pass1Status=pass1Status, whispBoxRFPhysicalRadios=whispBoxRFPhysicalRadios, smbFilter=smbFilter, whispBoxConfig=whispBoxConfig, whispBoxUserTableGroup=whispBoxUserTableGroup, subnetMask3=subnetMask3, bridgeCbRFStatbout=bridgeCbRFStatbout, subnetMask6=subnetMask6, radioEngKeyed=radioEngKeyed, rfStatBadInSyncIDRcv=rfStatBadInSyncIDRcv, whispBoxDNS=whispBoxDNS, trapIP6=trapIP6, userP1Filter=userP1Filter, vlanMemberSource=vlanMemberSource, bridgeCbFecStatutoss=bridgeCbFecStatutoss, whispBoxSoftwareVer=whispBoxSoftwareVer, whispBoxBridgeEntry=whispBoxBridgeEntry, mngtIP4=mngtIP4, fecEnable=fecEnable, userDefinedPort2=userDefinedPort2, bridgeCbErrStatBridgeFull=bridgeCbErrStatBridgeFull, bhvlanEnable=bhvlanEnable, fecOutDiscardsCount=fecOutDiscardsCount, scheduling=scheduling, codePoint30=codePoint30, codePoint43=codePoint43, subnetMask10=subnetMask10, etherLinkStatus=etherLinkStatus, spectrumAnalysisActionBox=spectrumAnalysisActionBox, whispBoxRFPhysicalRadioPathEntry=whispBoxRFPhysicalRadioPathEntry, whispBridgeTbErr=whispBridgeTbErr, userDefinedPort1=userDefinedPort1, dhcpLanSubnetMask=dhcpLanSubnetMask, whispBridgeHash=whispBridgeHash, whispBoxCPVar=whispBoxCPVar, codePoint52=codePoint52, dataScramblingMethod=dataScramblingMethod, clearEventLog=clearEventLog, snmpPort=snmpPort, bridgeCbRFStatutosscap=bridgeCbRFStatutosscap, trap10=trap10, codePoint14=codePoint14, regionOtherRegulatory=regionOtherRegulatory, codePoint31=codePoint31, rfStatRcvLTStart=rfStatRcvLTStart, codePoint34=codePoint34, mac2VIDMapVid=mac2VIDMapVid, rfStatInSyncCount=rfStatInSyncCount, whispBridgeTbFree=whispBridgeTbFree, codePoint35=codePoint35, addCustomFreqMimo=addCustomFreqMimo, codePoint38=codePoint38, mac5VIDMapAddr=mac5VIDMapAddr, platformType=platformType, apType=apType, setDefaultPlug=setDefaultPlug, deleteUser=deleteUser, reboot=reboot, trap4=trap4, rfStatRcvBDataCnt=rfStatRcvBDataCnt, subnetMask2=subnetMask2, updateStatus=updateStatus, codePoint57=codePoint57, port1UDPFilter=port1UDPFilter, numDFSDetections=numDFSDetections, codePoint41=codePoint41, pathIndex=pathIndex, bridgeCbRFStatuout=bridgeCbRFStatuout, codePoint49=codePoint49, natDhcpStatus=natDhcpStatus, rfStatOverrunCount=rfStatOverrunCount, rebootIfRequired=rebootIfRequired, subnetMask7=subnetMask7, trap6=trap6, loginMethod=loginMethod, bridgeCbErrStatApRfQSend=bridgeCbErrStatApRfQSend, managementVID=managementVID, codePoint36=codePoint36, rfStatRcvUDataCnt=rfStatRcvUDataCnt, codePoint5=codePoint5, bridgeCbRFStatfloods=bridgeCbRFStatfloods, rfStatRcvCntlCnt=rfStatRcvCntlCnt, trap2=trap2, codePoint13=codePoint13, bridgeCbErrStatNI2QSend=bridgeCbErrStatNI2QSend, snmpMibPerm=snmpMibPerm, userPswd=userPswd, trap8=trap8, rfStatRcvCorruptControlCount=rfStatRcvCorruptControlCount, whispVLANEntry=whispVLANEntry, platformInfo=platformInfo, codePoint50=codePoint50, fecInDiscardsCount=fecInDiscardsCount, codePoint12=codePoint12, removeCustomFreqMimo=removeCustomFreqMimo, whispBoxEventLog=whispBoxEventLog, bridgeCbErrStatNI1QSend=bridgeCbErrStatNI1QSend, linkNegoSpeed=linkNegoSpeed, whispBridgeAge=whispBridgeAge, ism=ism, fecInErrorsCount=fecInErrorsCount, whispVType=whispVType, trap1=trap1, allowColocation=allowColocation, agingTimeout=agingTimeout, antType=antType, userLoginName=userLoginName, whispBoxBoot=whispBoxBoot, neighborSiteName=neighborSiteName, pldVersion=pldVersion)
mibBuilder.exportSymbols("WHISP-BOX-MIBV2-MIB", pllOutLockCount=pllOutLockCount, bridgeCbFecStatbin=bridgeCbFecStatbin, codePoint1=codePoint1, interleave=interleave, whispBoxVLANTableGroup=whispBoxVLANTableGroup, addCustomFreqList=addCustomFreqList, dhcpLanGateway=dhcpLanGateway, codePoint29=codePoint29, subnetMask4=subnetMask4, codePoint0=codePoint0, mac3VIDMapAddr=mac3VIDMapAddr, lan1DhcpRenew=lan1DhcpRenew, regionSouthAmerica=regionSouthAmerica, rfStatXmtMDataCnt=rfStatXmtMDataCnt, rfStatRangeCapEnf=rfStatRangeCapEnf, dynamicLearning=dynamicLearning, ethernetLinkSpeed=ethernetLinkSpeed, rfStatBeaconVerMismatchCnt=rfStatBeaconVerMismatchCnt, trapIP10=trapIP10, dnsIpState=dnsIpState, boxDeviceTypeID=boxDeviceTypeID, height=height, trapIP3=trapIP3, snmpTrapPort=snmpTrapPort, bridgeCbErrStatSendMsg=bridgeCbErrStatSendMsg, codePoint63=codePoint63, mac4VIDMapAddr=mac4VIDMapAddr, dhcpRfPublicIp=dhcpRfPublicIp, boxTemperature=boxTemperature, trapIP5=trapIP5, rfInDiscardsCount=rfInDiscardsCount, saveFlash=saveFlash, rfStatnonLiteBcnRcvCnt=rfStatnonLiteBcnRcvCnt, bridgeCbRFStatutoss=bridgeCbRFStatutoss, bridgeCbFecStatbout=bridgeCbFecStatbout, rfStatTempOutOfRange=rfStatTempOutOfRange, whispBridgeDesLuid=whispBridgeDesLuid, twoXRate=twoXRate, codePoint44=codePoint44, whispVAge=whispVAge, hwsCompatibility=hwsCompatibility, codePoint26=codePoint26, allIpv4Filter=allIpv4Filter, whispVID=whispVID, radioPaths=radioPaths, radioFrequencyBand=radioFrequencyBand, trapIP1=trapIP1, lanDhcpState=lanDhcpState, whispBoxBTGroup=whispBoxBTGroup, whispBoxBridgeVar=whispBoxBridgeVar, aggregateBandwidthCap=aggregateBandwidthCap, codePoint21=codePoint21, port1TCPFilter=port1TCPFilter, webAutoUpdate=webAutoUpdate, trapIP4=trapIP4, agcGainRxCH1=agcGainRxCH1, fecStatLinkDetected=fecStatLinkDetected, whispBoxNotifGroup=whispBoxNotifGroup, mngtIP6=mngtIP6, port3TCPFilter=port3TCPFilter, fpgaBuildDate=fpgaBuildDate, regionAfrica=regionAfrica, httpPort=httpPort, port2UDPFilter=port2UDPFilter, whispLayer2NeighborTableGroup=whispLayer2NeighborTableGroup, whispBridgeTbUsed=whispBridgeTbUsed, setDefaults=setDefaults, regionAsia=regionAsia, neighborIP=neighborIP, antennaGain=antennaGain, rfStatRcvCorruptDataCount=rfStatRcvCorruptDataCount, whispVLANTable=whispVLANTable, codePoint60=codePoint60, usrAccountEnableAccounting=usrAccountEnableAccounting, rfStatBadBcastCtlCnt=rfStatBadBcastCtlCnt, rfStatRSSIOutOfRange=rfStatRSSIOutOfRange, encryptionConfig=encryptionConfig, cyclicPrefix=cyclicPrefix, whispBoxEvntLog=whispBoxEvntLog, codePoint23=codePoint23, priorityPrecedence=priorityPrecedence, rfStatBadFreqBcnRcvCnt=rfStatBadFreqBcnRcvCnt, boxFrequency=boxFrequency, calibrationStatusBox=calibrationStatusBox, codePoint56=codePoint56, codePoint24=codePoint24, mac4VIDMapVid=mac4VIDMapVid, mac9VIDMapAddr=mac9VIDMapAddr, codePoint47=codePoint47, codePoint9=codePoint9, rfStatRcvLTResult=rfStatRcvLTResult, latitude=latitude, lanDhcpStatus=lanDhcpStatus, subnetMask8=subnetMask8, codePoint4=codePoint4, addVlanMember=addVlanMember, trapIP7=trapIP7, boxTemperatureC=boxTemperatureC, swVersion=swVersion, whispBoxRFConfig=whispBoxRFConfig, mac7VIDMapVid=mac7VIDMapVid, updateDevice=updateDevice, arpFilter=arpFilter, lan1DhcpRelease=lan1DhcpRelease, displayOnlyAccess=displayOnlyAccess, PYSNMP_MODULE_ID=whispBoxLevelMibModule, timedSpectrumAnalysisDurationBox=timedSpectrumAnalysisDurationBox, codePoint51=codePoint51, frequency=frequency, radioRateAdaptUL=radioRateAdaptUL, gpsInput=gpsInput, allowRejectThenLocal=allowRejectThenLocal, eirp=eirp, codePoint53=codePoint53, boxLan1DHCPClientEvent=boxLan1DHCPClientEvent, bridgeEntryTimeout=bridgeEntryTimeout, natDhcpRelease=natDhcpRelease, commStringROnly=commStringROnly, syslogMinLevel=syslogMinLevel, txCalFailure=txCalFailure, userName=userName, natDhcpRenew=natDhcpRenew, mngtIP3=mngtIP3, rfStatUnsupFeatBcnRcvCnt=rfStatUnsupFeatBcnRcvCnt, regionOceania=regionOceania, codePoint62=codePoint62, removeVlanMember=removeVlanMember, codePoint61=codePoint61, providerVID=providerVID, codePoint15=codePoint15, mngtIP2=mngtIP2, boxTemperatureF=boxTemperatureF, rfInErrorsCount=rfInErrorsCount, clearBERStats=clearBERStats, frameType=frameType, whispLayer2NeighborTable=whispLayer2NeighborTable, whispBoxStatus=whispBoxStatus, trapIP8=trapIP8, whispBoxRFConfigRadioEntry=whispBoxRFConfigRadioEntry, bridgeCbErrStatAPFecQSend=bridgeCbErrStatAPFecQSend, radioRateAdapt=radioRateAdapt, codePoint28=codePoint28, whispBoxRFPhysicalRadioEntry=whispBoxRFPhysicalRadioEntry, whispBoxLevelMibModule=whispBoxLevelMibModule, regionNorthAmerica=regionNorthAmerica, codePoint37=codePoint37, bridgeCbRFStatbin=bridgeCbRFStatbin, dnsAlternateMgmtIP=dnsAlternateMgmtIP, radioMSN=radioMSN, bridgeCbRFStatbtoss=bridgeCbRFStatbtoss, pppoeCtlPriority=pppoeCtlPriority, loginStatus=loginStatus, fecOutErrorsCount=fecOutErrorsCount, extFilterDelay=extFilterDelay, subnetMask=subnetMask, largeVCQ=largeVCQ, packetFilterDirection=packetFilterDirection, userPassword=userPassword, whispBoxAttributesGroup=whispBoxAttributesGroup, mac3VIDMapVid=mac3VIDMapVid, codePoint16=codePoint16, codePoint40=codePoint40, rxOverrunPkts=rxOverrunPkts, bridgeCbRFStatbtosscap=bridgeCbRFStatbtosscap, fecStatLinkLost=fecStatLinkLost, whispBoxP11Personality=whispBoxP11Personality, codePoint3=codePoint3, bridgeCbRFStatuin=bridgeCbRFStatuin, trap9=trap9, apSmMode=apSmMode, rfStatTxCalFailCnt=rfStatTxCalFailCnt, mac2VIDMapAddr=mac2VIDMapAddr, codePoint6=codePoint6, rfStatPLLOutOfLockCnt=rfStatPLLOutOfLockCnt, whispBoxBridgeTable=whispBoxBridgeTable, whispBoxRFConfigRadios=whispBoxRFConfigRadios, bridgeCbFecStatuin=bridgeCbFecStatuin, mac6VIDMapVid=mac6VIDMapVid, whispBridgeMacAddr=whispBridgeMacAddr, port2TCPFilter=port2TCPFilter, codePoint48=codePoint48, receiveQualityDebug=receiveQualityDebug, mac5VIDMapVid=mac5VIDMapVid, bridgeCbFecStatbtoss=bridgeCbFecStatbtoss, platformVer=platformVer, rfStatXmtCntlCnt=rfStatXmtCntlCnt, dhcpLanIp=dhcpLanIp, colorCode=colorCode, whispBoxRFPhysicalRadioFrequencies=whispBoxRFPhysicalRadioFrequencies, codePoint58=codePoint58, mac9VIDMapVid=mac9VIDMapVid, whispUserEntry=whispUserEntry, codePoint54=codePoint54, whispBoxControlGroup=whispBoxControlGroup, entryL2Index=entryL2Index, whispBoxGroups=whispBoxGroups, mngtIP7=mngtIP7, rfStatXmtBDataCnt=rfStatXmtBDataCnt, bridgeCbFecStatuout=bridgeCbFecStatuout)
|
python
|
from setuptools import setup
setup(name='safygiphy',
version='1.1.1',
description='API Wrapper for the online Gif library, Giphy',
url='https://code.tetraetc.com/SafyGiphy/',
author="TetraEtc",
author_email="[email protected]",
install_requires=[
'requests'
],
packages=['safygiphy']
)
|
python
|
from mypy import api
from redun.tests.utils import get_test_file
def test_task_types() -> None:
"""
mypy should find type errors related to redun task calls.
"""
workflow_file = get_test_file("test_data/typing/workflow_fail.py.txt")
stdout, stderr, ret_code = api.run(
[
"--show-traceback",
workflow_file,
"redun",
]
)
print(stdout)
assert ret_code == 1
# Parse found type check errors.
stdout_lines = stdout.split("\n")[:-2]
found_errors = {line.split(":", 2)[1] for line in stdout_lines}
# Get lines with expected errors.
with open(workflow_file) as infile:
expected_errors = {str(i) for i, line in enumerate(infile, 1) if "ERROR" in line}
assert found_errors == expected_errors
|
python
|
from collections import Counter
l = [1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 3, 4, 4, 4, 4,
4, 5, 5, 5, 5, 5, 6, 6, 6, 6, 7, 7, 8, 8, 8, 8]
# print(Counter(l))
s = 'aaassssvvvveeeeedddddccccccceeelllll'
# print(Counter(s))
word = 'How many times does each word show up in this sentence word word show up'
words = word.split()
# print(Counter(words))
c = Counter(words)
# print(c.most_common(2))
# print(sum(c.values()))
|
python
|
"""Process ACL states"""
from __future__ import absolute_import
import logger
from utils import dict_proto
from proto.acl_counting_pb2 import RuleCounts
LOGGER = logger.get_logger('aclstate')
class AclStateCollector:
"""Processing ACL states for ACL counting"""
def __init__(self):
self._switch_configs = {}
def get_port_rule_counts(self, switch, port, rule_samples):
"""Return the ACL count for a port"""
acl_config, error_map = self._verify_port_acl_config(switch, port)
if not acl_config:
return dict_proto(error_map, RuleCounts)
rule_counts = self._get_port_rule_counts(switch, port, acl_config, rule_samples)
return dict_proto(rule_counts, RuleCounts)
# pylint: disable=protected-access
def _get_port_rule_counts(self, switch, port, acl_config, rule_samples):
rule_counts_map = {'rules': {}, 'errors': []}
rules_map = rule_counts_map['rules']
errors = rule_counts_map['errors']
for rule_config in acl_config.rules:
cookie_num = rule_config.get('cookie')
if not cookie_num:
LOGGER.error(
'Cookie is not generated for ACL rule: %s, %s',
acl_config._id, rule_config.get('description'))
continue
rule_description = rule_config.get('description')
if not rule_description:
LOGGER.error('Rule with cookie %s does not have a description', cookie_num)
continue
has_sample = False
for sample in rule_samples:
if str(sample.labels.get('cookie')) != str(cookie_num):
continue
if sample.labels.get('dp_name') != switch:
continue
if int(sample.labels.get('in_port')) != port:
continue
rule_map = rules_map.setdefault(rule_description, {})
rule_map['packet_count'] = int(sample.value)
has_sample = True
break
if not has_sample:
error = (f'No ACL metric sample available for switch, port, ACL, rule: '
f'{switch}, {port}, {acl_config._id}, {rule_description} '
f'(cookie={cookie_num})')
errors.append(error)
LOGGER.error(error)
return rule_counts_map
def _verify_port_acl_config(self, switch, port):
error_map = {'errors': []}
error_list = error_map['errors']
switch_config = self._switch_configs.get(switch)
if not switch_config:
error = f'Switch not defined in Faucet dps config: {switch}'
LOGGER.error(error)
error_list.append(error)
return None, error_map
port_config = switch_config.ports.get(port)
if not port_config:
error = f'Port not defined in Faucet dps config: {switch}, {port}'
LOGGER.error(error)
error_list.append(error)
return None, error_map
acls_config = port_config.acls_in
if not acls_config:
error = f'No ACLs applied to port: {switch}, {port}'
LOGGER.error(error)
error_list.append(error)
return None, error_map
if len(acls_config) != 1:
error = f'More than one ACLs were applied to port: {switch}, {port}'
LOGGER.error(error)
error_list.append(error)
return None, error_map
return acls_config[0], None
def update_switch_configs(self, switch_configs):
"""Update cache of switch configs"""
self._switch_configs = switch_configs
|
python
|
from sqlalchemy.orm import joinedload
from FlaskRTBCTF.utils.models import db, TimeMixin, ReprMixin
from FlaskRTBCTF.utils.cache import cache
# Machine Table
class Machine(TimeMixin, ReprMixin, db.Model):
__tablename__ = "machine"
__repr_fields__ = (
"name",
"os",
)
id = db.Column(db.Integer, primary_key=True, index=True)
name = db.Column(db.String(64), nullable=False, unique=True)
user_hash = db.Column(db.String(32), nullable=False)
root_hash = db.Column(db.String(32), nullable=False)
user_points = db.Column(db.Integer, default=0)
root_points = db.Column(db.Integer, default=0)
os = db.Column(db.String, nullable=False, default="linux")
ip = db.Column(db.String(64), nullable=False)
difficulty = db.Column(db.String, nullable=False, default="Easy")
@staticmethod
@cache.cached(timeout=3600 * 3, key_prefix="machines")
def get_all():
return Machine.query.all()
# UserMachine: N to N relationship
class UserMachine(TimeMixin, db.Model):
__tablename__ = "user_machine"
user_id = db.Column(
db.Integer,
db.ForeignKey("user.id"),
nullable=False,
primary_key=True,
index=True,
)
machine_id = db.Column(
db.Integer,
db.ForeignKey("machine.id"),
nullable=False,
primary_key=True,
index=True,
)
owned_user = db.Column(db.Boolean, nullable=False, default=False)
owned_root = db.Column(db.Boolean, nullable=False, default=False)
@classmethod
@cache.memoize(timeout=3600 * 3)
def completed_machines(cls, user_id):
completed = dict()
_ids1 = (
cls.query.with_entities(cls.machine_id)
.filter_by(user_id=user_id, owned_user=True)
.all()
)
_ids2 = (
cls.query.with_entities(cls.machine_id)
.filter_by(user_id=user_id, owned_root=True)
.all()
)
completed["user"] = [int(id[0]) for id in _ids1]
completed["root"] = [int(id[0]) for id in _ids2]
return completed
# Tag Model
class Tag(ReprMixin, db.Model):
__tablename__ = "tag"
__repr_fields__ = ("label",)
id = db.Column(db.Integer, primary_key=True)
label = db.Column(db.String(32), nullable=False)
color = db.Column(db.String(16), nullable=False)
# Tags table
tags = db.Table(
"tags",
db.Column("tag_id", db.Integer, db.ForeignKey("tag.id"), primary_key=True),
db.Column(
"challenge_id", db.Integer, db.ForeignKey("challenge.id"), primary_key=True
),
)
# Challenges Model
class Challenge(TimeMixin, ReprMixin, db.Model):
__tablename__ = "challenge"
__repr_fields__ = ("title", "category")
id = db.Column(db.Integer, primary_key=True, index=True)
title = db.Column(db.String(64), nullable=False, unique=True)
description = db.Column(db.TEXT, nullable=True)
flag = db.Column(db.TEXT, nullable=False)
points = db.Column(db.Integer, nullable=False, default=0)
url = db.Column(db.TEXT, nullable=True)
difficulty = db.Column(db.String, nullable=True)
category_id = db.Column(db.Integer, db.ForeignKey("category.id"), nullable=False)
category = db.relationship("Category", backref=db.backref("challenges", lazy=True))
tags = db.relationship(
"Tag",
secondary=tags,
lazy="subquery",
backref=db.backref("challenges", lazy="noload"),
)
# UserChallenge: N to N relationship
class UserChallenge(TimeMixin, db.Model):
__tablename__ = "user_challenge"
user_id = db.Column(
db.Integer,
db.ForeignKey("user.id"),
nullable=False,
primary_key=True,
index=True,
)
challenge_id = db.Column(
db.Integer,
db.ForeignKey("challenge.id"),
nullable=False,
primary_key=True,
index=True,
)
completed = db.Column(db.Boolean, nullable=False, default=False)
@classmethod
@cache.memoize(timeout=3600 * 3)
def completed_challenges(cls, user_id):
_ids = (
cls.query.with_entities(cls.challenge_id)
.filter_by(user_id=user_id, completed=True)
.all()
)
_ids = [int(id[0]) for id in _ids]
return _ids
# Category Model
class Category(ReprMixin, db.Model):
__tablename__ = "category"
__repr_fields__ = ("name",)
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(32), nullable=False)
@staticmethod
@cache.cached(timeout=3600 * 3, key_prefix="challenges")
def get_challenges():
categories = (
Category.query.options(joinedload("challenges"))
.filter(Category.challenges)
.all()
)
return categories
|
python
|
# This file is part of Peach-Py package and is licensed under the Simplified BSD license.
# See license.rst for the full text of the license.
from enum import IntEnum
class FileType(IntEnum):
# No file type
null = 0
# Relocatable file
object = 1
# Executable file
executable = 2
# Fixed VM shared library (?)
fixed_vm_library = 3
# Core dump file
core_dump = 4
# Preloaded executable file
preloaded_executable = 5
# Dynamically bound shared library
dynamic_library = 6
# Dynamic linker (dyld)
dynamic_linker = 7
# Dynamically bound bundle file
dynamic_bundle = 8
# Shared library stub for build-time linking (no section content)
dynamic_library_stub = 9
# Companion file with debug sections only
debug_symbols = 10
# Kernel-mode driver
kext_bundle = 11
class CpuType(IntEnum):
x86 = 0x00000007
x86_64 = 0x01000007
arm = 0x0000000C
arm64 = 0x0100000C
ppc = 0x00000012
ppc64 = 0x01000012
abi64 = 0x01000000
class PPCCpuSubType(IntEnum):
all = 0
# PowerPC G3
powerpc750 = 9
# PowerPC G4
powerpc7400 = 10
# PowerPC G4+
powerpc7450 = 11
# PowerPC G5
powerpc970 = 100
class X86CpuSubType(IntEnum):
all = 3
class ARMCpuSubType(IntEnum):
all = 0
# ARM 1176
v6 = 6
# ARM Cortex-A8
v7 = 9
# Cortex-A9 (ARMv7 + MP extension + NEON-HP, de-facto useless, removed from Clang)
v7f = 10
# Swift (ARMv7 + MP extension + VFPv4/NEONv2 + DIV)
v7s = 11
# Marvell Kirkwood (ARMv7 + XScale extension + WMMXv2 + Armada extension, no NEON)
v7k = 12
# Cyclone
v8 = 13
class ARM64CpuSubType(IntEnum):
all = 0
# Cyclone
v8 = 1
class MachHeader:
def __init__(self, abi):
import peachpy.x86_64
import peachpy.arm
self.abi = abi
self.size = {4: 28, 8: 32}[abi.pointer_size]
if abi == peachpy.x86_64.abi.system_v_x86_64_abi:
# 64-bit
self.magic = 0xFEEDFACF
self.cpu_type = CpuType.x86_64
self.cpu_subtype = X86CpuSubType.all
else:
raise ValueError("Unsupported ABI: %s" % str(abi))
self.file_type = FileType.object
self.commands_count = 0
self.commands_size = 0
self.flags = 0
@staticmethod
def get_size(abi):
from peachpy.abi import ABI
assert isinstance(abi, ABI)
assert abi.pointer_size in [4, 8]
return {4: 24, 8: 32}[abi.pointer_size]
def encode(self, encoder):
bytes = encoder.uint32(self.magic) + \
encoder.uint32(self.cpu_type) + \
encoder.uint32(self.cpu_subtype) + \
encoder.uint32(self.file_type) + \
encoder.uint32(self.commands_count) + \
encoder.uint32(self.commands_size) + \
encoder.uint32(self.flags)
if self.abi.pointer_size == 8:
bytes += bytearray(4)
return bytes
|
python
|
from requests.exceptions import ConnectionError, HTTPError, SSLError
from sentry.exceptions import PluginError
from django.utils.translation import ugettext_lazy as _
from sentry_youtrack.forms import VERIFY_SSL_CERTIFICATE
from sentry_youtrack.youtrack import YouTrackClient
class YouTrackConfiguration(object):
error_message = {
'client': _("Unable to connect to YouTrack."),
'project_unknown': _('Unable to fetch project'),
'project_not_found': _('Project not found: %s'),
'invalid_ssl': _("SSL certificate verification failed."),
'invalid_password': _('Invalid username or password.'),
'invalid_project': _('Invalid project: \'%s\''),
'missing_fields': _('Missing required fields.'),
'perms': _("User doesn't have Low-level Administration permissions."),
'required': _("This field is required.")}
def __init__(self, initial):
self.config = self.build_default_fields(initial)
self.client_errors = {}
if self.has_client_fields(initial):
client = self.get_youtrack_client(initial)
yt_project = initial.get('project')
if client:
choices = []
if yt_project:
choices = self.get_ignore_field_choices(client, yt_project)
self.config.append({
'name':'ignore_fields',
'label':'Ignore Fields',
'type':'select',
'choices':choices,
'required':False,
'help': 'These fields will not appear on the form.',
})
choices = self.get_project_field_choices(client, yt_project)
self.config.append({
'name':'project',
'label':'Linked Project',
'type':'select',
'choices': choices,
'required':True,})
self.__add_default_tags()
def has_client_fields(self, initial):
return initial.get('password') and initial.get('username') and initial.get('url')
def build_default_fields(self, initial):
url = {'name':'url',
'label':'YouTrack Instance URL',
'type':'text',
'required':True,
'placeholder': 'e.g. "https://yoursitename.myjetbrains.com/youtrack/"',}
username = {'name':'username',
'label':'Username',
'type':'text',
'required':True,
'help': 'User should have admin rights.',}
password = {'name':'password',
'label':'Password',
'type':'secret',
'required':False,
'help': 'Only enter a password if you want to change it.',}
if initial.get('password'):
password['has_saved_value'] = True
return [url, username, password]
def __add_default_tags(self):
self.config.append({'name':'default_tags',
'label':'Default Tags',
'type':'text',
'required':False,
'placeholder': 'e.g. sentry',
'help': 'Comma-separated list of tags.',})
def get_youtrack_client(self, data, additional_params=None):
yt_settings = {
'url': data.get('url'),
'username': data.get('username'),
'password': data.get('password'),
'verify_ssl_certificate': VERIFY_SSL_CERTIFICATE}
if additional_params:
yt_settings.update(additional_params)
client = None
try:
client = YouTrackClient(**yt_settings)
except (HTTPError, ConnectionError) as e:
if e.response is not None and e.response.status_code == 403:
self.client_errors['username'] = self.error_message[
'invalid_password']
else:
self.client_errors['url'] = self.error_message['client']
except (SSLError, TypeError) as e:
self.client_errors['url'] = self.error_message['invalid_ssl']
if client:
try:
client.get_user(yt_settings.get('username'))
except HTTPError as e:
if e.response.status_code == 403:
self.client_errors['username'] = self.error_message['perms']
client = None
return client
def get_ignore_field_choices(self, client, project):
try:
fields = list(client.get_project_fields_list(project))
except HTTPError:
self.client_errors['project'] = self.error_message[
'invalid_project'] % (project,)
else:
names = [field['name'] for field in fields]
return zip(names, names)
return []
def get_project_field_choices(self, client, project):
choices = [(' ', u"- Choose project -")]
try:
projects = list(client.get_projects())
except HTTPError:
self.client_errors['project'] = self.error_message[
'invalid_project'] % (project, )
else:
for project in projects:
display = "%s (%s)" % (project['name'], project['id'])
choices.append((project['id'], display))
return choices
def get_project_fields_list(self, client, project_id):
try:
return list(client.get_project_fields_list(project_id))
except (HTTPError, ConnectionError) as e:
if e.response is not None and e.response.status_code == 404:
self.client_errors['project'] = self.error_message['project_not_found'] % project_id
else:
self.client_errors['project'] = self.error_message['project_unknown']
def get_projects(self, client):
try:
return list(client.get_projects())
except (HTTPError, ConnectionError) as e:
if e.response is not None and e.response.status_code == 404:
self.client_errors['project'] = self.error_message['project_not_found'] % project_id
else:
self.client_errors['project'] = self.error_message['project_unknown']
|
python
|
class Solution:
def searchMatrix(self, matrix: List[List[int]], target: int) -> bool:
|
python
|
import signal
import sys
import time
from collections import deque
import traceback
from picrosolve.game.cell import CellList
from .strategies.all import ALL_STRATEGIES
class Solver(object):
def __init__(self, board, strategies=None, debug=False):
self._board = board
if not strategies:
strategies = ALL_STRATEGIES(debug)
self._strategies = strategies
self._queue = deque()
self._debug = debug
self.working = None
self.working_strat_name = None
@property
def board(self):
return self._board
@property
def strategies(self):
return self._strategies
@property
def queue(self):
return self._queue
@property
def debug(self):
return self._debug
def dump_status(self, message):
self.print_status(message)
print("=== Dumping Queue ===")
for seq in self.queue:
print(u"> Clues: {}, Cells: {}".format(seq[0], CellList(seq[1])))
if self.working:
print(u"> Working Strategy: {}, Clue: {}, Cells: {}".format(self.working_strat_name, self.working[0], CellList(self.working[1])))
def solve(self):
signal.signal(
signal.SIGUSR1,
lambda x, y: self.dump_status(signal.getsignal(x))
)
signal.signal(
signal.SIGUSR2,
lambda x, y: print(x, y)
)
try:
self._solve()
except (Exception, KeyboardInterrupt) as e:
self.dump_status("Caught Exception")
print(str(e))
traceback.print_tb(sys.exc_info()[2])
def d(self, *args, **kwargs):
if self.debug:
print(*args, **kwargs)
def _solve(self):
all_sequences = self.board.rows + self.board.cols
solved_sequences = lambda: sum([1 if seq.solved else 0 for seq in all_sequences])
solved_l = len(all_sequences)
solved = lambda: (solved_sequences() - solved_l) == 0
self.queue.extend([(s.clues, s.cells) for s in all_sequences])
while len(self.queue) > 0:
self.d("Queue depth: {}".format(len(self.queue)))
self.working = self.queue.popleft()
for strat in self.strategies:
self.working_strat_name = strat.name
new_seqs = strat.apply_strategy(*self.working)
if len(new_seqs):
self.working = new_seqs[0]
if len(new_seqs) > 1:
self.queue.extend(new_seqs[1:])
else:
self.working = None
self.working_strat_name = None
break
if self.working:
self.queue.append(self.working)
if solved():
self.d("Queue: ")
for row in self.queue:
self.d(">> Clues: {}, Cells: {}".format(row[0], CellList(row[1])))
self.print_status("Solved")
def print_status(self, message):
solved = sum([1 if s.solved else 0 for s in self.board.rows + self.board.cols])
len_seq = len(self.board.rows) + len(self.board.cols)
print(" ==== {} ====".format(message))
print(self.board)
print("There are {}/{} solved sequences".format(solved, len_seq))
|
python
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import aredis
import asyncio
import pytest
import sys
from unittest.mock import Mock
from distutils.version import StrictVersion
_REDIS_VERSIONS = {}
async def get_version(**kwargs):
params = {'host': 'localhost', 'port': 6379, 'db': 0}
params.update(kwargs)
key = '%s:%s' % (params['host'], params['port'])
if key not in _REDIS_VERSIONS:
client = aredis.StrictRedis(**params)
_REDIS_VERSIONS[key] = (await client.info())['redis_version']
client.connection_pool.disconnect()
return _REDIS_VERSIONS[key]
def skip_if_server_version_lt(min_version):
loop = asyncio.get_event_loop()
version = StrictVersion(loop.run_until_complete(get_version()))
check = version < StrictVersion(min_version)
return pytest.mark.skipif(check, reason="")
def skip_python_vsersion_lt(min_version):
min_version = tuple(map(int, min_version.split('.')))
check = sys.version_info[:2] < min_version
return pytest.mark.skipif(check, reason="")
@pytest.fixture()
def r(event_loop):
return aredis.StrictRedis(client_name='test', loop=event_loop)
class AsyncMock(Mock):
def __init__(self, *args, **kwargs):
super(AsyncMock, self).__init__(*args, **kwargs)
def __await__(self):
future = asyncio.Future(loop=self.loop)
future.set_result(self)
result = yield from future
return result
@staticmethod
def pack_response(response, *, loop):
future = asyncio.Future(loop=loop)
future.set_result(response)
return future
def _gen_mock_resp(r, response, *, loop):
mock_connection_pool = AsyncMock(loop=loop)
connection = AsyncMock(loop=loop)
connection.read_response.return_value = AsyncMock.pack_response(response, loop=loop)
mock_connection_pool.get_connection.return_value = connection
r.connection_pool = mock_connection_pool
return r
@pytest.fixture()
def mock_resp_role(event_loop):
r = aredis.StrictRedis(loop=event_loop)
response = [b'master', 169, [[b'172.17.0.2', b'7004', b'169']]]
return _gen_mock_resp(r, response, loop=event_loop)
|
python
|
import urllib2
from bs4 import BeautifulSoup
import re
response = urllib2.urlopen("http://www.baidu.com")
# html_doc = response.read()
html_doc = '<div id="u_sp" class="s-isindex-wrap s-sp-menu"> <a href="http://www.nuomi.com/?cid=002540" target="_blank" class="mnav">糯米</a> <a href="http://news.baidu.com" target="_blank" class="mnav">新闻</a> <a href="http://www.hao123.com" target="_blank" class="mnav">hao123</a> <a href="http://map.baidu.com" target="_blank" class="mnav">地图</a> <a href="http://v.baidu.com" target="_blank" class="mnav">视频</a> <a href="http://tieba.baidu.com" target="_blank" class="mnav">贴吧</a><a id="s_username_top" class="s-user-name-top" data-tid="2004" href="http://i.baidu.com/" target="_blank"><span class="user-name">枼心</span></a><a id="s_usersetting_top" href="javascript:;" name="tj_settingicon" class="pf s-user-setting-top"><span class="setting-text">设置</span></a><a href="http://www.baidu.com/more/" name="tj_briicon" class="s_bri" target="_blank"> 更多产品</a><div id="s_user_name_menu" class="s-isindex-wrap s-user-set-menu menu-top" style="right: 128px; display: none;"><div><a href="http://vip.baidu.com/pcui/show/ucenterindex?vip_frm=super_account" target="_blank"> 我的VIP </a> <a href="http://i.baidu.com/center" target="_blank" data-tid="1000"> 个人中心 </a> <a href="http://passport.baidu.com/" data-tid="1001" target="_blank"> 帐号设置 </a> <a class="s-feedback" style="overflow:hidden" href="#" onclick="return false;">意见反馈</a> <a class="quit" style="overflow:hidden" href="#" onclick="return false;"> 退出 </a> </div> <span class="menu-arrow"> <em></em> </span> </div><div id="s_user_setting_menu" class="s-isindex-wrap s-user-set-menu menu-top" style="display:none;"><div> <a href="//www.baidu.com/gaoji/preferences.html" target="_blank"> 搜索设置 </a> <a href="//www.baidu.com/gaoji/advanced.html" target="_blank"> 高级搜索 </a> <a href="http://i.baidu.com/my/history?from=index" target="_blank"> 搜索历史 </a> <a class="s-feedback" style="overflow:hidden" href="#" onclick="return false;"> 意见反馈 </a> </div> <span class="menu-arrow"> <em></em> </span> </div></div>'
soup = BeautifulSoup(html_doc, 'html.parser', from_encoding='utf-8')
print "获取所有的链接"
links = soup.find_all('a')
for link in links:
print link.name, link['href'], link.get_text()
print "获取单一的链接"
link_node = soup.find('a', href="http://passport.baidu.com/")
print link_node.name, link_node["href"], link_node["data-tid"], link_node.get_text()
print "获取正则匹配"
link_node = soup.find('a', href=re.compile(r"history"))
print link_node.name, link_node["href"], link_node.get_text()
# print link_node["data-tid"]
print "获取class节点"
span_node = soup.find('span', class_="menu-arrow")
print span_node.name, span_node.get_text()
|
python
|
from kapteyn import maputils
from matplotlib import pylab as plt
header = {'NAXIS': 2 ,'NAXIS1':100 , 'NAXIS2': 100 ,
'CDELT1': -7.165998823000E-03, 'CRPIX1': 5.100000000000E+01 ,
'CRVAL1': -5.128208479590E+01, 'CTYPE1': 'RA---NCP', 'CUNIT1': 'DEGREE ',
'CDELT2': 7.165998823000E-03 , 'CRPIX2': 5.100000000000E+01,
'CRVAL2': 6.015388802060E+01 , 'CTYPE2': 'DEC--NCP ', 'CUNIT2': 'DEGREE',
'CROTA2': 80
}
fig = plt.figure(figsize=(7,7))
fig.suptitle("Messy plot. Rotation is 80 deg.", fontsize=14, color='r')
fig.subplots_adjust(left=0.18, bottom=0.10, right=0.90,
top=0.90, wspace=0.95, hspace=0.20)
frame = fig.add_subplot(2,2,1)
f = maputils.FITSimage(externalheader=header)
annim = f.Annotatedimage(frame)
xpos = -0.42
ypos = 1.2
grat = annim.Graticule()
grat.setp_axislabel(plotaxis=0, xpos=xpos)
frame.set_title("Default", y=ypos)
frame2 = fig.add_subplot(2,2,2)
annim2 = f.Annotatedimage(frame2)
grat2 = annim2.Graticule()
grat2.setp_axislabel(plotaxis=0, xpos=xpos)
grat2.set_tickmode(mode="sw")
frame2.set_title("Switched ticks", y=ypos)
frame3 = fig.add_subplot(2,2,3)
annim3 = f.Annotatedimage(frame3)
grat3 = annim3.Graticule()
grat3.setp_axislabel(plotaxis=0, xpos=xpos)
grat3.set_tickmode(mode="na")
frame3.set_title("Only native ticks", y=ypos)
frame4 = fig.add_subplot(2,2,4)
annim4 = f.Annotatedimage(frame4)
grat4 = annim4.Graticule()
grat4.setp_axislabel(plotaxis=0, xpos=xpos)
grat4.set_tickmode(plotaxis=['bottom','left'], mode="Switch")
grat4.setp_ticklabel(plotaxis=['top','right'], visible=False)
frame4.set_title("Switched and cleaned", y=ypos)
maputils.showall()
|
python
|
import neptune.new as neptune
import os
from GTApack.GTA_hotloader import GTA_hotloader
from GTApack.GTA_Unet import GTA_Unet
from GTApack.GTA_tester import GTA_tester
from torchvision import datasets, transforms
from torch.optim import SGD, Adam
from torch.optim.lr_scheduler import (ReduceLROnPlateau, CyclicLR,
CosineAnnealingLR)
from torch.utils.data import DataLoader, random_split
import torch
import numpy as np
import time
from neptune.new.types import File
import matplotlib.pyplot as plt
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
print(device)
# Set up the datasets
np.random.seed(42)
val_set, train_set = torch.utils.data.random_split(
np.random.randint(low = 1, high = 4962, size = 500),
[60, 440],
generator=torch.Generator().manual_seed(42))
test_set = np.random.randint(low = 1, high = 858, size = 100)
valload = GTA_hotloader(path = "C:/Users/Marc/Desktop/Billeder/train/",
width = 400, height = 300, ind = val_set,
device = device)
trainload = GTA_hotloader(path = "C:/Users/Marc/Desktop/Billeder/train/",
width = 400, height = 300, ind = train_set,
device = device)
testload = GTA_hotloader(path = "C:/Users/Marc/Desktop/Billeder/test-val/",
width = 400, height = 300, ind = test_set,
device = device)
batch_size = 1
# Set up the dataloaders:
valloader = torch.utils.data.DataLoader(valload,
batch_size=batch_size,
shuffle=True,
num_workers=0)
trainloader = torch.utils.data.DataLoader(trainload,
batch_size=batch_size,
shuffle=True,
num_workers=0)
testloader = torch.utils.data.DataLoader(testload,
batch_size=batch_size,
shuffle=True,
num_workers=0)
token = os.getenv('Neptune_api')
run = neptune.init(
project="Deep-Learning-test/Deep-Learning-Test",
api_token=token,
)
nEpoch = 50
# Network 1
params = {"optimizer":"SGD", "optimizer_learning_rate":0.01,
"optimizer_momentum": 0.9, "loss_function":"MSEloss",
"model":"GTA_Unet"}
run[f"network1/parameters"] = params
lossFunc = nn.MSELoss()
model = GTA_Unet(n_channels = 3, n_classes = 9).to(device)
optimizer = SGD(model.parameters(), lr=0.01, momentum=0.9)
valid_loss, train_loss = [], []
avg_train_loss, avg_valid_loss = [], []
for iEpoch in range(nEpoch):
print(f"Training epoch {iEpoch}")
for img, lab in trainloader:
y_pred = model(img)
model.zero_grad()
loss = lossFunc(y_pred, lab)
loss.backward()
optimizer.step()
train_loss.append(loss.item())
avg_train_loss.append(w := (np.mean(np.array(train_loss))))
run[f"network1/train_loss"].log(w)
train_loss = []
for img, lab in valloader:
y_pred = model(img)
loss = lossFunc(y_pred, lab)
valid_loss.append(loss.item())
avg_valid_loss.append(w := (np.mean(np.array(valid_loss))))
run[f"network1/validation_loss"].log(w)
valid_loss = []
torch.save(model.state_dict(), "C:/Users/Marc/Desktop/Billeder/params/network1.pt")
run[f"network1/network_weights"].upload(File("C:/Users/Marc/Desktop/Billeder/params/network1.pt"))
test_acc_per_pic = GTA_tester(model, testloader)
print(np.mean(test_acc_per_pic))
run[f"network1/test_accuracy_per_pic"].log(test_acc_per_pic)
run[f"network1/mean_test_accuracy"].log(np.mean(test_acc_per_pic))
# Network 2:
params = {"optimizer":"SGD", "optimizer_momentum": 0.9,
"optimizer_learning_rate": 0.1, "loss_function":"MSEloss",
"model":"GTA_Unet", "scheduler":"ReduceLROnPlateau",
"scheduler_patience":3, "scheduler_threshold":0.01}
run[f"network2/parameters"] = params
lossFunc = nn.MSELoss()
model = GTA_Unet(n_channels = 3, n_classes = 9).to(device)
optimizer = SGD(model.parameters(), lr=0.1, momentum=0.9)
scheduler = ReduceLROnPlateau(optimizer, 'min', patience=3, threshold=0.01)
valid_loss, train_loss = [], []
avg_train_loss, avg_valid_loss = [], []
for iEpoch in range(nEpoch):
print(f"Training epoch {iEpoch}")
run[f"network2/learning_rate"].log(optimizer.param_groups[0]['lr'])
for img, lab in trainloader:
y_pred = model(img)
model.zero_grad()
loss = lossFunc(y_pred, lab)
loss.backward()
optimizer.step()
train_loss.append(loss.item())
avg_train_loss.append(w := (np.mean(np.array(train_loss))))
run[f"network2/train_loss"].log(w)
train_loss = []
for img, lab in valloader:
y_pred = model(img)
loss = lossFunc(y_pred, lab)
valid_loss.append(loss.item())
avg_valid_loss.append(w := (np.mean(np.array(valid_loss))))
run[f"network2/validation_loss"].log(w)
scheduler.step(w)
valid_loss = []
torch.save(model.state_dict(), "C:/Users/Marc/Desktop/Billeder/params/network2.pt")
run[f"network2/network_weights"].upload(File("C:/Users/Marc/Desktop/Billeder/params/network2.pt"))
test_acc_per_pic = GTA_tester(model, testloader)
print(np.mean(test_acc_per_pic))
run[f"network2/test_accuracy_per_pic"].log(test_acc_per_pic)
run[f"network2/mean_test_accuracy"].log(np.mean(test_acc_per_pic))
# Network 3
params = {"optimizer":"Adam", "optimizer_learning_rate": 0.1,
"loss_function":"MSEloss", "model":"GTA_Unet",
"scheduler":"ReduceLROnPlateau", "scheduler_patience":3,
"scheduler_threshold":0.01}
run[f"network3/parameters"] = params
lossFunc = nn.MSELoss()
model = GTA_Unet(n_channels = 3, n_classes = 9).to(device)
optimizer = Adam(model.parameters(), lr=0.1)
scheduler = ReduceLROnPlateau(optimizer, 'min', patience=3, threshold=0.01)
valid_loss, train_loss = [], []
avg_train_loss, avg_valid_loss = [], []
for iEpoch in range(nEpoch):
print(f"Training epoch {iEpoch}")
run[f"network3/learning_rate"].log(optimizer.param_groups[0]['lr'])
for img, lab in trainloader:
y_pred = model(img)
model.zero_grad()
loss = lossFunc(y_pred, lab)
loss.backward()
optimizer.step()
train_loss.append(loss.item())
avg_train_loss.append(w := (np.mean(np.array(train_loss))))
run[f"network3/train_loss"].log(w)
train_loss = []
for img, lab in valloader:
y_pred = model(img)
loss = lossFunc(y_pred, lab)
valid_loss.append(loss.item())
avg_valid_loss.append(w := (np.mean(np.array(valid_loss))))
run[f"network3/validation_loss"].log(w)
scheduler.step(w)
valid_loss = []
torch.save(model.state_dict(), "C:/Users/Marc/Desktop/Billeder/params/network3.pt")
run[f"network3/network_weights"].upload(File("C:/Users/Marc/Desktop/Billeder/params/network3.pt"))
test_acc_per_pic = GTA_tester(model, testloader)
print(np.mean(test_acc_per_pic))
run[f"network3/test_accuracy_per_pic"].log(test_acc_per_pic)
run[f"network3/mean_test_accuracy"].log(np.mean(test_acc_per_pic))
# Network 4
params = {"optimizer":"SGD", "optimizer_momentum": 0.9,
"optimizer_learning_rate": 0.1, "loss_function":"MSEloss",
"model":"GTA_Unet", "scheduler":"CyclicLR",
"scheduler_base_lr":0.01, "scheduler_max_lr":0.1,
"scheduler_step_size_up":10}
run[f"network4/parameters"] = params
lossFunc = nn.MSELoss()
model = GTA_Unet(n_channels = 3, n_classes = 9).to(device)
optimizer = SGD(model.parameters(), lr=0.01, momentum=0.9)
scheduler = CyclicLR(optimizer, base_lr=0.01, max_lr=0.1, step_size_up=10)
valid_loss, train_loss = [], []
avg_train_loss, avg_valid_loss = [], []
for iEpoch in range(nEpoch):
print(f"Training epoch {iEpoch}")
run[f"network4/learning_rate"].log(optimizer.param_groups[0]['lr'])
for img, lab in trainloader:
y_pred = model(img)
model.zero_grad()
loss = lossFunc(y_pred, lab)
loss.backward()
optimizer.step()
train_loss.append(loss.item())
avg_train_loss.append(w := (np.mean(np.array(train_loss))))
run[f"network4/train_loss"].log(w)
train_loss = []
for img, lab in valloader:
y_pred = model(img)
loss = lossFunc(y_pred, lab)
valid_loss.append(loss.item())
avg_valid_loss.append(w := (np.mean(np.array(valid_loss))))
run[f"network4/validation_loss"].log(w)
scheduler.step()
valid_loss = []
torch.save(model.state_dict(), "C:/Users/Marc/Desktop/Billeder/params/network4.pt")
run[f"network4/network_weights"].upload(File("C:/Users/Marc/Desktop/Billeder/params/network4.pt"))
test_acc_per_pic = GTA_tester(model, testloader)
print(np.mean(test_acc_per_pic))
run[f"network4/test_accuracy_per_pic"].log(test_acc_per_pic)
run[f"network4/mean_test_accuracy"].log(np.mean(test_acc_per_pic))
# network 5
params = {"optimizer":"Adam", "optimizer_learning_rate": 0.1,
"loss_function":"MSEloss", "model":"GTA_Unet", "scheduler":"CyclicLR",
"scheduler_base_lr":0.01, "scheduler_max_lr":0.1,
"scheduler_step_size_up":10, "scheduler_cycle_momentum":"False"}
run[f"network5/parameters"] = params
lossFunc = nn.MSELoss()
model = GTA_Unet(n_channels = 3, n_classes = 9).to(device)
optimizer = Adam(model.parameters(), lr=0.1)
scheduler = CyclicLR(optimizer, base_lr=0.001, max_lr=0.1, step_size_up=10, cycle_momentum = False)
valid_loss, train_loss = [], []
avg_train_loss, avg_valid_loss = [], []
for iEpoch in range(nEpoch):
print(f"Training epoch {iEpoch}")
run[f"network5/learning_rate"].log(optimizer.param_groups[0]['lr'])
for img, lab in trainloader:
y_pred = model(img)
model.zero_grad()
loss = lossFunc(y_pred, lab)
loss.backward()
optimizer.step()
train_loss.append(loss.item())
avg_train_loss.append(w := (np.mean(np.array(train_loss))))
run[f"network5/train_loss"].log(w)
train_loss = []
for img, lab in valloader:
y_pred = model(img)
loss = lossFunc(y_pred, lab)
valid_loss.append(loss.item())
avg_valid_loss.append(w := (np.mean(np.array(valid_loss))))
run[f"network5/validation_loss"].log(w)
scheduler.step()
valid_loss = []
torch.save(model.state_dict(), "C:/Users/Marc/Desktop/Billeder/params/network5.pt")
run[f"network5/network_weights"].upload(File("C:/Users/Marc/Desktop/Billeder/params/network5.pt"))
test_acc_per_pic = GTA_tester(model, testloader)
print(np.mean(test_acc_per_pic))
run[f"network5/test_accuracy_per_pic"].log(test_acc_per_pic)
run[f"network5/mean_test_accuracy"].log(np.mean(test_acc_per_pic))
# network 6
params = {"optimizer":"SGD", "optimizer_learning_rate": 0.1,
"loss_function":"MSEloss", "model":"GTA_Unet", "scheduler":"CyclicLR",
"scheduler_base_lr":0.01, "scheduler_max_lr":0.1,
"scheduler_step_size_up":1, "scheduler_step_size_down":4}
run[f"network6/parameters"] = params
lossFunc = nn.MSELoss()
model = GTA_Unet(n_channels = 3, n_classes = 9).to(device)
optimizer = SGD(model.parameters(), 0.1)
scheduler = CyclicLR(optimizer, base_lr=0.01, max_lr=0.1, step_size_up=1, step_size_down=4)
valid_loss, train_loss = [], []
avg_train_loss, avg_valid_loss = [], []
for iEpoch in range(nEpoch):
print(f"Training epoch {iEpoch}")
run[f"network6/learning_rate"].log(optimizer.param_groups[0]['lr'])
for img, lab in trainloader:
y_pred = model(img)
model.zero_grad()
loss = lossFunc(y_pred, lab)
loss.backward()
optimizer.step()
train_loss.append(loss.item())
avg_train_loss.append(w := (np.mean(np.array(train_loss))))
run[f"network6/train_loss"].log(w)
train_loss = []
for img, lab in valloader:
y_pred = model(img)
loss = lossFunc(y_pred, lab)
valid_loss.append(loss.item())
avg_valid_loss.append(w := (np.mean(np.array(valid_loss))))
run[f"network6/validation_loss"].log(w)
scheduler.step()
valid_loss = []
torch.save(model.state_dict(), "C:/Users/Marc/Desktop/Billeder/params/network6.pt")
run[f"network6/network_weights"].upload(File("C:/Users/Marc/Desktop/Billeder/params/network6.pt"))
test_acc_per_pic = GTA_tester(model, testloader)
print(np.mean(test_acc_per_pic))
run[f"network6/test_accuracy_per_pic"].log(test_acc_per_pic)
run[f"network6/mean_test_accuracy"].log(np.mean(test_acc_per_pic))
# Network 7
params = {"optimizer1":"SGD", "optimizer1_learning_rate": 0.5,
"loss_function":"MSEloss", "model":"GTA_Unet",
"scheduler1":"CosineAnnealingLR",
"scheduler1_T_max":10, "scheduler1_eta_min":0.1,
"optimizer2":"SGD", "optimizer2_learning_rate": 0.1,
"scheduler2":"CosineAnnealingLR",
"scheduler2_T_max":10, "scheduler2_eta_min":0.01,}
run[f"network7/parameters"] = params
lossFunc = nn.MSELoss()
model = GTA_Unet(n_channels = 3, n_classes = 9).to(device)
optimizer = SGD(model.parameters(), 0.5)
scheduler = CosineAnnealingLR(optimizer, T_max = 10, eta_min = 0.1)
valid_loss, train_loss = [], []
avg_train_loss, avg_valid_loss = [], []
for iEpoch in range(nEpoch):
print(f"Training epoch {iEpoch}")
run[f"network7/learning_rate"].log(optimizer.param_groups[0]['lr'])
for img, lab in trainloader:
y_pred = model(img)
model.zero_grad()
loss = lossFunc(y_pred, lab)
loss.backward()
optimizer.step()
train_loss.append(loss.item())
avg_train_loss.append(w := (np.mean(np.array(train_loss))))
run[f"network7/train_loss"].log(w)
train_loss = []
for img, lab in valloader:
y_pred = model(img)
loss = lossFunc(y_pred, lab)
valid_loss.append(loss.item())
avg_valid_loss.append(w := (np.mean(np.array(valid_loss))))
run[f"network7/validation_loss"].log(w)
if iEpoch == 30:
optimizer = SGD(model.parameters(), 0.1)
scheduler = CosineAnnealingLR(optimizer, T_max = 10, eta_min = 0.01)
scheduler.step()
valid_loss = []
torch.save(model.state_dict(), "C:/Users/Marc/Desktop/Billeder/params/network7.pt")
run[f"network7/network_weights"].upload(File("C:/Users/Marc/Desktop/Billeder/params/network7.pt"))
test_acc_per_pic = GTA_tester(model, testloader)
print(np.mean(test_acc_per_pic))
run[f"network7/test_accuracy_per_pic"].log(test_acc_per_pic)
run[f"network7/mean_test_accuracy"].log(np.mean(test_acc_per_pic))
run.stop()
|
python
|
# -*- coding: utf-8 -*-
import json
import re
import requests
import urllib
import logging
logger = logging.getLogger('nova-playlist')
class YouTubeAPI(object):
clientID = 'CLIENTID'
clientSecret = 'CLIENTSECRET'
refreshToken = 'REFRESHTOKEN'
accessToken = None
def get_access_token(self):
payload = {'client_id': self.clientID,
'client_secret': self.clientSecret,
'refresh_token': self.refreshToken,
'grant_type': 'refresh_token'}
r = requests.post('https://accounts.google.com/o/oauth2/token', data=payload)
self.accessToken = r.json()['access_token']
def search_youtube_id(self, title):
try:
if not self.accessToken:
self.get_access_token()
headers = {'Authorization': 'Bearer ' + self.accessToken}
url = 'https://www.googleapis.com/youtube/v3/search'
r = requests.get(url, params={'part': 'snippet',
'q': title,
'type': 'video'}, headers=headers)
items = r.json()['items']
if len(items) == 0:
youtube_id = None
logger.warning("No video found for %s" % title)
else:
youtube_id = items[0]['id']['videoId']
logger.info("Found %s for song %s" % (youtube_id, title))
return youtube_id
except:
logger.warning('YouTube API search error, fallback on scraper')
return self.scrap_youtube_id(title)
def scrap_youtube_id(self, title):
url = "http://www.youtube.com/results?search_query=%s" % urllib.quote_plus(title)
page = requests.get(url, timeout=15)
if 'Aucune vid' in page.content:
logger.warning("No video found for %s" % str(self))
return None
else:
youtube_id = re.findall('href="\/watch\?v=(.*?)[&;"]', page.content)[0]
logger.info("Found %s for song %s" % (youtube_id, str(self)))
return youtube_id
def clean_channel_playlist(self, playlist_id):
if not self.accessToken:
self.get_access_token()
headers = {'Authorization': 'Bearer ' + self.accessToken}
url = 'https://www.googleapis.com/youtube/v3/playlistItems'
r = requests.get(url, params={'part': 'snippet',
'playlistId': playlist_id,
'maxResults': 50}, headers=headers)
for video in r.json()['items']:
vd = requests.delete(url, params={'id': video['id']}, headers=headers)
if vd.status_code != 204:
logger.error("Error removing song from playlist %s" % (vd.text))
def build_channel_playlist(self, playlist_id, songs):
if not self.accessToken:
self.get_access_token()
url = 'https://www.googleapis.com/youtube/v3/playlistItems?part=snippet'
headers = {'Authorization': 'Bearer ' + self.accessToken,
'Content-Type': 'application/json'}
songPosition = -1
for song in songs:
if song.youtube_id:
songPosition += 1
payload = json.dumps({'snippet':
{
'playlistId': playlist_id,
'resourceId': {
'kind': 'youtube#video',
'videoId': song.youtube_id
},
'position': songPosition
}
})
logger.debug('Sending payload %s' % (payload))
r = requests.post(url, data=payload, headers=headers)
if r.status_code != 200:
logger.error("Error publishing %s : %s" % (song.artist + ' / ' + song.title, r.text))
|
python
|
import asyncio
import datetime
def get_time():
d = datetime.datetime.now()
return d.strftime('%M:%S')
async def coro(group_id, coro_id):
print('group{}-task{} started at:{}'.format(group_id, coro_id, get_time()))
await asyncio.sleep(coro_id) # 模拟读取文件的耗时IO
return 'group{}-task{} done at:{}'.format(group_id, coro_id, get_time())
loop = asyncio.get_event_loop()
# 创建三组tasks
tasks1 = [asyncio.ensure_future(coro(1, i)) for i in range(1, 5)]
tasks2 = [asyncio.ensure_future(coro(2, i)) for i in range(5, 6)]
tasks3 = [asyncio.ensure_future(coro(3, i)) for i in range(7, 10)]
group1 = asyncio.gather(*tasks1) # 对第1组的协程进行分组,group1
group2 = asyncio.gather(*tasks2) # 对第2组的协程进行分组,group2
group3 = asyncio.gather(*tasks3) # 对第3组的协程进行分组,group3
all_groups = asyncio.gather(group1, group2, group3) # 把3个group再聚合成一个大组,也是就所有协程对象的被聚合到一个大组
loop = asyncio.get_event_loop()
all_group_result = loop.run_until_complete(all_groups)
for index, group in enumerate(all_group_result): # 获取每组协程的输出
print('group {} result:{}'.format(index + 1, group))
loop.close()
|
python
|
import cv2
def draw_yolo_detections(image, detections, color=(0,255,0)):
img = image.copy()
with open("..//Data//model//yolov4/coco.names", 'rt') as f:
classes = f.read().rstrip('\n').split('\n')
for detect in detections:
bbox = detect[1]
category = classes[int(detect[0])]
cv2.rectangle(img, bbox, color, 2)
cv2.putText(img, str(category), (bbox[0], bbox[1] - 5),
cv2.FONT_HERSHEY_SIMPLEX, 0.5, color, 2, cv2.LINE_AA)
return img
|
python
|
import dataclasses
import vk_api
from vk_api import VkUpload
from vk_api.bot_longpoll import VkBotLongPoll
from vk_api.longpoll import VkLongPoll, VkEventType
from vk_api.utils import get_random_id
@dataclasses.dataclass
class __cfg__:
""" Bot config is struct for every bot. Easy to use because of fields """
name: str
logger: str
platform: str
logindata: str
description: str
def __post_init__(self):
self.compressed: dict = {
"name": self.name,
"logger": self.logger,
"platform": self.platform,
"logindata": self.logindata,
"description": self.description,
}
class __vk__:
def __init__(self, cfg):
try:
self.vk_session = vk_api.VkApi(token=cfg.logindata)
self.vk = self.vk_session.get_api()
self.longpool = VkBotLongPoll(self.vk_session)
self.cfg = cfg
except vk_api.exceptions.ApiError as err:
print(f"\033[1;31m║\033[0m --------------------- Error! ---------------------\n\033[1;31m║\033[0m - Error: {err}\n\033[1;31m║\033[0m - Token: {token}")
def listen(self, private: object, public: object):
print(f"\033[1;32m║\033[0m ------------------ Entered main loop ------------------\n\033[1;32m║\033[0m - Name: {self.cfg.name}\n\033[1;32m║\033[0m - Platform: {self.cfg.platform}\n\033[1;32m║\033[0m - Description: {self.cfg.description}")
for event in self.longpool.listen():
if event.type == VkEventType.MESSAGE_NEW:
if event.from_user and event.text:
text = private(event)
print("Theese scenaraio")
if text == "None": continue
self.vk.messages.send(
user_id=event.user_id,
random_id=get_random_id(),
message=text,
)
elif event.from_chat and event.text:
print("Another one")
text = public(event)
print("Another one")
if text == "None": continue
self.vk.messages.send(
group_id=event.chat_id,
random_id=get_random_id(),
message=text,
)
else:
print(event)
class Core:
def __init__(self, cfg: dict, prettyPrint: bool = True):
__cfg = __cfg__(name=cfg['name'],
logger=cfg['logger'],
platform=cfg['platform'],
logindata=cfg['token'],
description=cfg['description'])
self.cfg = __cfg
self.prt = prettyPrint
if self.prt:
print(f"""\033[1;34m║\033[0m ------------------ Initiated ... ------------------
\033[1;34m║\033[0m - Name: {self.cfg.name}
\033[1;34m║\033[0m - Platform: {self.cfg.platform}
\033[1;34m║\033[0m - Description: {self.cfg.description}
\033[1;34m║\033[0m - Logger Name: {self.cfg.logger}
""")
def __vk_run__(self, private: object, public: object):
if self.prt:
print(f"""\033[1;34m║\033[0m ------------------ Entered vk ... ------------------
\033[1;34m║\033[0m - Name: {self.cfg.name}
\033[1;34m║\033[0m - Description: {self.cfg.description}
""")
b = __vk__(self.cfg)
b.listen(private, public)
def run(self, commands: tuple[object]):
if self.cfg.platform == 'vk':
self.__vk_run__(commands[0], commands[1]);
|
python
|
# F2x installation script (setup.py)
#
# Copyright 2018 German Aerospace Center (DLR)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import setuptools
from distutils.core import setup
package_data = {
'F2x.parser.plyplus.grammar': ["*.g"],
}
cmdclass = {}
command_options = {}
sys.path.append(os.path.abspath('src'))
try:
# Try to extract program information from sources.
import F2x
from F2x.template import collect_template_data
name = F2x.program_name
version = F2x.get_version_string()
release = F2x.get_version_string(full=True)
for package, data_files in collect_template_data():
package_data[package.__name__] = package_data.get(package.__name__, []) + data_files
except ImportError:
# Fallback: set them manual :(
name = 'F2x'
version = '0.0.0'
release = '0.0'
try:
from F2x.distutils.command import build_sphinx
cmdclass['build_sphinx'] = build_sphinx.build_sphinx
command_options['build_sphinx'] = {
'project': ('setup.py', name),
'version': ('setup.py', version),
'release': ('setup.py', release),
'source_dir': ('setup.py', 'doc/src'),
'build_dir': ('setup.py', 'doc'),
}
except ImportError:
pass
setup(
name=name,
version=version,
description='Template-based Fortran wrapper.',
author='Michael Meinel',
author_email='[email protected]',
url='http://www.dlr.de/sc',
cmdclass=cmdclass,
command_options=command_options,
packages=setuptools.find_packages('src'),
package_dir={ '': 'src' },
package_data=package_data,
install_requires=[
'plyplus',
'jinja2',
'numpy',
],
extras_require={
'cython': ['Cython', ],
'docs': [
'six',
'sphinx',
'sphinx-argparse',
],
'tests': [
'pytest-runner',
'pytest',
],
},
entry_points={
'console_scripts': [
'F2x=F2x.runtime.main:main',
'F2x-d=F2x.runtime.daemon:main',
],
},
)
|
python
|
from hashlib import md5
def part_1(data):
i, p = 0, ""
while True:
if len(p) == 8:
break
hash = md5((data + str(i)).encode()).hexdigest()
if hash[:5] == "00000":
p += hash[5]
i += 1
return p
def part_2(data):
i, p = 0, "________"
while True:
if "_" not in p:
break
hash = md5((data + str(i)).encode()).hexdigest()
if hash[:5] == "00000":
if hash[5].isdigit() and int(hash[5]) < 8 and p[int(hash[5])] == "_":
p = p[:int(hash[5])] + hash[6] + p[int(hash[5])+1:]
i += 1
return p
def test():
assert(part_1("abc") == "18f47a30")
assert(part_2("abc") == "05ace8e3")
|
python
|
import logging
from typing import Iterable
from septentrion import core, db, files, migration, style, versions
logger = logging.getLogger(__name__)
def initialize(settings_kwargs):
quiet = settings_kwargs.pop("quiet", False)
stylist = style.noop_stylist if quiet else style.stylist
settings = core.initialize(**settings_kwargs)
return {"settings": settings, "stylist": stylist}
def show_migrations(**settings_kwargs):
lib_kwargs = initialize(settings_kwargs)
core.describe_migration_plan(**lib_kwargs)
def migrate(**settings_kwargs):
lib_kwargs = initialize(settings_kwargs)
migration.migrate(**lib_kwargs)
def is_schema_initialized(**settings_kwargs):
lib_kwargs = initialize(settings_kwargs)
return db.is_schema_initialized(settings=lib_kwargs["settings"])
def build_migration_plan(**settings_kwargs):
lib_kwargs = initialize(settings_kwargs)
schema_version = core.get_best_schema_version(settings=lib_kwargs["settings"])
return core.build_migration_plan(
settings=lib_kwargs["settings"], schema_version=schema_version
)
def fake(version: str, **settings_kwargs):
lib_kwargs = initialize(settings_kwargs)
fake_version = versions.Version.from_string(version)
migration.create_fake_entries(version=fake_version, **lib_kwargs)
def load_fixtures(version: str, **settings_kwargs) -> None:
lib_kwargs = initialize(settings_kwargs)
init_version = versions.Version.from_string(version)
migration.load_fixtures(init_version=init_version, **lib_kwargs)
def get_known_versions(**settings_kwargs) -> Iterable[str]:
lib_kwargs = initialize(settings_kwargs)
known_versions = files.get_known_versions(settings=lib_kwargs["settings"])
return [version.original_string for version in known_versions]
|
python
|
# Copyright (c) 2020, Huawei Technologies.All rights reserved.
# Licensed under the BSD 3-Clause License (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://opensource.org/licenses/BSD-3-Clause
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import torch
import numpy as np
import sys
import copy
from common_utils import TestCase, run_tests
from common_device_type import dtypes, instantiate_device_type_tests
from util_test import create_common_tensor
class TestTrueDivide(TestCase):
def generate_data(self,min_d, max_d, shape, dtype):
input1 = np.random.uniform(min_d, max_d, shape).astype(dtype)
input2 = np.random.uniform(min_d, max_d, shape).astype(dtype)
# modify from numpy.ndarray to torch.tensor
npu_input1 = torch.from_numpy(input1)
npu_input2 = torch.from_numpy(input2)
return npu_input1, npu_input2
def generate_single_data(self, min_d, max_d, shape, dtype):
input1 = np.random.uniform(min_d, max_d, shape).astype(dtype)
npu_input1 = torch.from_numpy(input1)
return npu_input1
def generate_single_bool_data(self, min_d, max_d, shape):
input1 = np.random.uniform(min_d, max_d, shape)
input1 = input1.reshape(-1)
for i in range(len(input1)):
if input1[i]<0.5:
input1[i] = 0
input1 = input1.astype(np.bool)
input1 = input1.reshape(shape)
npu_input1 = torch.from_numpy(input1)
return npu_input1
def cpu_op_exec(self, input1, input2):
output = torch.true_divide(input1,input2)
output = output.numpy()
return output
def npu_op_exec(self, input1, input2):
input1 = input1.to("npu")
input2 = input2.to("npu")
output = torch.true_divide(input1,input2)
output = output.to("cpu")
output = output.numpy()
return output
def npu_op_exec_scalar(self, input1, input2):
input1 = input1.to("npu")
output = torch.true_divide(input1,input2)
output = output.to("cpu")
output = output.numpy()
return output
def test_true_divide_float32_broadcast(self,device):
npu_input1 = self.generate_single_data(0, 100, (2,2), np.float32)
npu_input2 = self.generate_single_data(0, 100, (2), np.float32)
cpu_output = self.cpu_op_exec(npu_input1, npu_input2)
npu_output = self.npu_op_exec(npu_input1, npu_input2)
self.assertRtolEqual(cpu_output, npu_output)
def test_true_divide_float32(self,device):
npu_input1, npu_input2 = self.generate_data(0, 100, (4, 3), np.float32)
cpu_output = self.cpu_op_exec(npu_input1, npu_input2)
npu_output = self.npu_op_exec(npu_input1, npu_input2)
self.assertRtolEqual(cpu_output, npu_output)
def test_true_divide_int32(self,device):
npu_input1, npu_input2 = self.generate_data(0, 100, (4, 3), np.int32)
cpu_output = self.cpu_op_exec(npu_input1, npu_input2)
npu_output = self.npu_op_exec(npu_input1, npu_input2)
self.assertRtolEqual(cpu_output, npu_output)
def test_true_divide_bool(self, device):
npu_input1, npu_input2 = self.generate_data(0, 100, (2,2), np.float32)
npu_input3 = self.generate_single_bool_data(1, 1, (2, 2))
cpu_output = self.cpu_op_exec(npu_input1, npu_input3)
npu_output = self.npu_op_exec(npu_input1, npu_input3)
print(cpu_output, npu_output)
self.assertRtolEqual(cpu_output, npu_output)
def test_true_divide_bool_scalar(self, device):
npu_input1, npu_input2 = self.generate_data(0, 100, (2, 2), np.float32)
cpu_output = self.cpu_op_exec(npu_input1,True)
npu_output = self.npu_op_exec_scalar(npu_input1, True)
self.assertRtolEqual(cpu_output, npu_output)
def test_true_divide_scalar_int32_1(self,device):
npu_input1, npu_input2 = self.generate_data(0, 100, (2, 3), np.float32)
cpu_output = self.cpu_op_exec(npu_input1, 2)
npu_output = self.npu_op_exec_scalar(npu_input1, 2)
self.assertRtolEqual(cpu_output, npu_output)
def test_true_divide_scalar_int32_2(self,device):
npu_input1, npu_input2 = self.generate_data(0, 100, (2, 3), np.int32)
cpu_output = self.cpu_op_exec(npu_input1, 2)
npu_output = self.npu_op_exec_scalar(npu_input1, 2)
self.assertRtolEqual(cpu_output, npu_output)
def test_true_divide_scalar_float32(self,device):
npu_input1, npu_input2 = self.generate_data(0, 100, (2, 3), np.float32)
cpu_output = self.cpu_op_exec(npu_input1, 2.0)
npu_output = self.npu_op_exec_scalar(npu_input1, 2.0)
self.assertRtolEqual(cpu_output, npu_output)
instantiate_device_type_tests(TestTrueDivide, globals() , except_for='cpu')
if __name__ == "__main__":
run_tests()
|
python
|
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2016-2018 CERN.
#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Date string field."""
from __future__ import absolute_import, print_function
import arrow
from arrow.parser import ParserError
from marshmallow import fields, missing
class DateString(fields.Date):
"""ISO8601-formatted date string."""
def _serialize(self, value, attr, obj):
"""Serialize an ISO8601-formatted date."""
try:
return super(DateString, self)._serialize(
arrow.get(value).date(), attr, obj)
except ParserError:
return missing
def _deserialize(self, value, attr, data):
"""Deserialize an ISO8601-formatted date."""
return super(DateString, self)._deserialize(value, attr,
data).isoformat()
|
python
|
import unittest
import numpy as np
from modem.util.channel import Channel
def get_random(samples=2048):
"""Returns sequence of random comples samples """
return 2 * (np.random.sample((samples,)) + 1j * np.random.sample((samples,))) - (1 + 1j)
class test_channel(unittest.TestCase):
def setUp(self):
self.ch = Channel()
def test_awgn(self):
data_in = get_random(1024 * 1000)
data_out = self.ch.awgn(data_in, snr_db=0)
self.assertEqual(len(data_in), len(data_out))
self.assertAlmostEqual(
np.var(data_in), np.var(data_out) / 2.0, places=2)
def test_multipath(self):
data_in = np.zeros(10, dtype=complex)
data_in[2] = 1.0 + 0.0j
self.ch.impulse_response = np.arange(10) + 1j * np.arange(10)
data_out = self.ch.multipath(data_in)
np.testing.assert_array_almost_equal(
data_out[2:12], self.ch.last_impulse_response)
#self.assertAlmostEqual(np.linalg.norm(data_in), np.linalg.norm(data_out))
if __name__ == "__main__":
unittest.main()
|
python
|
# !/usr/bin/env python3
# -*- coding: utf-8 -*-
import urllib.request
requestUrl = 'http://www.tvapi.cn/movie/getMovieInfo'
webhead = {'User-Agent':'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:23.0) Gecko/20100101 Firefox/23.0', 'charset':'utf-8'}
urlRequest = urllib.request.Request(url = requestUrl, headers = webhead)
with urllib.request.urlopen(urlRequest) as f:
pass
|
python
|
import ckan.logic as logic
import ckan.model as model
import unicodedata
import ckanext.hdx_users.model as umodel
import ckanext.hdx_user_extra.model as ue_model
import ckanext.hdx_theme.tests.hdx_test_base as hdx_test_base
class TestAboutPageController(hdx_test_base.HdxBaseTest):
#loads missing plugins
@classmethod
def _load_plugins(cls):
hdx_test_base.load_plugin('hdx_users hdx_user_extra hdx_theme')
@classmethod
def setup_class(cls):
super(TestAboutPageController, cls).setup_class()
umodel.setup()
ue_model.create_table()
def test_resulting_page(self):
testsysadmin = model.User.by_name('testsysadmin')
page = self._getAboutPage('license')
assert 'Data Licenses' in str(page.response), 'the url /about/license should redirect to the Data Licenses page when no user is logged in'
page = self._getAboutPage('license', testsysadmin.apikey)
assert 'Data Licenses' in str(page.response), 'the url /about/license should redirect to the Data Licenses page, even when the user is logged in'
page = self._getAboutPage('terms')
assert 'Terms of Service' in str(page.response), 'the url /about/terms should redirect to the Terms of Service page when no user is logged in'
page = self._getAboutPage('terms', testsysadmin.apikey)
assert 'Terms of Service' in str(page.response), 'the url /about/terms should redirect to the Terms of Service page, even when the user is logged in'
try:
page = self._getAboutPage('fake')
assert "The requested about page doesn't exist" in str(page.response), 'the url /about/fake should throw an error when no user is logged in'
except logic.ValidationError:
assert True
try:
page = self._getAboutPage('fake', testsysadmin.apikey)
assert "The requested about page doesn't exist" in str(page.response), 'the url /about/terms should throw an error, even when the user is logged in'
except logic.ValidationError:
assert True
def _getAboutPage(self, page, apikey=None):
url = '/about/' + page
if apikey:
page = self.app.get(url,headers={'Authorization':unicodedata.normalize('NFKD', apikey).encode('ascii','ignore')})
else:
page = self.app.get(url)
return page
|
python
|
# -*- coding: utf-8 -*-
"""
Utilities for analysis
Author: G.J.J. van den Burg
License: See LICENSE file.
Copyright: 2021, The Alan Turing Institute
"""
from collections import namedtuple
Line = namedtuple("Line", ["xs", "ys", "style", "label"])
def dict2tex(d):
items = []
for key, value in d.items():
if isinstance(value, dict):
value = "{\n" + dict2tex(value) + "%\n}"
if value is None:
items.append(f"{key}")
else:
items.append(f"{key}={value}")
return ",%\n".join(items)
|
python
|
"""
Dexter Legaspi - [email protected]
Class: CS 521 - Summer 2
Date: 07/22/2021
Term Project
Main view/window
"""
import tkinter as tk
from tkinter import messagebox, filedialog
from PIL import ImageTk, Image as PILImage
import appglobals
from appcontroller import AppController
from appstate import AppState
from image import Image
from imageattributes import ImageAttributes
from imagecatalog import ImageCatalog
import logging
from imagerating import ImageRating
logger = logging.getLogger(__name__)
BACKGROUND_LOGO_IMAGE_PATH = appglobals.path_data_root_door / 'bg-logo2.jpg'
DOGE_IMAGE_PATH = appglobals.path_data_root_door / 'doge.jpg'
class MainView(tk.Frame):
"""
Main View/frame
"""
def __init__(self, master, controller: AppController):
"""
Constructor
:param master:
:param controller:
"""
tk.Frame.__init__(self, master)
self.controller = controller
self.master = master
self.statusbar = None
self.img_frame = None
self.img_label = None
self.exif_label = None
self.rating_slider = None
self.exif_label_text = tk.StringVar()
self.render()
self.render_menu()
def render_menu(self):
"""
Render menu
:return:
"""
menu_bar = tk.Menu(self.master)
main_menu = tk.Menu(menu_bar, tearoff=False)
main_menu.add_command(label='Open...',
command=self.menu_open_directory)
main_menu.add_command(label='Save Ratings',
command=self.menu_save_ratings)
main_menu.add_command(label='Quit',
command=self.menu_command_quit)
menu_bar.add_cascade(label='File',
menu=main_menu)
self.master.config(menu=menu_bar)
def menu_save_ratings(self):
"""
save ratings
:return:
"""
logger.debug('save ratings...')
self.controller.save_ratings()
def menu_command_quit(self):
"""
Quit app
:return:
"""
logger.debug('quitting...')
if appglobals.app_config_confirm_on_exit:
logger.info('quit confirmation enabled')
if messagebox.askokcancel("Quit", "Do you want to quit?"):
self.master.destroy()
else:
self.master.destroy()
def menu_open_directory(self):
"""
open/select directory dialog
:return:
"""
selected_dir = filedialog.askdirectory(parent=self.master,
initialdir='~',
title='Select Images Directory')
logger.info("open directory: %s", selected_dir)
self.set_status_bar_text(f'Open directory: {selected_dir}')
catalog = ImageCatalog(directory=selected_dir)
logger.info("catalog statistics: %s", catalog.get_stats())
new_state = AppState(catalog)
self.controller.set_state(new_state)
initial_img = self.controller.get_image_at_current_index()
if initial_img is not None:
self.set_img(initial_img)
return selected_dir
def set_status_bar_text(self, status_text):
"""
sets the status bar text
:param status_text:
:return:
"""
self.statusbar.config(text=status_text)
def __init_img_widget(self):
"""
Image widget init
:return:
"""
path = BACKGROUND_LOGO_IMAGE_PATH
img = PILImage.open(path)
# this is a test for resizing
# https://stackoverflow.com/a/24745969/918858
maxsize = (appglobals.app_config_img_dimension[0],
appglobals.app_config_img_dimension[1])
logger.info('resizing image...')
img.thumbnail(maxsize, PILImage.ANTIALIAS)
self.img_frame = tk.Frame(self,
width=appglobals.app_config_win_dimension[0],
height=appglobals.app_config_win_dimension[1])
self.img_frame.pack(fill=tk.BOTH, expand=True)
pimg = ImageTk.PhotoImage(img)
img_attr = ImageAttributes(img)
self.img_label = tk.Label(self.img_frame, image=pimg)
self.img_label.image = pimg
self.img_label.pack()
self.exif_label_text.set(img_attr.get_formatted_exif())
self.exif_label = tk.Label(self.img_frame,
textvariable=self.exif_label_text,
justify=tk.LEFT)
# place the EXIF relative to the image
# https://stackoverflow.com/a/63625317/918858
self.exif_label.place(in_=self.img_label, y=10, x=10)
def set_img(self, img: Image):
"""
Set the image in the Main View
:param img:
:return:
"""
# load the raw image
raw_img = img.get_image_object()
maxsize = (appglobals.app_config_img_dimension[0],
appglobals.app_config_img_dimension[1])
logger.info('resizing image...')
raw_img.thumbnail(maxsize, PILImage.ANTIALIAS)
# create the new Tk image object
pimg = ImageTk.PhotoImage(raw_img)
# get the image attribute and update the label
img_attr = img.get_attributes()
self.exif_label_text.set(img_attr.get_formatted_exif())
# set the rating slider
self.rating_slider.set(int(img.get_rating()))
# set the status bar
self.set_status_bar_text(f'Current image: {img.get_name().upper()}')
# finally update the actual image
self.img_label.configure(image=pimg)
self.img_label.image = pimg
self.img_label.pack()
def render(self):
"""
Render UI
:return:
"""
self.master.title('Image Viewer')
self.pack(fill=tk.BOTH, expand=1)
self.__init_img_widget()
# status bar
self.statusbar = tk.Label(self, text='Ready.', bd=1,
relief=tk.SUNKEN, anchor=tk.W)
self.statusbar.pack(side=tk.BOTTOM, fill=tk.X)
# rating slider
self.rating_slider = tk.Scale(self, from_=0, to=5, orient=tk.HORIZONTAL,
command=self.slider_handle_rating)
self.rating_slider.pack(side=tk.LEFT, padx=20, pady=20)
self.rating_slider.set(0)
# navigation buttons
next_button = tk.Button(self,
text='Next Image',
command=self.button_next_image)
previous_button = tk.Button(self,
text='Previous Image',
command=self.button_previous_image)
next_button.pack(side=tk.RIGHT, padx=2, pady=10)
previous_button.pack(side=tk.RIGHT, padx=2, pady=10)
def button_next_image(self):
"""
next image button handler
:return:
"""
logger.info("next image button pressed")
self.controller.next_image()
img = self.controller.get_image_at_current_index()
if img is not None:
self.set_img(img)
def button_previous_image(self):
"""
next image button handler
:return:
"""
logger.info("previous image button pressed")
self.controller.previous_image()
img = self.controller.get_image_at_current_index()
if img is not None:
self.set_img(img)
def slider_handle_rating(self, value):
"""
handling of rating slider
:return:
"""
logger.info("setting rating slider...")
logger.info("current slider setting: %s", value)
img = self.controller.get_image_at_current_index()
img.set_rating(ImageRating(int(value)))
pass
def render_main_view(controller):
"""
render the main view
:return:
"""
root = tk.Tk()
main_view = MainView(root, controller)
root.geometry('{}x{}'
.format(appglobals.app_config_win_dimension[0],
appglobals.app_config_win_dimension[1]))
# https://www.tutorialspoint.com/how-to-center-a-window-on-the-screen-in-tkinter
root.eval('tk::PlaceWindow . center')
root.mainloop()
|
python
|
import typing
import pytest
from energuide import bilingual
from energuide import element
from energuide.embedded import code
from energuide.exceptions import InvalidEmbeddedDataTypeError
@pytest.fixture
def raw_wall_code() -> element.Element:
data = """
<Code id='Code 1'>
<Label>1201101121</Label>
<Layers>
<StructureType>
<English>Wood frame</English>
<French>Ossature de bois</French>
</StructureType>
<ComponentTypeSize>
<English>38x89 mm (2x4 in)</English>
<French>38x89 (2x4)</French>
</ComponentTypeSize>
</Layers>
</Code>
"""
return element.Element.from_string(data)
BAD_WALL_CODE_XML = [
# This XML block is missing the id attribute on the <Code> tag
"""
<Code>
<Label>1201101121</Label>
<Layers>
<StructureType>
<English>Wood frame</English>
<French>Ossature de bois</French>
</StructureType>
<ComponentTypeSize>
<English>38x89 mm (2x4 in)</English>
<French>38x89 (2x4)</French>
</ComponentTypeSize>
</Layers>
</Code>
""",
]
BAD_WINDOW_CODE_XML = [
# This XML block is missing the id attribute on the <Code> tag
"""
<Code>
<Label>202002</Label>
<Layers>
<GlazingTypes>
<English>Double/double with 1 coat</English>
<French>Double/double, 1 couche</French>
</GlazingTypes>
<CoatingsTints>
<English>Clear</English>
<French>Transparent</French>
</CoatingsTints>
<FillType>
<English>6 mm Air</English>
<French>6 mm d'air</French>
</FillType>
<SpacerType>
<English>Metal</English>
<French>Métal</French>
</SpacerType>
<Type>
<English>Picture</English>
<French>Fixe</French>
</Type>
<FrameMaterial>
<English>Wood</English>
<French>Bois</French>
</FrameMaterial>
</Layers>
</Code>
"""
]
@pytest.fixture
def wall_code() -> code.WallCode:
return code.WallCode(
identifier='Code 1',
label='1201101121',
tags={
code.WallCodeTag.STRUCTURE_TYPE: bilingual.Bilingual(
english='Wood frame',
french='Ossature de bois',
),
code.WallCodeTag.COMPONENT_TYPE_SIZE: bilingual.Bilingual(
english='38x89 mm (2x4 in)',
french='38x89 (2x4)',
)
},
)
@pytest.fixture
def raw_window_code() -> element.Element:
data = """
<Code id='Code 11'>
<Label>202002</Label>
<Layers>
<GlazingTypes>
<English>Double/double with 1 coat</English>
<French>Double/double, 1 couche</French>
</GlazingTypes>
<CoatingsTints>
<English>Clear</English>
<French>Transparent</French>
</CoatingsTints>
<FillType>
<English>6 mm Air</English>
<French>6 mm d'air</French>
</FillType>
<SpacerType>
<English>Metal</English>
<French>Métal</French>
</SpacerType>
<Type>
<English>Picture</English>
<French>Fixe</French>
</Type>
<FrameMaterial>
<English>Wood</English>
<French>Bois</French>
</FrameMaterial>
</Layers>
</Code>
"""
return element.Element.from_string(data)
@pytest.fixture
def window_code() -> code.WindowCode:
return code.WindowCode(
identifier='Code 11',
label='202002',
tags={
code.WindowCodeTag.GLAZING_TYPE: bilingual.Bilingual(
english='Double/double with 1 coat',
french='Double/double, 1 couche',
),
code.WindowCodeTag.COATING_TINTS: bilingual.Bilingual(english='Clear', french='Transparent'),
code.WindowCodeTag.FILL_TYPE: bilingual.Bilingual(english='6 mm Air', french="6 mm d'air"),
code.WindowCodeTag.SPACER_TYPE: bilingual.Bilingual(english='Metal', french='Métal'),
code.WindowCodeTag.CODE_TYPE: bilingual.Bilingual(english='Picture', french='Fixe'),
code.WindowCodeTag.FRAME_MATERIAL: bilingual.Bilingual(english='Wood', french='Bois'),
}
)
@pytest.fixture
def raw_codes(raw_wall_code: element.Element,
raw_window_code: element.Element) -> typing.Dict[str, typing.List[element.Element]]:
return {
'wall': [raw_wall_code],
'window': [raw_window_code],
}
def test_wall_code_from_data(raw_wall_code: element.Element, wall_code: code.WallCode) -> None:
output = code.WallCode.from_data(raw_wall_code)
assert output == wall_code
def test_window_code_from_data(raw_window_code: element.Element, window_code: code.WindowCode) -> None:
output = code.WindowCode.from_data(raw_window_code)
assert output == window_code
@pytest.mark.parametrize("bad_xml", BAD_WALL_CODE_XML)
def test_bad_wall_code(bad_xml: str) -> None:
code_node = element.Element.from_string(bad_xml)
with pytest.raises(InvalidEmbeddedDataTypeError) as excinfo:
code.WallCode.from_data(code_node)
assert excinfo.value.data_class == code.WallCode
@pytest.mark.parametrize("bad_xml", BAD_WINDOW_CODE_XML)
def test_bad_window_code(bad_xml: str) -> None:
code_node = element.Element.from_string(bad_xml)
with pytest.raises(InvalidEmbeddedDataTypeError) as excinfo:
code.WindowCode.from_data(code_node)
assert excinfo.value.data_class == code.WindowCode
def test_code_from_data(raw_wall_code: element.Element,
raw_window_code: element.Element,
wall_code: code.WallCode,
window_code: code.WindowCode) -> None:
output = code.Codes.from_data(
{'wall': [raw_wall_code], 'window': [raw_window_code]}
)
assert output == code.Codes(
wall={wall_code.identifier: wall_code},
window={window_code.identifier: window_code}
)
|
python
|
__doc__ = """
样例: 传给topic_metadata函数的内容
args = {
'pattern' : "https://mirrors.tuna.tsinghua.edu.cn/help/%s",
'themes' :["AOSP", "AUR","CocoaPods"
, "anaconda","archlinux","archlinuxcn"
,"bananian","centos","chromiumos","cygwin"
,"docker","elpa","epel","fedora","git-repo"
,"gitlab-ce","gitlab-ci-multi-runner"
,"hackage","homebrew","homebrew-bottles"
,"linux-stable.git","linux.git","lxc-images"
,"mongodb","msys2","nodesource"
,"pybombs","pypi"
,"raspbian","repo-ck","repoforge","rpmfusion","rubygems"
,"tensorflow","termux","ubuntu","virtualbox","weave"],
'filter': {'method' : 'id', 'pattern' : 'help-content'}
}
生成任务的顺序是,首先读入任务列表,如果是HTML类型的任务,那么就转换HTML。
也就是说,关键还是我们需要告诉任务,何时我们得到任务类型的信息。
比如任务类型是HTML->Markdown这样的类型。
如果是这样的类型,我们就应该为这样的类型准备分析表达式。
更具体地,这样的类型还会与具体网站相绑定。也就是说,一个任务类型并不能决定处理函数
一个类型只能起到构造任务的作用。
另外,也说明任务一般是聚集起来出现的。以Python字典的形式出现,也就是以
最好元数据可以直接从YAML文件当中读取,这样的话就不用每个文件再单独写出来了。
"""
import re
## taskfunc,任务名,savefunc,保存名。
## 任务名其实是依赖于网站的。比如维基,我们希望以后缀.md结尾。
## 这个时候,要求具有定制网站保存名的能力
def to_metadata (args) :
"""
进一步整理mirror的元数据,从元数据当中添加构造类型数据
"""
taskfunc = lambda theme: re.sub(r'[=]', r'_', args['pattern'] % theme)
savefunc = lambda theme: re.sub(r'[=:/,\'\(\)]', r'_', theme) + ".md"
args['taskfunc'] = args.get('taskfunc', taskfunc)
args['savefunc'] = args.get('savefunc', savefunc)
args['savename'] = args['savename'] \
if 'savename' in args.keys() else 'download/'+savefunc(themes[0])
# 在mirror当中添加task_list属性,表示获取相应topic的任务名
args['task_list'] = [args['taskfunc'](theme)
for theme in args['themes']]
return args
def to_separate_metadata_list(args) :
"""
将数据变成是元数据的各个项目,每个项目作为单独的数据列表而出现。每次获取单独生成页面。
"""
result = []
taskfunc = lambda theme: re.sub(r'[=]', r'_', args['pattern'] % theme)
#savefunc = lambda theme: re.sub(r'[=]', r'_', theme) + '.md'
savefunc = lambda theme: re.sub(r'[=:/,\'\(\)]', r'_', theme) + ".md"
args['taskfunc'] = args.get('taskfunc', taskfunc)
args['savefunc'] = args.get('savefunc', savefunc)
for theme in args['themes'] :
result += [{'pattern' : args['pattern'],
'filter' : args['filter'] if 'filter' in args.keys() else None,
'themes' : [theme],
'taskfunc' : args['taskfunc'],
'savename' : args['savefunc'](theme)
}]
return result
|
python
|
"""This module demonstrates basic Sphinx usage with Python modules.
Submodules
==========
.. autosummary::
:toctree: _autosummary
"""
VERSION = "0.0.1"
"""The version of this module."""
|
python
|
#!/usr/bin/env python3
# pyreverse -p contexts_basecontext_basecontext ../Lib/pagebot/contexts/basecontext/basecontext.py
# dot -Tpng classes_contexts_basecontext_basecontext.dot -o classes_contexts_basecontext_basecontext.png
import os
import subprocess
def getDirs(root):
return [d for d in os.listdir(root) if os.path.isdir(os.path.join(root, d))]
def getFiles(root):
return [(f, os.path.join(root, f)) for f in os.listdir(root) if os.path.isfile(os.path.join(root, f))]
def reverse(files):
for (f, p) in files:
if f.startswith('.'):
continue
else:
name = f.split('.')[0]
subprocess.call('pyreverse')# -p %s %s' % (name, p))
print(name, p)
break
if __name__ == "__main__":
root = '../Lib/pagebot'
files = getFiles(root)
reverse(files)
|
python
|
#!/usr/bin/env python3
#
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit test for downloading gzip-ed objects."""
import gzip
import json
import os
import re
import unittest
from testbench import rest_server
from tests.format_multipart_upload import format_multipart_upload_bytes
UPLOAD_QUANTUM = 256 * 1024
class TestTestbenchObjectGzip(unittest.TestCase):
def setUp(self):
rest_server.db.clear()
rest_server.server.config["PREFERRED_URL_SCHEME"] = "https"
rest_server.server.config["SERVER_NAME"] = "storage.googleapis.com"
rest_server.root.config["PREFERRED_URL_SCHEME"] = "https"
rest_server.root.config["SERVER_NAME"] = "storage.googleapis.com"
self.client = rest_server.server.test_client(allow_subdomain_redirects=True)
# Avoid magic buckets in the test
os.environ.pop("GOOGLE_CLOUD_CPP_STORAGE_TEST_BUCKET_NAME", None)
response = self.client.post(
"/storage/v1/b", data=json.dumps({"name": "bucket-name"})
)
self.assertEqual(response.status_code, 200)
def _insert_compressed_object(self, name):
media = "How vexingly quick daft zebras jump!"
compressed = gzip.compress(media.encode("utf-8"))
response = self.client.post(
"/upload/storage/v1/b/bucket-name/o",
query_string={
"name": name,
"uploadType": "media",
"contentEncoding": "gzip",
},
content_type="application/octet-stream",
data=compressed,
)
self.assertEqual(response.status_code, 200)
self.assertTrue(
response.headers.get("content-type").startswith("application/json")
)
insert_rest = json.loads(response.data)
self.assertEqual(insert_rest.get("kind"), "storage#object")
self.assertEqual(insert_rest.get("contentEncoding", ""), "gzip")
return media
def test_download_gzip_data_simple_upload(self):
media = self._insert_compressed_object("zebra")
response = self.client.get(
"/download/storage/v1/b/bucket-name/o/zebra", query_string={"alt": "media"}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.data.decode("utf-8"), media)
self.assertEqual(
response.headers.get("x-guploader-response-body-transformations", ""),
"gunzipped",
)
def test_download_gzip_compressed(self):
media = self._insert_compressed_object("zebra")
response = self.client.get(
"/download/storage/v1/b/bucket-name/o/zebra",
query_string={"alt": "media"},
headers={"Accept-Encoding": "gzip"},
)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.data, gzip.compress(media.encode("utf-8")))
self.assertEqual(
response.headers.get("x-guploader-response-body-transformations", ""), ""
)
def test_download_gzip_range_ignored(self):
media = self._insert_compressed_object("zebra")
response = self.client.get(
"/download/storage/v1/b/bucket-name/o/zebra",
query_string={"alt": "media"},
headers={"Range": "4-8"},
)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.data, media.encode("utf-8"))
self.assertEqual(
response.headers.get("x-guploader-response-body-transformations", ""),
"gunzipped",
)
self.assertEqual(
response.headers.get("content-range", ""),
"bytes 0-%d/%d" % (len(media) - 1, len(media)),
)
def test_download_gzip_uncompressed_xml(self):
media = self._insert_compressed_object("zebra")
response = self.client.get(
"/zebra", base_url="https://bucket-name.storage.googleapis.com"
)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.data, media.encode("utf-8"))
self.assertEqual(
response.headers.get("x-guploader-response-body-transformations", ""),
"gunzipped",
)
self.assertEqual(
response.headers.get("x-goog-stored-content-encoding", ""), "gzip"
)
def test_download_gzip_compressed_xml(self):
media = self._insert_compressed_object("zebra")
response = self.client.get(
"/zebra",
base_url="https://bucket-name.storage.googleapis.com",
headers={"Accept-Encoding": "gzip"},
)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.data, gzip.compress(media.encode("utf-8")))
self.assertEqual(
response.headers.get("x-guploader-response-body-transformations", ""),
"",
)
self.assertEqual(
response.headers.get("x-goog-stored-content-encoding", ""), "gzip"
)
def test_download_of_multipart_upload(self):
media = "How vexingly quick daft zebras jump!"
compressed = gzip.compress(media.encode("utf-8"))
boundary, payload = format_multipart_upload_bytes(
{"contentEncoding": "gzip"}, compressed
)
response = self.client.post(
"/upload/storage/v1/b/bucket-name/o",
query_string={"uploadType": "multipart", "name": "zebra"},
content_type="multipart/related; boundary=" + boundary,
data=payload,
)
self.assertEqual(response.status_code, 200)
self.assertTrue(
response.headers.get("content-type").startswith("application/json")
)
insert_rest = json.loads(response.data)
self.assertEqual(insert_rest.get("kind"), "storage#object")
self.assertEqual(insert_rest.get("contentEncoding", ""), "gzip")
self.assertEqual(
response.headers.get("x-guploader-response-body-transformations", ""), ""
)
response = self.client.get(
"/download/storage/v1/b/bucket-name/o/zebra", query_string={"alt": "media"}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.data.decode("utf-8"), media)
self.assertEqual(
response.headers.get("x-guploader-response-body-transformations", ""),
"gunzipped",
)
def test_download_of_resumable_upload(self):
media = "How vexingly quick daft zebras jump!"
compressed = gzip.compress(media.encode("utf-8"))
response = self.client.post(
"/upload/storage/v1/b/bucket-name/o",
query_string={"uploadType": "resumable", "name": "zebra"},
content_type="application/json",
data=json.dumps({"name": "zebra", "contentEncoding": "gzip"}),
)
self.assertEqual(response.status_code, 200)
location = response.headers.get("location")
self.assertIn("upload_id=", location)
match = re.search("[&?]upload_id=([^&]+)", location)
self.assertIsNotNone(match, msg=location)
upload_id = match.group(1)
finalized = self.client.put(
"/upload/storage/v1/b/bucket-name/o",
query_string={"upload_id": upload_id},
data=compressed,
)
self.assertEqual(finalized.status_code, 200)
self.assertTrue(
finalized.headers.get("content-type").startswith("application/json")
)
insert_rest = json.loads(finalized.data)
self.assertIn("metadata", insert_rest)
self.assertEqual(insert_rest.get("kind"), "storage#object")
self.assertEqual(insert_rest.get("contentEncoding", ""), "gzip")
response = self.client.get(
"/download/storage/v1/b/bucket-name/o/zebra", query_string={"alt": "media"}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.data.decode("utf-8"), media)
self.assertEqual(
response.headers.get("x-guploader-response-body-transformations", ""),
"gunzipped",
)
if __name__ == "__main__":
unittest.main()
|
python
|
# from django.contrib.oauth.models import User
from rest_framework import authentication
from rest_framework import exceptions
import logging
log = logging.getLogger(__name__)
import json, re
from django.core.cache import cache
from django.conf import settings
class TokenAuthentication(authentication.BaseAuthentication):
def authenticate(self, request):
path = request.path
log.debug('TokenAuthentication get path = {0}'.format(path))
#if path start with /rest/* pass it
parser = re.compile(r'^/rest/(\w+/)+.*')
match_group = parser.match(path)
if match_group:
return None
token = request.META.get('HTTP_AUTHORIZATION')
log.debug('TokenAuthentication get token = {0}'.format(token))
if not token:
raise exceptions.AuthenticationFailed('Authorization Token required')
else:
# lookup redis to see if token exists
# if not, return exception, else return user
value = cache.get(token)
if value == None:
# user = None
raise exceptions.AuthenticationFailed('Invalid token in redis')
else:
user = json.loads(value)
cache.set(token, json.dumps(user), settings.REDIS_TOKEN_TIMEOUT_SEC)
return (user, None)
|
python
|
# Input: a list of "documents" at least containing: "sentences"
# Output: a list of "documents" at least containing: "text"
from .simplifier import Simplifier
class SimplifierByKGen:
def __init__(self, parameters):
# some prepartion
# no parameter is needed
print("Info: Simplifier By KGen has been initialized")
def execute(self, input):
if not 'documents' in input:
print("ERROR: documents is missing in the input for SimplifierByKGen")
return input
for document in input['documents']:
if not 'sentences' in document:
print("ERROR: sentences is missing in a document in documents for SimplifierByKGen")
return input
if not 'originalSentences' in document:
document['originalSentences'] = document['sentences']
document['sentences'] = Simplifier(document['sentences']).simplify()
return input
#def refineDocuments(self):
|
python
|
from django.conf import settings
from django.core.files.storage import get_storage_class
from storages.backends.s3boto3 import S3Boto3Storage
# if settings.DEBUG:
# PublicMediaStorage = get_storage_class()
# PrivateMediaStorage = get_storage_class()
# else:
from config.settings import dev
class PublicMediaStorage(S3Boto3Storage):
location = dev.AWS_PUBLIC_MEDIA_LOCATION
file_overwrite = False
class PrivateMediaStorage(S3Boto3Storage):
location = dev.AWS_PRIVATE_MEDIA_LOCATION
file_overwrite = False
default_acl = "private"
custom_domain = False
|
python
|
#########
# Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
import tempfile
import time
import threading
import ssl
import sys
import os
import copy
from contextlib import contextmanager
import celery
import cloudify.manager
from cloudify import ctx
from cloudify.exceptions import NonRecoverableError
from cloudify.utils import ManagerVersion
from cloudify.utils import get_manager_file_server_url
from cloudify.decorators import operation
from cloudify_agent.api.plugins.installer import PluginInstaller
from cloudify_agent.api.factory import DaemonFactory
from cloudify_agent.api import defaults
from cloudify_agent.api import exceptions
from cloudify_agent.api import utils
from cloudify_agent.app import app
from cloudify_agent.installer.config import configuration
@operation
def install_plugins(plugins, **_):
installer = PluginInstaller(logger=ctx.logger)
for plugin in plugins:
ctx.logger.info('Installing plugin: {0}'.format(plugin['name']))
try:
installer.install(plugin=plugin,
deployment_id=ctx.deployment.id,
blueprint_id=ctx.blueprint.id)
except exceptions.PluginInstallationError as e:
# preserve traceback
tpe, value, tb = sys.exc_info()
raise NonRecoverableError, NonRecoverableError(str(e)), tb
@operation
def uninstall_plugins(plugins, **_):
installer = PluginInstaller(logger=ctx.logger)
for plugin in plugins:
ctx.logger.info('Uninstalling plugin: {0}'.format(plugin['name']))
if plugin.get('wagon'):
installer.uninstall_wagon(
package_name=plugin['package_name'],
package_version=plugin['package_version'])
else:
installer.uninstall(plugin=plugin,
deployment_id=ctx.deployment.id)
@operation
def restart(new_name=None, delay_period=5, **_):
cloudify_agent = ctx.instance.runtime_properties['cloudify_agent']
if new_name is None:
new_name = utils.internal.generate_new_agent_name(
cloudify_agent.get('name', 'agent'))
# update agent name in runtime properties so that the workflow will
# what the name of the worker handling tasks to this instance.
# the update cannot be done by setting a nested property directly
# because they are not recognized as 'dirty'
cloudify_agent['name'] = new_name
ctx.instance.runtime_properties['cloudify_agent'] = cloudify_agent
# must update instance here because the process may shutdown before
# the decorator has a chance to do it.
ctx.instance.update()
daemon = _load_daemon(logger=ctx.logger)
# make the current master stop listening to the current queue
# to avoid a situation where we have two masters listening on the
# same queue.
app.control.cancel_consumer(
queue=daemon.queue,
destination=['celery@{0}'.format(daemon.name)]
)
# clone the current daemon to preserve all the attributes
attributes = utils.internal.daemon_to_dict(daemon)
# give the new daemon the new name
attributes['name'] = new_name
# remove the log file and pid file so that new ones will be created
# for the new agent
del attributes['log_file']
del attributes['pid_file']
# Get the broker credentials for the daemon
attributes.update(ctx.bootstrap_context.broker_config())
new_daemon = DaemonFactory().new(logger=ctx.logger, **attributes)
# create the new daemon
new_daemon.create()
_save_daemon(new_daemon)
# configure the new daemon
new_daemon.configure()
new_daemon.start()
# start a thread that will kill the current master.
# this is done in a thread so that the current task will not result in
# a failure
thread = threading.Thread(target=shutdown_current_master,
args=[delay_period, ctx.logger])
thread.daemon = True
thread.start()
@operation
def stop(delay_period=5, **_):
thread = threading.Thread(target=shutdown_current_master,
args=[delay_period, ctx.logger])
thread.daemon = True
thread.start()
def shutdown_current_master(delay_period, logger):
if delay_period > 0:
time.sleep(delay_period)
daemon = _load_daemon(logger=logger)
daemon.before_self_stop()
daemon.stop()
def _load_daemon(logger):
factory = DaemonFactory(
username=utils.internal.get_daemon_user(),
storage=utils.internal.get_daemon_storage_dir())
return factory.load(utils.internal.get_daemon_name(), logger=logger)
def _save_daemon(daemon):
factory = DaemonFactory(
username=utils.internal.get_daemon_user(),
storage=utils.internal.get_daemon_storage_dir())
factory.save(daemon)
def create_new_agent_dict(old_agent):
new_agent = {}
new_agent['name'] = utils.internal.generate_new_agent_name(
old_agent['name'])
new_agent['remote_execution'] = True
# TODO: broker_ip should be handled as part of fixing agent migration
fields_to_copy = ['windows', 'ip', 'basedir', 'user', 'broker_ip']
for field in fields_to_copy:
if field in old_agent:
new_agent[field] = old_agent[field]
configuration.reinstallation_attributes(new_agent)
new_agent['manager_file_server_url'] = get_manager_file_server_url()
new_agent['old_agent_version'] = old_agent['version']
return new_agent
@contextmanager
def _celery_client(ctx, agent):
# We retrieve broker url from old agent in order to support
# cases when old agent is not connected to current rabbit server.
if 'broker_config' in agent:
broker_config = agent['broker_config']
else:
broker_config = ctx.bootstrap_context.broker_config()
broker_url = utils.internal.get_broker_url(broker_config)
ctx.logger.info('Connecting to {0}'.format(broker_url))
celery_client = celery.Celery()
# We can't pass broker_url to Celery constructor because it would
# be overriden by the value from broker_config.py.
config = {
'BROKER_URL': broker_url,
'CELERY_RESULT_BACKEND': broker_url
}
if not ManagerVersion(agent['version']).equals(ManagerVersion('3.2')):
config['CELERY_TASK_RESULT_EXPIRES'] = \
defaults.CELERY_TASK_RESULT_EXPIRES
fd, cert_path = tempfile.mkstemp()
os.close(fd)
try:
if broker_config.get('broker_ssl_enabled'):
with open(cert_path, 'w') as cert_file:
cert_file.write(broker_config.get('broker_ssl_cert', ''))
broker_ssl = {
'ca_certs': cert_path,
'cert_reqs': ssl.CERT_REQUIRED
}
else:
broker_ssl = False
config['BROKER_USE_SSL'] = broker_ssl
celery_client.conf.update(**config)
yield celery_client
finally:
os.remove(cert_path)
def _celery_task_name(version):
if not version or ManagerVersion(version).greater_than(
ManagerVersion('3.3.1')):
return 'cloudify.dispatch.dispatch'
else:
return 'script_runner.tasks.run'
def _assert_agent_alive(name, celery_client, version=None):
tasks = utils.get_agent_registered(name, celery_client)
if not tasks:
raise NonRecoverableError(
'Could not access tasks list for agent {0}'.format(name))
task_name = _celery_task_name(version)
if task_name not in tasks:
raise NonRecoverableError('Task {0} is not available in agent {1}'.
format(task_name, name))
def _get_manager_version():
version_json = cloudify.manager.get_rest_client().manager.get_version()
return ManagerVersion(version_json['version'])
def _run_install_script(old_agent, timeout, validate_only=False,
install_script=None):
# Assuming that if there is no version info in the agent then
# this agent was installed by current manager.
old_agent = copy.deepcopy(old_agent)
if 'version' not in old_agent:
old_agent['version'] = str(_get_manager_version())
new_agent = create_new_agent_dict(old_agent)
old_agent_version = new_agent['old_agent_version']
with _celery_client(ctx, old_agent) as celery_client:
old_agent_name = old_agent['name']
_assert_agent_alive(old_agent_name, celery_client, old_agent_version)
if install_script is None:
script_format = '{0}/cloudify/install_agent.py'
install_script = script_format.format(
get_manager_file_server_url())
script_runner_task = 'script_runner.tasks.run'
cloudify_context = {
'type': 'operation',
'task_name': script_runner_task,
'task_target': old_agent['queue']
}
kwargs = {'script_path': install_script,
'cloudify_agent': new_agent,
'validate_only': validate_only,
'__cloudify_context': cloudify_context}
task = _celery_task_name(old_agent_version)
result = celery_client.send_task(
task,
kwargs=kwargs,
queue=old_agent['queue']
)
returned_agent = result.get(timeout=timeout)
if returned_agent['name'] != new_agent['name']:
raise NonRecoverableError(
'Expected agent name {0}, received {1}'.format(
new_agent['name'], returned_agent['name'])
)
returned_agent.pop('old_agent_version', None)
return {
'old': old_agent,
'new': returned_agent
}
def create_agent_from_old_agent(operation_timeout=300, install_script=None):
if 'cloudify_agent' not in ctx.instance.runtime_properties:
raise NonRecoverableError(
'cloudify_agent key not available in runtime_properties')
if 'agent_status' not in ctx.instance.runtime_properties:
raise NonRecoverableError(
('agent_status key not available in runtime_properties, '
'validation needs to be performed before new agent installation'))
status = ctx.instance.runtime_properties['agent_status']
if not status['agent_alive_crossbroker']:
raise NonRecoverableError(
('Last validation attempt has shown that agent is dead. '
'Rerun validation.'))
old_agent = ctx.instance.runtime_properties['cloudify_agent']
agents = _run_install_script(old_agent,
operation_timeout,
validate_only=False,
install_script=install_script)
# Make sure that new celery agent was started:
returned_agent = agents['new']
ctx.logger.info('Installed agent {0}'.format(returned_agent['name']))
_assert_agent_alive(returned_agent['name'], app)
# Setting old_cloudify_agent in order to uninstall it later.
ctx.instance.runtime_properties['old_cloudify_agent'] = agents['old']
ctx.instance.runtime_properties['cloudify_agent'] = returned_agent
@operation
def create_agent_amqp(install_agent_timeout, install_script=None, **_):
create_agent_from_old_agent(install_agent_timeout,
install_script=install_script)
@operation
def validate_agent_amqp(validate_agent_timeout, fail_on_agent_dead=False,
fail_on_agent_not_installable=False,
install_script=None, **_):
if 'cloudify_agent' not in ctx.instance.runtime_properties:
raise NonRecoverableError(
'cloudify_agent key not available in runtime_properties')
agent = ctx.instance.runtime_properties['cloudify_agent']
agent_name = agent['name']
result = {}
ctx.logger.info(('Checking if agent can be accessed through '
'current rabbitmq'))
try:
_assert_agent_alive(agent_name, app)
except Exception as e:
result['agent_alive'] = False
result['agent_alive_error'] = str(e)
ctx.logger.info('Agent unavailable, reason {0}'.format(str(e)))
else:
result['agent_alive'] = True
ctx.logger.info(('Checking if agent can be accessed through '
'different rabbitmq'))
try:
_run_install_script(agent, validate_agent_timeout, validate_only=True,
install_script=install_script)
except Exception as e:
result['agent_alive_crossbroker'] = False
result['agent_alive_crossbroker_error'] = str(e)
ctx.logger.info('Agent unavailable, reason {0}'.format(str(e)))
else:
result['agent_alive_crossbroker'] = True
result['timestamp'] = time.time()
ctx.instance.runtime_properties['agent_status'] = result
if fail_on_agent_dead and not result['agent_alive']:
raise NonRecoverableError(result['agent_alive_error'])
if fail_on_agent_not_installable and not result[
'agent_alive_crossbroker']:
raise NonRecoverableError(result['agent_alive_crossbroker_error'])
|
python
|
"""
Represents a square stop.
"""
from BeamlineComponents.Stop.StopRectangle import StopRectangle
class StopSquare(StopRectangle):
def __init__(self, side_length):
StopRectangle.__init__(self, side_length, side_length)
def sideLength(self):
return self.lengthVertical()
|
python
|
import urllib, json
from jwcrypto import jwt, jwk
class OpenIDTokenValidator:
def __init__(self, config_url, audience):
"""
Retrieve auth server config and set up the validator
:param config_url: the discovery URI
:param audience: client ID to verify against
"""
# Fetch configuration
self.config = json.loads(OpenIDTokenValidator.__fetch_content__(config_url))
self.config['audience'] = audience
# Fetch signing key/certificate
jwk_response = OpenIDTokenValidator.__fetch_content__(self.config['jwks_uri'])
self.jwk_keyset = jwk.JWKSet.from_json(jwk_response)
@staticmethod
def __fetch_content__(url):
response = urllib.urlopen(url)
return response.read()
def __verify_claim__(self, decoded_token_json):
if decoded_token_json['iss'] != self.config['issuer']:
raise Exception('Invalid Issuer')
if decoded_token_json['aud'] != self.config['audience']:
raise Exception('Invalid Audience')
def verify_and_decode_token(self, token):
"""
Verify the token with the provided JWK certificate and claims
:param token: the token to verify
:return: the decoded ID token body
"""
decoded_token = jwt.JWT(key=self.jwk_keyset, jwt=token)
decoded_json = json.loads(decoded_token.claims)
self.__verify_claim__(decoded_json)
return decoded_json
|
python
|
"""
Tests for attention module
"""
import numpy as np
import theano
import theano.tensor as T
import agentnet
from agentnet.memory import GRUCell
from agentnet.memory.attention import AttentionLayer
from lasagne.layers import *
def test_attention():
"""
minimalstic test that showcases attentive RNN that reads some chunk
of input sequence on each tick and outputs nothing
"""
# step inner graph
class step:
enc_activations = InputLayer((None, None, 12), name='placeholder for encoder activations (to be attended)')
prev_gru = InputLayer((None, 15),name='gru prev state (15 units)')
attention = AttentionLayer(enc_activations,prev_gru,num_units=16)
gru = GRUCell(prev_gru, attention['attn'] , name='rnn that reads enc_sequence with attention')
attn_probs = attention['probs'] #weights from inside attention
# outer graph
encoder_activations = InputLayer((None,None,12),name='encoder sequence (will be sent to enc_sequence)')
rec = agentnet.Recurrence(input_nonsequences={step.enc_activations: encoder_activations},
state_variables={step.gru: step.prev_gru},
tracked_outputs=[step.attn_probs],
unroll_scan=False,
n_steps = 10)
weights = get_all_params(rec)
gru_states,attention_probs_seq = rec[step.gru,step.attn_probs]
run = theano.function([encoder_activations.input_var], get_output([gru_states,attention_probs_seq]),
updates=rec.get_automatic_updates(),allow_input_downcast=True)
#run on surrogate data
gru_seq,probs_seq = run(np.random.randn(5, 25, 12))
assert gru_seq.shape == (5, 10, 15) #hidden GRU strates, 5 samples/10ticks/15units
assert probs_seq.shape == (5, 10, 25) #attention sequences, 5 samples/10ticks/25 input seq length
#hard attention
hard_outputs = get_output([gru_states,attention_probs_seq],recurrence_flags={'hard_attention':True})
hard_run = theano.function([encoder_activations.input_var], hard_outputs,
updates=rec.get_automatic_updates(),allow_input_downcast=True)
#run on surrogate data
_,hard_probs_seq = hard_run(np.random.randn(5, 25, 12))
#check if probs are one-hot
assert hard_probs_seq.shape == (5, 10, 25) #attention sequences, 5 samples/10ticks/25 input seq length
assert len(np.unique(hard_probs_seq.ravel()))==2 #only 0's and 1's
def test_attention_2d():
"""
Almost a copy-paste of previous test, but this time attention is applied to an image instead
of a 1d sequence.
"""
# step inner graph
class step:
image = InputLayer((None,3,24,24), name='placeholder for 24x24 image (to be attended)')
prev_gru = InputLayer((None, 15),name='gru prev state (15 units)')
#get image dimensions
n_channels,width,height = image.output_shape[1:]
#flatten all image spots to look like 1d sequence
image_chunks = reshape(dimshuffle(image,[0,2,3,1]),(-1,width*height,n_channels))
attention = AttentionLayer(image_chunks,prev_gru,num_units=16)
gru = GRUCell(prev_gru, attention['attn'] , name='rnn that reads enc_sequence with attention')
#weights from inside attention - reshape back into image
attn_probs = reshape(attention['probs'],(-1,width,height))
# outer graph
input_image = InputLayer((None,3,24,24),name='24x24-pixel RGB image to be sent into step.image')
rec = agentnet.Recurrence(input_nonsequences={step.image: input_image},
state_variables={step.gru: step.prev_gru},
tracked_outputs=[step.attn_probs],
unroll_scan=False,
n_steps = 10)
weights = get_all_params(rec)
gru_states,attention_probs_seq = rec[step.gru,step.attn_probs]
run = theano.function([input_image.input_var], get_output([gru_states,attention_probs_seq]),
updates=rec.get_automatic_updates(),allow_input_downcast=True)
#run on surrogate data
gru_seq,probs_seq = run(np.random.randn(5, 3, 24,24))
assert gru_seq.shape == (5, 10, 15) #hidden GRU strates, 5 samples/10ticks/15units
assert probs_seq.shape == (5, 10, 24,24) #attention sequences, 5 samples/10ticks/24width/24height
|
python
|
# coding: utf-8
# $Id: $
from celery import Celery
from celery.utils.log import get_task_logger, get_logger
CELERY_CONFIG = {
'BROKER_URL': 'amqp://guest@localhost/',
'CELERY_RESULT_BACKEND': "redis://localhost/0",
'CELERY_TASK_SERIALIZER': "pickle",
'CELERY_RESULT_SERIALIZER': "pickle",
'CELERYD_LOG_FORMAT': '[%(asctime)s] %(levelname)s: %(message)s',
'CELERYD_TASK_LOG_FORMAT': '[%(asctime)s] %(levelname)s <%(sid)s> %(task_name)s: %(message)s',
}
from logcollect.boot import celery_config
celery_config('amqp://guest:[email protected]/',
collect_root_logs=True,
activity_identity={'project': 'logcollect',
'subsystem': 'celery_test'})
celery = Celery(CELERY_CONFIG)
@celery.task
def sample_task(msg='CELERY'):
get_task_logger("sample_task").info("get task logger message")
get_logger("celery_sample_logger").info("get logger message")
|
python
|
from pyparsing import *
import act
topnum = Forward().setParseAction(act.topnum)
attacking = Forward().setParseAction(act.attacking)
blocking = Forward().setParseAction(act.blocking)
tapped = Forward().setParseAction(act.tapped)
untapped = Forward().setParseAction(act.untapped)
enchanted = Forward().setParseAction(act.enchanted)
equipped = Forward().setParseAction(act.equipped)
exiled = Forward().setParseAction(act.exiled)
sacrificed = Forward().setParseAction(act.sacrificed)
haunted = Forward().setParseAction(act.haunted)
adjective = Forward()
andadjectives = Forward()
oradjectives = Forward().setParseAction(act.oradjectives)
adjectives = Forward().setParseAction(act.adjectives)
|
python
|
import functools
import numpy as np
import pytest
from ansys import dpf
from ansys.dpf.core import examples
from ansys.dpf.core import misc
NO_PLOTTING = True
if misc.module_exists("pyvista"):
from pyvista.plotting import system_supports_plotting
NO_PLOTTING = not system_supports_plotting()
@pytest.fixture()
def static_model():
return dpf.core.Model(dpf.core.upload_file_in_tmp_folder(examples.static_rst))
def test_model_from_data_source(simple_bar):
data_source = dpf.core.DataSources(simple_bar)
model = dpf.core.Model(data_source)
assert "displacement" in model.metadata.result_info
def test_model_metadata_from_data_source(simple_bar):
data_source = dpf.core.DataSources(simple_bar)
model = dpf.core.Model(data_source)
assert model.metadata.result_info is not None
assert model.metadata.time_freq_support is not None
assert model.metadata.meshed_region is not None
assert model.metadata.data_sources is not None
def test_displacements_eval(static_model):
disp = static_model.results.displacement()
fc = disp.outputs.fields_container()
disp_field_from_eval = fc[0]
fc_from_outputs = disp.outputs.fields_container()[0]
assert np.allclose(disp_field_from_eval.data, fc_from_outputs.data)
def test_extract_component(static_model):
disp = static_model.results.displacement()
disp = disp.X()
disp_field = disp.outputs.fields_container()[0]
assert isinstance(disp_field.data, np.ndarray)
def test_kinetic(static_model):
e = static_model.results.kinetic_energy()
energy = e.outputs.fields_container()[0]
assert isinstance(energy.data, np.ndarray)
def test_str_model(static_model):
assert "Static" in str(static_model)
assert "81" in str(static_model)
assert "Unit: m" in str(static_model)
def test_connect_inputs_in_constructor_model(plate_msup):
model = dpf.core.Model(plate_msup)
u = model.results.displacement(0.015)
fc = u.outputs.fields_container()
assert len(fc) == 1
assert np.allclose(fc[0].data[0], [5.12304110e-14, 3.64308310e-04, 5.79805917e-06])
scop = dpf.core.Scoping()
scop.ids = list(range(1, 21))
u = model.results.displacement(0.015, scop)
fc = u.outputs.fields_container()
assert len(fc) == 1
assert np.allclose(fc[0].data[0], [9.66814331e-16, 6.82591973e-06, 1.35911110e-06])
assert fc[0].shape == (20, 3)
def test_named_selection_model(allkindofcomplexity):
model = dpf.core.Model(allkindofcomplexity)
ns = model.metadata.available_named_selections
assert ns == [
"_CM82",
"_CM86UX_XP",
"_DISPNONZEROUX",
"_DISPZEROUZ",
"_ELMISC",
"_FIXEDSU",
]
scop = model.metadata.named_selection("_CM86UX_XP")
assert len(scop) == 481
assert scop.location == dpf.core.locations().nodal
def test_all_result_operators_exist(allkindofcomplexity):
model = dpf.core.Model(allkindofcomplexity)
res = model.results
for key in res.__dict__:
if isinstance(res.__dict__[key], functools.partial):
res.__dict__[key]()
def test_iterate_results_model(allkindofcomplexity):
model = dpf.core.Model(allkindofcomplexity)
res = model.results
for key in res:
key()
def test_result_not_overrided(plate_msup):
model1 = dpf.core.Model(examples.electric_therm)
size = len(model1.results)
model2 = dpf.core.Model(plate_msup)
assert len(model1.results) == size
assert len(model2.results) > len(model1.results)
def test_result_displacement_model():
model = dpf.core.Model(examples.download_all_kinds_of_complexity_modal())
results = model.results
assert isinstance(results.displacement(), dpf.core.Operator)
assert len(results.displacement.on_all_time_freqs.eval()) == 45
assert results.displacement.on_first_time_freq.eval().get_label_scoping().ids == [1]
assert results.displacement.on_last_time_freq.eval().get_label_scoping().ids == [45]
assert len(results.displacement.split_by_body.eval()) == 32
assert len(results.displacement.split_by_shape.eval()) == 4
assert (
len(results.displacement.on_named_selection("_FIXEDSU").eval()[0].scoping)
== 222
)
all_time_ns = results.displacement.on_named_selection(
"_FIXEDSU"
).on_all_time_freqs.eval()
assert len(all_time_ns) == 45
assert len(all_time_ns[0].scoping) == 222
assert len(all_time_ns[19].scoping) == 222
def test_result_stress_model():
model = dpf.core.Model(examples.download_all_kinds_of_complexity_modal())
results = model.results
assert isinstance(results.stress(), dpf.core.Operator)
assert len(results.stress.on_all_time_freqs.eval()) == 45
assert results.stress.on_first_time_freq.eval().get_label_scoping().ids == [1]
assert results.stress.on_last_time_freq.eval().get_label_scoping().ids == [45]
assert len(results.stress.split_by_body.eval()) == 32
assert len(results.stress.split_by_shape.eval()) == 4
assert len(results.stress.on_named_selection("_FIXEDSU").eval()[0].scoping) == 222
all_time_ns = results.stress.on_named_selection("_FIXEDSU").on_all_time_freqs.eval()
assert len(all_time_ns) == 45
assert len(all_time_ns[0].scoping) == 222
assert len(all_time_ns[19].scoping) == 222
def test_result_no_memory(plate_msup):
model = dpf.core.Model(plate_msup)
assert len(model.results.elastic_strain.on_all_time_freqs.eval()) == 20
assert len(model.results.elastic_strain.eval()) == 1
def test_result_stress_location_model(plate_msup):
model = dpf.core.Model(plate_msup)
stress = model.results.stress
fc = (
stress.on_mesh_scoping(
dpf.core.Scoping(ids=[1, 2], location=dpf.core.locations.elemental)
)
.on_location(dpf.core.locations.nodal)
.eval()
)
assert fc[0].location == "Nodal"
def test_result_time_scoping(plate_msup):
model = dpf.core.Model(plate_msup)
stress = model.results.stress
fc = stress.on_time_scoping([1, 2, 3, 19]).eval()
assert len(fc) == 4
fc = stress.on_time_scoping([0.115, 0.125]).eval()
assert len(fc) == 2
assert np.allclose(
fc.time_freq_support.time_frequencies.data, np.array([0.115, 0.125])
)
def test_result_splitted_subset(allkindofcomplexity):
model = dpf.core.Model(allkindofcomplexity)
vol = model.results.elemental_volume
assert len(vol.split_by_body.eval()) == 11
assert len(vol.split_by_body.eval()[0].scoping) == 105
assert len(vol.on_mesh_scoping([1, 2, 3, 10992]).split_by_body.eval()) == 2
assert len(vol.eval()[0].scoping) == 3
assert len(vol.eval()[1].scoping) == 1
def test_result_not_dynamic(plate_msup):
dpf.core.settings.set_dynamic_available_results_capability(False)
model = dpf.core.Model(plate_msup)
assert isinstance(model.results, dpf.core.results.CommonResults)
stress = model.results.stress
fc = stress.on_time_scoping([1, 2, 3, 19]).eval()
assert len(fc) == 4
fc = stress.on_time_scoping([0.115, 0.125]).eval()
assert len(fc) == 2
assert np.allclose(
fc.time_freq_support.time_frequencies.data, np.array([0.115, 0.125])
)
assert fc[0].unit == "Pa"
dis = model.results.displacement().eval()
dpf.core.settings.set_dynamic_available_results_capability(True)
# @pytest.mark.skipif(NO_PLOTTING, reason="Requires system to support plotting")
# def test_displacements_plot(static_model):
# from pyvista import CameraPosition
# disp = static_model.results.displacement()
# cpos = disp.outputs.fields_container()[0].plot('x')
# assert isinstance(cpos, CameraPosition)
|
python
|
import os.path as osp
from .builder import DATASETS
from .custom import CustomDataset
@DATASETS.register_module()
class Combine(CustomDataset):
"""PascalContext dataset.
In segmentation map annotation for PascalContext, 0 stands for background,
which is included in 60 categories. ``reduce_zero_label`` is fixed to
False. The ``img_suffix`` is fixed to '.jpg' and ``seg_map_suffix`` is
fixed to '.png'.
Args:
split (str): Split txt file for PascalContext.
"""
CLASSES = ('other','water')
PALETTE = [[0, 0, 0], [128, 0, 0]]
def __init__(self, split, **kwargs):
super(Combine, self).__init__(
split=split,
reduce_zero_label=False,
#att_metrics = ['PRE','REC','F-measure','F-max','FPR','FNR'],
#att_metrics=['Grmse','Gmax'], ##训练不能价att_metrics因为pre_eval_to_metrics(results, metric)
**kwargs)
assert osp.exists(self.img_dir) and self.split is not None
|
python
|
<CustButton@Button>:
font_size: 32
<SudGridLayout>:
id = sudoku
cols: 9
rows: 9
spacing = 10
BoxLayout:
spacing = 10
CustButton:
text: = "7"
CustButton:
text: = "8"
|
python
|
#!/usr/bin/env python
# Returns a list of datetimes ranging from yesterday's
# date back to 2014-03-30 or if passed a first argument
# back to the first argument
import sys
import datetime
yesterday = (datetime.datetime.today() - datetime.timedelta(days=1))
opening_date = datetime.datetime(2014, 03, 30)
if len(sys.argv) > 1:
opening_date = datetime.datetime.strptime(sys.argv[1], "%Y-%m-%d")
days = (yesterday - opening_date).days
for x in range(0, days+1):
print (yesterday - datetime.timedelta(days=x)).strftime("%Y-%m-%d")
|
python
|
from patchify import patchify, unpatchify
from matplotlib import image as mpimg
from matplotlib import pyplot as plt
import cv2 as cv
from PIL import Image
import numpy as np
from patchfly import patchfly, unpatchfly
import os
# ----------------------
# get a image from internet
# ----------------------
# url = "https://gimg2.baidu.com/image_search/src=http%3A%2F%2Fwww.petsid.us%2Fwp-content%2Fuploads%2F2018%2F07%2FCats-Health-The-Dos-And-Donts-For-Cat-owners.jpg&refer=http%3A%2F%2Fwww.petsid.us&app=2002&size=f9999,10000&q=a80&n=0&g=0n&fmt=jpeg?sec=1642598122&t=976acf48cb6e5dc77b17048b24efdaa8"
# def request_download(IMAGE_URL):
# import requests
# r = requests.get(IMAGE_URL)
# with open('./data/img.png', 'wb') as f:
# f.write(r.content)
# request_download(url)
# ----------------------
# My patchfly
# ----------------------
img = Image.open(r"/mnt/4t/ljt/project/patchfly/data/img.png")
img_copy = img.copy()
img_array = np.array(img_copy)
img_patches = patchfly(img_array, (256, 256, 3))
print(img_patches.shape)
recon = unpatchfly(img_patches, img_array.shape)
# def main():
# os.makedirs("/mnt/4t/ljt/project/patchfly/data/patch", exist_ok=True)
# img = Image.open(r"/mnt/4t/ljt/project/patchfly/data/img.png")
# img_copy = img.copy()
# img_array = np.array(img_copy)
# img_patches = patchfly(img_array, (555, 555, 3))
# for i in range(img_patches.shape[0]):
# for j in range(img_patches.shape[1]):
# print(i, j)
# print(img_patches[i][j][0].shape)
# plt.imsave("/mnt/4t/ljt/project/patchfly/data/patch/{}_{}.png".format(i, j), img_patches[i][j][0])
# recon = unpatchfly(img_patches=img_patches, img_size=img_array.shape)
# plt.imsave("recon.jpg", recon)
# print(recon.shape)
# if __name__ == '__main__':
# main()
# from patchify import patchify, unpatchify
# from matplotlib import image as mpimg
# from matplotlib import pyplot as plt
# import cv2 as cv
# from PIL import Image
# import numpy as np
# from patchfly import patchfly
# # ----------------------
# # get a image from internet
# # ----------------------
# # url = "https://gimg2.baidu.com/image_search/src=http%3A%2F%2Fwww.petsid.us%2Fwp-content%2Fuploads%2F2018%2F07%2FCats-Health-The-Dos-And-Donts-For-Cat-owners.jpg&refer=http%3A%2F%2Fwww.petsid.us&app=2002&size=f9999,10000&q=a80&n=0&g=0n&fmt=jpeg?sec=1642598122&t=976acf48cb6e5dc77b17048b24efdaa8"
# # def request_download(IMAGE_URL):
# # import requests
# # r = requests.get(IMAGE_URL)
# # with open('./data/img.png', 'wb') as f:
# # f.write(r.content)
# # request_download(url)
# # ----------------------
# # My patchfly
# # ----------------------
# img = Image.open(r"/mnt/4t/ljt/project/patchfly/data/img.png")
# img_copy = img.copy()
# img_array = np.array(img_copy)
# img_patches = patchfly(img_array, (256, 256, 3))
|
python
|
from unittest import TestCase
from moff.parser import Parser
from moff.node import VideoNode, SourceNode, ParagraphNode, LinkNode, TextNode
class TestReadVideo (TestCase):
def test_parse1(self):
parser = Parser()
node1 = parser.parse_string("@video example.mp4")
node2 = VideoNode(
src="example.mp4",
preload="none",
controls=True,
nodes=[
ParagraphNode(nodes=[
TextNode(
"Your browser has not supported playing video with HTML5."),
TextNode("You can download video from "),
LinkNode(
href="example.mp4",
target="_blank",
nodes=[
TextNode("here")
]),
TextNode(".")
])
])
self.assertEqual(str(node1), str(node2))
def test_parse2(self):
parser = Parser()
node1 = parser.parse_string(
"@video example.mp4\n@video @thumbnail thumbnail.jpg")
node2 = VideoNode(
src="example.mp4",
poster="thumbnail.jpg",
preload="none",
controls=True,
nodes=[
ParagraphNode(nodes=[
TextNode(
"Your browser has not supported playing video with HTML5."),
TextNode("You can download video from "),
LinkNode(
href="example.mp4",
target="_blank",
nodes=[
TextNode("here")
]),
TextNode(".")
])
])
self.assertEqual(str(node1), str(node2))
def test_parse3(self):
parser = Parser()
node1 = parser.parse_string(
"@video example.mp4\n@video @src example.mp4\n@video @src example.webm video/webm")
node2 = VideoNode(
preload="none",
controls=True,
nodes=[
SourceNode(
src="example.mp4",
type="video/mp4"),
SourceNode(
src="example.mp4",
type="video/mp4"),
SourceNode(
src="example.webm",
type="video/webm"),
ParagraphNode(nodes=[
TextNode(
"Your browser has not supported playing video with HTML5."),
TextNode("You can download video from "),
LinkNode(
href="example.mp4",
target="_blank",
nodes=[
TextNode("here")
]),
TextNode(".")
])
])
self.assertEqual(str(node1), str(node2))
|
python
|
#!/usr/bin/python
import json
import sys
from datetime import datetime
from pprint import pprint
def dateconv(d):
return datetime.strptime(d, "%Y-%m-%dT%H:%M:%S.%fZ").strftime("%Y-%m-%d %a %H:%M")
def printtask(task, lev):
print("%s %s %s" % (
lev,
("DONE" if task["completed"] else "TODO"),
task["title"]
))
print("CREATED: [%s]" % dateconv(task["created_at"]))
if (task["completed"]):
print("CLOSED: [%s]" % dateconv(task["completed_at"]))
with open(sys.argv[1]) as data_f:
data = json.load(data_f)
print("* Wunderlist")
print("EXPORTED: [%s]" % datetime.strptime(data["exported"], "%a %b %d %Y %H:%M:%S GMT%z (%Z)").strftime("%Y-%m-%d %a %H:%M"))
data = data["data"]
for wlist in data["lists"]:
print("** %s" % wlist["title"])
print("CREATED: [%s]" % dateconv(wlist["created_at"]))
for task in (task for task in data["tasks"] if task["list_id"] == wlist["id"]):
printtask(task, "***")
for note in (note for note in data["notes"] if note["task_id"] == task["id"] and note["content"]):
print(note["content"])
for subtask in (subtask for subtask in data["subtasks"] if subtask["task_id"] == task["id"]):
printtask(subtask, "****")
|
python
|
import unittest
from mitama._extra import _classproperty
class TestClassProperty(unittest.TestCase):
def test_getter(self):
class ClassA:
@_classproperty
def value(cls):
return "hello, world!"
self.assertEqual(ClassA.value, "hello, world!")
|
python
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.