gt
stringclasses 1
value | context
stringlengths 2.49k
119k
|
---|---|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'nxtWin7_h900.ui'
#
# Created: Sat Jul 5 16:58:40 2014
# by: PyQt4 UI code generator 4.10.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName(_fromUtf8("MainWindow"))
MainWindow.resize(1280, 880)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(MainWindow.sizePolicy().hasHeightForWidth())
MainWindow.setSizePolicy(sizePolicy)
MainWindow.setMaximumSize(QtCore.QSize(1280, 900))
MainWindow.setAutoFillBackground(True)
self.centralwidget = QtGui.QWidget(MainWindow)
self.centralwidget.setObjectName(_fromUtf8("centralwidget"))
self.tabWidget_Req = QtGui.QTabWidget(self.centralwidget)
self.tabWidget_Req.setGeometry(QtCore.QRect(380, 0, 366, 821))
self.tabWidget_Req.setObjectName(_fromUtf8("tabWidget_Req"))
self.tab = QtGui.QWidget()
self.tab.setObjectName(_fromUtf8("tab"))
self.gridLayoutWidget_2 = QtGui.QWidget(self.tab)
self.gridLayoutWidget_2.setGeometry(QtCore.QRect(0, 130, 361, 616))
self.gridLayoutWidget_2.setObjectName(_fromUtf8("gridLayoutWidget_2"))
self.gridLayout_2 = QtGui.QGridLayout(self.gridLayoutWidget_2)
self.gridLayout_2.setSizeConstraint(QtGui.QLayout.SetNoConstraint)
self.gridLayout_2.setContentsMargins(6, 0, 0, 8)
self.gridLayout_2.setHorizontalSpacing(10)
self.gridLayout_2.setVerticalSpacing(15)
self.gridLayout_2.setObjectName(_fromUtf8("gridLayout_2"))
self.lineEdit1_nxtQ12 = QtGui.QLineEdit(self.gridLayoutWidget_2)
self.lineEdit1_nxtQ12.setObjectName(_fromUtf8("lineEdit1_nxtQ12"))
self.gridLayout_2.addWidget(self.lineEdit1_nxtQ12, 11, 0, 1, 1)
self.lineEdit1_nxtQ3 = QtGui.QLineEdit(self.gridLayoutWidget_2)
self.lineEdit1_nxtQ3.setObjectName(_fromUtf8("lineEdit1_nxtQ3"))
self.gridLayout_2.addWidget(self.lineEdit1_nxtQ3, 2, 0, 1, 1)
self.lineEdit1_nxtQ6_Val6 = QtGui.QLineEdit(self.gridLayoutWidget_2)
self.lineEdit1_nxtQ6_Val6.setObjectName(_fromUtf8("lineEdit1_nxtQ6_Val6"))
self.gridLayout_2.addWidget(self.lineEdit1_nxtQ6_Val6, 5, 1, 1, 1)
self.lineEdit1_nxtQ2 = QtGui.QLineEdit(self.gridLayoutWidget_2)
self.lineEdit1_nxtQ2.setObjectName(_fromUtf8("lineEdit1_nxtQ2"))
self.gridLayout_2.addWidget(self.lineEdit1_nxtQ2, 1, 0, 1, 1)
self.lineEdit1_nxtQ8_Val8 = QtGui.QLineEdit(self.gridLayoutWidget_2)
self.lineEdit1_nxtQ8_Val8.setObjectName(_fromUtf8("lineEdit1_nxtQ8_Val8"))
self.gridLayout_2.addWidget(self.lineEdit1_nxtQ8_Val8, 7, 1, 1, 1)
self.lineEdit1_nxtQ5 = QtGui.QLineEdit(self.gridLayoutWidget_2)
self.lineEdit1_nxtQ5.setObjectName(_fromUtf8("lineEdit1_nxtQ5"))
self.gridLayout_2.addWidget(self.lineEdit1_nxtQ5, 4, 0, 1, 1)
self.lineEdit1_nxtQ7_Val7 = QtGui.QLineEdit(self.gridLayoutWidget_2)
self.lineEdit1_nxtQ7_Val7.setObjectName(_fromUtf8("lineEdit1_nxtQ7_Val7"))
self.gridLayout_2.addWidget(self.lineEdit1_nxtQ7_Val7, 6, 1, 1, 1)
self.lineEdit1_nxtQ6 = QtGui.QLineEdit(self.gridLayoutWidget_2)
self.lineEdit1_nxtQ6.setObjectName(_fromUtf8("lineEdit1_nxtQ6"))
self.gridLayout_2.addWidget(self.lineEdit1_nxtQ6, 5, 0, 1, 1)
self.lineEdit1_nxtQ2_Val2 = QtGui.QLineEdit(self.gridLayoutWidget_2)
self.lineEdit1_nxtQ2_Val2.setObjectName(_fromUtf8("lineEdit1_nxtQ2_Val2"))
self.gridLayout_2.addWidget(self.lineEdit1_nxtQ2_Val2, 1, 1, 1, 1)
self.lineEdit1_nxtQ7 = QtGui.QLineEdit(self.gridLayoutWidget_2)
self.lineEdit1_nxtQ7.setObjectName(_fromUtf8("lineEdit1_nxtQ7"))
self.gridLayout_2.addWidget(self.lineEdit1_nxtQ7, 6, 0, 1, 1)
self.lineEdit1_nxtQ9_Val9 = QtGui.QLineEdit(self.gridLayoutWidget_2)
self.lineEdit1_nxtQ9_Val9.setObjectName(_fromUtf8("lineEdit1_nxtQ9_Val9"))
self.gridLayout_2.addWidget(self.lineEdit1_nxtQ9_Val9, 8, 1, 1, 1)
self.lineEdit1_nxtQ3_Val3 = QtGui.QLineEdit(self.gridLayoutWidget_2)
self.lineEdit1_nxtQ3_Val3.setObjectName(_fromUtf8("lineEdit1_nxtQ3_Val3"))
self.gridLayout_2.addWidget(self.lineEdit1_nxtQ3_Val3, 2, 1, 1, 1)
self.lineEdit1_nxtQ5_Val5 = QtGui.QLineEdit(self.gridLayoutWidget_2)
self.lineEdit1_nxtQ5_Val5.setObjectName(_fromUtf8("lineEdit1_nxtQ5_Val5"))
self.gridLayout_2.addWidget(self.lineEdit1_nxtQ5_Val5, 4, 1, 1, 1)
self.lineEdit1_nxtQ8 = QtGui.QLineEdit(self.gridLayoutWidget_2)
self.lineEdit1_nxtQ8.setObjectName(_fromUtf8("lineEdit1_nxtQ8"))
self.gridLayout_2.addWidget(self.lineEdit1_nxtQ8, 7, 0, 1, 1)
self.lineEdit1_nxtQ1_Val1 = QtGui.QLineEdit(self.gridLayoutWidget_2)
self.lineEdit1_nxtQ1_Val1.setObjectName(_fromUtf8("lineEdit1_nxtQ1_Val1"))
self.gridLayout_2.addWidget(self.lineEdit1_nxtQ1_Val1, 0, 1, 1, 1)
self.lineEdit1_nxtQ1 = QtGui.QLineEdit(self.gridLayoutWidget_2)
self.lineEdit1_nxtQ1.setObjectName(_fromUtf8("lineEdit1_nxtQ1"))
self.gridLayout_2.addWidget(self.lineEdit1_nxtQ1, 0, 0, 1, 1)
self.lineEdit1_nxtQ9 = QtGui.QLineEdit(self.gridLayoutWidget_2)
self.lineEdit1_nxtQ9.setObjectName(_fromUtf8("lineEdit1_nxtQ9"))
self.gridLayout_2.addWidget(self.lineEdit1_nxtQ9, 8, 0, 1, 1)
self.lineEdit1_nxtQ4 = QtGui.QLineEdit(self.gridLayoutWidget_2)
self.lineEdit1_nxtQ4.setObjectName(_fromUtf8("lineEdit1_nxtQ4"))
self.gridLayout_2.addWidget(self.lineEdit1_nxtQ4, 3, 0, 1, 1)
self.lineEdit1_nxtQ4_Val4 = QtGui.QLineEdit(self.gridLayoutWidget_2)
self.lineEdit1_nxtQ4_Val4.setObjectName(_fromUtf8("lineEdit1_nxtQ4_Val4"))
self.gridLayout_2.addWidget(self.lineEdit1_nxtQ4_Val4, 3, 1, 1, 1)
self.lineEdit1_nxtQ10_Val10 = QtGui.QLineEdit(self.gridLayoutWidget_2)
self.lineEdit1_nxtQ10_Val10.setObjectName(_fromUtf8("lineEdit1_nxtQ10_Val10"))
self.gridLayout_2.addWidget(self.lineEdit1_nxtQ10_Val10, 9, 1, 1, 1)
self.lineEdit1_nxtQ10 = QtGui.QLineEdit(self.gridLayoutWidget_2)
self.lineEdit1_nxtQ10.setObjectName(_fromUtf8("lineEdit1_nxtQ10"))
self.gridLayout_2.addWidget(self.lineEdit1_nxtQ10, 9, 0, 1, 1)
self.lineEdit1_nxtQ11 = QtGui.QLineEdit(self.gridLayoutWidget_2)
self.lineEdit1_nxtQ11.setObjectName(_fromUtf8("lineEdit1_nxtQ11"))
self.gridLayout_2.addWidget(self.lineEdit1_nxtQ11, 10, 0, 1, 1)
self.lineEdit1_nxtQ13 = QtGui.QLineEdit(self.gridLayoutWidget_2)
self.lineEdit1_nxtQ13.setObjectName(_fromUtf8("lineEdit1_nxtQ13"))
self.gridLayout_2.addWidget(self.lineEdit1_nxtQ13, 12, 0, 1, 1)
self.lineEdit1_nxtQ14 = QtGui.QLineEdit(self.gridLayoutWidget_2)
self.lineEdit1_nxtQ14.setObjectName(_fromUtf8("lineEdit1_nxtQ14"))
self.gridLayout_2.addWidget(self.lineEdit1_nxtQ14, 13, 0, 1, 1)
self.lineEdit1_nxtQ11_Val11 = QtGui.QLineEdit(self.gridLayoutWidget_2)
self.lineEdit1_nxtQ11_Val11.setObjectName(_fromUtf8("lineEdit1_nxtQ11_Val11"))
self.gridLayout_2.addWidget(self.lineEdit1_nxtQ11_Val11, 10, 1, 1, 1)
self.lineEdit1_nxtQ12_Val12 = QtGui.QLineEdit(self.gridLayoutWidget_2)
self.lineEdit1_nxtQ12_Val12.setObjectName(_fromUtf8("lineEdit1_nxtQ12_Val12"))
self.gridLayout_2.addWidget(self.lineEdit1_nxtQ12_Val12, 11, 1, 1, 1)
self.lineEdit1_nxtQ13_Val13 = QtGui.QLineEdit(self.gridLayoutWidget_2)
self.lineEdit1_nxtQ13_Val13.setObjectName(_fromUtf8("lineEdit1_nxtQ13_Val13"))
self.gridLayout_2.addWidget(self.lineEdit1_nxtQ13_Val13, 12, 1, 1, 1)
self.lineEdit1_nxtQ14_Val14 = QtGui.QLineEdit(self.gridLayoutWidget_2)
self.lineEdit1_nxtQ14_Val14.setObjectName(_fromUtf8("lineEdit1_nxtQ14_Val14"))
self.gridLayout_2.addWidget(self.lineEdit1_nxtQ14_Val14, 13, 1, 1, 1)
self.gridLayout_2.setColumnStretch(0, 3)
self.gridLayout_2.setColumnStretch(1, 4)
self.label_serverAddress = QtGui.QLabel(self.tab)
self.label_serverAddress.setGeometry(QtCore.QRect(0, 5, 61, 41))
font = QtGui.QFont()
font.setPointSize(12)
font.setBold(True)
font.setWeight(75)
self.label_serverAddress.setFont(font)
self.label_serverAddress.setAlignment(QtCore.Qt.AlignCenter)
self.label_serverAddress.setObjectName(_fromUtf8("label_serverAddress"))
self.label_account = QtGui.QLabel(self.tab)
self.label_account.setGeometry(QtCore.QRect(0, 35, 71, 41))
font = QtGui.QFont()
font.setPointSize(12)
font.setBold(True)
font.setWeight(75)
self.label_account.setFont(font)
self.label_account.setAlignment(QtCore.Qt.AlignCenter)
self.label_account.setObjectName(_fromUtf8("label_account"))
self.lineEdit_account7 = QtGui.QLineEdit(self.tab)
self.lineEdit_account7.setGeometry(QtCore.QRect(70, 40, 261, 31))
font = QtGui.QFont()
font.setPointSize(12)
font.setBold(True)
font.setWeight(75)
self.lineEdit_account7.setFont(font)
self.lineEdit_account7.setAlignment(QtCore.Qt.AlignCenter)
self.lineEdit_account7.setObjectName(_fromUtf8("lineEdit_account7"))
self.lineEdit_nxtServer = QtGui.QLineEdit(self.tab)
self.lineEdit_nxtServer.setGeometry(QtCore.QRect(70, 5, 261, 31))
font = QtGui.QFont()
font.setPointSize(11)
font.setBold(False)
font.setWeight(50)
self.lineEdit_nxtServer.setFont(font)
self.lineEdit_nxtServer.setAlignment(QtCore.Qt.AlignCenter)
self.lineEdit_nxtServer.setObjectName(_fromUtf8("lineEdit_nxtServer"))
self.tabWidget_Req.addTab(self.tab, _fromUtf8(""))
self.tab_2 = QtGui.QWidget()
self.tab_2.setObjectName(_fromUtf8("tab_2"))
self.gridLayoutWidget_6 = QtGui.QWidget(self.tab_2)
self.gridLayoutWidget_6.setGeometry(QtCore.QRect(0, 80, 331, 581))
self.gridLayoutWidget_6.setObjectName(_fromUtf8("gridLayoutWidget_6"))
self.gridLayout_6 = QtGui.QGridLayout(self.gridLayoutWidget_6)
self.gridLayout_6.setMargin(0)
self.gridLayout_6.setObjectName(_fromUtf8("gridLayout_6"))
self.lineEdit_sessDat1_10 = QtGui.QLineEdit(self.gridLayoutWidget_6)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(False)
font.setWeight(50)
self.lineEdit_sessDat1_10.setFont(font)
self.lineEdit_sessDat1_10.setMaxLength(2000)
self.lineEdit_sessDat1_10.setObjectName(_fromUtf8("lineEdit_sessDat1_10"))
self.gridLayout_6.addWidget(self.lineEdit_sessDat1_10, 13, 1, 1, 1)
self.lineEdit_sessDat1_14 = QtGui.QLineEdit(self.gridLayoutWidget_6)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(False)
font.setWeight(50)
self.lineEdit_sessDat1_14.setFont(font)
self.lineEdit_sessDat1_14.setMaxLength(2000)
self.lineEdit_sessDat1_14.setObjectName(_fromUtf8("lineEdit_sessDat1_14"))
self.gridLayout_6.addWidget(self.lineEdit_sessDat1_14, 17, 1, 1, 1)
self.lineEdit_sessDat1 = QtGui.QLineEdit(self.gridLayoutWidget_6)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(False)
font.setWeight(50)
self.lineEdit_sessDat1.setFont(font)
self.lineEdit_sessDat1.setMaxLength(2000)
self.lineEdit_sessDat1.setObjectName(_fromUtf8("lineEdit_sessDat1"))
self.gridLayout_6.addWidget(self.lineEdit_sessDat1, 4, 1, 1, 1)
self.lineEdit_sessDat1_6 = QtGui.QLineEdit(self.gridLayoutWidget_6)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(False)
font.setWeight(50)
self.lineEdit_sessDat1_6.setFont(font)
self.lineEdit_sessDat1_6.setMaxLength(2000)
self.lineEdit_sessDat1_6.setObjectName(_fromUtf8("lineEdit_sessDat1_6"))
self.gridLayout_6.addWidget(self.lineEdit_sessDat1_6, 9, 1, 1, 1)
self.label_NXTServer = QtGui.QLabel(self.gridLayoutWidget_6)
self.label_NXTServer.setAlignment(QtCore.Qt.AlignCenter)
self.label_NXTServer.setObjectName(_fromUtf8("label_NXTServer"))
self.gridLayout_6.addWidget(self.label_NXTServer, 0, 0, 1, 1)
self.lineEdit_passPhr7 = QtGui.QLineEdit(self.gridLayoutWidget_6)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(False)
font.setWeight(50)
self.lineEdit_passPhr7.setFont(font)
self.lineEdit_passPhr7.setMaxLength(200)
self.lineEdit_passPhr7.setEchoMode(QtGui.QLineEdit.Password)
self.lineEdit_passPhr7.setObjectName(_fromUtf8("lineEdit_passPhr7"))
self.gridLayout_6.addWidget(self.lineEdit_passPhr7, 2, 1, 1, 1)
self.lineEdit_account1 = QtGui.QLineEdit(self.gridLayoutWidget_6)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(False)
font.setWeight(50)
self.lineEdit_account1.setFont(font)
self.lineEdit_account1.setMaxLength(200)
self.lineEdit_account1.setObjectName(_fromUtf8("lineEdit_account1"))
self.gridLayout_6.addWidget(self.lineEdit_account1, 1, 1, 1, 1)
self.label_NXTServer_2 = QtGui.QLabel(self.gridLayoutWidget_6)
self.label_NXTServer_2.setAlignment(QtCore.Qt.AlignCenter)
self.label_NXTServer_2.setObjectName(_fromUtf8("label_NXTServer_2"))
self.gridLayout_6.addWidget(self.label_NXTServer_2, 1, 0, 1, 1)
self.label_passPhrase = QtGui.QLabel(self.gridLayoutWidget_6)
self.label_passPhrase.setAlignment(QtCore.Qt.AlignCenter)
self.label_passPhrase.setObjectName(_fromUtf8("label_passPhrase"))
self.gridLayout_6.addWidget(self.label_passPhrase, 2, 0, 1, 1)
self.label_pubKey = QtGui.QLabel(self.gridLayoutWidget_6)
self.label_pubKey.setAlignment(QtCore.Qt.AlignCenter)
self.label_pubKey.setObjectName(_fromUtf8("label_pubKey"))
self.gridLayout_6.addWidget(self.label_pubKey, 3, 0, 1, 1)
self.lineEdit_publicKey1 = QtGui.QLineEdit(self.gridLayoutWidget_6)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(False)
font.setWeight(50)
self.lineEdit_publicKey1.setFont(font)
self.lineEdit_publicKey1.setMaxLength(2000)
self.lineEdit_publicKey1.setObjectName(_fromUtf8("lineEdit_publicKey1"))
self.gridLayout_6.addWidget(self.lineEdit_publicKey1, 3, 1, 1, 1)
self.lineEdit_nxtServer1 = QtGui.QLineEdit(self.gridLayoutWidget_6)
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(False)
font.setWeight(50)
self.lineEdit_nxtServer1.setFont(font)
self.lineEdit_nxtServer1.setMaxLength(200)
self.lineEdit_nxtServer1.setObjectName(_fromUtf8("lineEdit_nxtServer1"))
self.gridLayout_6.addWidget(self.lineEdit_nxtServer1, 0, 1, 1, 1)
self.lineEdit_sessDat1_4 = QtGui.QLineEdit(self.gridLayoutWidget_6)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(False)
font.setWeight(50)
self.lineEdit_sessDat1_4.setFont(font)
self.lineEdit_sessDat1_4.setMaxLength(2000)
self.lineEdit_sessDat1_4.setObjectName(_fromUtf8("lineEdit_sessDat1_4"))
self.gridLayout_6.addWidget(self.lineEdit_sessDat1_4, 7, 1, 1, 1)
self.lineEdit_sessDat1_8 = QtGui.QLineEdit(self.gridLayoutWidget_6)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(False)
font.setWeight(50)
self.lineEdit_sessDat1_8.setFont(font)
self.lineEdit_sessDat1_8.setMaxLength(2000)
self.lineEdit_sessDat1_8.setObjectName(_fromUtf8("lineEdit_sessDat1_8"))
self.gridLayout_6.addWidget(self.lineEdit_sessDat1_8, 11, 1, 1, 1)
self.lineEdit_sessDat1_12 = QtGui.QLineEdit(self.gridLayoutWidget_6)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(False)
font.setWeight(50)
self.lineEdit_sessDat1_12.setFont(font)
self.lineEdit_sessDat1_12.setMaxLength(2000)
self.lineEdit_sessDat1_12.setObjectName(_fromUtf8("lineEdit_sessDat1_12"))
self.gridLayout_6.addWidget(self.lineEdit_sessDat1_12, 15, 1, 1, 1)
self.lineEdit_sessDat1_2 = QtGui.QLineEdit(self.gridLayoutWidget_6)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(False)
font.setWeight(50)
self.lineEdit_sessDat1_2.setFont(font)
self.lineEdit_sessDat1_2.setMaxLength(2000)
self.lineEdit_sessDat1_2.setObjectName(_fromUtf8("lineEdit_sessDat1_2"))
self.gridLayout_6.addWidget(self.lineEdit_sessDat1_2, 5, 1, 1, 1)
self.lineEdit_sessDat1_13 = QtGui.QLineEdit(self.gridLayoutWidget_6)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(False)
font.setWeight(50)
self.lineEdit_sessDat1_13.setFont(font)
self.lineEdit_sessDat1_13.setMaxLength(2000)
self.lineEdit_sessDat1_13.setObjectName(_fromUtf8("lineEdit_sessDat1_13"))
self.gridLayout_6.addWidget(self.lineEdit_sessDat1_13, 16, 1, 1, 1)
self.lineEdit_sessDat1_7 = QtGui.QLineEdit(self.gridLayoutWidget_6)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(False)
font.setWeight(50)
self.lineEdit_sessDat1_7.setFont(font)
self.lineEdit_sessDat1_7.setMaxLength(2000)
self.lineEdit_sessDat1_7.setObjectName(_fromUtf8("lineEdit_sessDat1_7"))
self.gridLayout_6.addWidget(self.lineEdit_sessDat1_7, 10, 1, 1, 1)
self.lineEdit_sessDat1_9 = QtGui.QLineEdit(self.gridLayoutWidget_6)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(False)
font.setWeight(50)
self.lineEdit_sessDat1_9.setFont(font)
self.lineEdit_sessDat1_9.setMaxLength(2000)
self.lineEdit_sessDat1_9.setObjectName(_fromUtf8("lineEdit_sessDat1_9"))
self.gridLayout_6.addWidget(self.lineEdit_sessDat1_9, 12, 1, 1, 1)
self.lineEdit_sessDat1_11 = QtGui.QLineEdit(self.gridLayoutWidget_6)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(False)
font.setWeight(50)
self.lineEdit_sessDat1_11.setFont(font)
self.lineEdit_sessDat1_11.setMaxLength(2000)
self.lineEdit_sessDat1_11.setObjectName(_fromUtf8("lineEdit_sessDat1_11"))
self.gridLayout_6.addWidget(self.lineEdit_sessDat1_11, 14, 1, 1, 1)
self.lineEdit_sessDat1_3 = QtGui.QLineEdit(self.gridLayoutWidget_6)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(False)
font.setWeight(50)
self.lineEdit_sessDat1_3.setFont(font)
self.lineEdit_sessDat1_3.setMaxLength(2000)
self.lineEdit_sessDat1_3.setObjectName(_fromUtf8("lineEdit_sessDat1_3"))
self.gridLayout_6.addWidget(self.lineEdit_sessDat1_3, 6, 1, 1, 1)
self.lineEdit_sessDat1_5 = QtGui.QLineEdit(self.gridLayoutWidget_6)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(False)
font.setWeight(50)
self.lineEdit_sessDat1_5.setFont(font)
self.lineEdit_sessDat1_5.setMaxLength(2000)
self.lineEdit_sessDat1_5.setObjectName(_fromUtf8("lineEdit_sessDat1_5"))
self.gridLayout_6.addWidget(self.lineEdit_sessDat1_5, 8, 1, 1, 1)
self.lineEdit_sessDat1_15 = QtGui.QLineEdit(self.gridLayoutWidget_6)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(False)
font.setWeight(50)
self.lineEdit_sessDat1_15.setFont(font)
self.lineEdit_sessDat1_15.setMaxLength(2000)
self.lineEdit_sessDat1_15.setObjectName(_fromUtf8("lineEdit_sessDat1_15"))
self.gridLayout_6.addWidget(self.lineEdit_sessDat1_15, 18, 1, 1, 1)
self.pb_pullData1 = QtGui.QPushButton(self.tab_2)
self.pb_pullData1.setGeometry(QtCore.QRect(150, 40, 181, 31))
self.pb_pullData1.setObjectName(_fromUtf8("pb_pullData1"))
self.tabWidget_Req.addTab(self.tab_2, _fromUtf8(""))
self.tab_3 = QtGui.QWidget()
self.tab_3.setObjectName(_fromUtf8("tab_3"))
self.tabWidget_Req.addTab(self.tab_3, _fromUtf8(""))
self.groupBox = QtGui.QGroupBox(self.centralwidget)
self.groupBox.setGeometry(QtCore.QRect(750, 65, 521, 756))
self.groupBox.setAutoFillBackground(True)
self.groupBox.setObjectName(_fromUtf8("groupBox"))
self.textEdit_NRSRaw1 = QtGui.QTextEdit(self.groupBox)
self.textEdit_NRSRaw1.setGeometry(QtCore.QRect(5, 20, 516, 731))
self.textEdit_NRSRaw1.setReadOnly(True)
self.textEdit_NRSRaw1.setObjectName(_fromUtf8("textEdit_NRSRaw1"))
self.tabWidget_querySel = QtGui.QTabWidget(self.centralwidget)
self.tabWidget_querySel.setGeometry(QtCore.QRect(0, -5, 381, 826))
self.tabWidget_querySel.setObjectName(_fromUtf8("tabWidget_querySel"))
self.tab_4 = QtGui.QWidget()
self.tab_4.setObjectName(_fromUtf8("tab_4"))
self.gridLayoutWidget = QtGui.QWidget(self.tab_4)
self.gridLayoutWidget.setGeometry(QtCore.QRect(10, 50, 356, 506))
self.gridLayoutWidget.setObjectName(_fromUtf8("gridLayoutWidget"))
self.gridLayout = QtGui.QGridLayout(self.gridLayoutWidget)
self.gridLayout.setMargin(0)
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.pb_getConstants = QtGui.QPushButton(self.gridLayoutWidget)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_getConstants.setFont(font)
self.pb_getConstants.setObjectName(_fromUtf8("pb_getConstants"))
self.gridLayout.addWidget(self.pb_getConstants, 2, 1, 1, 1)
self.pb_decodeToken = QtGui.QPushButton(self.gridLayoutWidget)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_decodeToken.setFont(font)
self.pb_decodeToken.setObjectName(_fromUtf8("pb_decodeToken"))
self.gridLayout.addWidget(self.pb_decodeToken, 4, 1, 1, 1)
self.pb_getBlockId = QtGui.QPushButton(self.gridLayoutWidget)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_getBlockId.setFont(font)
self.pb_getBlockId.setObjectName(_fromUtf8("pb_getBlockId"))
self.gridLayout.addWidget(self.pb_getBlockId, 1, 1, 1, 1)
self.pb_getBlock = QtGui.QPushButton(self.gridLayoutWidget)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_getBlock.setFont(font)
self.pb_getBlock.setObjectName(_fromUtf8("pb_getBlock"))
self.gridLayout.addWidget(self.pb_getBlock, 0, 2, 1, 1)
self.pb_broadcastTransaction = QtGui.QPushButton(self.gridLayoutWidget)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_broadcastTransaction.setFont(font)
self.pb_broadcastTransaction.setObjectName(_fromUtf8("pb_broadcastTransaction"))
self.gridLayout.addWidget(self.pb_broadcastTransaction, 0, 1, 1, 1)
self.pb_getPeer = QtGui.QPushButton(self.gridLayoutWidget)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_getPeer.setFont(font)
self.pb_getPeer.setObjectName(_fromUtf8("pb_getPeer"))
self.gridLayout.addWidget(self.pb_getPeer, 3, 2, 1, 1)
self.pb_getPeers = QtGui.QPushButton(self.gridLayoutWidget)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_getPeers.setFont(font)
self.pb_getPeers.setObjectName(_fromUtf8("pb_getPeers"))
self.gridLayout.addWidget(self.pb_getPeers, 4, 2, 1, 1)
self.pb_getUnconfirmedTransactionIds = QtGui.QPushButton(self.gridLayoutWidget)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_getUnconfirmedTransactionIds.setFont(font)
self.pb_getUnconfirmedTransactionIds.setObjectName(_fromUtf8("pb_getUnconfirmedTransactionIds"))
self.gridLayout.addWidget(self.pb_getUnconfirmedTransactionIds, 14, 2, 1, 1)
self.pb_decodeHallmark = QtGui.QPushButton(self.gridLayoutWidget)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_decodeHallmark.setFont(font)
self.pb_decodeHallmark.setObjectName(_fromUtf8("pb_decodeHallmark"))
self.gridLayout.addWidget(self.pb_decodeHallmark, 3, 1, 1, 1)
self.pb_calculateFullHash = QtGui.QPushButton(self.gridLayoutWidget)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_calculateFullHash.setFont(font)
self.pb_calculateFullHash.setObjectName(_fromUtf8("pb_calculateFullHash"))
self.gridLayout.addWidget(self.pb_calculateFullHash, 5, 1, 1, 1)
self.pb_markHost = QtGui.QPushButton(self.gridLayoutWidget)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_markHost.setFont(font)
self.pb_markHost.setObjectName(_fromUtf8("pb_markHost"))
self.gridLayout.addWidget(self.pb_markHost, 5, 2, 1, 1)
self.pb_getBlockchainStatus = QtGui.QPushButton(self.gridLayoutWidget)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_getBlockchainStatus.setFont(font)
self.pb_getBlockchainStatus.setObjectName(_fromUtf8("pb_getBlockchainStatus"))
self.gridLayout.addWidget(self.pb_getBlockchainStatus, 1, 2, 1, 1)
self.pb_getState = QtGui.QPushButton(self.gridLayoutWidget)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_getState.setFont(font)
self.pb_getState.setObjectName(_fromUtf8("pb_getState"))
self.gridLayout.addWidget(self.pb_getState, 6, 1, 1, 1)
self.pb_getTime = QtGui.QPushButton(self.gridLayoutWidget)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_getTime.setFont(font)
self.pb_getTime.setObjectName(_fromUtf8("pb_getTime"))
self.gridLayout.addWidget(self.pb_getTime, 6, 2, 1, 1)
self.pb_getTransactionBytes = QtGui.QPushButton(self.gridLayoutWidget)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_getTransactionBytes.setFont(font)
self.pb_getTransactionBytes.setObjectName(_fromUtf8("pb_getTransactionBytes"))
self.gridLayout.addWidget(self.pb_getTransactionBytes, 7, 2, 1, 1)
self.pb_getTransaction = QtGui.QPushButton(self.gridLayoutWidget)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_getTransaction.setFont(font)
self.pb_getTransaction.setObjectName(_fromUtf8("pb_getTransaction"))
self.gridLayout.addWidget(self.pb_getTransaction, 7, 1, 1, 1)
self.pb_getUnconfirmedTransactions = QtGui.QPushButton(self.gridLayoutWidget)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_getUnconfirmedTransactions.setFont(font)
self.pb_getUnconfirmedTransactions.setObjectName(_fromUtf8("pb_getUnconfirmedTransactions"))
self.gridLayout.addWidget(self.pb_getUnconfirmedTransactions, 14, 1, 1, 1)
self.pb_parseTransaction = QtGui.QPushButton(self.gridLayoutWidget)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_parseTransaction.setFont(font)
self.pb_parseTransaction.setObjectName(_fromUtf8("pb_parseTransaction"))
self.gridLayout.addWidget(self.pb_parseTransaction, 13, 2, 1, 1)
self.pb_getNextBlockGenerators = QtGui.QPushButton(self.gridLayoutWidget)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_getNextBlockGenerators.setFont(font)
self.pb_getNextBlockGenerators.setObjectName(_fromUtf8("pb_getNextBlockGenerators"))
self.gridLayout.addWidget(self.pb_getNextBlockGenerators, 2, 2, 1, 1)
self.pb_getMyInfo = QtGui.QPushButton(self.gridLayoutWidget)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_getMyInfo.setFont(font)
self.pb_getMyInfo.setObjectName(_fromUtf8("pb_getMyInfo"))
self.gridLayout.addWidget(self.pb_getMyInfo, 13, 1, 1, 1)
self.tabWidget_querySel.addTab(self.tab_4, _fromUtf8(""))
self.tab_5 = QtGui.QWidget()
self.tab_5.setObjectName(_fromUtf8("tab_5"))
self.gridLayoutWidget_7 = QtGui.QWidget(self.tab_5)
self.gridLayoutWidget_7.setGeometry(QtCore.QRect(25, 525, 311, 121))
self.gridLayoutWidget_7.setObjectName(_fromUtf8("gridLayoutWidget_7"))
self.gridLayout_7 = QtGui.QGridLayout(self.gridLayoutWidget_7)
self.gridLayout_7.setMargin(0)
self.gridLayout_7.setObjectName(_fromUtf8("gridLayout_7"))
self.pb_sendMessage = QtGui.QPushButton(self.gridLayoutWidget_7)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_sendMessage.setFont(font)
self.pb_sendMessage.setObjectName(_fromUtf8("pb_sendMessage"))
self.gridLayout_7.addWidget(self.pb_sendMessage, 1, 0, 1, 1)
self.pb_encryptTo = QtGui.QPushButton(self.gridLayoutWidget_7)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_encryptTo.setFont(font)
self.pb_encryptTo.setObjectName(_fromUtf8("pb_encryptTo"))
self.gridLayout_7.addWidget(self.pb_encryptTo, 2, 1, 1, 1)
self.pb_decryptFrom = QtGui.QPushButton(self.gridLayoutWidget_7)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_decryptFrom.setFont(font)
self.pb_decryptFrom.setObjectName(_fromUtf8("pb_decryptFrom"))
self.gridLayout_7.addWidget(self.pb_decryptFrom, 2, 0, 1, 1)
self.pb_sendEncryptedNote = QtGui.QPushButton(self.gridLayoutWidget_7)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_sendEncryptedNote.setFont(font)
self.pb_sendEncryptedNote.setObjectName(_fromUtf8("pb_sendEncryptedNote"))
self.gridLayout_7.addWidget(self.pb_sendEncryptedNote, 1, 1, 1, 1)
self.pb_readEncryptedNote = QtGui.QPushButton(self.gridLayoutWidget_7)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_readEncryptedNote.setFont(font)
self.pb_readEncryptedNote.setObjectName(_fromUtf8("pb_readEncryptedNote"))
self.gridLayout_7.addWidget(self.pb_readEncryptedNote, 0, 0, 1, 1)
self.gridLayoutWidget_5 = QtGui.QWidget(self.tab_5)
self.gridLayoutWidget_5.setGeometry(QtCore.QRect(40, 670, 281, 101))
self.gridLayoutWidget_5.setObjectName(_fromUtf8("gridLayoutWidget_5"))
self.gridLayout_5 = QtGui.QGridLayout(self.gridLayoutWidget_5)
self.gridLayout_5.setMargin(0)
self.gridLayout_5.setObjectName(_fromUtf8("gridLayout_5"))
self.pb_getPoll = QtGui.QPushButton(self.gridLayoutWidget_5)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_getPoll.setFont(font)
self.pb_getPoll.setObjectName(_fromUtf8("pb_getPoll"))
self.gridLayout_5.addWidget(self.pb_getPoll, 2, 0, 1, 1)
self.pb_castVote = QtGui.QPushButton(self.gridLayoutWidget_5)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_castVote.setFont(font)
self.pb_castVote.setObjectName(_fromUtf8("pb_castVote"))
self.gridLayout_5.addWidget(self.pb_castVote, 1, 0, 1, 1)
self.pb_getPollIds = QtGui.QPushButton(self.gridLayoutWidget_5)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_getPollIds.setFont(font)
self.pb_getPollIds.setObjectName(_fromUtf8("pb_getPollIds"))
self.gridLayout_5.addWidget(self.pb_getPollIds, 1, 1, 1, 1)
self.pb_createPoll = QtGui.QPushButton(self.gridLayoutWidget_5)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_createPoll.setFont(font)
self.pb_createPoll.setObjectName(_fromUtf8("pb_createPoll"))
self.gridLayout_5.addWidget(self.pb_createPoll, 2, 1, 1, 1)
self.gridLayoutWidget_8 = QtGui.QWidget(self.tab_5)
self.gridLayoutWidget_8.setGeometry(QtCore.QRect(5, 10, 356, 356))
self.gridLayoutWidget_8.setObjectName(_fromUtf8("gridLayoutWidget_8"))
self.gridLayout_8 = QtGui.QGridLayout(self.gridLayoutWidget_8)
self.gridLayout_8.setMargin(0)
self.gridLayout_8.setObjectName(_fromUtf8("gridLayout_8"))
self.pb_stopForging = QtGui.QPushButton(self.gridLayoutWidget_8)
font = QtGui.QFont()
font.setFamily(_fromUtf8("Ubuntu"))
font.setPointSize(8)
font.setBold(True)
font.setItalic(False)
font.setWeight(75)
self.pb_stopForging.setFont(font)
self.pb_stopForging.setObjectName(_fromUtf8("pb_stopForging"))
self.gridLayout_8.addWidget(self.pb_stopForging, 7, 0, 1, 1)
self.pb_leaseBalance = QtGui.QPushButton(self.gridLayoutWidget_8)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_leaseBalance.setFont(font)
self.pb_leaseBalance.setObjectName(_fromUtf8("pb_leaseBalance"))
self.gridLayout_8.addWidget(self.pb_leaseBalance, 4, 0, 1, 1)
self.pb_getForging = QtGui.QPushButton(self.gridLayoutWidget_8)
font = QtGui.QFont()
font.setFamily(_fromUtf8("Ubuntu"))
font.setPointSize(8)
font.setBold(True)
font.setItalic(False)
font.setWeight(75)
self.pb_getForging.setFont(font)
self.pb_getForging.setObjectName(_fromUtf8("pb_getForging"))
self.gridLayout_8.addWidget(self.pb_getForging, 5, 0, 1, 1)
self.pb_signTransaction = QtGui.QPushButton(self.gridLayoutWidget_8)
font = QtGui.QFont()
font.setFamily(_fromUtf8("Ubuntu"))
font.setPointSize(8)
font.setBold(True)
font.setItalic(False)
font.setWeight(75)
self.pb_signTransaction.setFont(font)
self.pb_signTransaction.setObjectName(_fromUtf8("pb_signTransaction"))
self.gridLayout_8.addWidget(self.pb_signTransaction, 8, 0, 1, 1)
self.pb_getAccountBlockIds = QtGui.QPushButton(self.gridLayoutWidget_8)
font = QtGui.QFont()
font.setFamily(_fromUtf8("Ubuntu"))
font.setPointSize(8)
font.setBold(True)
font.setItalic(False)
font.setWeight(75)
self.pb_getAccountBlockIds.setFont(font)
self.pb_getAccountBlockIds.setObjectName(_fromUtf8("pb_getAccountBlockIds"))
self.gridLayout_8.addWidget(self.pb_getAccountBlockIds, 0, 1, 1, 1)
self.pb_getAccountId = QtGui.QPushButton(self.gridLayoutWidget_8)
font = QtGui.QFont()
font.setFamily(_fromUtf8("Ubuntu"))
font.setPointSize(8)
font.setBold(True)
font.setItalic(False)
font.setWeight(75)
self.pb_getAccountId.setFont(font)
self.pb_getAccountId.setObjectName(_fromUtf8("pb_getAccountId"))
self.gridLayout_8.addWidget(self.pb_getAccountId, 1, 0, 1, 1)
self.pb_getAccount = QtGui.QPushButton(self.gridLayoutWidget_8)
font = QtGui.QFont()
font.setFamily(_fromUtf8("Ubuntu"))
font.setPointSize(8)
font.setBold(True)
font.setItalic(False)
font.setWeight(75)
self.pb_getAccount.setFont(font)
self.pb_getAccount.setObjectName(_fromUtf8("pb_getAccount"))
self.gridLayout_8.addWidget(self.pb_getAccount, 0, 0, 1, 1)
self.pb_getAccountTransactionIds = QtGui.QPushButton(self.gridLayoutWidget_8)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_getAccountTransactionIds.setFont(font)
self.pb_getAccountTransactionIds.setObjectName(_fromUtf8("pb_getAccountTransactionIds"))
self.gridLayout_8.addWidget(self.pb_getAccountTransactionIds, 2, 0, 1, 1)
self.pb_getAccountPublicKey = QtGui.QPushButton(self.gridLayoutWidget_8)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_getAccountPublicKey.setFont(font)
self.pb_getAccountPublicKey.setObjectName(_fromUtf8("pb_getAccountPublicKey"))
self.gridLayout_8.addWidget(self.pb_getAccountPublicKey, 1, 1, 1, 1)
self.pb_sendMoney = QtGui.QPushButton(self.gridLayoutWidget_8)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_sendMoney.setFont(font)
self.pb_sendMoney.setObjectName(_fromUtf8("pb_sendMoney"))
self.gridLayout_8.addWidget(self.pb_sendMoney, 9, 0, 1, 1)
self.pb_getBalance = QtGui.QPushButton(self.gridLayoutWidget_8)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_getBalance.setFont(font)
self.pb_getBalance.setObjectName(_fromUtf8("pb_getBalance"))
self.gridLayout_8.addWidget(self.pb_getBalance, 4, 1, 1, 1)
self.pb_rsConvert = QtGui.QPushButton(self.gridLayoutWidget_8)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_rsConvert.setFont(font)
self.pb_rsConvert.setObjectName(_fromUtf8("pb_rsConvert"))
self.gridLayout_8.addWidget(self.pb_rsConvert, 9, 1, 1, 1)
self.pb_setAccountInfo = QtGui.QPushButton(self.gridLayoutWidget_8)
font = QtGui.QFont()
font.setFamily(_fromUtf8("Ubuntu"))
font.setPointSize(8)
font.setBold(True)
font.setItalic(False)
font.setWeight(75)
self.pb_setAccountInfo.setFont(font)
self.pb_setAccountInfo.setObjectName(_fromUtf8("pb_setAccountInfo"))
self.gridLayout_8.addWidget(self.pb_setAccountInfo, 2, 1, 1, 1)
self.pb_startForging = QtGui.QPushButton(self.gridLayoutWidget_8)
font = QtGui.QFont()
font.setFamily(_fromUtf8("Ubuntu"))
font.setPointSize(8)
font.setBold(True)
font.setItalic(False)
font.setWeight(75)
self.pb_startForging.setFont(font)
self.pb_startForging.setObjectName(_fromUtf8("pb_startForging"))
self.gridLayout_8.addWidget(self.pb_startForging, 5, 1, 1, 1)
self.pb_getGuaranteedBalance = QtGui.QPushButton(self.gridLayoutWidget_8)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_getGuaranteedBalance.setFont(font)
self.pb_getGuaranteedBalance.setObjectName(_fromUtf8("pb_getGuaranteedBalance"))
self.gridLayout_8.addWidget(self.pb_getGuaranteedBalance, 7, 1, 1, 1)
self.pb_generateToken = QtGui.QPushButton(self.gridLayoutWidget_8)
font = QtGui.QFont()
font.setFamily(_fromUtf8("Ubuntu"))
font.setPointSize(8)
font.setBold(True)
font.setItalic(False)
font.setWeight(75)
self.pb_generateToken.setFont(font)
self.pb_generateToken.setObjectName(_fromUtf8("pb_generateToken"))
self.gridLayout_8.addWidget(self.pb_generateToken, 8, 1, 1, 1)
self.gridLayoutWidget_4 = QtGui.QWidget(self.tab_5)
self.gridLayoutWidget_4.setGeometry(QtCore.QRect(50, 380, 251, 111))
self.gridLayoutWidget_4.setObjectName(_fromUtf8("gridLayoutWidget_4"))
self.gridLayout_4 = QtGui.QGridLayout(self.gridLayoutWidget_4)
self.gridLayout_4.setMargin(0)
self.gridLayout_4.setObjectName(_fromUtf8("gridLayout_4"))
self.pb_getAliases = QtGui.QPushButton(self.gridLayoutWidget_4)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_getAliases.setFont(font)
self.pb_getAliases.setObjectName(_fromUtf8("pb_getAliases"))
self.gridLayout_4.addWidget(self.pb_getAliases, 1, 1, 1, 1)
self.pb_getAlias = QtGui.QPushButton(self.gridLayoutWidget_4)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_getAlias.setFont(font)
self.pb_getAlias.setObjectName(_fromUtf8("pb_getAlias"))
self.gridLayout_4.addWidget(self.pb_getAlias, 0, 1, 1, 1)
self.pb_buyAlias = QtGui.QPushButton(self.gridLayoutWidget_4)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_buyAlias.setFont(font)
self.pb_buyAlias.setObjectName(_fromUtf8("pb_buyAlias"))
self.gridLayout_4.addWidget(self.pb_buyAlias, 0, 0, 1, 1)
self.pb_sellAlias = QtGui.QPushButton(self.gridLayoutWidget_4)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_sellAlias.setFont(font)
self.pb_sellAlias.setObjectName(_fromUtf8("pb_sellAlias"))
self.gridLayout_4.addWidget(self.pb_sellAlias, 1, 0, 1, 1)
self.pb_setAlias = QtGui.QPushButton(self.gridLayoutWidget_4)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_setAlias.setFont(font)
self.pb_setAlias.setObjectName(_fromUtf8("pb_setAlias"))
self.gridLayout_4.addWidget(self.pb_setAlias, 2, 0, 1, 1)
self.tabWidget_querySel.addTab(self.tab_5, _fromUtf8(""))
self.tab_6 = QtGui.QWidget()
self.tab_6.setObjectName(_fromUtf8("tab_6"))
self.gridLayoutWidget_3 = QtGui.QWidget(self.tab_6)
self.gridLayoutWidget_3.setGeometry(QtCore.QRect(10, 350, 351, 425))
self.gridLayoutWidget_3.setObjectName(_fromUtf8("gridLayoutWidget_3"))
self.gridLayout_3 = QtGui.QGridLayout(self.gridLayoutWidget_3)
self.gridLayout_3.setMargin(0)
self.gridLayout_3.setObjectName(_fromUtf8("gridLayout_3"))
self.pb_getBidOrderIds = QtGui.QPushButton(self.gridLayoutWidget_3)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_getBidOrderIds.setFont(font)
self.pb_getBidOrderIds.setObjectName(_fromUtf8("pb_getBidOrderIds"))
self.gridLayout_3.addWidget(self.pb_getBidOrderIds, 13, 1, 1, 1)
self.pb_cancelAskOrder = QtGui.QPushButton(self.gridLayoutWidget_3)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_cancelAskOrder.setFont(font)
self.pb_cancelAskOrder.setObjectName(_fromUtf8("pb_cancelAskOrder"))
self.gridLayout_3.addWidget(self.pb_cancelAskOrder, 12, 0, 1, 1)
self.pb_cancelBidOrder = QtGui.QPushButton(self.gridLayoutWidget_3)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_cancelBidOrder.setFont(font)
self.pb_cancelBidOrder.setObjectName(_fromUtf8("pb_cancelBidOrder"))
self.gridLayout_3.addWidget(self.pb_cancelBidOrder, 12, 1, 1, 1)
self.pb_placeBidOrder = QtGui.QPushButton(self.gridLayoutWidget_3)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_placeBidOrder.setFont(font)
self.pb_placeBidOrder.setObjectName(_fromUtf8("pb_placeBidOrder"))
self.gridLayout_3.addWidget(self.pb_placeBidOrder, 11, 1, 1, 1)
self.pb_getAskOrder = QtGui.QPushButton(self.gridLayoutWidget_3)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_getAskOrder.setFont(font)
self.pb_getAskOrder.setObjectName(_fromUtf8("pb_getAskOrder"))
self.gridLayout_3.addWidget(self.pb_getAskOrder, 14, 0, 1, 1)
self.pb_getAskOrderIds = QtGui.QPushButton(self.gridLayoutWidget_3)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_getAskOrderIds.setFont(font)
self.pb_getAskOrderIds.setObjectName(_fromUtf8("pb_getAskOrderIds"))
self.gridLayout_3.addWidget(self.pb_getAskOrderIds, 13, 0, 1, 1)
self.pb_getBidOrder = QtGui.QPushButton(self.gridLayoutWidget_3)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_getBidOrder.setFont(font)
self.pb_getBidOrder.setObjectName(_fromUtf8("pb_getBidOrder"))
self.gridLayout_3.addWidget(self.pb_getBidOrder, 14, 1, 1, 1)
self.pb_placeAskOrder = QtGui.QPushButton(self.gridLayoutWidget_3)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_placeAskOrder.setFont(font)
self.pb_placeAskOrder.setObjectName(_fromUtf8("pb_placeAskOrder"))
self.gridLayout_3.addWidget(self.pb_placeAskOrder, 11, 0, 1, 1)
self.pb_getAccountCurrentBidOrderIds = QtGui.QPushButton(self.gridLayoutWidget_3)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_getAccountCurrentBidOrderIds.setFont(font)
self.pb_getAccountCurrentBidOrderIds.setObjectName(_fromUtf8("pb_getAccountCurrentBidOrderIds"))
self.gridLayout_3.addWidget(self.pb_getAccountCurrentBidOrderIds, 10, 1, 1, 1)
self.pb_getAccountCurrentAskOrderIds = QtGui.QPushButton(self.gridLayoutWidget_3)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_getAccountCurrentAskOrderIds.setFont(font)
self.pb_getAccountCurrentAskOrderIds.setObjectName(_fromUtf8("pb_getAccountCurrentAskOrderIds"))
self.gridLayout_3.addWidget(self.pb_getAccountCurrentAskOrderIds, 10, 0, 1, 1)
self.pb_transferAsset = QtGui.QPushButton(self.gridLayoutWidget_3)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_transferAsset.setFont(font)
self.pb_transferAsset.setObjectName(_fromUtf8("pb_transferAsset"))
self.gridLayout_3.addWidget(self.pb_transferAsset, 2, 1, 1, 1)
self.pb_getTrades = QtGui.QPushButton(self.gridLayoutWidget_3)
font = QtGui.QFont()
font.setFamily(_fromUtf8("Ubuntu"))
font.setPointSize(8)
font.setBold(True)
font.setItalic(False)
font.setWeight(75)
self.pb_getTrades.setFont(font)
self.pb_getTrades.setObjectName(_fromUtf8("pb_getTrades"))
self.gridLayout_3.addWidget(self.pb_getTrades, 7, 0, 1, 1)
self.pb_getAssets = QtGui.QPushButton(self.gridLayoutWidget_3)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_getAssets.setFont(font)
self.pb_getAssets.setObjectName(_fromUtf8("pb_getAssets"))
self.gridLayout_3.addWidget(self.pb_getAssets, 5, 1, 1, 1)
self.pb_getAllOpenOrders = QtGui.QPushButton(self.gridLayoutWidget_3)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_getAllOpenOrders.setFont(font)
self.pb_getAllOpenOrders.setObjectName(_fromUtf8("pb_getAllOpenOrders"))
self.gridLayout_3.addWidget(self.pb_getAllOpenOrders, 0, 1, 1, 1)
self.pb_getAllAssets = QtGui.QPushButton(self.gridLayoutWidget_3)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_getAllAssets.setFont(font)
self.pb_getAllAssets.setObjectName(_fromUtf8("pb_getAllAssets"))
self.gridLayout_3.addWidget(self.pb_getAllAssets, 0, 0, 1, 1)
self.pb_getAskOrders = QtGui.QPushButton(self.gridLayoutWidget_3)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_getAskOrders.setFont(font)
self.pb_getAskOrders.setObjectName(_fromUtf8("pb_getAskOrders"))
self.gridLayout_3.addWidget(self.pb_getAskOrders, 15, 0, 1, 1)
self.pb_getBidOrders = QtGui.QPushButton(self.gridLayoutWidget_3)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_getBidOrders.setFont(font)
self.pb_getBidOrders.setObjectName(_fromUtf8("pb_getBidOrders"))
self.gridLayout_3.addWidget(self.pb_getBidOrders, 15, 1, 1, 1)
self.pb_getAssetIds = QtGui.QPushButton(self.gridLayoutWidget_3)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_getAssetIds.setFont(font)
self.pb_getAssetIds.setObjectName(_fromUtf8("pb_getAssetIds"))
self.gridLayout_3.addWidget(self.pb_getAssetIds, 4, 1, 1, 1)
self.pb_getAssetsByIssuer = QtGui.QPushButton(self.gridLayoutWidget_3)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_getAssetsByIssuer.setFont(font)
self.pb_getAssetsByIssuer.setObjectName(_fromUtf8("pb_getAssetsByIssuer"))
self.gridLayout_3.addWidget(self.pb_getAssetsByIssuer, 7, 1, 1, 1)
self.pb_getAsset = QtGui.QPushButton(self.gridLayoutWidget_3)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_getAsset.setFont(font)
self.pb_getAsset.setObjectName(_fromUtf8("pb_getAsset"))
self.gridLayout_3.addWidget(self.pb_getAsset, 5, 0, 1, 1)
self.pb_issueAsset = QtGui.QPushButton(self.gridLayoutWidget_3)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_issueAsset.setFont(font)
self.pb_issueAsset.setObjectName(_fromUtf8("pb_issueAsset"))
self.gridLayout_3.addWidget(self.pb_issueAsset, 4, 0, 1, 1)
self.pb_getAllTrades = QtGui.QPushButton(self.gridLayoutWidget_3)
font = QtGui.QFont()
font.setFamily(_fromUtf8("Ubuntu"))
font.setPointSize(8)
font.setBold(True)
font.setItalic(False)
font.setWeight(75)
self.pb_getAllTrades.setFont(font)
self.pb_getAllTrades.setObjectName(_fromUtf8("pb_getAllTrades"))
self.gridLayout_3.addWidget(self.pb_getAllTrades, 2, 0, 1, 1)
self.gridLayoutWidget_9 = QtGui.QWidget(self.tab_6)
self.gridLayoutWidget_9.setGeometry(QtCore.QRect(5, 70, 366, 271))
self.gridLayoutWidget_9.setObjectName(_fromUtf8("gridLayoutWidget_9"))
self.gridLayout_9 = QtGui.QGridLayout(self.gridLayoutWidget_9)
self.gridLayout_9.setMargin(0)
self.gridLayout_9.setObjectName(_fromUtf8("gridLayout_9"))
self.pb_dgsPriceChange = QtGui.QPushButton(self.gridLayoutWidget_9)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_dgsPriceChange.setFont(font)
self.pb_dgsPriceChange.setObjectName(_fromUtf8("pb_dgsPriceChange"))
self.gridLayout_9.addWidget(self.pb_dgsPriceChange, 5, 0, 1, 1)
self.pb_getDGSPurchase = QtGui.QPushButton(self.gridLayoutWidget_9)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_getDGSPurchase.setFont(font)
self.pb_getDGSPurchase.setObjectName(_fromUtf8("pb_getDGSPurchase"))
self.gridLayout_9.addWidget(self.pb_getDGSPurchase, 1, 2, 1, 1)
self.pb_dgsFeedback = QtGui.QPushButton(self.gridLayoutWidget_9)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_dgsFeedback.setFont(font)
self.pb_dgsFeedback.setObjectName(_fromUtf8("pb_dgsFeedback"))
self.gridLayout_9.addWidget(self.pb_dgsFeedback, 4, 0, 1, 1)
self.pb_dgsQuantityChange = QtGui.QPushButton(self.gridLayoutWidget_9)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_dgsQuantityChange.setFont(font)
self.pb_dgsQuantityChange.setObjectName(_fromUtf8("pb_dgsQuantityChange"))
self.gridLayout_9.addWidget(self.pb_dgsQuantityChange, 6, 0, 1, 1)
self.pb_getDGSPendingPurchases = QtGui.QPushButton(self.gridLayoutWidget_9)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_getDGSPendingPurchases.setFont(font)
self.pb_getDGSPendingPurchases.setObjectName(_fromUtf8("pb_getDGSPendingPurchases"))
self.gridLayout_9.addWidget(self.pb_getDGSPendingPurchases, 1, 0, 1, 1)
self.pb_dgsDelisting = QtGui.QPushButton(self.gridLayoutWidget_9)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_dgsDelisting.setFont(font)
self.pb_dgsDelisting.setObjectName(_fromUtf8("pb_dgsDelisting"))
self.gridLayout_9.addWidget(self.pb_dgsDelisting, 3, 0, 1, 1)
self.pb_dgsListing = QtGui.QPushButton(self.gridLayoutWidget_9)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_dgsListing.setFont(font)
self.pb_dgsListing.setObjectName(_fromUtf8("pb_dgsListing"))
self.gridLayout_9.addWidget(self.pb_dgsListing, 4, 2, 1, 1)
self.pb_dgsPurchase = QtGui.QPushButton(self.gridLayoutWidget_9)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_dgsPurchase.setFont(font)
self.pb_dgsPurchase.setObjectName(_fromUtf8("pb_dgsPurchase"))
self.gridLayout_9.addWidget(self.pb_dgsPurchase, 5, 2, 1, 1)
self.pb_dgsDelivery = QtGui.QPushButton(self.gridLayoutWidget_9)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_dgsDelivery.setFont(font)
self.pb_dgsDelivery.setObjectName(_fromUtf8("pb_dgsDelivery"))
self.gridLayout_9.addWidget(self.pb_dgsDelivery, 3, 2, 1, 1)
self.pb_dgsRefund = QtGui.QPushButton(self.gridLayoutWidget_9)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_dgsRefund.setFont(font)
self.pb_dgsRefund.setObjectName(_fromUtf8("pb_dgsRefund"))
self.gridLayout_9.addWidget(self.pb_dgsRefund, 6, 2, 1, 1)
self.pb_getDGSGoods = QtGui.QPushButton(self.gridLayoutWidget_9)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_getDGSGoods.setFont(font)
self.pb_getDGSGoods.setObjectName(_fromUtf8("pb_getDGSGoods"))
self.gridLayout_9.addWidget(self.pb_getDGSGoods, 0, 2, 1, 1)
self.pb_getDGSGood = QtGui.QPushButton(self.gridLayoutWidget_9)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_getDGSGood.setFont(font)
self.pb_getDGSGood.setObjectName(_fromUtf8("pb_getDGSGood"))
self.gridLayout_9.addWidget(self.pb_getDGSGood, 0, 0, 1, 1)
self.pb_getDGSPurchases = QtGui.QPushButton(self.gridLayoutWidget_9)
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_getDGSPurchases.setFont(font)
self.pb_getDGSPurchases.setObjectName(_fromUtf8("pb_getDGSPurchases"))
self.gridLayout_9.addWidget(self.pb_getDGSPurchases, 2, 0, 1, 1)
self.pb_getState_2 = QtGui.QPushButton(self.tab_6)
self.pb_getState_2.setGeometry(QtCore.QRect(10, 20, 96, 27))
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_getState_2.setFont(font)
self.pb_getState_2.setObjectName(_fromUtf8("pb_getState_2"))
self.pb_getAccount_2 = QtGui.QPushButton(self.tab_6)
self.pb_getAccount_2.setGeometry(QtCore.QRect(120, 20, 91, 27))
font = QtGui.QFont()
font.setFamily(_fromUtf8("Ubuntu"))
font.setPointSize(8)
font.setBold(True)
font.setItalic(False)
font.setWeight(75)
self.pb_getAccount_2.setFont(font)
self.pb_getAccount_2.setObjectName(_fromUtf8("pb_getAccount_2"))
self.pb_getTransaction_2 = QtGui.QPushButton(self.tab_6)
self.pb_getTransaction_2.setGeometry(QtCore.QRect(230, 20, 111, 27))
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.pb_getTransaction_2.setFont(font)
self.pb_getTransaction_2.setObjectName(_fromUtf8("pb_getTransaction_2"))
self.tabWidget_querySel.addTab(self.tab_6, _fromUtf8(""))
self.lineEdit1_nxtFullQ = QtGui.QLineEdit(self.centralwidget)
self.lineEdit1_nxtFullQ.setGeometry(QtCore.QRect(90, 830, 1171, 27))
font = QtGui.QFont()
font.setPointSize(10)
self.lineEdit1_nxtFullQ.setFont(font)
self.lineEdit1_nxtFullQ.setObjectName(_fromUtf8("lineEdit1_nxtFullQ"))
self.label_fullQuery = QtGui.QLabel(self.centralwidget)
self.label_fullQuery.setGeometry(QtCore.QRect(10, 830, 71, 21))
self.label_fullQuery.setObjectName(_fromUtf8("label_fullQuery"))
self.pb_sendQuery = QtGui.QPushButton(self.centralwidget)
self.pb_sendQuery.setGeometry(QtCore.QRect(750, 5, 131, 51))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.pb_sendQuery.setFont(font)
self.pb_sendQuery.setObjectName(_fromUtf8("pb_sendQuery"))
self.pb_clearText = QtGui.QPushButton(self.centralwidget)
self.pb_clearText.setGeometry(QtCore.QRect(985, 10, 101, 31))
self.pb_clearText.setObjectName(_fromUtf8("pb_clearText"))
self.cb_scrollCheck = QtGui.QCheckBox(self.centralwidget)
self.cb_scrollCheck.setGeometry(QtCore.QRect(1190, 50, 71, 22))
self.cb_scrollCheck.setObjectName(_fromUtf8("cb_scrollCheck"))
self.lcdNumber_time7 = QtGui.QLCDNumber(self.centralwidget)
self.lcdNumber_time7.setGeometry(QtCore.QRect(1100, 10, 161, 31))
self.lcdNumber_time7.setObjectName(_fromUtf8("lcdNumber_time7"))
self.gb_poll7 = QtGui.QGroupBox(self.centralwidget)
self.gb_poll7.setGeometry(QtCore.QRect(895, 10, 81, 46))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.gb_poll7.setFont(font)
self.gb_poll7.setCheckable(True)
self.gb_poll7.setObjectName(_fromUtf8("gb_poll7"))
self.lineEdit_pollTimer7 = QtGui.QLineEdit(self.gb_poll7)
self.lineEdit_pollTimer7.setGeometry(QtCore.QRect(25, 24, 50, 16))
font = QtGui.QFont()
font.setPointSize(10)
self.lineEdit_pollTimer7.setFont(font)
self.lineEdit_pollTimer7.setMaxLength(200)
self.lineEdit_pollTimer7.setObjectName(_fromUtf8("lineEdit_pollTimer7"))
self.kled_poll7 = KLed(self.gb_poll7)
self.kled_poll7.setGeometry(QtCore.QRect(0, 20, 26, 21))
self.kled_poll7.setColor(QtGui.QColor(0, 10, 250))
self.kled_poll7.setObjectName(_fromUtf8("kled_poll7"))
MainWindow.setCentralWidget(self.centralwidget)
self.actionMenu = QtGui.QAction(MainWindow)
self.actionMenu.setObjectName(_fromUtf8("actionMenu"))
self.retranslateUi(MainWindow)
self.tabWidget_Req.setCurrentIndex(0)
self.tabWidget_querySel.setCurrentIndex(2)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(_translate("MainWindow", "NxtPowerTools Api Access", None))
self.label_serverAddress.setText(_translate("MainWindow", "server", None))
self.label_account.setText(_translate("MainWindow", "account", None))
self.tabWidget_Req.setTabText(self.tabWidget_Req.indexOf(self.tab), _translate("MainWindow", "nxt requests", None))
self.label_NXTServer.setText(_translate("MainWindow", "Server", None))
self.label_NXTServer_2.setText(_translate("MainWindow", "Account", None))
self.label_passPhrase.setText(_translate("MainWindow", "Pass", None))
self.label_pubKey.setText(_translate("MainWindow", "PubKey", None))
self.pb_pullData1.setText(_translate("MainWindow", "pull data from sessMan", None))
self.tabWidget_Req.setTabText(self.tabWidget_Req.indexOf(self.tab_2), _translate("MainWindow", "sessMan data", None))
self.tabWidget_Req.setTabText(self.tabWidget_Req.indexOf(self.tab_3), _translate("MainWindow", "logFile", None))
self.groupBox.setTitle(_translate("MainWindow", "nxt server reply", None))
self.pb_getConstants.setText(_translate("MainWindow", "getConstants", None))
self.pb_decodeToken.setText(_translate("MainWindow", "decodeToken", None))
self.pb_getBlockId.setText(_translate("MainWindow", "getBlockId", None))
self.pb_getBlock.setText(_translate("MainWindow", "getBlock", None))
self.pb_broadcastTransaction.setText(_translate("MainWindow", "broadcastTransaction", None))
self.pb_getPeer.setText(_translate("MainWindow", "getPeer", None))
self.pb_getPeers.setText(_translate("MainWindow", "getPeers", None))
self.pb_getUnconfirmedTransactionIds.setText(_translate("MainWindow", "getUnconfTransactionIds", None))
self.pb_decodeHallmark.setText(_translate("MainWindow", "decodeHallmark", None))
self.pb_calculateFullHash.setText(_translate("MainWindow", "calculateFullHash", None))
self.pb_markHost.setText(_translate("MainWindow", "markHost", None))
self.pb_getBlockchainStatus.setText(_translate("MainWindow", "getBlockchainStatus", None))
self.pb_getState.setText(_translate("MainWindow", "getState", None))
self.pb_getTime.setText(_translate("MainWindow", "getTime", None))
self.pb_getTransactionBytes.setText(_translate("MainWindow", "getTransactionBytes", None))
self.pb_getTransaction.setText(_translate("MainWindow", "getTransaction", None))
self.pb_getUnconfirmedTransactions.setText(_translate("MainWindow", "getUnconfTransactions", None))
self.pb_parseTransaction.setText(_translate("MainWindow", "parseTransaction", None))
self.pb_getNextBlockGenerators.setText(_translate("MainWindow", "getNextBlockGenerators", None))
self.pb_getMyInfo.setText(_translate("MainWindow", "getMyInfo", None))
self.tabWidget_querySel.setTabText(self.tabWidget_querySel.indexOf(self.tab_4), _translate("MainWindow", "NXT", None))
self.pb_sendMessage.setText(_translate("MainWindow", "sendMessage ", None))
self.pb_encryptTo.setText(_translate("MainWindow", "encryptTo", None))
self.pb_decryptFrom.setText(_translate("MainWindow", "decryptFrom", None))
self.pb_sendEncryptedNote.setText(_translate("MainWindow", "sendEncryptedNote", None))
self.pb_readEncryptedNote.setText(_translate("MainWindow", "readEncryptedNote", None))
self.pb_getPoll.setText(_translate("MainWindow", "getPoll", None))
self.pb_castVote.setText(_translate("MainWindow", "castVote", None))
self.pb_getPollIds.setText(_translate("MainWindow", "getPollIds", None))
self.pb_createPoll.setText(_translate("MainWindow", "createPoll", None))
self.pb_stopForging.setText(_translate("MainWindow", "stopForging", None))
self.pb_leaseBalance.setText(_translate("MainWindow", "leaseBalance", None))
self.pb_getForging.setText(_translate("MainWindow", "getForging", None))
self.pb_signTransaction.setText(_translate("MainWindow", "signTransaction", None))
self.pb_getAccountBlockIds.setText(_translate("MainWindow", "getAccountBlockIds", None))
self.pb_getAccountId.setText(_translate("MainWindow", "getAccountId", None))
self.pb_getAccount.setText(_translate("MainWindow", "getAccount", None))
self.pb_getAccountTransactionIds.setText(_translate("MainWindow", "getAccountTransactionIds", None))
self.pb_getAccountPublicKey.setText(_translate("MainWindow", "getAccountPublicKey", None))
self.pb_sendMoney.setText(_translate("MainWindow", "sendMoney", None))
self.pb_getBalance.setText(_translate("MainWindow", "getBalance", None))
self.pb_rsConvert.setText(_translate("MainWindow", "rsConvert", None))
self.pb_setAccountInfo.setText(_translate("MainWindow", "setAccountInfo", None))
self.pb_startForging.setText(_translate("MainWindow", "startForging", None))
self.pb_getGuaranteedBalance.setText(_translate("MainWindow", "getGuaranteedBalance", None))
self.pb_generateToken.setText(_translate("MainWindow", "generateToken", None))
self.pb_getAliases.setText(_translate("MainWindow", "getAliases", None))
self.pb_getAlias.setText(_translate("MainWindow", "getAlias", None))
self.pb_buyAlias.setText(_translate("MainWindow", "buyAlias", None))
self.pb_sellAlias.setText(_translate("MainWindow", "sellAlias", None))
self.pb_setAlias.setText(_translate("MainWindow", "setAlias", None))
self.tabWidget_querySel.setTabText(self.tabWidget_querySel.indexOf(self.tab_5), _translate("MainWindow", "Acct+MSGs", None))
self.pb_getBidOrderIds.setText(_translate("MainWindow", "getBidOrderIds", None))
self.pb_cancelAskOrder.setText(_translate("MainWindow", "cancelAskOrder", None))
self.pb_cancelBidOrder.setText(_translate("MainWindow", "cancelBidOrder", None))
self.pb_placeBidOrder.setText(_translate("MainWindow", "placeBidOrder", None))
self.pb_getAskOrder.setText(_translate("MainWindow", "getAskOrder", None))
self.pb_getAskOrderIds.setText(_translate("MainWindow", "getAskOrderIds", None))
self.pb_getBidOrder.setText(_translate("MainWindow", "getBidOrder", None))
self.pb_placeAskOrder.setText(_translate("MainWindow", "placeAskOrder", None))
self.pb_getAccountCurrentBidOrderIds.setText(_translate("MainWindow", "getAccCurrBidOrderIds", None))
self.pb_getAccountCurrentAskOrderIds.setText(_translate("MainWindow", "getAccCurrAskOrderIds", None))
self.pb_transferAsset.setText(_translate("MainWindow", "transferAsset", None))
self.pb_getTrades.setText(_translate("MainWindow", "getTrades", None))
self.pb_getAssets.setText(_translate("MainWindow", "getAssets", None))
self.pb_getAllOpenOrders.setText(_translate("MainWindow", "getAllOpenOrders", None))
self.pb_getAllAssets.setText(_translate("MainWindow", "getAllAssets", None))
self.pb_getAskOrders.setText(_translate("MainWindow", "getAskOrders", None))
self.pb_getBidOrders.setText(_translate("MainWindow", "getBidOrders", None))
self.pb_getAssetIds.setText(_translate("MainWindow", "getAssetIds", None))
self.pb_getAssetsByIssuer.setText(_translate("MainWindow", "getAssetsByIssuer", None))
self.pb_getAsset.setText(_translate("MainWindow", "getAsset", None))
self.pb_issueAsset.setText(_translate("MainWindow", "issueAsset", None))
self.pb_getAllTrades.setText(_translate("MainWindow", "getAllTrades", None))
self.pb_dgsPriceChange.setText(_translate("MainWindow", "dgsPriceChange", None))
self.pb_getDGSPurchase.setText(_translate("MainWindow", "getDGSPurchase", None))
self.pb_dgsFeedback.setText(_translate("MainWindow", "dgsFeedback", None))
self.pb_dgsQuantityChange.setText(_translate("MainWindow", "dgsQuantityChange", None))
self.pb_getDGSPendingPurchases.setText(_translate("MainWindow", "getDGSPendingPurchases", None))
self.pb_dgsDelisting.setText(_translate("MainWindow", "dgsDelisting", None))
self.pb_dgsListing.setText(_translate("MainWindow", "dgsListing", None))
self.pb_dgsPurchase.setText(_translate("MainWindow", "dgsPurchase", None))
self.pb_dgsDelivery.setText(_translate("MainWindow", "dgsDelivery", None))
self.pb_dgsRefund.setText(_translate("MainWindow", "dgsRefund", None))
self.pb_getDGSGoods.setText(_translate("MainWindow", "getDGSGoods", None))
self.pb_getDGSGood.setText(_translate("MainWindow", "getDGSGood", None))
self.pb_getDGSPurchases.setText(_translate("MainWindow", "getDGSPurchases", None))
self.pb_getState_2.setText(_translate("MainWindow", "getState", None))
self.pb_getAccount_2.setText(_translate("MainWindow", "getAccount", None))
self.pb_getTransaction_2.setText(_translate("MainWindow", "getTransaction", None))
self.tabWidget_querySel.setTabText(self.tabWidget_querySel.indexOf(self.tab_6), _translate("MainWindow", "AE+DGS", None))
self.label_fullQuery.setText(_translate("MainWindow", "rawQuery", None))
self.pb_sendQuery.setText(_translate("MainWindow", "send Query once", None))
self.pb_clearText.setText(_translate("MainWindow", "clear", None))
self.cb_scrollCheck.setText(_translate("MainWindow", "scroll", None))
self.gb_poll7.setTitle(_translate("MainWindow", "Poll", None))
self.actionMenu.setText(_translate("MainWindow", "menu", None))
from PyKDE4.kdeui import KLed
|
|
"""Test the Z-Wave JS init module."""
from copy import deepcopy
from unittest.mock import call, patch
import pytest
from zwave_js_server.exceptions import BaseZwaveJSServerError, InvalidServerVersion
from zwave_js_server.model.node import Node
from homeassistant.components.hassio.handler import HassioAPIError
from homeassistant.components.zwave_js.const import DOMAIN
from homeassistant.components.zwave_js.helpers import get_device_id
from homeassistant.config_entries import DISABLED_USER, ConfigEntryState
from homeassistant.const import STATE_UNAVAILABLE
from homeassistant.helpers import device_registry as dr, entity_registry as er
from .common import AIR_TEMPERATURE_SENSOR, EATON_RF9640_ENTITY
from tests.common import MockConfigEntry
@pytest.fixture(name="connect_timeout")
def connect_timeout_fixture():
"""Mock the connect timeout."""
with patch("homeassistant.components.zwave_js.CONNECT_TIMEOUT", new=0) as timeout:
yield timeout
async def test_entry_setup_unload(hass, client, integration):
"""Test the integration set up and unload."""
entry = integration
assert client.connect.call_count == 1
assert entry.state is ConfigEntryState.LOADED
await hass.config_entries.async_unload(entry.entry_id)
assert client.disconnect.call_count == 1
assert entry.state is ConfigEntryState.NOT_LOADED
async def test_home_assistant_stop(hass, client, integration):
"""Test we clean up on home assistant stop."""
await hass.async_stop()
assert client.disconnect.call_count == 1
async def test_initialized_timeout(hass, client, connect_timeout):
"""Test we handle a timeout during client initialization."""
entry = MockConfigEntry(domain="zwave_js", data={"url": "ws://test.org"})
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
assert entry.state is ConfigEntryState.SETUP_RETRY
async def test_enabled_statistics(hass, client):
"""Test that we enabled statistics if the entry is opted in."""
entry = MockConfigEntry(
domain="zwave_js",
data={"url": "ws://test.org", "data_collection_opted_in": True},
)
entry.add_to_hass(hass)
with patch(
"zwave_js_server.model.driver.Driver.async_enable_statistics"
) as mock_cmd:
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
assert mock_cmd.called
async def test_disabled_statistics(hass, client):
"""Test that we diisabled statistics if the entry is opted out."""
entry = MockConfigEntry(
domain="zwave_js",
data={"url": "ws://test.org", "data_collection_opted_in": False},
)
entry.add_to_hass(hass)
with patch(
"zwave_js_server.model.driver.Driver.async_disable_statistics"
) as mock_cmd:
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
assert mock_cmd.called
async def test_noop_statistics(hass, client):
"""Test that we don't make any statistics calls if user hasn't provided preference."""
entry = MockConfigEntry(domain="zwave_js", data={"url": "ws://test.org"})
entry.add_to_hass(hass)
with patch(
"zwave_js_server.model.driver.Driver.async_enable_statistics"
) as mock_cmd1, patch(
"zwave_js_server.model.driver.Driver.async_disable_statistics"
) as mock_cmd2:
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
assert not mock_cmd1.called
assert not mock_cmd2.called
@pytest.mark.parametrize("error", [BaseZwaveJSServerError("Boom"), Exception("Boom")])
async def test_listen_failure(hass, client, error):
"""Test we handle errors during client listen."""
async def listen(driver_ready):
"""Mock the client listen method."""
# Set the connect side effect to stop an endless loop on reload.
client.connect.side_effect = BaseZwaveJSServerError("Boom")
raise error
client.listen.side_effect = listen
entry = MockConfigEntry(domain="zwave_js", data={"url": "ws://test.org"})
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
assert entry.state is ConfigEntryState.SETUP_RETRY
async def test_on_node_added_ready(hass, multisensor_6_state, client, integration):
"""Test we handle a ready node added event."""
dev_reg = dr.async_get(hass)
node = Node(client, multisensor_6_state)
event = {"node": node}
air_temperature_device_id = f"{client.driver.controller.home_id}-{node.node_id}"
state = hass.states.get(AIR_TEMPERATURE_SENSOR)
assert not state # entity and device not yet added
assert not dev_reg.async_get_device(
identifiers={(DOMAIN, air_temperature_device_id)}
)
client.driver.controller.emit("node added", event)
await hass.async_block_till_done()
state = hass.states.get(AIR_TEMPERATURE_SENSOR)
assert state # entity and device added
assert state.state != STATE_UNAVAILABLE
assert dev_reg.async_get_device(identifiers={(DOMAIN, air_temperature_device_id)})
async def test_on_node_added_not_ready(hass, multisensor_6_state, client, integration):
"""Test we handle a non ready node added event."""
dev_reg = dr.async_get(hass)
node_data = deepcopy(multisensor_6_state) # Copy to allow modification in tests.
node = Node(client, node_data)
node.data["ready"] = False
event = {"node": node}
air_temperature_device_id = f"{client.driver.controller.home_id}-{node.node_id}"
state = hass.states.get(AIR_TEMPERATURE_SENSOR)
assert not state # entity and device not yet added
assert not dev_reg.async_get_device(
identifiers={(DOMAIN, air_temperature_device_id)}
)
client.driver.controller.emit("node added", event)
await hass.async_block_till_done()
state = hass.states.get(AIR_TEMPERATURE_SENSOR)
assert not state # entity not yet added but device added in registry
assert dev_reg.async_get_device(identifiers={(DOMAIN, air_temperature_device_id)})
node.data["ready"] = True
node.emit("ready", event)
await hass.async_block_till_done()
state = hass.states.get(AIR_TEMPERATURE_SENSOR)
assert state # entity added
assert state.state != STATE_UNAVAILABLE
async def test_existing_node_ready(hass, client, multisensor_6, integration):
"""Test we handle a ready node that exists during integration setup."""
dev_reg = dr.async_get(hass)
node = multisensor_6
air_temperature_device_id = f"{client.driver.controller.home_id}-{node.node_id}"
state = hass.states.get(AIR_TEMPERATURE_SENSOR)
assert state # entity and device added
assert state.state != STATE_UNAVAILABLE
assert dev_reg.async_get_device(identifiers={(DOMAIN, air_temperature_device_id)})
async def test_null_name(hass, client, null_name_check, integration):
"""Test that node without a name gets a generic node name."""
node = null_name_check
assert hass.states.get(f"switch.node_{node.node_id}")
async def test_existing_node_not_ready(hass, client, multisensor_6):
"""Test we handle a non ready node that exists during integration setup."""
dev_reg = dr.async_get(hass)
node = multisensor_6
node.data = deepcopy(node.data) # Copy to allow modification in tests.
node.data["ready"] = False
event = {"node": node}
air_temperature_device_id = f"{client.driver.controller.home_id}-{node.node_id}"
entry = MockConfigEntry(domain="zwave_js", data={"url": "ws://test.org"})
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
state = hass.states.get(AIR_TEMPERATURE_SENSOR)
assert not state # entity not yet added
assert dev_reg.async_get_device( # device should be added
identifiers={(DOMAIN, air_temperature_device_id)}
)
node.data["ready"] = True
node.emit("ready", event)
await hass.async_block_till_done()
state = hass.states.get(AIR_TEMPERATURE_SENSOR)
assert state # entity and device added
assert state.state != STATE_UNAVAILABLE
assert dev_reg.async_get_device(identifiers={(DOMAIN, air_temperature_device_id)})
async def test_start_addon(
hass, addon_installed, install_addon, addon_options, set_addon_options, start_addon
):
"""Test start the Z-Wave JS add-on during entry setup."""
device = "/test"
network_key = "abc123"
addon_options = {
"device": device,
"network_key": network_key,
}
entry = MockConfigEntry(
domain=DOMAIN,
title="Z-Wave JS",
data={"use_addon": True, "usb_path": device, "network_key": network_key},
)
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
assert entry.state is ConfigEntryState.SETUP_RETRY
assert install_addon.call_count == 0
assert set_addon_options.call_count == 1
assert set_addon_options.call_args == call(
hass, "core_zwave_js", {"options": addon_options}
)
assert start_addon.call_count == 1
assert start_addon.call_args == call(hass, "core_zwave_js")
async def test_install_addon(
hass, addon_installed, install_addon, addon_options, set_addon_options, start_addon
):
"""Test install and start the Z-Wave JS add-on during entry setup."""
addon_installed.return_value["version"] = None
device = "/test"
network_key = "abc123"
addon_options = {
"device": device,
"network_key": network_key,
}
entry = MockConfigEntry(
domain=DOMAIN,
title="Z-Wave JS",
data={"use_addon": True, "usb_path": device, "network_key": network_key},
)
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
assert entry.state is ConfigEntryState.SETUP_RETRY
assert install_addon.call_count == 1
assert install_addon.call_args == call(hass, "core_zwave_js")
assert set_addon_options.call_count == 1
assert set_addon_options.call_args == call(
hass, "core_zwave_js", {"options": addon_options}
)
assert start_addon.call_count == 1
assert start_addon.call_args == call(hass, "core_zwave_js")
@pytest.mark.parametrize("addon_info_side_effect", [HassioAPIError("Boom")])
async def test_addon_info_failure(
hass,
addon_installed,
install_addon,
addon_options,
set_addon_options,
start_addon,
):
"""Test failure to get add-on info for Z-Wave JS add-on during entry setup."""
device = "/test"
network_key = "abc123"
entry = MockConfigEntry(
domain=DOMAIN,
title="Z-Wave JS",
data={"use_addon": True, "usb_path": device, "network_key": network_key},
)
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
assert entry.state is ConfigEntryState.SETUP_RETRY
assert install_addon.call_count == 0
assert start_addon.call_count == 0
@pytest.mark.parametrize(
"old_device, new_device, old_network_key, new_network_key",
[("/old_test", "/new_test", "old123", "new123")],
)
async def test_addon_options_changed(
hass,
client,
addon_installed,
addon_running,
install_addon,
addon_options,
start_addon,
old_device,
new_device,
old_network_key,
new_network_key,
):
"""Test update config entry data on entry setup if add-on options changed."""
addon_options["device"] = new_device
addon_options["network_key"] = new_network_key
entry = MockConfigEntry(
domain=DOMAIN,
title="Z-Wave JS",
data={
"url": "ws://host1:3001",
"use_addon": True,
"usb_path": old_device,
"network_key": old_network_key,
},
)
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
assert entry.state == ConfigEntryState.LOADED
assert entry.data["usb_path"] == new_device
assert entry.data["network_key"] == new_network_key
assert install_addon.call_count == 0
assert start_addon.call_count == 0
@pytest.mark.parametrize(
"addon_version, update_available, update_calls, backup_calls, "
"update_addon_side_effect, create_backup_side_effect",
[
("1.0", True, 1, 1, None, None),
("1.0", False, 0, 0, None, None),
("1.0", True, 1, 1, HassioAPIError("Boom"), None),
("1.0", True, 0, 1, None, HassioAPIError("Boom")),
],
)
async def test_update_addon(
hass,
client,
addon_info,
addon_installed,
addon_running,
create_backup,
update_addon,
addon_options,
addon_version,
update_available,
update_calls,
backup_calls,
update_addon_side_effect,
create_backup_side_effect,
):
"""Test update the Z-Wave JS add-on during entry setup."""
device = "/test"
network_key = "abc123"
addon_options["device"] = device
addon_options["network_key"] = network_key
addon_info.return_value["version"] = addon_version
addon_info.return_value["update_available"] = update_available
create_backup.side_effect = create_backup_side_effect
update_addon.side_effect = update_addon_side_effect
client.connect.side_effect = InvalidServerVersion("Invalid version")
entry = MockConfigEntry(
domain=DOMAIN,
title="Z-Wave JS",
data={
"url": "ws://host1:3001",
"use_addon": True,
"usb_path": device,
"network_key": network_key,
},
)
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
assert entry.state is ConfigEntryState.SETUP_RETRY
assert create_backup.call_count == backup_calls
assert update_addon.call_count == update_calls
@pytest.mark.parametrize(
"stop_addon_side_effect, entry_state",
[
(None, ConfigEntryState.NOT_LOADED),
(HassioAPIError("Boom"), ConfigEntryState.LOADED),
],
)
async def test_stop_addon(
hass,
client,
addon_installed,
addon_running,
addon_options,
stop_addon,
stop_addon_side_effect,
entry_state,
):
"""Test stop the Z-Wave JS add-on on entry unload if entry is disabled."""
stop_addon.side_effect = stop_addon_side_effect
device = "/test"
network_key = "abc123"
addon_options["device"] = device
addon_options["network_key"] = network_key
entry = MockConfigEntry(
domain=DOMAIN,
title="Z-Wave JS",
data={
"url": "ws://host1:3001",
"use_addon": True,
"usb_path": device,
"network_key": network_key,
},
)
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
assert entry.state is ConfigEntryState.LOADED
await hass.config_entries.async_set_disabled_by(entry.entry_id, DISABLED_USER)
await hass.async_block_till_done()
assert entry.state == entry_state
assert stop_addon.call_count == 1
assert stop_addon.call_args == call(hass, "core_zwave_js")
async def test_remove_entry(
hass, addon_installed, stop_addon, create_backup, uninstall_addon, caplog
):
"""Test remove the config entry."""
# test successful remove without created add-on
entry = MockConfigEntry(
domain=DOMAIN,
title="Z-Wave JS",
data={"integration_created_addon": False},
)
entry.add_to_hass(hass)
assert entry.state is ConfigEntryState.NOT_LOADED
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
await hass.config_entries.async_remove(entry.entry_id)
assert entry.state is ConfigEntryState.NOT_LOADED
assert len(hass.config_entries.async_entries(DOMAIN)) == 0
# test successful remove with created add-on
entry = MockConfigEntry(
domain=DOMAIN,
title="Z-Wave JS",
data={"integration_created_addon": True},
)
entry.add_to_hass(hass)
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
await hass.config_entries.async_remove(entry.entry_id)
assert stop_addon.call_count == 1
assert stop_addon.call_args == call(hass, "core_zwave_js")
assert create_backup.call_count == 1
assert create_backup.call_args == call(
hass,
{"name": "addon_core_zwave_js_1.0", "addons": ["core_zwave_js"]},
partial=True,
)
assert uninstall_addon.call_count == 1
assert uninstall_addon.call_args == call(hass, "core_zwave_js")
assert entry.state is ConfigEntryState.NOT_LOADED
assert len(hass.config_entries.async_entries(DOMAIN)) == 0
stop_addon.reset_mock()
create_backup.reset_mock()
uninstall_addon.reset_mock()
# test add-on stop failure
entry.add_to_hass(hass)
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
stop_addon.side_effect = HassioAPIError()
await hass.config_entries.async_remove(entry.entry_id)
assert stop_addon.call_count == 1
assert stop_addon.call_args == call(hass, "core_zwave_js")
assert create_backup.call_count == 0
assert uninstall_addon.call_count == 0
assert entry.state is ConfigEntryState.NOT_LOADED
assert len(hass.config_entries.async_entries(DOMAIN)) == 0
assert "Failed to stop the Z-Wave JS add-on" in caplog.text
stop_addon.side_effect = None
stop_addon.reset_mock()
create_backup.reset_mock()
uninstall_addon.reset_mock()
# test create backup failure
entry.add_to_hass(hass)
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
create_backup.side_effect = HassioAPIError()
await hass.config_entries.async_remove(entry.entry_id)
assert stop_addon.call_count == 1
assert stop_addon.call_args == call(hass, "core_zwave_js")
assert create_backup.call_count == 1
assert create_backup.call_args == call(
hass,
{"name": "addon_core_zwave_js_1.0", "addons": ["core_zwave_js"]},
partial=True,
)
assert uninstall_addon.call_count == 0
assert entry.state is ConfigEntryState.NOT_LOADED
assert len(hass.config_entries.async_entries(DOMAIN)) == 0
assert "Failed to create a backup of the Z-Wave JS add-on" in caplog.text
create_backup.side_effect = None
stop_addon.reset_mock()
create_backup.reset_mock()
uninstall_addon.reset_mock()
# test add-on uninstall failure
entry.add_to_hass(hass)
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
uninstall_addon.side_effect = HassioAPIError()
await hass.config_entries.async_remove(entry.entry_id)
assert stop_addon.call_count == 1
assert stop_addon.call_args == call(hass, "core_zwave_js")
assert create_backup.call_count == 1
assert create_backup.call_args == call(
hass,
{"name": "addon_core_zwave_js_1.0", "addons": ["core_zwave_js"]},
partial=True,
)
assert uninstall_addon.call_count == 1
assert uninstall_addon.call_args == call(hass, "core_zwave_js")
assert entry.state is ConfigEntryState.NOT_LOADED
assert len(hass.config_entries.async_entries(DOMAIN)) == 0
assert "Failed to uninstall the Z-Wave JS add-on" in caplog.text
async def test_removed_device(hass, client, multiple_devices, integration):
"""Test that the device registry gets updated when a device gets removed."""
nodes = multiple_devices
# Verify how many nodes are available
assert len(client.driver.controller.nodes) == 2
# Make sure there are the same number of devices
dev_reg = dr.async_get(hass)
device_entries = dr.async_entries_for_config_entry(dev_reg, integration.entry_id)
assert len(device_entries) == 2
# Check how many entities there are
ent_reg = er.async_get(hass)
entity_entries = er.async_entries_for_config_entry(ent_reg, integration.entry_id)
assert len(entity_entries) == 26
# Remove a node and reload the entry
old_node = nodes.pop(13)
await hass.config_entries.async_reload(integration.entry_id)
await hass.async_block_till_done()
# Assert that the node and all of it's entities were removed from the device and
# entity registry
device_entries = dr.async_entries_for_config_entry(dev_reg, integration.entry_id)
assert len(device_entries) == 1
entity_entries = er.async_entries_for_config_entry(ent_reg, integration.entry_id)
assert len(entity_entries) == 16
assert dev_reg.async_get_device({get_device_id(client, old_node)}) is None
async def test_suggested_area(hass, client, eaton_rf9640_dimmer):
"""Test that suggested area works."""
dev_reg = dr.async_get(hass)
ent_reg = er.async_get(hass)
entry = MockConfigEntry(domain="zwave_js", data={"url": "ws://test.org"})
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
entity = ent_reg.async_get(EATON_RF9640_ENTITY)
assert dev_reg.async_get(entity.device_id).area_id is not None
|
|
# urlpath.py
# 0.1.0
# 2005/08/20
# Functions that handle url paths.
# Part of Pythonutils
# http://www.voidspace.org.uk/python/pythonutils.html
# Copyright Michael Foord, 2004 & 2005.
# Released subject to the BSD License
# Please see http://www.voidspace.org.uk/python/license.shtml
# For information about bugfixes, updates and support, please join the
# Pythonutils mailing list.
# http://groups.google.com/group/pythonutils/
# Comments, suggestions and bug reports welcome.
# Scripts maintained at http://www.voidspace.org.uk/python/index.shtml
# E-mail [email protected]
from __future__ import print_function
import posixpath
import os
try:
from urllib.request import url2pathname, pathname2url
except ImportError:
from urllib import url2pathname, pathname2url
__all__ = [
'nativejoin',
'pathjoin',
'relpathto',
'tslash',
'relpath'
]
def pathjoin(base, *paths):
"""
Join paths to a base, observing pardir.
If base doesn't *end* with '/' we assume it's a file rather than a directory.
(so we get rid of it)
"""
# XXXX will posixpath.join do all this anyway?
if base and not base.endswith('/'):
# get rid of the filename
base = '/'.join(base.split('/')[:-1])
base = tslash(base)
path = (base,) + paths
return posixpath.normpath(posixpath.join(*path))
def nativejoin(base, path):
"""
Joins two paths - returning a native file path.
Given a base path and a relative location, (in posix format)
return a file path in a (relatively) OS native way.
"""
return url2pathname(pathjoin(base, path))
def relpathto(thisdir, origin, dest):
"""
Given two paths relative to a directory, work out a path from origin
to destination.
Assumes UNIX/URL type relative paths.
If origin doesn't *end* with '/' we assume it's a file rather than a
directory.
If the same paths are passed in :
if the path ends with ('/') then we return ''
else we return the last part of the path (presumably a filename)
If thisdir doesn't start with '/' then we add one
(this makes the top level of thisdir our root directory)
"""
orig_thisdir = thisdir
if not thisdir.startswith('/'):
thisdir = '/' + thisdir
orig_abs = posixpath.normpath(posixpath.join(thisdir, origin))
dest_abs = posixpath.normpath(posixpath.join(thisdir, dest))
if origin.endswith('/') and not orig_abs.endswith('/'):
orig_abs = orig_abs + '/'
if dest.endswith('/') and not dest_abs.endswith('/'):
dest_abs = dest_abs + '/'
# print orig_abs, dest_abs
#
# if the first item is a filename, we want to get rid of it
orig_list = orig_abs.split('/')[:-1]
dest_list = dest_abs.split('/')
# print orig_list, dest_list
if orig_list[0] != dest_list[0]:
# can't get here from there
# XXXX raise exception?
return dest
#
# find the location where the two paths start to differ.
i = 0
for start_seg, dest_seg in zip(orig_list, dest_list):
if start_seg != dest_seg:
break
i += 1
#
# now i is the point where the two paths diverge;
# need a certain number of "os.pardir"s to work up
# from the origin to the point of divergence.
segments = ['..'] * (len(orig_list) - i)
# need to add the diverging part of dest_list.
segments += dest_list[i:]
if len(segments) == 0:
# if they happen to be identical paths
# identical directories
if dest.endswith('/'):
return ''
# just the filename - the last part of dest
return dest_list[-1]
else:
return '/'.join(segments)
def relpath(origin, dest):
"""Given two absolute paths, work out a path from origin to destination.
Assumes UNIX/URL type relative paths.
If origin doesn't *end* with '/' we assume it's a file rather than
a directory.
If the same paths are passed in :
if the path ends with ('/') then we return ''
else we return the last part of the path (presumably a filename)
If origin or dest don't start with '/' then we add it.
We are *assuming* relative paths on the same device
(i.e. same top level directory)
"""
if not origin.startswith('/'):
origin = '/' + origin
if not dest.startswith('/'):
dest = '/' + dest
#
# if the first item is a filename, we want to get rid of it
orig_list = origin.split('/')[:-1]
dest_list = dest.split('/')
#
# find the location where the two paths start to differ.
i = 0
for start_seg, dest_seg in zip(orig_list, dest_list):
if start_seg != dest_seg:
break
i += 1
# now i is the point where the two paths diverge.
# need a certain number of "os.pardir"s to work up
# from the origin to the point of divergence.
segments = ['..'] * (len(orig_list) - i)
# need to add the diverging part of dest_list.
segments += dest_list[i:]
if len(segments) == 0:
# if they happen to be identical paths
# identical directories
if dest.endswith('/'):
return ''
# just the filename - the last part of dest
return dest_list[-1]
else:
return '/'.join(segments)
def tslash(apath):
"""Add a trailing slash to a path if it needs one.
Doesn't use os.sep because you end up jiggered on windoze - when you
want separators for URLs.
"""
if (apath and
apath != '.' and
not apath.endswith('/') and
not apath.endswith('\\')):
return apath + '/'
else:
return apath
##############################################
def testJoin():
thelist = [
('/', 'fish.html'),
('/dir/dir/', '../file'),
('dir/dir/', '../file'),
('dir/dir/', '../../file'),
('dir/dir/', '../../../file'),
('/dir/dir/', '../notherdir/file'),
('/dir/dir/', '../../notherdir/file'),
('dir/dir/', '../../notherdir/file'),
('dir/dir/', '../../../notherdir/file'),
('', '../path'),
]
for entry in thelist:
print(entry, ' :: ', pathjoin(*entry))
print(entry, ' :: ', nativejoin(*entry))
print('\n')
def testRelpathto():
thedir = '//toplevel/dirone/dirtwo/dirthree'
thelist = [
('file1.html', 'file2.html'),
('file1.html', '../file2.html'),
('../file1.html', '../file2.html'),
('../file1.html', 'file2.html'),
('../fish1/fish2/', '../../sub1/sub2/'),
('../fish1/fish2/', 'sub1/sub2'),
('../../../fish1/fish2/', 'sub1/sub2/'),
('../../../fish1/fish2/', 'sub1/sub2/file1.html'),
]
for orig, dest in thelist:
print('(%s, %s) : ' % (orig, dest), relpathto(thedir, orig, dest))
def testRelpathto2():
thedir = 'section3/'
thelist = [
('../archive/strangeindex1.html', 'article2.html'),
]
for orig, dest in thelist:
answer = relpathto(thedir, orig, dest)
print('(%s, %s) : ' % (orig, dest), answer)
def testRelpath():
thelist = [
('/hello/fish/', 'bungles'),
]
for orig, dest in thelist:
answer = relpath(orig, dest)
print('(%s, %s) : ' % (orig, dest), answer)
if __name__ == '__main__':
testJoin()
testRelpathto()
testRelpath()
# testRelpathto2()
"""
TODO
====
More comprehensive tests.
CHANGELOG
2005/07/31
Can now pass mulitple args to ``pathjoin``.
Finalised as version 0.1.0
2005/06/18
Changes by Nicola Larosa
Code cleanup
lines shortened
comments on line above code
empty comments in empty lines
2005/05/28
Added relpath to __all__
TODO
Move into pythonutils
relpathto could call relpath (and so be shorter)
nativejoin could accept multiple paths
Could tslash be more elegant ?
"""
|
|
"""
******
Layout
******
Node positioning algorithms for graph drawing.
The default scales and centering for these layouts are
typically squares with side [0, 1] or [0, scale].
The two circular layout routines (circular_layout and
shell_layout) have size [-1, 1] or [-scale, scale].
"""
# Authors: Aric Hagberg <[email protected]>,
# Dan Schult <[email protected]>
# Copyright (C) 2004-2016 by
# Aric Hagberg <[email protected]>
# Dan Schult <[email protected]>
# Pieter Swart <[email protected]>
# All rights reserved.
# BSD license.
import collections
import networkx as nx
__all__ = ['circular_layout',
'random_layout',
'shell_layout',
'spring_layout',
'spectral_layout',
'fruchterman_reingold_layout']
def random_layout(G, dim=2, scale=1., center=None):
"""Position nodes uniformly at random.
For every node, a position is generated by choosing each of dim
coordinates uniformly at random on the default interval [0.0, 1.0),
or on an interval of length `scale` centered at `center`.
NumPy (http://scipy.org) is required for this function.
Parameters
----------
G : NetworkX graph or list of nodes
A position will be assigned to every node in G.
dim : int
Dimension of layout.
scale : float (default 1)
Scale factor for positions
center : array-like (default scale*0.5 in each dim)
Coordinate around which to center the layout.
Returns
-------
pos : dict
A dictionary of positions keyed by node
Examples
--------
>>> G = nx.lollipop_graph(4, 3)
>>> pos = nx.random_layout(G)
"""
import numpy as np
shape = (len(G), dim)
pos = np.random.random(shape) * scale
if center is not None:
pos += np.asarray(center) - 0.5 * scale
return dict(zip(G, pos))
def circular_layout(G, dim=2, scale=1., center=None):
"""Position nodes on a circle.
Parameters
----------
G : NetworkX graph or list of nodes
dim : int
Dimension of layout, currently only dim=2 is supported
scale : float (default 1)
Scale factor for positions, i.e. radius of circle.
center : array-like (default origin)
Coordinate around which to center the layout.
Returns
-------
dict :
A dictionary of positions keyed by node
Examples
--------
>>> G=nx.path_graph(4)
>>> pos=nx.circular_layout(G)
Notes
-----
This algorithm currently only works in two dimensions and does not
try to minimize edge crossings.
"""
import numpy as np
if len(G) == 0:
return {}
twopi = 2.0*np.pi
theta = np.arange(0, twopi, twopi/len(G))
pos = np.column_stack([np.cos(theta), np.sin(theta)]) * scale
if center is not None:
pos += np.asarray(center)
return dict(zip(G, pos))
def shell_layout(G, nlist=None, dim=2, scale=1., center=None):
"""Position nodes in concentric circles.
Parameters
----------
G : NetworkX graph or list of nodes
nlist : list of lists
List of node lists for each shell.
dim : int
Dimension of layout, currently only dim=2 is supported
scale : float (default 1)
Scale factor for positions, i.e.radius of largest shell
center : array-like (default origin)
Coordinate around which to center the layout.
Returns
-------
dict :
A dictionary of positions keyed by node
Examples
--------
>>> G = nx.path_graph(4)
>>> shells = [[0], [1,2,3]]
>>> pos = nx.shell_layout(G, shells)
Notes
-----
This algorithm currently only works in two dimensions and does not
try to minimize edge crossings.
"""
import numpy as np
if len(G) == 0:
return {}
if nlist is None:
# draw the whole graph in one shell
nlist = [list(G)]
numb_shells = len(nlist)
if len(nlist[0]) == 1:
# single node at center
radius = 0.0
numb_shells -= 1
else:
# else start at r=1
radius = 1.0
# distance between shells
gap = (scale / numb_shells) if numb_shells else scale
radius *= gap
npos={}
twopi = 2.0*np.pi
for nodes in nlist:
theta = np.arange(0, twopi, twopi/len(nodes))
pos = np.column_stack([np.cos(theta), np.sin(theta)]) * radius
npos.update(zip(nodes, pos))
radius += gap
if center is not None:
center = np.asarray(center)
for n,p in npos.items():
npos[n] = p + center
return npos
def fruchterman_reingold_layout(G, dim=2, k=None,
pos=None,
fixed=None,
iterations=50,
weight='weight',
scale=1.0,
center=None):
"""Position nodes using Fruchterman-Reingold force-directed algorithm.
Parameters
----------
G : NetworkX graph
dim : int
Dimension of layout
k : float (default=None)
Optimal distance between nodes. If None the distance is set to
1/sqrt(n) where n is the number of nodes. Increase this value
to move nodes farther apart.
pos : dict or None optional (default=None)
Initial positions for nodes as a dictionary with node as keys
and values as a list or tuple. If None, then use random initial
positions.
fixed : list or None optional (default=None)
Nodes to keep fixed at initial position.
If any nodes are fixed, the scale and center features are not used.
iterations : int optional (default=50)
Number of iterations of spring-force relaxation
weight : string or None optional (default='weight')
The edge attribute that holds the numerical value used for
the effective spring constant. If None, edge weights are 1.
scale : float (default=1.0)
Scale factor for positions. The nodes are positioned
in a box of size `scale` in each dim centered at `center`.
center : array-like (default scale/2 in each dim)
Coordinate around which to center the layout.
Returns
-------
dict :
A dictionary of positions keyed by node
Examples
--------
>>> G=nx.path_graph(4)
>>> pos=nx.spring_layout(G)
# this function has two names:
# spring_layout and fruchterman_reingold_layout
>>> pos=nx.fruchterman_reingold_layout(G)
"""
import numpy as np
if len(G) == 0:
return {}
if fixed is not None:
nfixed = dict(zip(G, range(len(G))))
fixed = np.asarray([nfixed[v] for v in fixed])
if pos is None:
msg = "Keyword pos must be specified if any nodes are fixed"
raise ValueError(msg)
if pos is not None:
# Determine size of existing domain to adjust initial positions
pos_coords = np.array(list(pos.values()))
min_coords = pos_coords.min(0)
domain_size = pos_coords.max(0) - min_coords
shape = (len(G), dim)
pos_arr = np.random.random(shape) * domain_size + min_coords
for i,n in enumerate(G):
if n in pos:
pos_arr[i] = np.asarray(pos[n])
else:
pos_arr=None
if k is None and fixed is not None:
# Adjust k for domains larger than 1x1
k=domain_size.max()/np.sqrt(len(G))
try:
# Sparse matrix
if len(G) < 500: # sparse solver for large graphs
raise ValueError
A = nx.to_scipy_sparse_matrix(G, weight=weight, dtype='f')
pos = _sparse_fruchterman_reingold(A,dim,k,pos_arr,fixed,iterations)
except:
A = nx.to_numpy_matrix(G, weight=weight)
pos = _fruchterman_reingold(A, dim, k, pos_arr, fixed, iterations)
if fixed is None:
pos = _rescale_layout(pos, scale)
if center is not None:
pos += np.asarray(center) - 0.5 * scale
return dict(zip(G,pos))
spring_layout=fruchterman_reingold_layout
def _fruchterman_reingold(A,dim=2,k=None,pos=None,fixed=None,iterations=50):
# Position nodes in adjacency matrix A using Fruchterman-Reingold
# Entry point for NetworkX graph is fruchterman_reingold_layout()
import numpy as np
try:
nnodes,_=A.shape
except AttributeError:
raise nx.NetworkXError(
"fruchterman_reingold() takes an adjacency matrix as input")
A=np.asarray(A) # make sure we have an array instead of a matrix
if pos is None:
# random initial positions
pos=np.asarray(np.random.random((nnodes,dim)),dtype=A.dtype)
else:
# make sure positions are of same type as matrix
pos=pos.astype(A.dtype)
# optimal distance between nodes
if k is None:
k=np.sqrt(1.0/nnodes)
# the initial "temperature" is about .1 of domain area (=1x1)
# this is the largest step allowed in the dynamics.
# Calculate domain in case our fixed positions are bigger than 1x1
t = max(max(pos.T[0]) - min(pos.T[0]), max(pos.T[1]) - min(pos.T[1]))*0.1
# simple cooling scheme.
# linearly step down by dt on each iteration so last iteration is size dt.
dt=t/float(iterations+1)
delta = np.zeros((pos.shape[0],pos.shape[0],pos.shape[1]),dtype=A.dtype)
# the inscrutable (but fast) version
# this is still O(V^2)
# could use multilevel methods to speed this up significantly
for iteration in range(iterations):
# matrix of difference between points
for i in range(pos.shape[1]):
delta[:,:,i]= pos[:,i,None]-pos[:,i]
# distance between points
distance=np.sqrt((delta**2).sum(axis=-1))
# enforce minimum distance of 0.01
distance=np.where(distance<0.01,0.01,distance)
# displacement "force"
displacement=np.transpose(np.transpose(delta)*\
(k*k/distance**2-A*distance/k))\
.sum(axis=1)
# update positions
length=np.sqrt((displacement**2).sum(axis=1))
length=np.where(length<0.01,0.01,length)
delta_pos=np.transpose(np.transpose(displacement)*t/length)
if fixed is not None:
# don't change positions of fixed nodes
delta_pos[fixed]=0.0
pos+=delta_pos
# cool temperature
t-=dt
if fixed is None:
pos = _rescale_layout(pos)
return pos
def _sparse_fruchterman_reingold(A, dim=2, k=None, pos=None, fixed=None,
iterations=50):
# Position nodes in adjacency matrix A using Fruchterman-Reingold
# Entry point for NetworkX graph is fruchterman_reingold_layout()
# Sparse version
import numpy as np
try:
nnodes,_=A.shape
except AttributeError:
raise nx.NetworkXError(
"fruchterman_reingold() takes an adjacency matrix as input")
try:
from scipy.sparse import spdiags,coo_matrix
except ImportError:
raise ImportError("_sparse_fruchterman_reingold() scipy numpy: http://scipy.org/ ")
# make sure we have a LIst of Lists representation
try:
A=A.tolil()
except:
A=(coo_matrix(A)).tolil()
if pos is None:
# random initial positions
pos=np.asarray(np.random.random((nnodes,dim)),dtype=A.dtype)
else:
# make sure positions are of same type as matrix
pos=pos.astype(A.dtype)
# no fixed nodes
if fixed is None:
fixed=[]
# optimal distance between nodes
if k is None:
k=np.sqrt(1.0/nnodes)
# the initial "temperature" is about .1 of domain area (=1x1)
# this is the largest step allowed in the dynamics.
# Calculate domain in case our fixed positions are bigger than 1x1
t = max(max(pos.T[0]) - min(pos.T[0]), max(pos.T[1]) - min(pos.T[1]))*0.1
# simple cooling scheme.
# linearly step down by dt on each iteration so last iteration is size dt.
dt=t/float(iterations+1)
displacement=np.zeros((dim,nnodes))
for iteration in range(iterations):
displacement*=0
# loop over rows
for i in range(A.shape[0]):
if i in fixed:
continue
# difference between this row's node position and all others
delta=(pos[i]-pos).T
# distance between points
distance=np.sqrt((delta**2).sum(axis=0))
# enforce minimum distance of 0.01
distance=np.where(distance<0.01,0.01,distance)
# the adjacency matrix row
Ai=np.asarray(A.getrowview(i).toarray())
# displacement "force"
displacement[:,i]+=\
(delta*(k*k/distance**2-Ai*distance/k)).sum(axis=1)
# update positions
length=np.sqrt((displacement**2).sum(axis=0))
length=np.where(length<0.01,0.01,length)
pos+=(displacement*t/length).T
# cool temperature
t-=dt
if fixed is None:
pos = _rescale_layout(pos)
return pos
def spectral_layout(G, dim=2, weight='weight', scale=1., center=None):
"""Position nodes using the eigenvectors of the graph Laplacian.
Parameters
----------
G : NetworkX graph or list of nodes
dim : int
Dimension of layout
weight : string or None optional (default='weight')
The edge attribute that holds the numerical value used for
the edge weight. If None, then all edge weights are 1.
scale : float optional (default 1)
Scale factor for positions, i.e. nodes placed in a box with
side [0, scale] or centered on `center` if provided.
center : array-like (default scale/2 in each dim)
Coordinate around which to center the layout.
Returns
-------
dict :
A dictionary of positions keyed by node
Examples
--------
>>> G=nx.path_graph(4)
>>> pos=nx.spectral_layout(G)
Notes
-----
Directed graphs will be considered as undirected graphs when
positioning the nodes.
For larger graphs (>500 nodes) this will use the SciPy sparse
eigenvalue solver (ARPACK).
"""
# handle some special cases that break the eigensolvers
import numpy as np
if len(G) <= 2:
if len(G) == 0:
return {}
elif len(G) == 1:
if center is not None:
pos = np.asarray(center)
else:
pos = np.ones((1,dim)) * scale * 0.5
else: #len(G) == 2
pos = np.array([np.zeros(dim), np.ones(dim) * scale])
if center is not None:
pos += np.asarray(center) - scale * 0.5
return dict(zip(G,pos))
try:
# Sparse matrix
if len(G)< 500: # dense solver is faster for small graphs
raise ValueError
A = nx.to_scipy_sparse_matrix(G, weight=weight, dtype='d')
# Symmetrize directed graphs
if G.is_directed():
A = A + np.transpose(A)
pos = _sparse_spectral(A,dim)
except (ImportError, ValueError):
# Dense matrix
A = nx.to_numpy_matrix(G, weight=weight)
# Symmetrize directed graphs
if G.is_directed():
A = A + np.transpose(A)
pos = _spectral(A, dim)
pos = _rescale_layout(pos, scale)
if center is not None:
pos += np.asarray(center) - 0.5 * scale
return dict(zip(G,pos))
def _spectral(A, dim=2):
# Input adjacency matrix A
# Uses dense eigenvalue solver from numpy
try:
import numpy as np
except ImportError:
raise ImportError("spectral_layout() requires numpy: http://scipy.org/ ")
try:
nnodes,_=A.shape
except AttributeError:
raise nx.NetworkXError(\
"spectral() takes an adjacency matrix as input")
# form Laplacian matrix
# make sure we have an array instead of a matrix
A=np.asarray(A)
I=np.identity(nnodes,dtype=A.dtype)
D=I*np.sum(A,axis=1) # diagonal of degrees
L=D-A
eigenvalues,eigenvectors=np.linalg.eig(L)
# sort and keep smallest nonzero
index=np.argsort(eigenvalues)[1:dim+1] # 0 index is zero eigenvalue
return np.real(eigenvectors[:,index])
def _sparse_spectral(A,dim=2):
# Input adjacency matrix A
# Uses sparse eigenvalue solver from scipy
# Could use multilevel methods here, see Koren "On spectral graph drawing"
try:
import numpy as np
from scipy.sparse import spdiags
except ImportError:
raise ImportError("_sparse_spectral() requires scipy & numpy: http://scipy.org/ ")
try:
from scipy.sparse.linalg.eigen import eigsh
except ImportError:
# scipy <0.9.0 names eigsh differently
from scipy.sparse.linalg import eigen_symmetric as eigsh
try:
nnodes,_=A.shape
except AttributeError:
raise nx.NetworkXError(\
"sparse_spectral() takes an adjacency matrix as input")
# form Laplacian matrix
data=np.asarray(A.sum(axis=1).T)
D=spdiags(data,0,nnodes,nnodes)
L=D-A
k=dim+1
# number of Lanczos vectors for ARPACK solver.What is the right scaling?
ncv=max(2*k+1,int(np.sqrt(nnodes)))
# return smallest k eigenvalues and eigenvectors
eigenvalues,eigenvectors=eigsh(L,k,which='SM',ncv=ncv)
index=np.argsort(eigenvalues)[1:k] # 0 index is zero eigenvalue
return np.real(eigenvectors[:,index])
def _rescale_layout(pos, scale=1.):
# rescale to [0, scale) in each axis
# Find max length over all dimensions
maxlim=0
for i in range(pos.shape[1]):
pos[:,i] -= pos[:,i].min() # shift min to zero
maxlim = max(maxlim, pos[:,i].max())
if maxlim > 0:
for i in range(pos.shape[1]):
pos[:,i] *= scale / maxlim
return pos
# fixture for nose tests
def setup_module(module):
from nose import SkipTest
try:
import numpy
except:
raise SkipTest("NumPy not available")
try:
import scipy
except:
raise SkipTest("SciPy not available")
|
|
from .. import BaseProvider
localized = True
class Provider(BaseProvider):
formats = (
'{{last_name}} {{company_suffix}}',
'{{last_name}}-{{last_name}}',
'{{last_name}}, {{last_name}} and {{last_name}}',
)
company_suffixes = ('Inc', 'and Sons', 'LLC', 'Group', 'PLC', 'Ltd')
catch_phrase_words = (
('Adaptive',
'Advanced',
'Ameliorated',
'Assimilated',
'Automated',
'Balanced',
'Business-focused',
'Centralized',
'Cloned',
'Compatible',
'Configurable',
'Cross-group',
'Cross-platform',
'Customer-focused',
'Customizable',
'Decentralized',
'De-engineered',
'Devolved',
'Digitized',
'Distributed',
'Diverse',
'Down-sized',
'Enhanced',
'Enterprise-wide',
'Ergonomic',
'Exclusive',
'Expanded',
'Extended',
'Face-to-face',
'Focused',
'Front-line',
'Fully-configurable',
'Function-based',
'Fundamental',
'Future-proofed',
'Grass-roots',
'Horizontal',
'Implemented',
'Innovative',
'Integrated',
'Intuitive',
'Inverse',
'Managed',
'Mandatory',
'Monitored',
'Multi-channeled',
'Multi-lateral',
'Multi-layered',
'Multi-tiered',
'Networked',
'Object-based',
'Open-architected',
'Open-source',
'Operative',
'Optimized',
'Optional',
'Organic',
'Organized',
'Persevering',
'Persistent',
'Phased',
'Polarized',
'Pre-emptive',
'Proactive',
'Profit-focused',
'Profound',
'Programmable',
'Progressive',
'Public-key',
'Quality-focused',
'Reactive',
'Realigned',
'Re-contextualized',
'Re-engineered',
'Reduced',
'Reverse-engineered',
'Right-sized',
'Robust',
'Seamless',
'Secured',
'Self-enabling',
'Sharable',
'Stand-alone',
'Streamlined',
'Switchable',
'Synchronized',
'Synergistic',
'Synergized',
'Team-oriented',
'Total',
'Triple-buffered',
'Universal',
'Up-sized',
'Upgradable',
'User-centric',
'User-friendly',
'Versatile',
'Virtual',
'Visionary',
'Vision-oriented'),
('24hour',
'24/7',
'3rdgeneration',
'4thgeneration',
'5thgeneration',
'6thgeneration',
'actuating',
'analyzing',
'asymmetric',
'asynchronous',
'attitude-oriented',
'background',
'bandwidth-monitored',
'bi-directional',
'bifurcated',
'bottom-line',
'clear-thinking',
'client-driven',
'client-server',
'coherent',
'cohesive',
'composite',
'context-sensitive',
'contextually-based',
'content-based',
'dedicated',
'demand-driven',
'didactic',
'directional',
'discrete',
'disintermediate',
'dynamic',
'eco-centric',
'empowering',
'encompassing',
'even-keeled',
'executive',
'explicit',
'exuding',
'fault-tolerant',
'foreground',
'fresh-thinking',
'full-range',
'global',
'grid-enabled',
'heuristic',
'high-level',
'holistic',
'homogeneous',
'human-resource',
'hybrid',
'impactful',
'incremental',
'intangible',
'interactive',
'intermediate',
'leadingedge',
'local',
'logistical',
'maximized',
'methodical',
'mission-critical',
'mobile',
'modular',
'motivating',
'multimedia',
'multi-state',
'multi-tasking',
'national',
'needs-based',
'neutral',
'next generation',
'non-volatile',
'object-oriented',
'optimal',
'optimizing',
'radical',
'real-time',
'reciprocal',
'regional',
'responsive',
'scalable',
'secondary',
'solution-oriented',
'stable',
'static',
'systematic',
'systemic',
'system-worthy',
'tangible',
'tertiary',
'transitional',
'uniform',
'upward-trending',
'user-facing',
'value-added',
'web-enabled',
'well-modulated',
'zero administration',
'zero-defect',
'zero tolerance'),
('ability',
'access',
'adapter',
'algorithm',
'alliance',
'analyzer',
'application',
'approach',
'architecture',
'archive',
'artificial intelligence',
'array',
'attitude',
'benchmark',
'budgetary management',
'capability',
'capacity',
'challenge',
'circuit',
'collaboration',
'complexity',
'concept',
'conglomeration',
'contingency',
'core',
'customer loyalty',
'database',
'data-warehouse',
'definition',
'emulation',
'encoding',
'encryption',
'extranet',
'firmware',
'flexibility',
'focus group',
'forecast',
'frame',
'framework',
'function',
'functionalities',
'Graphic Interface',
'groupware',
'Graphical User Interface',
'hardware',
'help-desk',
'hierarchy',
'hub',
'implementation',
'info-mediaries',
'infrastructure',
'initiative',
'installation',
'instruction set',
'interface',
'Internet solution',
'intranet',
'knowledge user',
'knowledgebase',
'Local Area Network',
'leverage',
'matrices',
'matrix',
'methodology',
'middleware',
'migration',
'model',
'moderator',
'monitoring',
'moratorium',
'neural-net',
'open architecture',
'open system',
'orchestration',
'paradigm',
'parallelism',
'policy',
'portal',
'pricing structure',
'process improvement',
'product',
'productivity',
'project',
'projection',
'protocol',
'secured line',
'service-desk',
'software',
'solution',
'standardization',
'strategy',
'structure',
'success',
'superstructure',
'support',
'synergy',
'system engine',
'task-force',
'throughput',
'time-frame',
'toolset',
'utilization',
'website',
'workforce'))
bsWords = (
('implement',
'utilize',
'integrate',
'streamline',
'optimize',
'evolve',
'transform',
'embrace',
'enable',
'orchestrate',
'leverage',
'reinvent',
'aggregate',
'architect',
'enhance',
'incentivize',
'morph',
'empower',
'envisioneer',
'monetize',
'harness',
'facilitate',
'seize',
'disintermediate',
'synergize',
'strategize',
'deploy',
'brand',
'grow',
'target',
'syndicate',
'synthesize',
'deliver',
'mesh',
'incubate',
'engage',
'maximize',
'benchmark',
'expedite',
're-intermediate',
'whiteboard',
'visualize',
'repurpose',
'innovate',
'scale',
'unleash',
'drive',
'extend',
'engineer',
'revolutionize',
'generate',
'exploit',
'transition',
'e-enable',
'iterate',
'cultivate',
'matrix',
'productize',
'redefine',
're-contextualize'),
('clicks-and-mortar',
'value-added',
'vertical',
'proactive',
'robust',
'revolutionary',
'scalable',
'leading-edge',
'innovative',
'intuitive',
'strategic',
'e-business',
'mission-critical',
'sticky',
'one-to-one',
'24/7',
'end-to-end',
'global',
'B2B',
'B2C',
'granular',
'frictionless',
'virtual',
'viral',
'dynamic',
'24/365',
'best-of-breed',
'killer',
'magnetic',
'bleeding-edge',
'web-enabled',
'interactive',
'dot-com',
'sexy',
'back-end',
'real-time',
'efficient',
'front-end',
'distributed',
'seamless',
'extensible',
'turn-key',
'world-class',
'open-source',
'cross-platform',
'cross-media',
'synergistic',
'bricks-and-clicks',
'out-of-the-box',
'enterprise',
'integrated',
'impactful',
'wireless',
'transparent',
'next-generation',
'cutting-edge',
'user-centric',
'visionary',
'customized',
'ubiquitous',
'plug-and-play',
'collaborative',
'compelling',
'holistic',
'rich'),
('synergies',
'web-readiness',
'paradigms',
'markets',
'partnerships',
'infrastructures',
'platforms',
'initiatives',
'channels',
'eyeballs',
'communities',
'ROI',
'solutions',
'e-tailers',
'e-services',
'action-items',
'portals',
'niches',
'technologies',
'content',
'vortals',
'supply-chains',
'convergence',
'relationships',
'architectures',
'interfaces',
'e-markets',
'e-commerce',
'systems',
'bandwidth',
'info-mediaries',
'models',
'mindshare',
'deliverables',
'users',
'schemas',
'networks',
'applications',
'metrics',
'e-business',
'functionalities',
'experiences',
'web services',
'methodologies'))
def company(self):
"""
:example 'Acme Ltd'
"""
pattern = self.random_element(self.formats)
return self.generator.parse(pattern)
def company_suffix(self):
"""
:example 'Ltd'
"""
return self.random_element(self.company_suffixes)
def catch_phrase(self):
"""
:example 'Robust full-range hub'
"""
result = []
for word_list in self.catch_phrase_words:
result.append(self.random_element(word_list))
return " ".join(result)
def bs(self):
"""
:example 'integrate extensible convergence'
"""
result = []
for word_list in self.bsWords:
result.append(self.random_element(word_list))
return " ".join(result)
|
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
'''
Developer Navdeep Ghai
Email [email protected]
'''
import frappe
from frappe import _, msgprint
from frappe.utils import cint, cstr, flt
from bcommerce.utils.api import get_connection, get_last_sync_id, get_queue_status
from bcommerce.utils import get_resource, validate_resource
from bcommerce.utils.logger import make_logs
KEYS = ['last_name', 'first_name', 'secure_url', 'country_code', 'domain', 'phone',
'address','plan_level', 'admin_email', 'plan_name', 'order_email']
'''
This file has function to sync up the master data
Used to Sync the Bcommerce Setting with ERPNext
'''
def sync_with_store():
try:
setting = frappe.get_doc("Bcommerce Setting", "Bcommerce Setting")
if not get_queue_status():
msgprint(_("Sync is already in progress"))
return
make_logs("Queued", "Syncing", message="Queued for syncing, Store Setting")
sync_store()
sync_currencies(setting)
sync_countries(setting)
sync_payments(setting)
make_logs("Sync Completed", "Syncing", "Syncing complete successfully")
frappe.db.commit()
except Exception as e:
msg = "{0}, {1}".format("Error while syncing store setting", frappe.get_traceback())
make_logs("Failed", "Syncing", message=msg)
def sync_store(store=None, save=True, doc=None):
if not doc:
doc = frappe.get_doc("Bcommerce Setting", "Bcommerce Setting")
d = doc.as_dict()
if not store:
store = get_connection().Store.all()
for key, val in store.iteritems():
if key in KEYS:
doc.set(key, val)
doc.set("default_currency", store.currency)
if save:
doc.save(ignore_permissions=True)
def sync_currencies(setting, id=None):
setting = frappe.get_doc("Bcommerce Setting", "Bcommerce Setting")
if id and not frappe.db.get_value("Currency",{"bcommerce_currency_id":id}):
currency = get_resource("Currencies", id)
if not currency:
return
update_currency(currency,setting)
else:
currencies = get_connection().Currencies.all()
for currency in currencies:
if validate_resource(currency):
update_currency(currency, setting)
def update_currency(currency, setting):
flag = frappe.db.get_value("Currency", {"name": currency.currency_code}, as_dict=True)
if flag:
doc = frappe.get_doc("Currency", currency.currency_code)
doc.update({
"bcommerce_currency_id": currency.id
})
doc.save(ignore_permissions=True)
if currency.currency_exchange_rate and not currency.currency_code == setting.default_currency:
frm = setting.default_currency
to = currency.currency_code
rate = currency.currency_exchange_rate
make_currency_exchange(frm, to, rate)
def make_currency_exchange(frm, to, rate):
name = get_currency_exchange_name(frm, to)
if not frappe.db.get_value("Currency Exchange", filters={"name":name}):
frappe.get_doc({
"doctype": "Currency Exchange",
"date": frappe.utils.nowdate(),
"from_currency": frm,
"to_currency": to,
"exchange_rate": rate
}).save(ignore_permissions=True)
else:
doc = frappe.get_doc("Currency Exchange", name)
doc.update({
"date": frappe.utils.nowdate(),
"from_currency": frm,
"to_currency": to,
"exchange_rate": rate
})
doc.save(ignore_permissions=True)
def get_currency_exchange_name(frm, to):
name = "{0}-{1}-{2}".format(frappe.utils.nowdate(), frm, to)
return name
def sync_countries(setting):
"""
There are less countries in the world than limit of record
provided as with parameter in request
"""
countries = get_connection().Countries.all(limit=250)
for country in countries if countries else []:
try:
update_country(country, setting)
make_states(country)
except:
pass
def update_country(country, setting):
"""
There are two ISO code used within Big Commerce Sytem ISO2 is same as country
code in EPRNext ISO3 is different from from country code in ERPNext
"""
flag = frappe.db.get_value("Country", filters={"name":country.country}, as_dict=True)
doc = None
if flag:
doc = frappe.get_doc("Country", flag.get("name"))
doc.update({
"name": country.country,
"code": country.country_iso2,
})
else:
doc = frappe.get_doc({
"doctype": "Country",
"name": country.country,
"code": country.country_iso2,
})
if doc:
doc.flags.ignore_mandatory = 1
doc.save(ignore_permissions=True)
def make_states(country):
states = []
for state in country.states():
if validate_resource(state):
name = frappe.db.get_value("Bcommerce State", {"state":state.state})
doc = None
if name:
doc = frappe.get_doc("Bcommerce State", name.get("name"))
doc.update({
"abbr":state.state_abbreviation,
"state": state.state,
"country": country.country
})
else:
doc = frappe.get_doc({
"doctype": "Bcommerce State",
"abbr": state.state_abbreviation,
"state": state.state,
"country": country.country
})
if doc:
doc.save(ignore_permissions=True)
frappe.db.commit()
def sync_payments(setting):
payment_methods = get_connection().PaymentMethods.all()
for pay_method in payment_methods:
if validate_resource(pay_method, "name"):
flag = frappe.db.get_value("Mode of Payment", {"mode_of_payment":pay_method.name})
doc = None
if flag:
doc = frappe.get_doc("Mode of Payment", {"mode_of_payment":pay_method.name})
doc.update({
"mode_of_payment":pay_method.name
})
else:
doc = frappe.get_doc({
"doctype": "Mode of Payment",
"mode_of_payment": pay_method.name
})
if doc:
doc.flags.ignore_mandatory = 1
doc.save(ignore_permissions=True)
frappe.db.commit()
'''
Get Brand from Bigcommerce
'''
def get_brand(resource_id):
brand = None
if not resource_id:
return brand
flag = frappe.db.get_value("Brand", {"bcommerce_brand_id":resource_id}, as_dict=True)
if flag:
brand = flag.get("name")
else:
brand = get_resource("Brands", resource_id)
if brand:
doc = frappe.get_doc({
"doctype": "Brand",
"description": brand.meta_description,
"brand": brand.name,
"bcommerce_brand_id": brand.id
})
doc.flags.ignore_mandatory = 1
doc.save(ignore_permissions=True)
brand = doc.name
return None if not brand else brand
def sync_bulk_brands():
try:
min_id = get_last_sync_id("bcommerce_brand_id", "Brand")
max_id = min_id + 250 #250 is limit of resource list
brands = get_connection().Brands.all(min_id=min_id, max_id=max_id, limit=250)
if brands:
for brand in brands:
if validate_resource(brand):
if not frappe.db.get_value("Brand", {"bcommerce_brand_id": brand.id}):
doc = frappe.get_doc({
"doctype": "Brand",
"description": brand.meta_description,
"brand": brand.name,
"bcommerce_brand_id": brand.id
})
doc.flags.ignore_mandatory = 1
doc.save(ignore_permissions=True)
except Exception as e:
print "Exception raised while syncing brand from bigcommerce"
print frappe.get_traceback()
'''
Save master data, Customer Group
'''
def get_customer_group(group_id, setting):
group_id = cint(group_id)
flag = frappe.db.get_value("Customer Group", {"bcommerce_customer_group_id":group_id}, as_dict=True)
if flag:
return flag.get("name")
else:
cg = get_resurce("CustomerGroups", group_id)
if not cg:
return setting.customer_group
doc = frappe.get_doc({
"doctype":"Customer Group",
"is_group":0,
"parent_customer_group": setting.customer_group,
"customer_group_name": cg.name,
"bcommerce_customer_group_id":cg.id
})
doc.save(ignore_permissions=True)
return cg.name
def sync_bulk_customer_group():
try:
customer_groups = get_connection().CustomerGroups.all(limit=250)
if customer_groups:
for cg in customer_groups:
if validate_resource(cg):
doc = frappe.get_doc({
"doctype":"Customer Group",
"is_group":0,
"parent_customer_group": setting.customer_group,
"customer_group_name": cg.name,
"bcommerce_customer_group_id":cg.id
})
doc.save(ignore_permissions=True)
except Exception as e:
print frappe.get_traceback()
|
|
from typing import Callable, Dict, Type
from contextlib import contextmanager
import os
import logging
import shutil
import tempfile
from filelock import FileLock
import ray
from ray import tune
from ray.tune.resources import Resources
from ray.tune.utils.trainable import TrainableUtil
from ray.tune.result import RESULT_DUPLICATE
from ray.tune.logger import NoopLogger
from ray.tune.function_runner import wrap_function
from horovod.ray import RayExecutor
logger = logging.getLogger(__name__)
def get_rank() -> str:
"""Returns rank of worker."""
return os.environ["HOROVOD_RANK"]
def logger_creator(log_config: Dict, logdir: str) -> NoopLogger:
"""Simple NOOP logger for worker trainables."""
index = get_rank()
worker_dir = os.path.join(logdir, "worker_{}".format(index))
os.makedirs(worker_dir, exist_ok=True)
return NoopLogger(log_config, worker_dir)
@contextmanager
def distributed_checkpoint_dir(step: int, disable: bool = False):
"""ContextManager for creating a distributed checkpoint.
Only checkpoints a file on the "main" training actor, avoiding
redundant work.
Args:
step (int): Used to label the checkpoint
disable (bool): Disable for prototyping.
Yields:
str: A path to a directory. This path will be used
again when invoking the training_function.
Example:
.. code-block:: python
def train_func(config, checkpoint_dir):
if checkpoint_dir:
path = os.path.join(checkpoint_dir, "checkpoint")
model_state_dict = torch.load(path)
if epoch % 3 == 0:
with distributed_checkpoint_dir(step=epoch) as checkpoint_dir:
path = os.path.join(checkpoint_dir, "checkpoint")
torch.save(model.state_dict(), path)
"""
if int(get_rank()) == 0 and not disable:
with tune.checkpoint_dir(step=step) as checkpoint_dir:
yield checkpoint_dir
else:
path = tempfile.mkdtemp()
yield path
shutil.rmtree(path)
class _HorovodTrainable(tune.Trainable):
"""Abstract Trainable class for Horovod."""
# Callable function for training.
_function = None
# Number of hosts (nodes) to allocate per trial
_num_hosts: int = 1
# Number of workers (slots) to place on each host.
_num_slots: int = 1
# Number of CPU resources to reserve for each worker.
_num_cpus_per_slot: int = 1
# Whether to reserve and pass GPU resources through.
_use_gpu: bool = False
# bool: Whether a the function has completed training
_finished: bool = False
# Horovod settings
_ssh_str: str = None
_ssh_identity_file: str = None
_timeout_s: int = 30
@property
def num_workers(self):
return self._num_hosts * self._num_slots
def setup(self, config: Dict):
trainable = wrap_function(self.__class__._function)
# We use a filelock here to ensure that the file-writing
# process is safe across different trainables.
if self._ssh_identity_file:
with FileLock(self._ssh_identity_file + ".lock"):
settings = RayExecutor.create_settings(
self._timeout_s, self._ssh_identity_file, self._ssh_str)
else:
settings = RayExecutor.create_settings(
self._timeout_s, self._ssh_identity_file, self._ssh_str)
self.executor = RayExecutor(
settings,
cpus_per_slot=self._num_cpus_per_slot,
use_gpu=self._use_gpu,
num_hosts=self._num_hosts,
num_slots=self._num_slots)
# We can't put `self` in the lambda closure, so we
# resolve the variable ahead of time.
logdir_ = str(self.logdir)
# Starts the workers as specified by the resources above.
self.executor.start(
executable_cls=trainable,
executable_kwargs={
"config": config,
"logger_creator": lambda cfg: logger_creator(cfg, logdir_)
})
def step(self) -> Dict:
if self._finished:
raise RuntimeError("Training has already finished.")
result = self.executor.execute(lambda w: w.step())[0]
if RESULT_DUPLICATE in result:
self._finished = True
return result
def save_checkpoint(self, checkpoint_dir: str) -> str:
# TODO: optimize if colocated
save_obj = self.executor.execute_single(lambda w: w.save_to_object())
checkpoint_path = TrainableUtil.create_from_pickle(
save_obj, checkpoint_dir)
return checkpoint_path
def load_checkpoint(self, checkpoint_dir: str):
checkpoint_obj = TrainableUtil.checkpoint_to_object(checkpoint_dir)
x_id = ray.put(checkpoint_obj)
return self.executor.execute(
lambda w: w.restore_from_object(ray.get(x_id)))
def stop(self):
self.executor.execute(lambda w: w.stop())
self.executor.shutdown()
def DistributedTrainableCreator(
func: Callable,
use_gpu: bool = False,
num_hosts: int = 1,
num_slots: int = 1,
num_cpus_per_slot: int = 1,
timeout_s: int = 30,
replicate_pem: bool = False) -> Type[_HorovodTrainable]:
"""Converts Horovod functions to be executable by Tune.
Requires horovod > 0.19 to work.
This function wraps and sets the resources for a given Horovod
function to be used with Tune. It generates a Horovod Trainable (trial)
which can itself be a distributed training job. One basic assumption of
this implementation is that all sub-workers
of a trial will be placed evenly across different machines.
It is recommended that if `num_hosts` per trial > 1, you set
num_slots == the size (or number of GPUs) of a single host.
If num_hosts == 1, then you can set num_slots to be <=
the size (number of GPUs) of a single host.
This above assumption can be relaxed - please file a feature request
on Github to inform the maintainers.
Another assumption is that this API requires gloo as the underlying
communication primitive. You will need to install Horovod with
`HOROVOD_WITH_GLOO` enabled.
*Fault Tolerance:* The trial workers themselves are not fault tolerant.
When a host of a trial fails, all workers of a trial are expected to
die, and the trial is expected to restart. This currently does not
support function checkpointing.
Args:
func (Callable[[dict], None]): A training function that takes in
a config dict for hyperparameters and should initialize
horovod via horovod.init.
use_gpu (bool); Whether to allocate a GPU per worker.
num_cpus_per_slot (int): Number of CPUs to request
from Ray per worker.
num_hosts (int): Number of hosts that each trial is expected
to use.
num_slots (int): Number of slots (workers) to start on each host.
timeout_s (int): Seconds for Horovod rendezvous to timeout.
replicate_pem (bool): THIS MAY BE INSECURE. If true, this will
replicate the underlying Ray cluster ssh key across all hosts.
This may be useful if using the Ray Autoscaler.
Returns:
Trainable class that can be passed into `tune.run`.
Example:
.. code-block:: python
def train(config):
horovod.init()
horovod.allreduce()
from ray.tune.integration.horovod import DistributedTrainableCreator
trainable_cls = DistributedTrainableCreator(
train, num_hosts=1, num_slots=2, use_gpu=True)
tune.run(trainable_cls)
.. versionadded:: 1.0.0
"""
ssh_identity_file = None
sshkeystr = None
if replicate_pem:
from ray.tune.cluster_info import get_ssh_key
ssh_identity_file = get_ssh_key()
if os.path.exists(ssh_identity_file):
# For now, we assume that you're on a Ray cluster.
with open(ssh_identity_file) as f:
sshkeystr = f.read()
class WrappedHorovodTrainable(_HorovodTrainable):
_function = func
_num_hosts = num_hosts
_num_slots = num_slots
_num_cpus_per_slot = num_cpus_per_slot
_use_gpu = use_gpu
_ssh_identity_file = ssh_identity_file
_ssh_str = sshkeystr
_timeout_s = timeout_s
@classmethod
def default_resource_request(cls, config: Dict):
extra_gpu = int(num_hosts * num_slots) * int(use_gpu)
extra_cpu = int(num_hosts * num_slots * num_cpus_per_slot)
return Resources(
cpu=0,
gpu=0,
extra_cpu=extra_cpu,
extra_gpu=extra_gpu,
)
return WrappedHorovodTrainable
# pytest presents a bunch of serialization problems
# that force us to include mocks as part of the module.
def _train_simple(config: Dict):
import horovod.torch as hvd
hvd.init()
from ray import tune
for i in range(config.get("epochs", 2)):
import time
time.sleep(1)
if config.get("enable_checkpoint", True):
with distributed_checkpoint_dir(step=i) as checkpoint_dir:
path = os.path.join(checkpoint_dir, "checkpoint")
import pickle
with open(path, "wb") as f:
pickle.dump("hi", f)
tune.report(test=1, rank=hvd.rank())
|
|
from direct.directnotify import DirectNotifyGlobal
from direct.distributed.DistributedObjectUD import DistributedObjectUD
class DistributedToonUD(DistributedObjectUD):
notify = DirectNotifyGlobal.directNotify.newCategory("DistributedToonUD")
def setDNAString(self, todo0):
pass
def setGM(self, todo0):
pass
def setMaxBankMoney(self, todo0):
pass
def setBankMoney(self, todo0):
pass
def setMaxMoney(self, todo0):
pass
def setMoney(self, todo0):
pass
def setMaxHp(self, todo0):
pass
def setHp(self, todo0):
pass
def toonUp(self, todo0):
pass
def takeDamage(self, todo0):
pass
def setBattleId(self, todo0):
pass
def setExperience(self, todo0):
pass
def setMaxCarry(self, todo0):
pass
def setTrackAccess(self, todo0):
pass
def setTrackProgress(self, todo0, todo1):
pass
def setTrackBonusLevel(self, todo0):
pass
def setInventory(self, todo0):
pass
def setMaxNPCFriends(self, todo0):
pass
def setNPCFriendsDict(self, todo0):
pass
def setDefaultShard(self, todo0):
pass
def setDefaultZone(self, todo0):
pass
def setShtickerBook(self, todo0):
pass
def setZonesVisited(self, todo0):
pass
def setHoodsVisited(self, todo0):
pass
def setInterface(self, todo0):
pass
def setLastHood(self, todo0):
pass
def setTutorialAck(self, todo0):
pass
def setMaxClothes(self, todo0):
pass
def setClothesTopsList(self, todo0):
pass
def setClothesBottomsList(self, todo0):
pass
def setMaxAccessories(self, todo0):
pass
def setHatList(self, todo0):
pass
def setGlassesList(self, todo0):
pass
def setBackpackList(self, todo0):
pass
def setShoesList(self, todo0):
pass
def setHat(self, todo0, todo1, todo2):
pass
def setGlasses(self, todo0, todo1, todo2):
pass
def setBackpack(self, todo0, todo1, todo2):
pass
def setShoes(self, todo0, todo1, todo2):
pass
def setGardenSpecials(self, todo0):
pass
def setEarnedExperience(self, todo0):
pass
def setTunnelIn(self, todo0, todo1, todo2, todo3, todo4, todo5):
pass
def setTunnelOut(self, todo0, todo1, todo2, todo3, todo4, todo5, todo6):
pass
def setAnimState(self, todo0, todo1, todo2):
pass
def setEmoteState(self, todo0, todo1, todo2):
pass
def setEmoteAccess(self, todo0):
pass
def setCustomMessages(self, todo0):
pass
def setSleepAutoReply(self, todo0):
pass
def setResistanceMessages(self, todo0):
pass
def setPetTrickPhrases(self, todo0):
pass
def setCatalogSchedule(self, todo0, todo1):
pass
def setCatalog(self, todo0, todo1, todo2):
pass
def setMailboxContents(self, todo0):
pass
def setDeliverySchedule(self, todo0):
pass
def setGiftSchedule(self, todo0):
pass
def setAwardMailboxContents(self, todo0):
pass
def setAwardSchedule(self, todo0):
pass
def setAwardNotify(self, todo0):
pass
def setCatalogNotify(self, todo0, todo1):
pass
def playSplashEffect(self, todo0, todo1, todo2):
pass
def setWhisperSCToontaskFrom(self, todo0, todo1, todo2, todo3, todo4):
pass
def setSCToontask(self, todo0, todo1, todo2, todo3):
pass
def reqSCResistance(self, todo0, todo1):
pass
def setSCResistance(self, todo0, todo1):
pass
def setSpeedChatStyleIndex(self, todo0):
pass
def setTrophyScore(self, todo0):
pass
def setTeleportAccess(self, todo0):
pass
def checkTeleportAccess(self, todo0):
pass
def battleSOS(self, todo0):
pass
def teleportQuery(self, todo0):
pass
def teleportResponse(self, todo0, todo1, todo2, todo3, todo4):
pass
def teleportResponseToAI(self, todo0, todo1, todo2, todo3, todo4, todo5):
pass
def teleportGiveup(self, todo0):
pass
def teleportGreeting(self, todo0):
pass
def setCogStatus(self, todo0):
pass
def setCogCount(self, todo0):
pass
def setCogRadar(self, todo0):
pass
def setBuildingRadar(self, todo0):
pass
def setCogLevels(self, todo0):
pass
def setCogTypes(self, todo0):
pass
def setCogParts(self, todo0):
pass
def setCogMerits(self, todo0):
pass
def setCogIndex(self, todo0):
pass
def setDisguisePageFlag(self, todo0):
pass
def setSosPageFlag(self, todo0):
pass
def setHouseId(self, todo0):
pass
def setQuests(self, todo0):
pass
def setQuestHistory(self, todo0):
pass
def setRewardHistory(self, todo0, todo1):
pass
def setQuestCarryLimit(self, todo0):
pass
def requestDeleteQuest(self, todo0):
pass
def setCheesyEffect(self, todo0, todo1, todo2):
pass
def setGhostMode(self, todo0):
pass
def setPosIndex(self, todo0):
pass
def setFishCollection(self, todo0, todo1, todo2):
pass
def setMaxFishTank(self, todo0):
pass
def setFishTank(self, todo0, todo1, todo2):
pass
def setFishingRod(self, todo0):
pass
def setFishingTrophies(self, todo0):
pass
def setFlowerCollection(self, todo0, todo1):
pass
def setFlowerBasket(self, todo0, todo1):
pass
def setMaxFlowerBasket(self, todo0):
pass
def setGardenTrophies(self, todo0):
pass
def setShovel(self, todo0):
pass
def setShovelSkill(self, todo0):
pass
def setWateringCan(self, todo0):
pass
def setWateringCanSkill(self, todo0):
pass
def promoteShovel(self, todo0):
pass
def promoteWateringCan(self, todo0):
pass
def reactivateWater(self):
pass
def presentPie(self, todo0, todo1, todo2, todo3, todo4, todo5, todo6):
pass
def tossPie(self, todo0, todo1, todo2, todo3, todo4, todo5, todo6, todo7, todo8):
pass
def pieSplat(self, todo0, todo1, todo2, todo3, todo4, todo5):
pass
def setPieType(self, todo0):
pass
def setNumPies(self, todo0):
pass
def catalogGenClothes(self, todo0):
pass
def catalogGenAccessories(self, todo0):
pass
def setPetId(self, todo0):
pass
def setPetMovie(self, todo0, todo1):
pass
def setPetTutorialDone(self, todo0):
pass
def setFishBingoTutorialDone(self, todo0):
pass
def setFishBingoMarkTutorialDone(self, todo0):
pass
def setKartBodyType(self, todo0):
pass
def setKartBodyColor(self, todo0):
pass
def setKartAccessoryColor(self, todo0):
pass
def setKartEngineBlockType(self, todo0):
pass
def setKartSpoilerType(self, todo0):
pass
def setKartFrontWheelWellType(self, todo0):
pass
def setKartBackWheelWellType(self, todo0):
pass
def setKartRimType(self, todo0):
pass
def setKartDecalType(self, todo0):
pass
def updateKartDNAField(self, todo0, todo1):
pass
def addOwnedAccessory(self, todo0):
pass
def removeOwnedAccessory(self, todo0):
pass
def setTickets(self, todo0):
pass
def setKartingHistory(self, todo0):
pass
def setKartingTrophies(self, todo0):
pass
def setKartingPersonalBest(self, todo0):
pass
def setKartingPersonalBest2(self, todo0):
pass
def setKartAccessoriesOwned(self, todo0):
pass
def setCurrentKart(self, todo0):
pass
def squish(self, todo0):
pass
def announceBingo(self):
pass
def trickOrTreatTargetMet(self, todo0):
pass
def trickOrTreatMilestoneMet(self):
pass
def winterCarolingTargetMet(self, todo0):
pass
def setCogSummonsEarned(self, todo0):
pass
def reqCogSummons(self, todo0, todo1):
pass
def cogSummonsResponse(self, todo0, todo1, todo2):
pass
def reqUseSpecial(self, todo0):
pass
def useSpecialResponse(self, todo0):
pass
def setGardenStarted(self, todo0):
pass
def sendToGolfCourse(self, todo0):
pass
def setGolfHistory(self, todo0):
pass
def setPackedGolfHoleBest(self, todo0):
pass
def setGolfCourseBest(self, todo0):
pass
def setUnlimitedSwing(self, todo0):
pass
def logSuspiciousEvent(self, todo0):
pass
def logMessage(self, todo0):
pass
def forceLogoutWithNotify(self):
pass
def setPinkSlips(self, todo0):
pass
def setNametagStyle(self, todo0):
pass
def setMail(self, todo0):
pass
def setNumMailItems(self, todo0):
pass
def setSimpleMailNotify(self, todo0):
pass
def setInvites(self, todo0):
pass
def setPartiesInvitedTo(self, todo0):
pass
def setHostedParties(self, todo0):
pass
def setPartyReplies(self, todo0):
pass
def updateInvite(self, todo0, todo1):
pass
def updateReply(self, todo0, todo1, todo2):
pass
def setPartyCanStart(self, todo0):
pass
def setPartyStatus(self, todo0, todo1):
pass
def announcePartyStarted(self, todo0):
pass
def setNeverStartedPartyRefunded(self, todo0, todo1, todo2):
pass
def setModuleInfo(self, todo0):
pass
def setDISLname(self, todo0):
pass
def setDISLid(self, todo0):
pass
def flagAv(self, todo0, todo1, todo2):
pass
def requestPing(self, todo0):
pass
def ping(self, todo0):
pass
def pingresp(self, todo0):
pass
def setAchievements(self, achievements):
pass
|
|
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility functions for building models."""
import collections
import logging
import re
import time
import tensorflow.compat.v1 as tf
from rnn_decoder.multi_rnn import MultiRNNCell
from utils import dialogue_utils
from utils import iterator_utils
from utils import misc_utils as utils
from utils import vocab_utils
def get_initializer(init_op, seed=None, init_weight=None):
"""Create an initializer. init_weight is only for uniform."""
if init_op == "uniform":
assert init_weight
return tf.random_uniform_initializer(-init_weight, init_weight, seed=seed)
elif init_op == "glorot_normal":
return tf.keras.initializers.glorot_normal(seed=seed)
elif init_op == "glorot_uniform":
return tf.keras.initializers.glorot_uniform(seed=seed)
else:
raise ValueError("Unknown init_op %s" % init_op)
def get_device_str(device_id, num_gpus):
"""Return a device string for multi-GPU setup."""
if num_gpus == 0:
return "/cpu:0"
device_str_output = "/gpu:%d" % (device_id % num_gpus)
return device_str_output
class ExtraArgs(
collections.namedtuple(
"ExtraArgs",
("single_cell_fn", "model_device_fn", "attention_mechanism_fn"))):
pass
class TrainModel(
collections.namedtuple(
"TrainModel",
("graph", "model", "placeholder_iterator", "placeholder_handle",
"train_iterator", "skip_count_placeholder"))):
pass
class EvalModel(
collections.namedtuple(
"EvalModel",
("graph", "model", "placeholder_iterator", "placeholder_handle",
"eval_iterator", "data_file_placeholder", "kb_file_placeholder"))):
pass
class InferModel(
collections.namedtuple(
"InferModel",
("graph", "model", "placeholder_iterator", "placeholder_handle",
"infer_iterator", "data_src_placeholder", "kb_placeholder",
"batch_size_placeholder"))):
pass
class SelfplayModel(
collections.namedtuple(
"SelfplayModel",
("graph", "model", "placeholder_iterator", "placeholder_handle",
"train_iterator", "self_play_ft_iterator", "self_play_st_iterator",
"data_placeholder", "kb_placeholder", "skip_count_placeholder",
"batch_size_placeholder"))):
pass
def create_train_model(model_creator,
hparams,
scope=None,
num_workers=1,
jobid=0,
extra_args=None):
"""Create graph, model and iterator for training."""
graph = tf.Graph()
with graph.as_default(), tf.container(scope or "train"):
vocab_table, reverse_vocab_table = vocab_utils.create_vocab_tables(hparams.vocab_file)
data_dataset = tf.data.TextLineDataset(hparams.train_data)
kb_dataset = tf.data.TextLineDataset(hparams.train_kb)
skip_count_placeholder = tf.placeholder(shape=(), dtype=tf.int64)
# this is the actual train_iterator
train_iterator = iterator_utils.get_iterator(
data_dataset,
kb_dataset,
vocab_table,
batch_size=hparams.batch_size,
t1=hparams.t1.encode(),
t2=hparams.t2.encode(),
eod=hparams.eod,
len_action=hparams.len_action,
random_seed=hparams.random_seed,
num_buckets=hparams.num_buckets,
max_dialogue_len=hparams.max_dialogue_len,
skip_count=skip_count_placeholder,
num_shards=num_workers,
shard_index=jobid)
# this is the placeholder iterator. One can use this placeholder iterator
# to switch between training and evauation.
handle = tf.placeholder(tf.string, shape=[])
iterator = tf.data.Iterator.from_string_handle(
handle, train_iterator.output_types, train_iterator.output_shapes)
batched_iterator = iterator_utils.get_batched_iterator(iterator)
model_device_fn = None
if extra_args:
model_device_fn = extra_args.model_device_fn
with tf.device(model_device_fn):
model = model_creator(
hparams,
iterator=batched_iterator,
handle=handle,
mode=tf.estimator.ModeKeys.TRAIN,
vocab_table=vocab_table,
scope=scope,
extra_args=extra_args,
reverse_vocab_table=reverse_vocab_table)
return TrainModel(
graph=graph,
model=model,
placeholder_iterator=iterator,
train_iterator=train_iterator,
placeholder_handle=handle,
skip_count_placeholder=skip_count_placeholder)
def create_eval_model(model_creator, hparams, scope=None, extra_args=None):
"""Create train graph, model, src/tgt file holders, and iterator."""
vocab_file = hparams.vocab_file
graph = tf.Graph()
with graph.as_default(), tf.container(scope or "eval"):
vocab_table = vocab_utils.create_vocab_tables(vocab_file)[0]
data_file_placeholder = tf.placeholder(shape=(), dtype=tf.string)
kb_file_placeholder = tf.placeholder(shape=(), dtype=tf.string)
data_dataset = tf.data.TextLineDataset(data_file_placeholder)
kb_dataset = tf.data.TextLineDataset(kb_file_placeholder)
# this is the eval_actual iterator
eval_iterator = iterator_utils.get_iterator(
data_dataset,
kb_dataset,
vocab_table,
batch_size=hparams.batch_size,
t1=hparams.t1.encode(),
t2=hparams.t2.encode(),
eod=hparams.eod,
len_action=hparams.len_action,
random_seed=hparams.random_seed,
num_buckets=hparams.num_buckets,
max_dialogue_len=hparams.max_dialogue_len)
# this is the placeholder iterator
handle = tf.placeholder(tf.string, shape=[])
iterator = tf.data.Iterator.from_string_handle(
handle, eval_iterator.output_types, eval_iterator.output_shapes)
batched_iterator = iterator_utils.get_batched_iterator(iterator)
model = model_creator(
hparams,
iterator=batched_iterator,
handle=handle,
mode=tf.estimator.ModeKeys.EVAL,
vocab_table=vocab_table,
scope=scope,
extra_args=extra_args)
return EvalModel(
graph=graph,
model=model,
placeholder_iterator=iterator,
placeholder_handle=handle,
eval_iterator=eval_iterator,
data_file_placeholder=data_file_placeholder,
kb_file_placeholder=kb_file_placeholder)
def create_infer_model(model_creator, hparams, scope=None, extra_args=None):
"""Create inference model."""
graph = tf.Graph()
with graph.as_default(), tf.container(scope or "infer"):
vocab_table, reverse_vocab_table = vocab_utils.create_vocab_tables(hparams.vocab_file)
data_src_placeholder = tf.placeholder(
shape=[None], dtype=tf.string, name="src_ph")
kb_placeholder = tf.placeholder(shape=[None], dtype=tf.string, name="kb_ph")
batch_size_placeholder = tf.placeholder(
shape=[], dtype=tf.int64, name="bs_ph")
data_src_dataset = tf.data.Dataset.from_tensor_slices(data_src_placeholder)
kb_dataset = tf.data.Dataset.from_tensor_slices(kb_placeholder)
# this is the actual infer iterator
infer_iterator = iterator_utils.get_infer_iterator(
data_src_dataset,
kb_dataset,
vocab_table,
batch_size=batch_size_placeholder,
eod=hparams.eod,
len_action=hparams.len_action)
# this is the placeholder infer iterator
handle = tf.placeholder(tf.string, shape=[])
iterator = tf.data.Iterator.from_string_handle(
handle, infer_iterator.output_types, infer_iterator.output_shapes)
batched_iterator = iterator_utils.get_batched_iterator(iterator)
model = model_creator(
hparams,
iterator=batched_iterator,
handle=handle,
mode=tf.estimator.ModeKeys.PREDICT,
vocab_table=vocab_table,
reverse_vocab_table=reverse_vocab_table,
scope=scope,
extra_args=extra_args)
return InferModel(
graph=graph,
model=model,
placeholder_iterator=iterator,
placeholder_handle=handle,
infer_iterator=infer_iterator,
data_src_placeholder=data_src_placeholder,
kb_placeholder=kb_placeholder,
batch_size_placeholder=batch_size_placeholder)
#
def self_play_iterator_creator(hparams, num_workers, jobid):
"""create a self play iterator. There are iterators that will be created here.
A supervised training iterator used for supervised learning. A full text
iterator and structured iterator used for reinforcement learning self play.
Full text iterators feeds data from text files while structured iterators
are initialized directly from objects. The former one is used for traiing.
The later one is used for self play dialogue generation to eliminate the
need of serializing them into actual text
files.
"""
vocab_table = vocab_utils.create_vocab_tables(hparams.vocab_file)[0]
data_dataset = tf.data.TextLineDataset(hparams.train_data)
kb_dataset = tf.data.TextLineDataset(hparams.train_kb)
skip_count_placeholder = tf.placeholder(shape=(), dtype=tf.int64)
# this is the actual iterator for supervised training
train_iterator = iterator_utils.get_iterator(
data_dataset,
kb_dataset,
vocab_table,
batch_size=hparams.batch_size,
t1=hparams.t1.encode(),
t2=hparams.t2.encode(),
eod=hparams.eod,
len_action=hparams.len_action,
random_seed=hparams.random_seed,
num_buckets=hparams.num_buckets,
max_dialogue_len=hparams.max_dialogue_len,
skip_count=skip_count_placeholder,
num_shards=num_workers,
shard_index=jobid)
# this is the actual iterator for self_play_fulltext_iterator
data_placeholder = tf.placeholder(
shape=[None], dtype=tf.string, name="src_ph")
kb_placeholder = tf.placeholder(shape=[None], dtype=tf.string, name="kb_ph")
batch_size_placeholder = tf.placeholder(
shape=[], dtype=tf.int64, name="bs_ph")
dataset_data = tf.data.Dataset.from_tensor_slices(data_placeholder)
kb_dataset = tf.data.Dataset.from_tensor_slices(kb_placeholder)
self_play_fulltext_iterator = iterator_utils.get_infer_iterator(
dataset_data,
kb_dataset,
vocab_table,
batch_size=batch_size_placeholder,
eod=hparams.eod,
len_action=hparams.len_action,
self_play=True)
# this is the actual iterator for self_play_structured_iterator
self_play_structured_iterator = tf.data.Iterator.from_structure(
tf.data.get_output_types(self_play_fulltext_iterator),
tf.data.get_output_shapes(self_play_fulltext_iterator))
iterators = [
train_iterator, self_play_fulltext_iterator, self_play_structured_iterator
]
# this is the list of placeholders
placeholders = [
data_placeholder, kb_placeholder, batch_size_placeholder,
skip_count_placeholder
]
return iterators, placeholders
def create_selfplay_model(model_creator,
is_mutable,
num_workers,
jobid,
hparams,
scope=None,
extra_args=None):
"""create slef play models."""
graph = tf.Graph()
with graph.as_default(), tf.container(scope or "selfplay"):
vocab_table, reverse_vocab_table = vocab_utils.create_vocab_tables(hparams.vocab_file)
if is_mutable:
mutable_index = 0
else:
mutable_index = 1
# get a list of iterators and placeholders
iterators, placeholders = self_play_iterator_creator(
hparams, num_workers, jobid)
train_iterator, self_play_fulltext_iterator, self_play_structured_iterator = iterators
data_placeholder, kb_placeholder, batch_size_placeholder, skip_count_placeholder = placeholders
# get an iterator handler
handle = tf.placeholder(tf.string, shape=[])
iterator = tf.data.Iterator.from_string_handle(
handle, tf.data.get_output_types(train_iterator), tf.data.get_output_shapes(train_iterator))
batched_iterator = iterator_utils.get_batched_iterator(iterator)
model_device_fn = None
if extra_args:
model_device_fn = extra_args.model_device_fn
with tf.device(model_device_fn):
model = model_creator(
hparams,
iterator=batched_iterator,
handle=handle,
mode=[
dialogue_utils.mode_self_play_mutable,
dialogue_utils.mode_self_play_immutable
][mutable_index],
vocab_table=vocab_table,
reverse_vocab_table=reverse_vocab_table,
scope=scope,
extra_args=extra_args)
return SelfplayModel(
graph=graph,
model=model,
placeholder_iterator=iterator,
placeholder_handle=handle,
train_iterator=train_iterator,
self_play_ft_iterator=self_play_fulltext_iterator,
self_play_st_iterator=self_play_structured_iterator,
data_placeholder=data_placeholder,
kb_placeholder=kb_placeholder,
skip_count_placeholder=skip_count_placeholder,
batch_size_placeholder=batch_size_placeholder)
def create_emb_for_encoder_and_decoder(vocab_size,
embed_size,
dtype=tf.float32,
num_partitions=0,
scope=None):
"""Create embedding matrix for both encoder and decoder."""
if num_partitions <= 1:
partitioner = None
else:
# Note: num_partitions > 1 is required for distributed training due to
# embedding_lookup tries to colocate single partition-ed embedding variable
# with lookup ops. This may cause embedding variables being placed on worker
# jobs.
partitioner = tf.fixed_size_partitioner(num_partitions)
with tf.variable_scope(
scope or "embeddings", dtype=dtype, partitioner=partitioner) as scope:
# Share embedding
embedding_encoder = tf.get_variable("shared_embedding",
[vocab_size, embed_size], dtype)
embedding_decoder = embedding_encoder
return embedding_encoder, embedding_decoder
def _single_cell(num_units,
dropout,
mode,
residual_connection=False,
device_str=None):
"""Create an instance of a single RNN cell."""
dropout = dropout if mode == tf.estimator.ModeKeys.TRAIN else 0.0
# Cell Type
utils.print_out(" GRU", new_line=False)
single_cell = tf.nn.rnn_cell.GRUCell(num_units)
# Dropout (= 1 - keep_prob)
if dropout > 0.0:
single_cell = tf.nn.rnn_cell.DropoutWrapper(
cell=single_cell, input_keep_prob=(1.0 - dropout))
utils.print_out(
" %s, dropout=%g " % (type(single_cell).__name__, dropout),
new_line=False)
# Residual
if residual_connection:
single_cell = tf.nn.rnn_cell.ResidualWrapper(single_cell)
utils.print_out(" %s" % type(single_cell).__name__, new_line=False)
# Device Wrapper
if device_str:
single_cell = tf.nn.rnn_cell.DeviceWrapper(single_cell, device_str)
utils.print_out(
" %s, device=%s" % (type(single_cell).__name__, device_str),
new_line=False)
return single_cell
def _cell_list(num_units,
num_layers,
num_residual_layers,
dropout,
mode,
num_gpus,
base_gpu=0,
single_cell_fn=None):
"""Create a list of RNN cells."""
if not single_cell_fn:
single_cell_fn = _single_cell
# Multi-GPU
cell_list = []
for i in range(num_layers):
utils.print_out(" cell %d" % i, new_line=False)
single_cell = single_cell_fn(
num_units=num_units,
dropout=dropout,
mode=mode,
residual_connection=(i >= num_layers - num_residual_layers),
device_str=get_device_str(i + base_gpu, num_gpus),
)
utils.print_out("")
cell_list.append(single_cell)
return cell_list
def create_rnn_cell(num_units,
num_layers,
num_residual_layers,
dropout,
mode,
num_gpus,
base_gpu=0,
single_cell_fn=None,
all_layer_outputs=False):
"""Create multi-layer RNN cell. When all_layer_outputs is True, that means we
want hidden states of all timestamps to pass through. In this case we use
MultiRNNCell, a slightly modified tensorflow RNN cell.
"""
cell_list = _cell_list(
num_units=num_units,
num_layers=num_layers,
num_residual_layers=num_residual_layers,
dropout=dropout,
mode=mode,
num_gpus=num_gpus,
base_gpu=base_gpu,
single_cell_fn=single_cell_fn)
if len(cell_list) == 1: # Single layer.
return cell_list[0]
else: # Multi layers
print(all_layer_outputs, "all_layer_outputs")
if all_layer_outputs:
return MultiRNNCell(cell_list)
else:
return tf.nn.rnn_cell.MultiRNNCell(cell_list)
def gradient_clip(gradients, max_gradient_norm):
"""Clipping gradients of a model."""
clipped_gradients, gradient_norm = tf.clip_by_global_norm(
gradients, max_gradient_norm)
gradient_norm_summary = [tf.summary.scalar("grad_norm", gradient_norm)]
gradient_norm_summary.append(
tf.summary.scalar("clipped_gradient", tf.global_norm(clipped_gradients)))
return clipped_gradients, gradient_norm_summary
def get_variables_available_in_checkpoint(variables,
ckpt,
include_global_step=True):
if isinstance(variables, list):
variable_names_map = {variable.op.name: variable for variable in variables}
elif isinstance(variables, dict):
variable_names_map = variables
else:
raise ValueError("`variables` is expected to be a list or dict.")
ckpt_reader = tf.train.NewCheckpointReader(ckpt)
ckpt_vars_to_shape_map = ckpt_reader.get_variable_to_shape_map()
if include_global_step:
ckpt_vars_to_shape_map.pop(tf.GraphKeys.GLOBAL_STEP, None)
vars_in_ckpt = {}
for variable_name, variable in sorted(variable_names_map.items()):
variable_name_without_partition = re.sub("/part_[0-9]+$", "", variable_name)
if variable_name in ckpt_vars_to_shape_map:
if ckpt_vars_to_shape_map[variable_name] == variable.shape.as_list():
vars_in_ckpt[variable_name] = variable
else:
logging.warning(
"Variable [%s] is available in checkpoint, but has an "
"incompatible shape with model variable. Checkpoint "
"shape: [%s], model variable shape: [%s]. This "
"variable will not be initialized from the checkpoint.",
variable_name, ckpt_vars_to_shape_map[variable_name],
variable.shape.as_list())
elif variable_name_without_partition in ckpt_vars_to_shape_map:
# Do not check shape for partition variables
vars_in_ckpt[variable_name] = variable
else:
logging.warning("Variable [%s] is not available in checkpoint",
variable_name)
# It seems the restore does something smart about partitioned variables.
# Should keep it as a list instead of using partitioned variable keys.
if isinstance(variables, list):
return list(vars_in_ckpt.values())
return vars_in_ckpt
def load_model(model, ckpt, session, name):
start_time = time.time()
available_var_list = (
get_variables_available_in_checkpoint(model.saver._var_list, ckpt))
# TODO: handle verbosity
# logging.info("available_var_list:%s,%s", len(available_var_list),
# available_var_list)
tf.train.Saver(available_var_list).restore(session, ckpt)
session.run(tf.tables_initializer())
utils.print_out(" loaded %s model parameters from %s, time %.2fs" %
(name, ckpt, time.time() - start_time))
return model
def full_restore(session, ckpt):
start_time = time.time()
available_var_list = (
get_variables_available_in_checkpoint(tf.global_variables(), ckpt))
logging.info("available_var_list:%s,%s", len(available_var_list),
available_var_list)
tf.train.Saver(available_var_list).restore(session, ckpt)
session.run(tf.tables_initializer())
utils.print_out(
"full restore from %s, time %.2fs" % (ckpt, time.time() - start_time))
def create_or_load_model(model, model_dir, session, name):
"""Create translation model and initialize or load parameters in session."""
latest_ckpt = tf.train.latest_checkpoint(model_dir)
if latest_ckpt:
start_time = time.time()
# It only takes a few seconds to initialize all variables.
session.run(tf.global_variables_initializer())
logging.info(
"Initialize %s model with fresh parameters before loading variables "
"from the checkpoint, time %.2fs", name,
time.time() - start_time)
model = load_model(model, latest_ckpt, session, name)
else:
start_time = time.time()
session.run(tf.global_variables_initializer())
session.run(tf.tables_initializer())
utils.print_out(" created %s model with fresh parameters, time %.2fs" %
(name, time.time() - start_time))
global_step = model.global_step.eval(session=session)
return model, global_step
def compute_perplexity(model, sess, name, eval_handle):
"""Compute perplexity of the output of the model based on loss function."""
def aggregate_all_summaries(original, updates):
for key in updates:
if key not in original:
original[key] = 0.0
original[key] += updates[key]
return original
total_loss = 0
total_predict_count = 0
start_time = time.time()
aggregated_summaries = {}
batch_processed = 0
while True:
try:
loss, all_summaries, predict_count, batch_size = model.eval(
sess, eval_handle)
total_loss += loss * batch_size
batch_processed += 1
total_predict_count += predict_count
aggregated_summaries = aggregate_all_summaries(aggregated_summaries,
all_summaries)
except tf.errors.OutOfRangeError:
break
perplexity = utils.safe_exp(total_loss / total_predict_count)
for key in aggregated_summaries:
if key not in set(
["eval_dialogue_loss1", "eval_dialogue_loss2", "eval_action_loss3"]):
aggregated_summaries[key] /= batch_processed
utils.print_time(" eval %s: perplexity %.2f" % (name, perplexity),
start_time)
return perplexity, aggregated_summaries
|
|
import argparse
import cProfile
import multiprocessing as mp
import os
import pstats
import time
import gym
import IPython
import numpy as np
from keras.layers import Dense
from keras.models import Input, Model, Sequential, clone_model, load_model
from keras.optimizers import Adam
from context import core
from core.strategies import ES
from minesweeper_tk import Minesweeper
def fitnessfun(env, model):
total_reward = 0
done = False
observation = env.reset()
steps = 0
while not done and steps < rows*cols-mines:
# Predict action
action = model.predict(observation.reshape((1, 1)+observation.shape))
# Mask invalid moves and renormalize
mask = env.get_validMoves().flatten()
action[0, 0, ~mask] = 0
action = action/np.sum(action)
# Step and get reward
observation, reward, done, info = env.step(np.argmax(action))
total_reward += reward
steps += 1
return total_reward, steps
def testfun(env, model, episodes):
total_reward = []
try:
for i in range(episodes):
total_reward.append(0)
input()
observation = env.reset()
done = False
t = 0
while not done and t < rows*cols-mines:
input()
action = model.predict(observation.reshape((1, 1)+observation.shape))
observation, reward, done, info = env.step(np.argmax(action))
total_reward[i] += reward
t += 1
print('Reward: {: >2.1f}'.format(reward))
except KeyboardInterrupt:
raise
return total_reward
parser = argparse.ArgumentParser()
parser.add_argument('--nwrk', type=int, default=mp.cpu_count())
parser.add_argument('--nags', type=int, default=20)
parser.add_argument('--ngns', type=int, default=10000)
parser.add_argument('--cint', type=int, default=20)
parser.add_argument('--sigm', type=float, default=0.1)
parser.add_argument('--lrte', type=float, default=0.1)
parser.add_argument('--regu', type=float, default=0.001)
parser.add_argument('--size', type=int, default=6)
parser.add_argument('--mine', type=int, default=7)
args = parser.parse_args()
rows = args.size
cols = args.size
mines = args.mine
OUT = 'FULL'
rewards = {"win": 0.9, "loss": -1, "progress": 0.9, "noprogress": -0.3, "YOLO": -0.3}
env = Minesweeper(display=False, OUT=OUT, ROWS=rows, COLS=cols, MINES=mines, rewards=rewards)
n_inputs = rows*cols*10 if OUT is 'FULL' else rows*cols*2
n_hidden = [rows*cols*10, 200, 200, 200, 200]
n_outputs = rows*cols
# Model
model = Sequential()
model.add(Dense(input_shape=(1, n_inputs),
units=n_hidden[0],
activation='relu',
kernel_initializer='glorot_uniform',
bias_initializer='zeros',
kernel_regularizer=None,#l2(reg),
bias_regularizer=None))#l2(reg)))
# Hidden
for n_units in n_hidden[1:]:
model.add(Dense(units=n_units,
activation='relu',
kernel_initializer='glorot_uniform',
bias_initializer='zeros',
kernel_regularizer=None,#l2(reg),
bias_regularizer=None))#l2(reg)))
# Output
model.add(Dense(units=n_outputs,
activation='softmax',
kernel_initializer='glorot_uniform',
bias_initializer='zeros',
kernel_regularizer=None,
bias_regularizer=None))
model.compile(optimizer='rmsprop', loss='mean_squared_error')
model.summary()
DO_PROFILE = False
save_dir = os.path.split(os.path.realpath(__file__))[0]
if __name__ == '__main__':
mp.freeze_support()
#e = ES(fun=fitnessfun, model=model, env=env, reg={'L2': args.regu}, population=args.nags, learning_rate=args.lrte, sigma=args.sigm, workers=args.nwrk, save_dir=save_dir)
#e.load_checkpoint()
if DO_PROFILE:
cProfile.run('e.evolve(args.ngns, print_every=1, plot_every=10)', 'profilingstats')
# e.evolve(args.ngns, checkpoint_every=args.cint, plot_every=args.cint)
if DO_PROFILE:
p = pstats.Stats('profilingstats')
p.sort_stats('cumulative').print_stats(10)
p.sort_stats('time').print_stats(10)
#model = load_model('model.h5')
#env = Minesweeper(display=True, OUT=OUT, ROWS=rows, COLS=cols, MINES=mines, rewards=rewards)
fitnessfun(env, model)
"""
model = Sequential()
model.add(Dense(input_shape=(1, n_inputs),
units=n_hidden[0],
activation='relu',
kernel_initializer='glorot_uniform',
bias_initializer='zeros'))
# Hidden
for n_units in n_hidden[1:]:
model.add(Dense(units=n_units,
activation='relu',
kernel_initializer='glorot_uniform',
bias_initializer='zeros'))
# Output
model.add(Dense(units=n_outputs,
activation='softmax',
kernel_initializer='glorot_uniform',
bias_initializer='zeros'))
model.compile(optimizer='adam', loss='mean_squared_error')
model.summary()
"""
"""
======= PROFILING WITH 1 WORKER =======
Wed Dec 6 10:08:15 2017 profilingstats
1107747 function calls (1092605 primitive calls) in 125.457 seconds
Ordered by: cumulative time
List reduced from 2470 to 10 due to restriction <10>
ncalls tottime percall cumtime percall filename:lineno(function)
20/1 0.000 0.000 125.458 125.458 {built-in method builtins.exec}
1 0.001 0.001 125.458 125.458 <string>:1(<module>)
1 0.032 0.032 125.457 125.457 /Users/Jakob/Desktop/minesweeper_solver/evolutionary/CartPole-v1-multi/strategies.py:70(evolve)
30 0.000 0.000 121.111 4.037 /Users/Jakob/anaconda/lib/python3.6/multiprocessing/pool.py:261(map)
33 0.000 0.000 121.108 3.670 /Users/Jakob/anaconda/lib/python3.6/threading.py:533(wait)
33 0.000 0.000 121.108 3.670 /Users/Jakob/anaconda/lib/python3.6/threading.py:263(wait)
30 0.000 0.000 121.108 4.037 /Users/Jakob/anaconda/lib/python3.6/multiprocessing/pool.py:637(get)
30 0.000 0.000 121.107 4.037 /Users/Jakob/anaconda/lib/python3.6/multiprocessing/pool.py:634(wait)
166 121.107 0.730 121.107 0.730 {method 'acquire' of '_thread.lock' objects}
30 0.038 0.001 2.091 0.070 es-multi-threaded.py:15(fitnessfun)
Wed Dec 6 10:08:15 2017 profilingstats
1107747 function calls (1092605 primitive calls) in 125.457 seconds
Ordered by: internal time
List reduced from 2470 to 10 due to restriction <10>
ncalls tottime percall cumtime percall filename:lineno(function)
166 121.107 0.730 121.107 0.730 {method 'acquire' of '_thread.lock' objects}
4618 0.432 0.000 0.614 0.000 /Users/Jakob/anaconda/lib/python3.6/site-packages/theano/compile/function_module.py:725(__call__)
10 0.344 0.034 0.344 0.034 {method 'poll' of 'select.poll' objects}
4 0.227 0.057 0.227 0.057 {built-in method _tkinter.create}
22372 0.212 0.000 0.212 0.000 {built-in method numpy.core.multiarray.array}
2472 0.207 0.000 0.207 0.000 {built-in method numpy.core.multiarray.dot}
61099 0.123 0.000 0.123 0.000 {built-in method builtins.hasattr}
4618 0.118 0.000 1.007 0.000 /Users/Jakob/anaconda/lib/python3.6/site-packages/keras/engine/training.py:1209(_predict_loop)
1 0.101 0.101 0.101 0.101 {method 'acquire' of '_multiprocessing.SemLock' objects}
4618 0.084 0.000 0.084 0.000 /Users/Jakob/anaconda/lib/python3.6/site-packages/keras/engine/training.py:406(<listcomp>)
======= PROFILING WITH 4 WORKERS =======
Wed Dec 6 10:00:43 2017 profilingstats
3111894 function calls (3068601 primitive calls) in 211.293 seconds
Ordered by: cumulative time
List reduced from 2462 to 10 due to restriction <10>
ncalls tottime percall cumtime percall filename:lineno(function)
27/1 0.001 0.000 211.296 211.296 {built-in method builtins.exec}
1 0.115 0.115 211.295 211.295 /Users/Jakob/Desktop/minesweeper_solver/evolutionary/CartPole-v1-multi/strategies.py:70(evolve)
100 0.001 0.000 200.251 2.003 /Users/Jakob/anaconda/lib/python3.6/multiprocessing/pool.py:261(map)
103 0.001 0.000 200.241 1.944 /Users/Jakob/anaconda/lib/python3.6/threading.py:533(wait)
100 0.000 0.000 200.240 2.002 /Users/Jakob/anaconda/lib/python3.6/multiprocessing/pool.py:637(get)
100 0.000 0.000 200.239 2.002 /Users/Jakob/anaconda/lib/python3.6/multiprocessing/pool.py:634(wait)
103 0.001 0.000 200.239 1.944 /Users/Jakob/anaconda/lib/python3.6/threading.py:263(wait)
515 200.238 0.389 200.238 0.389 {method 'acquire' of '_thread.lock' objects}
100 0.122 0.001 5.254 0.053 es-multi-threaded.py:15(fitnessfun)
100 0.001 0.000 4.544 0.045 /Users/Jakob/Desktop/minesweeper_solver/evolutionary/CartPole-v1-multi/strategies.py:58(plot_progress)
Wed Dec 6 10:00:43 2017 profilingstats
3111894 function calls (3068601 primitive calls) in 211.293 seconds
Ordered by: internal time
List reduced from 2462 to 10 due to restriction <10>
ncalls tottime percall cumtime percall filename:lineno(function)
515 200.238 0.389 200.238 0.389 {method 'acquire' of '_thread.lock' objects}
15292 1.299 0.000 1.880 0.000 /Users/Jakob/anaconda/lib/python3.6/site-packages/theano/compile/function_module.py:725(__call__)
67701 0.658 0.000 0.658 0.000 {built-in method numpy.core.multiarray.array}
7026 0.574 0.000 0.574 0.000 {built-in method numpy.core.multiarray.dot}
11 0.490 0.045 0.490 0.045 {built-in method _tkinter.create}
15292 0.368 0.000 3.128 0.000 /Users/Jakob/anaconda/lib/python3.6/site-packages/keras/engine/training.py:1209(_predict_loop)
10 0.294 0.029 0.294 0.029 {method 'poll' of 'select.poll' objects}
15292 0.264 0.000 0.264 0.000 /Users/Jakob/anaconda/lib/python3.6/site-packages/keras/engine/training.py:406(<listcomp>)
15292 0.261 0.000 0.493 0.000 /Users/Jakob/anaconda/lib/python3.6/site-packages/gym/envs/classic_control/cartpole.py:56(_step)
15292 0.203 0.000 0.248 0.000 /Users/Jakob/anaconda/lib/python3.6/site-packages/keras/engine/training.py:364(_make_batches)
"""
|
|
# Copyright 2012-2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""Identity v3 Endpoint action implementations"""
import logging
import six
import sys
from cliff import command
from cliff import lister
from cliff import show
from openstackclient.common import utils
from openstackclient.identity import common
def get_service_name(service):
if hasattr(service, 'name'):
return service.name
else:
return ''
class CreateEndpoint(show.ShowOne):
"""Create new endpoint"""
log = logging.getLogger(__name__ + '.CreateEndpoint')
def get_parser(self, prog_name):
parser = super(CreateEndpoint, self).get_parser(prog_name)
parser.add_argument(
'service',
metavar='<service>',
help='New endpoint service (name or ID)',
)
parser.add_argument(
'interface',
metavar='<interface>',
choices=['admin', 'public', 'internal'],
help='New endpoint interface type (admin, public or internal)',
)
parser.add_argument(
'url',
metavar='<url>',
help='New endpoint URL',
)
parser.add_argument(
'--region',
metavar='<region-id>',
help='New endpoint region ID',
)
enable_group = parser.add_mutually_exclusive_group()
enable_group.add_argument(
'--enable',
dest='enabled',
action='store_true',
default=True,
help='Enable endpoint (default)',
)
enable_group.add_argument(
'--disable',
dest='enabled',
action='store_false',
help='Disable endpoint',
)
return parser
@utils.log_method(log)
def take_action(self, parsed_args):
identity_client = self.app.client_manager.identity
service = common.find_service(identity_client, parsed_args.service)
endpoint = identity_client.endpoints.create(
service=service.id,
url=parsed_args.url,
interface=parsed_args.interface,
region=parsed_args.region,
enabled=parsed_args.enabled
)
info = {}
endpoint._info.pop('links')
info.update(endpoint._info)
info['service_name'] = get_service_name(service)
info['service_type'] = service.type
return zip(*sorted(six.iteritems(info)))
class DeleteEndpoint(command.Command):
"""Delete endpoint"""
log = logging.getLogger(__name__ + '.DeleteEndpoint')
def get_parser(self, prog_name):
parser = super(DeleteEndpoint, self).get_parser(prog_name)
parser.add_argument(
'endpoint',
metavar='<endpoint-id>',
help='Endpoint ID to delete',
)
return parser
@utils.log_method(log)
def take_action(self, parsed_args):
identity_client = self.app.client_manager.identity
endpoint_id = utils.find_resource(identity_client.endpoints,
parsed_args.endpoint).id
identity_client.endpoints.delete(endpoint_id)
return
class ListEndpoint(lister.Lister):
"""List endpoints"""
log = logging.getLogger(__name__ + '.ListEndpoint')
def get_parser(self, prog_name):
parser = super(ListEndpoint, self).get_parser(prog_name)
parser.add_argument(
'--service',
metavar='<service>',
help='Filter by service',
)
parser.add_argument(
'--interface',
metavar='<interface>',
choices=['admin', 'public', 'internal'],
help='Filter by interface type (admin, public or internal)',
)
parser.add_argument(
'--region',
metavar='<region-id>',
help='Filter by region ID',
)
return parser
@utils.log_method(log)
def take_action(self, parsed_args):
identity_client = self.app.client_manager.identity
columns = ('ID', 'Region', 'Service Name', 'Service Type',
'Enabled', 'Interface', 'URL')
kwargs = {}
if parsed_args.service:
service = common.find_service(identity_client, parsed_args.service)
kwargs['service'] = service.id
if parsed_args.interface:
kwargs['interface'] = parsed_args.interface
if parsed_args.region:
kwargs['region'] = parsed_args.region
data = identity_client.endpoints.list(**kwargs)
for ep in data:
service = common.find_service(identity_client, ep.service_id)
ep.service_name = get_service_name(service)
ep.service_type = service.type
return (columns,
(utils.get_item_properties(
s, columns,
formatters={},
) for s in data))
class SetEndpoint(command.Command):
"""Set endpoint properties"""
log = logging.getLogger(__name__ + '.SetEndpoint')
def get_parser(self, prog_name):
parser = super(SetEndpoint, self).get_parser(prog_name)
parser.add_argument(
'endpoint',
metavar='<endpoint-id>',
help='Endpoint ID to modify',
)
parser.add_argument(
'--region',
metavar='<region-id>',
help='New endpoint region ID',
)
parser.add_argument(
'--interface',
metavar='<interface>',
choices=['admin', 'public', 'internal'],
help='New endpoint interface type (admin, public or internal)',
)
parser.add_argument(
'--url',
metavar='<url>',
help='New endpoint URL',
)
parser.add_argument(
'--service',
metavar='<service>',
help='New endpoint service (name or ID)',
)
enable_group = parser.add_mutually_exclusive_group()
enable_group.add_argument(
'--enable',
dest='enabled',
action='store_true',
help='Enable endpoint',
)
enable_group.add_argument(
'--disable',
dest='disabled',
action='store_true',
help='Disable endpoint',
)
return parser
@utils.log_method(log)
def take_action(self, parsed_args):
identity_client = self.app.client_manager.identity
endpoint = utils.find_resource(identity_client.endpoints,
parsed_args.endpoint)
if (not parsed_args.interface and not parsed_args.url
and not parsed_args.service and not parsed_args.region
and not parsed_args.enabled and not parsed_args.disabled):
sys.stdout.write("Endpoint not updated, no arguments present")
return
service_id = None
if parsed_args.service:
service = common.find_service(identity_client, parsed_args.service)
service_id = service.id
enabled = None
if parsed_args.enabled:
enabled = True
if parsed_args.disabled:
enabled = False
identity_client.endpoints.update(
endpoint.id,
service=service_id,
url=parsed_args.url,
interface=parsed_args.interface,
region=parsed_args.region,
enabled=enabled
)
return
class ShowEndpoint(show.ShowOne):
"""Display endpoint details"""
log = logging.getLogger(__name__ + '.ShowEndpoint')
def get_parser(self, prog_name):
parser = super(ShowEndpoint, self).get_parser(prog_name)
parser.add_argument(
'endpoint',
metavar='<endpoint-id>',
help='Endpoint ID to display',
)
return parser
@utils.log_method(log)
def take_action(self, parsed_args):
identity_client = self.app.client_manager.identity
endpoint = utils.find_resource(identity_client.endpoints,
parsed_args.endpoint)
service = common.find_service(identity_client, endpoint.service_id)
info = {}
endpoint._info.pop('links')
info.update(endpoint._info)
info['service_name'] = get_service_name(service)
info['service_type'] = service.type
return zip(*sorted(six.iteritems(info)))
|
|
# pyOCD debugger
# Copyright (c) 2019-2020 Arm Limited
# Copyright (c) 2021 Chris Reed
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
import six
import cmsis_pack_manager
import os
import zipfile
from xml.etree import ElementTree
from pathlib import Path
from pyocd.target.pack import (cmsis_pack, flash_algo, pack_target)
from pyocd.target import TARGET
from pyocd.core import (memory_map, target)
K64F = "MK64FN1M0VDC12"
NRF5340 = "nRF5340_xxAA"
STM32L4R5 = "STM32L4R5AGIx"
TEST_DATA_DIR = Path(__file__).resolve().parents[1] / "data" / "packs"
K64F_PACK_PATH = TEST_DATA_DIR / "NXP.MK64F12_DFP.11.0.0.pack"
K64F_1M0_FLM = "arm/MK_P1M0.FLM"
NRF_PDSC_PATH = TEST_DATA_DIR / "NordicSemiconductor.nRF_DeviceFamilyPack.8.38.0.pdsc"
STM32L4_PDSC_PATH = TEST_DATA_DIR / "Keil.STM32L4xx_DFP.2.5.0.pdsc"
@pytest.fixture(scope='module')
def pack_ref():
return cmsis_pack_manager.CmsisPackRef(
"NXP",
"MK64F12_DFP",
"11.0.1",
)
@pytest.fixture(scope='module')#, autouse=True)
def cache(tmpdir_factory, pack_ref):
tmp_path = str(tmpdir_factory.mktemp("cpm"))
c = cmsis_pack_manager.Cache(False, False, json_path=tmp_path, data_path=tmp_path)
c.download_pack_list([pack_ref])
return c
@pytest.fixture(scope='module')
def k64dev(cache):
devs = pack_target.ManagedPacks.get_installed_targets()
return [d for d in devs if d.part_number == K64F].pop()
@pytest.fixture()#autouse=True)
def fixed_installed_packs(monkeypatch, pack_ref):
def my_get_installed_packs(cache=None):
return [pack_ref]
monkeypatch.setattr(pack_target.ManagedPacks, 'get_installed_packs', my_get_installed_packs)
@pytest.fixture(scope='function')
def k64pack():
return cmsis_pack.CmsisPack(K64F_PACK_PATH)
@pytest.fixture(scope='function')
def k64f1m0(k64pack):
return [d for d in k64pack.devices if d.part_number == "MK64FN1M0VLL12"].pop()
@pytest.fixture(scope='function')
def k64algo(k64pack):
flm = k64pack.get_file(K64F_1M0_FLM)
return flash_algo.PackFlashAlgo(flm)
# Replacement for CmsisPackDevice._load_flash_algo() that loads the FLM from the test data dir
# instead of the (unset) CmsisPack object.
def load_test_flm(filename):
p = TEST_DATA_DIR / Path(filename).name
return flash_algo.PackFlashAlgo(p.open('rb'))
@pytest.fixture(scope='function')
def nrfpdsc():
return cmsis_pack.CmsisPackDescription(None, NRF_PDSC_PATH)
# Fixture to provide nRF5340 CmsisPackDevice modified to load FLM from test data dir.
@pytest.fixture(scope='function')
def nrf5340(monkeypatch, nrfpdsc):
dev = [d for d in nrfpdsc.devices if d.part_number == NRF5340].pop()
monkeypatch.setattr(dev, '_load_flash_algo', load_test_flm)
return dev
@pytest.fixture(scope='function')
def stm32l4pdsc():
return cmsis_pack.CmsisPackDescription(None, STM32L4_PDSC_PATH)
# Fixture to provide STM32L4R5 CmsisPackDevice modified to load FLM from test data dir.
@pytest.fixture(scope='function')
def stm32l4r5(monkeypatch, stm32l4pdsc):
dev = [d for d in stm32l4pdsc.devices if d.part_number == STM32L4R5].pop()
monkeypatch.setattr(dev, '_load_flash_algo', load_test_flm)
return dev
# Tests for managed packs. Currently disabled as they fail on most systems.
class Disabled_TestPack(object):
def test_get_installed(self, pack_ref):
p = pack_target.ManagedPacks.get_installed_packs()
assert p == [pack_ref]
def test_get_targets(self, k64dev):
assert k64dev.part_number == K64F
def test_pop_managed_k64(self):
pack_target.ManagedPacks.populate_target(K64F)
assert K64F.lower() in TARGET
def test_k64_mem_map(self, k64dev):
map = k64dev.memory_map
raml = map.get_region_for_address(0x1fff0000)
ramu = map.get_region_for_address(0x20000000)
flash = map.get_default_region_of_type(memory_map.MemoryType.FLASH)
assert raml.start == 0x1fff0000 and raml.length == 0x10000
assert ramu.start == 0x20000000 and ramu.length == 0x30000
assert flash.start == 0 and flash.length == 0x100000
assert flash.sector_size == 0x1000
class TestPack(object):
def test_devices(self, k64pack):
devs = k64pack.devices
pns = [x.part_number for x in devs]
assert "MK64FN1M0xxx12" in pns
assert "MK64FX512xxx12" in pns
# Make sure CmsisPack can open a zip file too.
def test_zipfile(self):
z = zipfile.ZipFile(K64F_PACK_PATH, 'r')
p = cmsis_pack.CmsisPack(z)
pns = [x.part_number for x in p.devices]
assert "MK64FN1M0xxx12" in pns
def test_parse_device_info(self, k64f1m0):
assert k64f1m0.vendor == "NXP"
assert k64f1m0.families == ["MK64F12"]
assert k64f1m0.default_reset_type == target.Target.ResetType.SW
def test_get_svd(self, k64f1m0):
svd = k64f1m0.svd
x = ElementTree.parse(svd)
assert x.getroot().tag == 'device'
def test_mem_map(self, k64f1m0):
map = k64f1m0.memory_map
bm = map.get_boot_memory()
assert bm.start == 0 and bm.length == 1 * 1024 * 1024
ram = map.get_default_region_of_type(memory_map.MemoryType.RAM)
assert ram.start == 0x20000000 and ram.length == 0x30000
# Verify the flash region was converted correctly.
def test_flash(self, k64f1m0):
map = k64f1m0.memory_map
flash = map.get_boot_memory()
assert isinstance(flash, memory_map.FlashRegion)
assert flash.start == 0 and flash.length == 1 * 1024 * 1024
assert flash.sector_size == 4096
class TestFLM(object):
def test_algo(self, k64algo):
i = k64algo.flash_info
# print(i)
assert i.type == 1
assert i.start == 0
assert i.size == 1 * 1024 * 1024
assert i.page_size == 512
assert i.sector_info_list == [(0, 4 * 1024)]
def test_algo_dict(self, k64algo, k64f1m0):
map = k64f1m0.memory_map
ram = map.get_default_region_of_type(memory_map.MemoryType.RAM)
d = k64algo.get_pyocd_flash_algo(4096, ram)
# print(len(d['instructions']) * 4)
# del d['instructions']
# print(d)
STACK_SIZE = 0x200
assert d['load_address'] == ram.start + STACK_SIZE
assert d['pc_init'] == ram.start + STACK_SIZE + 0x5
assert d['pc_unInit'] == ram.start + STACK_SIZE + 0x55
assert d['pc_eraseAll'] == ram.start + STACK_SIZE + 0x79
assert d['pc_erase_sector'] == ram.start + STACK_SIZE + 0xaf
assert d['pc_program_page'] == ram.start + STACK_SIZE + 0xc3
def has_overlapping_regions(memmap):
return any((len(memmap.get_intersecting_regions(r.start, r.end)) > 1) for r in memmap.regions)
class TestNRF():
def test_regions(self, nrf5340):
memmap = nrf5340.memory_map
assert not has_overlapping_regions(memmap)
class TestSTM32L4():
def test_regions(self, stm32l4r5):
memmap = stm32l4r5.memory_map
assert not has_overlapping_regions(memmap)
|
|
from __future__ import unicode_literals
import mock
import unittest
import pytest
import pytz
from django.core.exceptions import ObjectDoesNotExist
from django.utils import timezone
from nose.tools import * # noqa
from addons.osfstorage.models import OsfStorageFile, OsfStorageFileNode, OsfStorageFolder
from osf.exceptions import ValidationError
from osf.models import Contributor
from tests.factories import ProjectFactory
from addons.osfstorage.tests import factories
from addons.osfstorage.tests.utils import StorageTestCase
import datetime
from osf import models
from addons.osfstorage import utils
from addons.osfstorage import settings
from website.files.exceptions import FileNodeCheckedOutError
@pytest.mark.django_db
class TestOsfstorageFileNode(StorageTestCase):
def test_root_node_exists(self):
assert_true(self.node_settings.root_node is not None)
def test_root_node_has_no_parent(self):
assert_true(self.node_settings.root_node.parent is None)
def test_node_reference(self):
assert_equal(self.project, self.node_settings.root_node.node)
# def test_get_folder(self):
# file = models.OsfStorageFileNode(name='MOAR PYLONS', is_file=True, node=self.node)
# folder = models.OsfStorageFileNode(name='MOAR PYLONS', is_file=False, node=self.node)
# _id = folder._id
# file.save()
# folder.save()
# assert_equal(folder, models.OsfStorageFileNode.get_folder(_id, self.node_settings))
# def test_get_file(self):
# file = models.OsfStorageFileNode(name='MOAR PYLONS', is_file=True, node=self.node)
# folder = models.OsfStorageFileNode(name='MOAR PYLONS', is_file=False, node=self.node)
# file.save()
# folder.save()
# _id = file._id
# assert_equal(file, models.OsfStorageFileNode.get_file(_id, self.node_settings))
def test_serialize(self):
file = OsfStorageFile(name='MOAR PYLONS', node=self.node_settings.owner)
file.save()
assert_equals(file.serialize(), {
u'id': file._id,
u'path': file.path,
u'created': None,
u'name': u'MOAR PYLONS',
u'kind': 'file',
u'version': 0,
u'downloads': 0,
u'size': None,
u'modified': None,
u'contentType': None,
u'checkout': None,
u'md5': None,
u'sha256': None,
})
version = file.create_version(
self.user,
{
'service': 'cloud',
settings.WATERBUTLER_RESOURCE: 'osf',
'object': '06d80e',
}, {
'size': 1234,
'contentType': 'text/plain'
})
assert_equals(file.serialize(), {
'id': file._id,
'path': file.path,
'created': None,
'name': 'MOAR PYLONS',
'kind': 'file',
'version': 1,
'downloads': 0,
'size': 1234L,
'modified': None,
'contentType': 'text/plain',
'checkout': None,
'md5': None,
'sha256': None,
})
date = timezone.now()
version.update_metadata({
'modified': date.isoformat()
})
assert_equals(file.serialize(), {
'id': file._id,
'path': file.path,
'created': date.isoformat(),
'name': 'MOAR PYLONS',
'kind': 'file',
'version': 1,
'downloads': 0,
'size': 1234L,
'modified': date.isoformat(),
'contentType': 'text/plain',
'checkout': None,
'md5': None,
'sha256': None,
})
def test_get_child_by_name(self):
child = self.node_settings.get_root().append_file('Test')
assert_equal(child, self.node_settings.get_root().find_child_by_name('Test'))
def test_root_node_path(self):
assert_equal(self.node_settings.get_root().name, '')
def test_folder_path(self):
path = '/{}/'.format(self.node_settings.root_node._id)
assert_equal(self.node_settings.get_root().path, path)
def test_file_path(self):
file = OsfStorageFileNode(name='MOAR PYLONS', is_file=True, node=self.node)
file.save()
assert_equal(file.name, 'MOAR PYLONS')
assert_equal(file.path, '/{}'.format(file._id))
def test_append_folder(self):
child = self.node_settings.get_root().append_folder('Test')
children = self.node_settings.get_root().children
assert_equal(child.kind, 'folder')
assert_equal([child], list(children))
def test_append_file(self):
child = self.node_settings.get_root().append_file('Test')
children = self.node_settings.get_root().children
assert_equal(child.kind, 'file')
assert_equal([child], list(children))
def test_append_to_file(self):
child = self.node_settings.get_root().append_file('Test')
with assert_raises(AttributeError):
child.append_file('Cant')
def test_children(self):
assert_equals([
self.node_settings.get_root().append_file('Foo{}Bar'.format(x))
for x in xrange(100)
], list(self.node_settings.get_root().children))
def test_download_count_file_defaults(self):
child = self.node_settings.get_root().append_file('Test')
assert_equals(child.get_download_count(), 0)
@mock.patch('framework.sessions.session')
def test_download_count_file(self, mock_session):
mock_session.data = {}
child = self.node_settings.get_root().append_file('Test')
utils.update_analytics(self.project, child._id, 0)
utils.update_analytics(self.project, child._id, 1)
utils.update_analytics(self.project, child._id, 2)
assert_equals(child.get_download_count(), 3)
assert_equals(child.get_download_count(0), 1)
assert_equals(child.get_download_count(1), 1)
assert_equals(child.get_download_count(2), 1)
@unittest.skip
def test_create_version(self):
pass
@unittest.skip
def test_update_version_metadata(self):
pass
def test_delete_folder(self):
parent = self.node_settings.get_root().append_folder('Test')
kids = []
for x in range(10):
kid = parent.append_file(str(x))
kid.save()
kids.append(kid)
count = OsfStorageFileNode.find().count()
tcount = models.TrashedFileNode.find().count()
parent.delete()
assert_is(OsfStorageFileNode.load(parent._id), None)
assert_equals(count - 11, OsfStorageFileNode.find().count())
assert_equals(tcount + 11, models.TrashedFileNode.find().count())
for kid in kids:
assert_is(
OsfStorageFileNode.load(kid._id),
None
)
# def test_delete_file(self):
# child = self.node_settings.get_root().append_file('Test')
# child.delete()
#
# assert_is(models.OsfStorageFileNode.load(child._id), None)
# trashed = models.TrashedFileNode.load(child._id)
# child_storage = child.to_storage()
# trashed_storage = trashed.to_storage()
# trashed_storage['parent'] = trashed_storage['parent'][0]
# child_storage['materialized_path'] = child.materialized_path
# trashed_storage.pop('deleted_by')
# trashed_storage.pop('deleted_on')
# trashed_storage.pop('suspended')
# assert_equal(child_storage.pop('path'), '')
# assert_equal(trashed_storage.pop('path'), '/' + child._id)
# assert_equal(trashed_storage, child_storage)
def test_delete_file(self):
child = self.node_settings.get_root().append_file('Test')
field_names = [f.name for f in child._meta.get_fields() if not f.is_relation and f.name not in ['id', 'guid_string', 'content_type_pk']]
child_data = {f: getattr(child, f) for f in field_names}
child.delete()
assert_raises(ObjectDoesNotExist, child.reload)
assert_is(OsfStorageFileNode.load(child._id), None)
trashed = models.TrashedFileNode.load(child._id)
child_storage = dict()
trashed_storage = dict()
trashed_storage['parent'] = trashed.parent._id
child_storage['materialized_path'] = child.materialized_path
assert_equal(trashed.path, '/' + child._id)
trashed_field_names = [f.name for f in trashed._meta.get_fields() if not f.is_relation and
f.name not in ['id', 'guid_string', 'path', '_materialized_path', 'content_type_pk']]
for f, value in child_data.iteritems():
if f in trashed_field_names:
assert_equal(getattr(trashed, f), value)
def test_materialized_path(self):
child = self.node_settings.get_root().append_file('Test')
assert_equals('/Test', child.materialized_path)
def test_materialized_path_folder(self):
child = self.node_settings.get_root().append_folder('Test')
assert_equals('/Test/', child.materialized_path)
def test_materialized_path_nested(self):
child = self.node_settings.get_root().append_folder('Cloud').append_file('Carp')
assert_equals('/Cloud/Carp', child.materialized_path)
def test_copy(self):
to_copy = self.node_settings.get_root().append_file('Carp')
copy_to = self.node_settings.get_root().append_folder('Cloud')
copied = to_copy.copy_under(copy_to)
assert_not_equal(copied, to_copy)
assert_equal(copied.parent, copy_to)
assert_equal(to_copy.parent, self.node_settings.get_root())
def test_move_nested(self):
new_project = ProjectFactory()
other_node_settings = new_project.get_addon('osfstorage')
move_to = other_node_settings.get_root().append_folder('Cloud')
to_move = self.node_settings.get_root().append_folder('Carp')
child = to_move.append_file('A dee um')
moved = to_move.move_under(move_to)
child.reload()
assert_equal(moved, to_move)
assert_equal(new_project, to_move.node)
assert_equal(new_project, move_to.node)
assert_equal(new_project, child.node)
def test_copy_rename(self):
to_copy = self.node_settings.get_root().append_file('Carp')
copy_to = self.node_settings.get_root().append_folder('Cloud')
copied = to_copy.copy_under(copy_to, name='But')
assert_equal(copied.name, 'But')
assert_not_equal(copied, to_copy)
assert_equal(to_copy.name, 'Carp')
assert_equal(copied.parent, copy_to)
assert_equal(to_copy.parent, self.node_settings.get_root())
def test_move(self):
to_move = self.node_settings.get_root().append_file('Carp')
move_to = self.node_settings.get_root().append_folder('Cloud')
moved = to_move.move_under(move_to)
assert_equal(to_move, moved)
assert_equal(moved.parent, move_to)
def test_move_and_rename(self):
to_move = self.node_settings.get_root().append_file('Carp')
move_to = self.node_settings.get_root().append_folder('Cloud')
moved = to_move.move_under(move_to, name='Tuna')
assert_equal(to_move, moved)
assert_equal(to_move.name, 'Tuna')
assert_equal(moved.parent, move_to)
@unittest.skip
def test_move_folder(self):
pass
@unittest.skip
def test_move_folder_and_rename(self):
pass
@unittest.skip
def test_rename_folder(self):
pass
@unittest.skip
def test_rename_file(self):
pass
@unittest.skip
def test_move_across_nodes(self):
pass
@unittest.skip
def test_move_folder_across_nodes(self):
pass
@unittest.skip
def test_copy_across_nodes(self):
pass
@unittest.skip
def test_copy_folder_across_nodes(self):
pass
def test_get_file_guids_for_live_file(self):
node = self.node_settings.owner
file = OsfStorageFile(name='foo', node=node)
file.save()
file.get_guid(create=True)
guid = file.get_guid()._id
assert guid is not None
assert guid in OsfStorageFileNode.get_file_guids(
'/' + file._id, provider='osfstorage', node=node)
def test_get_file_guids_for_live_folder(self):
node = self.node_settings.owner
folder = OsfStorageFolder(name='foofolder', node=node)
folder.save()
files = []
for i in range(1, 4):
files.append(folder.append_file('foo.{}'.format(i)))
files[-1].get_guid(create=True)
guids = [file.get_guid()._id for file in files]
assert len(guids) == len(files)
all_guids = OsfStorageFileNode.get_file_guids(
'/' + folder._id, provider='osfstorage', node=node)
assert sorted(guids) == sorted(all_guids)
def test_get_file_guids_for_trashed_file(self):
node = self.node_settings.owner
file = OsfStorageFile(name='foo', node=node)
file.save()
file.get_guid(create=True)
guid = file.get_guid()._id
file.delete()
assert guid is not None
assert guid in OsfStorageFileNode.get_file_guids(
'/' + file._id, provider='osfstorage', node=node)
def test_get_file_guids_for_trashed_folder(self):
node = self.node_settings.owner
folder = OsfStorageFolder(name='foofolder', node=node)
folder.save()
files = []
for i in range(1, 4):
files.append(folder.append_file('foo.{}'.format(i)))
files[-1].get_guid(create=True)
guids = [file.get_guid()._id for file in files]
assert len(guids) == len(files)
folder.delete()
all_guids = OsfStorageFileNode.get_file_guids(
'/' + folder._id, provider='osfstorage', node=node)
assert sorted(guids) == sorted(all_guids)
def test_get_file_guids_live_file_wo_guid(self):
node = self.node_settings.owner
file = OsfStorageFile(name='foo', node=node)
file.save()
assert [] == OsfStorageFileNode.get_file_guids(
'/' + file._id, provider='osfstorage', node=node)
def test_get_file_guids_for_live_folder_wo_guids(self):
node = self.node_settings.owner
folder = OsfStorageFolder(name='foofolder', node=node)
folder.save()
files = []
for i in range(1, 4):
files.append(folder.append_file('foo.{}'.format(i)))
all_guids = OsfStorageFileNode.get_file_guids(
'/' + folder._id, provider='osfstorage', node=node)
assert [] == all_guids
def test_get_file_guids_trashed_file_wo_guid(self):
node = self.node_settings.owner
file = OsfStorageFile(name='foo', node=node)
file.save()
file.delete()
assert [] == OsfStorageFileNode.get_file_guids(
'/' + file._id, provider='osfstorage', node=node)
def test_get_file_guids_for_trashed_folder_wo_guids(self):
node = self.node_settings.owner
folder = OsfStorageFolder(name='foofolder', node=node)
folder.save()
files = []
for i in range(1, 4):
files.append(folder.append_file('foo.{}'.format(i)))
folder.delete()
all_guids = OsfStorageFileNode.get_file_guids(
'/' + folder._id, provider='osfstorage', node=node)
assert [] == all_guids
def test_get_file_guids_for_live_folder_recursive(self):
node = self.node_settings.owner
folder = OsfStorageFolder(name='foofolder', node=node)
folder.save()
files = []
for i in range(1, 4):
files.append(folder.append_file('foo.{}'.format(i)))
files[-1].get_guid(create=True)
subfolder = folder.append_folder('subfoo')
for i in range(1, 4):
files.append(subfolder.append_file('subfoo.{}'.format(i)))
files[-1].get_guid(create=True)
guids = [file.get_guid()._id for file in files]
assert len(guids) == len(files)
all_guids = OsfStorageFileNode.get_file_guids(
'/' + folder._id, provider='osfstorage', node=node)
assert sorted(guids) == sorted(all_guids)
def test_get_file_guids_for_trashed_folder_recursive(self):
node = self.node_settings.owner
folder = OsfStorageFolder(name='foofolder', node=node)
folder.save()
files = []
for i in range(1, 4):
files.append(folder.append_file('foo.{}'.format(i)))
files[-1].get_guid(create=True)
subfolder = folder.append_folder('subfoo')
for i in range(1, 4):
files.append(subfolder.append_file('subfoo.{}'.format(i)))
files[-1].get_guid(create=True)
guids = [file.get_guid()._id for file in files]
assert len(guids) == len(files)
folder.delete()
all_guids = OsfStorageFileNode.get_file_guids(
'/' + folder._id, provider='osfstorage', node=node)
assert sorted(guids) == sorted(all_guids)
def test_get_file_guids_for_live_folder_recursive_wo_guids(self):
node = self.node_settings.owner
folder = OsfStorageFolder(name='foofolder', node=node)
folder.save()
files = []
for i in range(1, 4):
files.append(folder.append_file('foo.{}'.format(i)))
subfolder = folder.append_folder('subfoo')
for i in range(1, 4):
files.append(subfolder.append_file('subfoo.{}'.format(i)))
all_guids = OsfStorageFileNode.get_file_guids(
'/' + folder._id, provider='osfstorage', node=node)
assert [] == all_guids
def test_get_file_guids_for_trashed_folder_recursive_wo_guids(self):
node = self.node_settings.owner
folder = OsfStorageFolder(name='foofolder', node=node)
folder.save()
files = []
for i in range(1, 4):
files.append(folder.append_file('foo.{}'.format(i)))
subfolder = folder.append_folder('subfoo')
for i in range(1, 4):
files.append(subfolder.append_file('subfoo.{}'.format(i)))
folder.delete()
all_guids = OsfStorageFileNode.get_file_guids(
'/' + folder._id, provider='osfstorage', node=node)
assert [] == all_guids
@pytest.mark.django_db
class TestNodeSettingsModel(StorageTestCase):
def test_fields(self):
assert_true(self.node_settings._id)
assert_is(self.node_settings.has_auth, True)
assert_is(self.node_settings.complete, True)
def test_after_fork_copies_versions(self):
num_versions = 5
path = 'jazz/dreamers-ball.mp3'
record = self.node_settings.get_root().append_file(path)
for _ in range(num_versions):
version = factories.FileVersionFactory()
record.versions.add(version)
fork = self.project.fork_node(self.auth_obj)
fork_node_settings = fork.get_addon('osfstorage')
fork_node_settings.reload()
cloned_record = fork_node_settings.get_root().find_child_by_name(path)
assert_equal(list(cloned_record.versions.all()), list(record.versions.all()))
assert_true(fork_node_settings.root_node)
@pytest.mark.django_db
class TestOsfStorageFileVersion(StorageTestCase):
def setUp(self):
super(TestOsfStorageFileVersion, self).setUp()
self.user = factories.AuthUserFactory()
self.mock_date = datetime.datetime(1991, 10, 31, tzinfo=pytz.UTC)
def test_fields(self):
version = factories.FileVersionFactory(
size=1024,
content_type='application/json',
date_modified=timezone.now(),
)
retrieved = models.FileVersion.load(version._id)
assert_true(retrieved.creator)
assert_true(retrieved.location)
assert_true(retrieved.size)
# sometimes identifiers are strings, so this always has to be a string, sql is funny about that.
assert_equal(retrieved.identifier, u"0")
assert_true(retrieved.content_type)
assert_true(retrieved.date_modified)
def test_is_duplicate_true(self):
version1 = factories.FileVersionFactory()
version2 = factories.FileVersionFactory()
assert_true(version1.is_duplicate(version2))
assert_true(version2.is_duplicate(version1))
def test_is_duplicate_false(self):
version1 = factories.FileVersionFactory(
location={
'service': 'cloud',
settings.WATERBUTLER_RESOURCE: 'osf',
'object': 'd077f2',
},
)
version2 = factories.FileVersionFactory(
location={
'service': 'cloud',
settings.WATERBUTLER_RESOURCE: 'osf',
'object': '06d80e',
},
)
assert_false(version1.is_duplicate(version2))
assert_false(version2.is_duplicate(version1))
def test_validate_location(self):
version = factories.FileVersionFactory.build(location={'invalid': True})
with assert_raises(ValidationError):
version.save()
version.location = {
'service': 'cloud',
settings.WATERBUTLER_RESOURCE: 'osf',
'object': 'object',
}
version.save()
def test_update_metadata(self):
version = factories.FileVersionFactory()
version.update_metadata(
{'archive': 'glacier', 'size': 123, 'modified': 'Mon, 16 Feb 2015 18:45:34 GMT'})
version.reload()
assert_in('archive', version.metadata)
assert_equal(version.metadata['archive'], 'glacier')
def test_matching_archive(self):
version = factories.FileVersionFactory(
location={
'service': 'cloud',
settings.WATERBUTLER_RESOURCE: 'osf',
'object': 'd077f2',
},
metadata={'sha256': 'existing'}
)
factories.FileVersionFactory(
location={
'service': 'cloud',
settings.WATERBUTLER_RESOURCE: 'osf',
'object': '06d80e',
},
metadata={
'sha256': 'existing',
'vault': 'the cloud',
'archive': 'erchiv'
}
)
assert_is(version._find_matching_archive(), True)
assert_is_not(version.archive, None)
assert_equal(version.metadata['vault'], 'the cloud')
assert_equal(version.metadata['archive'], 'erchiv')
def test_archive_exits(self):
node_addon = self.project.get_addon('osfstorage')
fnode = node_addon.get_root().append_file('MyCoolTestFile')
version = fnode.create_version(
self.user,
{
'service': 'cloud',
settings.WATERBUTLER_RESOURCE: 'osf',
'object': '06d80e',
}, {
'sha256': 'existing',
'vault': 'the cloud',
'archive': 'erchiv'
})
assert_equal(version.archive, 'erchiv')
version2 = fnode.create_version(
self.user,
{
'service': 'cloud',
settings.WATERBUTLER_RESOURCE: 'osf',
'object': '07d80a',
}, {
'sha256': 'existing',
})
assert_equal(version2.archive, 'erchiv')
def test_no_matching_archive(self):
models.FileVersion.remove()
assert_is(False, factories.FileVersionFactory(
location={
'service': 'cloud',
settings.WATERBUTLER_RESOURCE: 'osf',
'object': 'd077f2',
},
metadata={'sha256': 'existing'}
)._find_matching_archive())
@pytest.mark.django_db
class TestOsfStorageCheckout(StorageTestCase):
def setUp(self):
super(TestOsfStorageCheckout, self).setUp()
self.user = factories.AuthUserFactory()
self.node = ProjectFactory(creator=self.user)
self.osfstorage = self.node.get_addon('osfstorage')
self.root_node = self.osfstorage.get_root()
self.file = self.root_node.append_file('3005')
def test_checkout_logs(self):
non_admin = factories.AuthUserFactory()
self.node.add_contributor(non_admin, permissions=['read', 'write'])
self.node.save()
self.file.check_in_or_out(non_admin, non_admin, save=True)
self.file.reload()
self.node.reload()
assert_equal(self.file.checkout, non_admin)
assert_equal(self.node.logs.latest().action, 'checked_out')
assert_equal(self.node.logs.latest().user, non_admin)
self.file.check_in_or_out(self.user, None, save=True)
self.file.reload()
self.node.reload()
assert_equal(self.file.checkout, None)
assert_equal(self.node.logs.latest().action, 'checked_in')
assert_equal(self.node.logs.latest().user, self.user)
self.file.check_in_or_out(self.user, self.user, save=True)
self.file.reload()
self.node.reload()
assert_equal(self.file.checkout, self.user)
assert_equal(self.node.logs.latest().action, 'checked_out')
assert_equal(self.node.logs.latest().user, self.user)
with assert_raises(FileNodeCheckedOutError):
self.file.check_in_or_out(non_admin, None, save=True)
with assert_raises(FileNodeCheckedOutError):
self.file.check_in_or_out(non_admin, non_admin, save=True)
def test_delete_checked_out_file(self):
self.file.check_in_or_out(self.user, self.user, save=True)
self.file.reload()
assert_equal(self.file.checkout, self.user)
with assert_raises(FileNodeCheckedOutError):
self.file.delete()
def test_delete_folder_with_checked_out_file(self):
folder = self.root_node.append_folder('folder')
self.file.move_under(folder)
self.file.check_in_or_out(self.user, self.user, save=True)
self.file.reload()
assert_equal(self.file.checkout, self.user)
with assert_raises(FileNodeCheckedOutError):
folder.delete()
def test_move_checked_out_file(self):
self.file.check_in_or_out(self.user, self.user, save=True)
self.file.reload()
assert_equal(self.file.checkout, self.user)
folder = self.root_node.append_folder('folder')
with assert_raises(FileNodeCheckedOutError):
self.file.move_under(folder)
def test_checked_out_merge(self):
user = factories.AuthUserFactory()
node = ProjectFactory(creator=user)
osfstorage = node.get_addon('osfstorage')
root_node = osfstorage.get_root()
file = root_node.append_file('test_file')
user_merge_target = factories.AuthUserFactory()
file.check_in_or_out(user, user, save=True)
file.reload()
assert_equal(file.checkout, user)
user_merge_target.merge_user(user)
file.reload()
assert_equal(user_merge_target.id, file.checkout.id)
def test_remove_contributor_with_checked_file(self):
user = factories.AuthUserFactory()
Contributor.objects.create(
node=self.node,
user=user,
admin=True,
write=True,
read=True,
visible=True
)
self.file.check_in_or_out(self.user, self.user, save=True)
self.file.reload()
assert_equal(self.file.checkout, self.user)
self.file.node.remove_contributors([self.user], save=True)
self.file.reload()
assert_equal(self.file.checkout, None)
|
|
from __future__ import absolute_import
import logging
import six
from collections import defaultdict
from datetime import timedelta
from uuid import uuid4
from django.db import IntegrityError, transaction
from django.utils import timezone
from rest_framework import serializers
from rest_framework.exceptions import ParseError
from rest_framework.response import Response
from sentry import eventstream, features
from sentry.api.base import audit_logger
from sentry.api.fields import Actor, ActorField
from sentry.api.serializers import serialize
from sentry.api.serializers.models.actor import ActorSerializer
from sentry.api.serializers.models.group import SUBSCRIPTION_REASON_MAP
from sentry.constants import DEFAULT_SORT_OPTION
from sentry.db.models.query import create_or_update
from sentry.models import (
Activity,
Commit,
Group,
GroupAssignee,
GroupHash,
GroupLink,
GroupStatus,
GroupTombstone,
GroupResolution,
GroupBookmark,
GroupSeen,
GroupShare,
GroupSnooze,
GroupSubscription,
GroupSubscriptionReason,
Release,
Repository,
TOMBSTONE_FIELDS_FROM_GROUP,
Team,
User,
UserOption,
)
from sentry.models.group import looks_like_short_id
from sentry.api.issue_search import convert_query_values, InvalidSearchQuery, parse_search_query
from sentry.signals import (
issue_deleted,
issue_ignored,
issue_resolved,
advanced_search_feature_gated,
)
from sentry.tasks.deletion import delete_groups as delete_groups_task
from sentry.tasks.integrations import kick_off_status_syncs
from sentry.tasks.merge import merge_groups
from sentry.utils import metrics
from sentry.utils.audit import create_audit_entry
from sentry.utils.cursors import Cursor
from sentry.utils.functional import extract_lazy_object
delete_logger = logging.getLogger("sentry.deletions.api")
class ValidationError(Exception):
pass
def build_query_params_from_request(request, organization, projects, environments):
query_kwargs = {"projects": projects, "sort_by": request.GET.get("sort", DEFAULT_SORT_OPTION)}
limit = request.GET.get("limit")
if limit:
try:
query_kwargs["limit"] = int(limit)
except ValueError:
raise ValidationError("invalid limit")
# TODO: proper pagination support
if request.GET.get("cursor"):
try:
query_kwargs["cursor"] = Cursor.from_string(request.GET.get("cursor"))
except ValueError:
raise ParseError(detail="Invalid cursor parameter.")
query = request.GET.get("query", "is:unresolved").strip()
if query:
try:
search_filters = convert_query_values(
parse_search_query(query), projects, request.user, environments
)
except InvalidSearchQuery as e:
raise ValidationError(u"Your search query could not be parsed: {}".format(e.message))
validate_search_filter_permissions(organization, search_filters, request.user)
query_kwargs["search_filters"] = search_filters
return query_kwargs
# List of conditions that mark a SearchFilter as an advanced search. Format is
# (lambda SearchFilter(): <boolean condition>, '<feature_name')
advanced_search_features = [
(lambda search_filter: search_filter.is_negation, "negative search"),
(lambda search_filter: search_filter.value.is_wildcard(), "wildcard search"),
]
def validate_search_filter_permissions(organization, search_filters, user):
"""
Verifies that an organization is allowed to perform the query that they
submitted.
If the org is using a feature they don't have access to, raises
`ValidationError` with information which part of the query they don't have
access to.
:param search_filters:
"""
# If the organization has advanced search, then no need to perform any
# other checks since they're allowed to use all search features
if features.has("organizations:advanced-search", organization):
return
for search_filter in search_filters:
for feature_condition, feature_name in advanced_search_features:
if feature_condition(search_filter):
advanced_search_feature_gated.send_robust(
user=user, organization=organization, sender=validate_search_filter_permissions
)
raise ValidationError(
u"You need access to the advanced search feature to use {}".format(feature_name)
)
def get_by_short_id(organization_id, is_short_id_lookup, query):
if is_short_id_lookup == "1" and looks_like_short_id(query):
try:
return Group.objects.by_qualified_short_id(organization_id, query)
except Group.DoesNotExist:
pass
STATUS_CHOICES = {
"resolved": GroupStatus.RESOLVED,
"unresolved": GroupStatus.UNRESOLVED,
"ignored": GroupStatus.IGNORED,
"resolvedInNextRelease": GroupStatus.UNRESOLVED,
# TODO(dcramer): remove in 9.0
"muted": GroupStatus.IGNORED,
}
class InCommitValidator(serializers.Serializer):
commit = serializers.CharField(required=True)
repository = serializers.CharField(required=True)
def validate_repository(self, value):
project = self.context["project"]
try:
value = Repository.objects.get(organization_id=project.organization_id, name=value)
except Repository.DoesNotExist:
raise serializers.ValidationError("Unable to find the given repository.")
return value
def validate(self, attrs):
attrs = super(InCommitValidator, self).validate(attrs)
repository = attrs.get("repository")
commit = attrs.get("commit")
if not repository:
raise serializers.ValidationError(
{"repository": ["Unable to find the given repository."]}
)
if not commit:
raise serializers.ValidationError({"commit": ["Unable to find the given commit."]})
try:
commit = Commit.objects.get(repository_id=repository.id, key=commit)
except Commit.DoesNotExist:
raise serializers.ValidationError({"commit": ["Unable to find the given commit."]})
return commit
class StatusDetailsValidator(serializers.Serializer):
inNextRelease = serializers.BooleanField()
inRelease = serializers.CharField()
inCommit = InCommitValidator(required=False)
ignoreDuration = serializers.IntegerField()
ignoreCount = serializers.IntegerField()
# in minutes, max of one week
ignoreWindow = serializers.IntegerField(max_value=7 * 24 * 60)
ignoreUserCount = serializers.IntegerField()
# in minutes, max of one week
ignoreUserWindow = serializers.IntegerField(max_value=7 * 24 * 60)
def validate_inRelease(self, value):
project = self.context["project"]
if value == "latest":
try:
value = (
Release.objects.filter(
projects=project, organization_id=project.organization_id
)
.extra(select={"sort": "COALESCE(date_released, date_added)"})
.order_by("-sort")[0]
)
except IndexError:
raise serializers.ValidationError(
"No release data present in the system to form a basis for 'Next Release'"
)
else:
try:
value = Release.objects.get(
projects=project, organization_id=project.organization_id, version=value
)
except Release.DoesNotExist:
raise serializers.ValidationError(
"Unable to find a release with the given version."
)
return value
def validate_inNextRelease(self, value):
project = self.context["project"]
try:
value = (
Release.objects.filter(projects=project, organization_id=project.organization_id)
.extra(select={"sort": "COALESCE(date_released, date_added)"})
.order_by("-sort")[0]
)
except IndexError:
raise serializers.ValidationError(
"No release data present in the system to form a basis for 'Next Release'"
)
return value
class GroupValidator(serializers.Serializer):
status = serializers.ChoiceField(choices=zip(STATUS_CHOICES.keys(), STATUS_CHOICES.keys()))
statusDetails = StatusDetailsValidator()
hasSeen = serializers.BooleanField()
isBookmarked = serializers.BooleanField()
isPublic = serializers.BooleanField()
isSubscribed = serializers.BooleanField()
merge = serializers.BooleanField()
discard = serializers.BooleanField()
ignoreDuration = serializers.IntegerField()
ignoreCount = serializers.IntegerField()
# in minutes, max of one week
ignoreWindow = serializers.IntegerField(max_value=7 * 24 * 60)
ignoreUserCount = serializers.IntegerField()
# in minutes, max of one week
ignoreUserWindow = serializers.IntegerField(max_value=7 * 24 * 60)
assignedTo = ActorField()
# TODO(dcramer): remove in 9.0
# for the moment, the CLI sends this for any issue update, so allow nulls
snoozeDuration = serializers.IntegerField(allow_null=True)
def validate_assignedTo(self, value):
if (
value
and value.type is User
and not self.context["project"].member_set.filter(user_id=value.id).exists()
):
raise serializers.ValidationError("Cannot assign to non-team member")
if (
value
and value.type is Team
and not self.context["project"].teams.filter(id=value.id).exists()
):
raise serializers.ValidationError(
"Cannot assign to a team without access to the project"
)
return value
def validate(self, attrs):
attrs = super(GroupValidator, self).validate(attrs)
if len(attrs) > 1 and "discard" in attrs:
raise serializers.ValidationError("Other attributes cannot be updated when discarding")
return attrs
def handle_discard(request, group_list, projects, user):
for project in projects:
if not features.has("projects:discard-groups", project, actor=user):
return Response({"detail": ["You do not have that feature enabled"]}, status=400)
# grouped by project_id
groups_to_delete = defaultdict(list)
for group in group_list:
with transaction.atomic():
try:
tombstone = GroupTombstone.objects.create(
previous_group_id=group.id,
actor_id=user.id if user else None,
**{name: getattr(group, name) for name in TOMBSTONE_FIELDS_FROM_GROUP}
)
except IntegrityError:
# in this case, a tombstone has already been created
# for a group, so no hash updates are necessary
pass
else:
groups_to_delete[group.project_id].append(group)
GroupHash.objects.filter(group=group).update(
group=None, group_tombstone_id=tombstone.id
)
for project in projects:
_delete_groups(request, project, groups_to_delete.get(project.id), delete_type="discard")
return Response(status=204)
def _delete_groups(request, project, group_list, delete_type):
if not group_list:
return
# deterministic sort for sanity, and for very large deletions we'll
# delete the "smaller" groups first
group_list.sort(key=lambda g: (g.times_seen, g.id))
group_ids = [g.id for g in group_list]
Group.objects.filter(id__in=group_ids).exclude(
status__in=[GroupStatus.PENDING_DELETION, GroupStatus.DELETION_IN_PROGRESS]
).update(status=GroupStatus.PENDING_DELETION)
eventstream_state = eventstream.start_delete_groups(project.id, group_ids)
transaction_id = uuid4().hex
GroupHash.objects.filter(project_id=project.id, group__id__in=group_ids).delete()
delete_groups_task.apply_async(
kwargs={
"object_ids": group_ids,
"transaction_id": transaction_id,
"eventstream_state": eventstream_state,
},
countdown=3600,
)
for group in group_list:
create_audit_entry(
request=request,
transaction_id=transaction_id,
logger=audit_logger,
organization_id=project.organization_id,
target_object=group.id,
)
delete_logger.info(
"object.delete.queued",
extra={
"object_id": group.id,
"transaction_id": transaction_id,
"model": type(group).__name__,
},
)
issue_deleted.send_robust(
group=group, user=request.user, delete_type=delete_type, sender=_delete_groups
)
def delete_groups(request, projects, organization_id, search_fn):
"""
`search_fn` refers to the `search.query` method with the appropriate
project, org, environment, and search params already bound
"""
group_ids = request.GET.getlist("id")
if group_ids:
group_list = list(
Group.objects.filter(
project__in=projects,
project__organization_id=organization_id,
id__in=set(group_ids),
).exclude(status__in=[GroupStatus.PENDING_DELETION, GroupStatus.DELETION_IN_PROGRESS])
)
else:
try:
# bulk mutations are limited to 1000 items
# TODO(dcramer): it'd be nice to support more than this, but its
# a bit too complicated right now
cursor_result, _ = search_fn({"limit": 1000, "paginator_options": {"max_limit": 1000}})
except ValidationError as exc:
return Response({"detail": six.text_type(exc)}, status=400)
group_list = list(cursor_result)
if not group_list:
return Response(status=204)
groups_by_project_id = defaultdict(list)
for group in group_list:
groups_by_project_id[group.project_id].append(group)
for project in projects:
_delete_groups(request, project, groups_by_project_id.get(project.id), delete_type="delete")
return Response(status=204)
def self_subscribe_and_assign_issue(acting_user, group):
# Used during issue resolution to assign to acting user
# returns None if the user didn't elect to self assign on resolution
# or the group is assigned already, otherwise returns Actor
# representation of current user
if acting_user:
GroupSubscription.objects.subscribe(
user=acting_user, group=group, reason=GroupSubscriptionReason.status_change
)
self_assign_issue = UserOption.objects.get_value(
user=acting_user, key="self_assign_issue", default="0"
)
if self_assign_issue == "1" and not group.assignee_set.exists():
return Actor(type=User, id=acting_user.id)
def update_groups(request, projects, organization_id, search_fn):
group_ids = request.GET.getlist("id")
if group_ids:
group_list = Group.objects.filter(
project__organization_id=organization_id, project__in=projects, id__in=group_ids
)
# filter down group ids to only valid matches
group_ids = [g.id for g in group_list]
if not group_ids:
return Response(status=204)
else:
group_list = None
# TODO(jess): We may want to look into refactoring GroupValidator
# to support multiple projects, but this is pretty complicated
# because of the assignee validation. Punting on this for now.
for project in projects:
serializer = GroupValidator(data=request.data, partial=True, context={"project": project})
if not serializer.is_valid():
return Response(serializer.errors, status=400)
result = dict(serializer.validated_data)
# so we won't have to requery for each group
project_lookup = {p.id: p for p in projects}
acting_user = request.user if request.user.is_authenticated() else None
if not group_ids:
try:
# bulk mutations are limited to 1000 items
# TODO(dcramer): it'd be nice to support more than this, but its
# a bit too complicated right now
cursor_result, _ = search_fn({"limit": 1000, "paginator_options": {"max_limit": 1000}})
except ValidationError as exc:
return Response({"detail": six.text_type(exc)}, status=400)
group_list = list(cursor_result)
group_ids = [g.id for g in group_list]
is_bulk = len(group_ids) > 1
group_project_ids = {g.project_id for g in group_list}
# filter projects down to only those that have groups in the search results
projects = [p for p in projects if p.id in group_project_ids]
queryset = Group.objects.filter(id__in=group_ids)
discard = result.get("discard")
if discard:
return handle_discard(request, list(queryset), projects, acting_user)
statusDetails = result.pop("statusDetails", result)
status = result.get("status")
release = None
commit = None
if status in ("resolved", "resolvedInNextRelease"):
if status == "resolvedInNextRelease" or statusDetails.get("inNextRelease"):
# TODO(jess): We may want to support this for multi project, but punting on it for now
if len(projects) > 1:
return Response(
{"detail": "Cannot set resolved in next release for multiple projects."},
status=400,
)
release = (
statusDetails.get("inNextRelease")
or Release.objects.filter(
projects=projects[0], organization_id=projects[0].organization_id
)
.extra(select={"sort": "COALESCE(date_released, date_added)"})
.order_by("-sort")[0]
)
activity_type = Activity.SET_RESOLVED_IN_RELEASE
activity_data = {
# no version yet
"version": ""
}
status_details = {
"inNextRelease": True,
"actor": serialize(extract_lazy_object(request.user), request.user),
}
res_type = GroupResolution.Type.in_next_release
res_type_str = "in_next_release"
res_status = GroupResolution.Status.pending
elif statusDetails.get("inRelease"):
# TODO(jess): We could update validation to check if release
# applies to multiple projects, but I think we agreed to punt
# on this for now
if len(projects) > 1:
return Response(
{"detail": "Cannot set resolved in release for multiple projects."}, status=400
)
release = statusDetails["inRelease"]
activity_type = Activity.SET_RESOLVED_IN_RELEASE
activity_data = {
# no version yet
"version": release.version
}
status_details = {
"inRelease": release.version,
"actor": serialize(extract_lazy_object(request.user), request.user),
}
res_type = GroupResolution.Type.in_release
res_type_str = "in_release"
res_status = GroupResolution.Status.resolved
elif statusDetails.get("inCommit"):
# TODO(jess): Same here, this is probably something we could do, but
# punting for now.
if len(projects) > 1:
return Response(
{"detail": "Cannot set resolved in commit for multiple projects."}, status=400
)
commit = statusDetails["inCommit"]
activity_type = Activity.SET_RESOLVED_IN_COMMIT
activity_data = {"commit": commit.id}
status_details = {
"inCommit": serialize(commit, request.user),
"actor": serialize(extract_lazy_object(request.user), request.user),
}
res_type_str = "in_commit"
else:
res_type_str = "now"
activity_type = Activity.SET_RESOLVED
activity_data = {}
status_details = {}
now = timezone.now()
metrics.incr("group.resolved", instance=res_type_str, skip_internal=True)
# if we've specified a commit, let's see if its already been released
# this will allow us to associate the resolution to a release as if we
# were simply using 'inRelease' above
# Note: this is different than the way commit resolution works on deploy
# creation, as a given deploy is connected to an explicit release, and
# in this case we're simply choosing the most recent release which contains
# the commit.
if commit and not release:
# TODO(jess): If we support multiple projects for release / commit resolution,
# we need to update this to find the release for each project (we shouldn't assume
# it's the same)
try:
release = (
Release.objects.filter(projects__in=projects, releasecommit__commit=commit)
.extra(select={"sort": "COALESCE(date_released, date_added)"})
.order_by("-sort")[0]
)
res_type = GroupResolution.Type.in_release
res_status = GroupResolution.Status.resolved
except IndexError:
release = None
for group in group_list:
with transaction.atomic():
resolution = None
if release:
resolution_params = {
"release": release,
"type": res_type,
"status": res_status,
"actor_id": request.user.id if request.user.is_authenticated() else None,
}
resolution, created = GroupResolution.objects.get_or_create(
group=group, defaults=resolution_params
)
if not created:
resolution.update(datetime=timezone.now(), **resolution_params)
if commit:
GroupLink.objects.create(
group_id=group.id,
project_id=group.project_id,
linked_type=GroupLink.LinkedType.commit,
relationship=GroupLink.Relationship.resolves,
linked_id=commit.id,
)
affected = Group.objects.filter(id=group.id).update(
status=GroupStatus.RESOLVED, resolved_at=now
)
if not resolution:
created = affected
group.status = GroupStatus.RESOLVED
group.resolved_at = now
assigned_to = self_subscribe_and_assign_issue(acting_user, group)
if assigned_to is not None:
result["assignedTo"] = assigned_to
if created:
activity = Activity.objects.create(
project=project_lookup[group.project_id],
group=group,
type=activity_type,
user=acting_user,
ident=resolution.id if resolution else None,
data=activity_data,
)
# TODO(dcramer): we need a solution for activity rollups
# before sending notifications on bulk changes
if not is_bulk:
activity.send_notification()
issue_resolved.send_robust(
organization_id=organization_id,
user=acting_user or request.user,
group=group,
project=project_lookup[group.project_id],
resolution_type=res_type_str,
sender=update_groups,
)
kick_off_status_syncs.apply_async(
kwargs={"project_id": group.project_id, "group_id": group.id}
)
result.update({"status": "resolved", "statusDetails": status_details})
elif status:
new_status = STATUS_CHOICES[result["status"]]
with transaction.atomic():
happened = queryset.exclude(status=new_status).update(status=new_status)
GroupResolution.objects.filter(group__in=group_ids).delete()
if new_status == GroupStatus.IGNORED:
metrics.incr("group.ignored", skip_internal=True)
ignore_duration = (
statusDetails.pop("ignoreDuration", None)
or statusDetails.pop("snoozeDuration", None)
) or None
ignore_count = statusDetails.pop("ignoreCount", None) or None
ignore_window = statusDetails.pop("ignoreWindow", None) or None
ignore_user_count = statusDetails.pop("ignoreUserCount", None) or None
ignore_user_window = statusDetails.pop("ignoreUserWindow", None) or None
if ignore_duration or ignore_count or ignore_user_count:
if ignore_duration:
ignore_until = timezone.now() + timedelta(minutes=ignore_duration)
else:
ignore_until = None
for group in group_list:
state = {}
if ignore_count and not ignore_window:
state["times_seen"] = group.times_seen
if ignore_user_count and not ignore_user_window:
state["users_seen"] = group.count_users_seen()
GroupSnooze.objects.create_or_update(
group=group,
values={
"until": ignore_until,
"count": ignore_count,
"window": ignore_window,
"user_count": ignore_user_count,
"user_window": ignore_user_window,
"state": state,
"actor_id": request.user.id
if request.user.is_authenticated()
else None,
},
)
result["statusDetails"] = {
"ignoreCount": ignore_count,
"ignoreUntil": ignore_until,
"ignoreUserCount": ignore_user_count,
"ignoreUserWindow": ignore_user_window,
"ignoreWindow": ignore_window,
"actor": serialize(extract_lazy_object(request.user), request.user),
}
else:
GroupSnooze.objects.filter(group__in=group_ids).delete()
ignore_until = None
result["statusDetails"] = {}
else:
result["statusDetails"] = {}
if group_list and happened:
if new_status == GroupStatus.UNRESOLVED:
activity_type = Activity.SET_UNRESOLVED
activity_data = {}
elif new_status == GroupStatus.IGNORED:
activity_type = Activity.SET_IGNORED
activity_data = {
"ignoreCount": ignore_count,
"ignoreDuration": ignore_duration,
"ignoreUntil": ignore_until,
"ignoreUserCount": ignore_user_count,
"ignoreUserWindow": ignore_user_window,
"ignoreWindow": ignore_window,
}
groups_by_project_id = defaultdict(list)
for group in group_list:
groups_by_project_id[group.project_id].append(group)
for project in projects:
project_groups = groups_by_project_id.get(project.id)
if project_groups:
issue_ignored.send_robust(
project=project,
user=acting_user,
group_list=project_groups,
activity_data=activity_data,
sender=update_groups,
)
for group in group_list:
group.status = new_status
activity = Activity.objects.create(
project=project_lookup[group.project_id],
group=group,
type=activity_type,
user=acting_user,
data=activity_data,
)
# TODO(dcramer): we need a solution for activity rollups
# before sending notifications on bulk changes
if not is_bulk:
if acting_user:
GroupSubscription.objects.subscribe(
user=acting_user,
group=group,
reason=GroupSubscriptionReason.status_change,
)
activity.send_notification()
if new_status == GroupStatus.UNRESOLVED:
kick_off_status_syncs.apply_async(
kwargs={"project_id": group.project_id, "group_id": group.id}
)
if "assignedTo" in result:
assigned_actor = result["assignedTo"]
if assigned_actor:
for group in group_list:
resolved_actor = assigned_actor.resolve()
GroupAssignee.objects.assign(group, resolved_actor, acting_user)
result["assignedTo"] = serialize(
assigned_actor.resolve(), acting_user, ActorSerializer()
)
else:
for group in group_list:
GroupAssignee.objects.deassign(group, acting_user)
is_member_map = {
project.id: project.member_set.filter(user=acting_user).exists() for project in projects
}
if result.get("hasSeen"):
for group in group_list:
if is_member_map.get(group.project_id):
instance, created = create_or_update(
GroupSeen,
group=group,
user=acting_user,
project=project_lookup[group.project_id],
values={"last_seen": timezone.now()},
)
elif result.get("hasSeen") is False:
GroupSeen.objects.filter(group__in=group_ids, user=acting_user).delete()
if result.get("isBookmarked"):
for group in group_list:
GroupBookmark.objects.get_or_create(
project=project_lookup[group.project_id], group=group, user=acting_user
)
GroupSubscription.objects.subscribe(
user=acting_user, group=group, reason=GroupSubscriptionReason.bookmark
)
elif result.get("isBookmarked") is False:
GroupBookmark.objects.filter(group__in=group_ids, user=acting_user).delete()
# TODO(dcramer): we could make these more efficient by first
# querying for rich rows are present (if N > 2), flipping the flag
# on those rows, and then creating the missing rows
if result.get("isSubscribed") in (True, False):
is_subscribed = result["isSubscribed"]
for group in group_list:
# NOTE: Subscribing without an initiating event (assignment,
# commenting, etc.) clears out the previous subscription reason
# to avoid showing confusing messaging as a result of this
# action. It'd be jarring to go directly from "you are not
# subscribed" to "you were subscribed due since you were
# assigned" just by clicking the "subscribe" button (and you
# may no longer be assigned to the issue anyway.)
GroupSubscription.objects.create_or_update(
user=acting_user,
group=group,
project=project_lookup[group.project_id],
values={"is_active": is_subscribed, "reason": GroupSubscriptionReason.unknown},
)
result["subscriptionDetails"] = {
"reason": SUBSCRIPTION_REASON_MAP.get(GroupSubscriptionReason.unknown, "unknown")
}
if "isPublic" in result:
# We always want to delete an existing share, because triggering
# an isPublic=True even when it's already public, should trigger
# regenerating.
for group in group_list:
if GroupShare.objects.filter(group=group).delete():
result["shareId"] = None
Activity.objects.create(
project=project_lookup[group.project_id],
group=group,
type=Activity.SET_PRIVATE,
user=acting_user,
)
if result.get("isPublic"):
for group in group_list:
share, created = GroupShare.objects.get_or_create(
project=project_lookup[group.project_id], group=group, user=acting_user
)
if created:
result["shareId"] = share.uuid
Activity.objects.create(
project=project_lookup[group.project_id],
group=group,
type=Activity.SET_PUBLIC,
user=acting_user,
)
# XXX(dcramer): this feels a bit shady like it should be its own
# endpoint
if result.get("merge") and len(group_list) > 1:
# don't allow merging cross project
if len(projects) > 1:
return Response({"detail": "Merging across multiple projects is not supported"})
group_list_by_times_seen = sorted(
group_list, key=lambda g: (g.times_seen, g.id), reverse=True
)
primary_group, groups_to_merge = group_list_by_times_seen[0], group_list_by_times_seen[1:]
group_ids_to_merge = [g.id for g in groups_to_merge]
eventstream_state = eventstream.start_merge(
primary_group.project_id, group_ids_to_merge, primary_group.id
)
Group.objects.filter(id__in=group_ids_to_merge).update(status=GroupStatus.PENDING_MERGE)
transaction_id = uuid4().hex
merge_groups.delay(
from_object_ids=group_ids_to_merge,
to_object_id=primary_group.id,
transaction_id=transaction_id,
eventstream_state=eventstream_state,
)
Activity.objects.create(
project=project_lookup[primary_group.project_id],
group=primary_group,
type=Activity.MERGE,
user=acting_user,
data={"issues": [{"id": c.id} for c in groups_to_merge]},
)
result["merge"] = {
"parent": six.text_type(primary_group.id),
"children": [six.text_type(g.id) for g in groups_to_merge],
}
return Response(result)
|
|
# Copyright 2014 Cisco Systems, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
from neutron.api.v2 import attributes as attr
from neutron.api.v2 import resource_helper
from neutron_lib.api import converters as conv
from neutron_lib import exceptions
from networking_cisco._i18n import _
from networking_cisco import backwards_compatibility as bc
from networking_cisco.plugins.cisco.common import utils
NEUTRON_VERSION = bc.NEUTRON_VERSION
NEUTRON_NEWTON_VERSION = bc.NEUTRON_NEWTON_VERSION
ROUTERTYPE = 'routertype'
ROUTERTYPE_ALIAS = ROUTERTYPE
TYPE_ATTR = ROUTERTYPE + ':id'
ROUTER_TYPES = ROUTERTYPE + 's'
RESOURCE_ATTRIBUTE_MAP = {
ROUTER_TYPES: {
'id': {'allow_post': True, 'allow_put': False,
'validate': {'type:uuid_or_none': None}, 'is_visible': True,
'default': None, 'primary_key': True},
'name': {'allow_post': True, 'allow_put': True,
'validate': {'type:string': None}, 'is_visible': True,
'default': ''},
'description': {'allow_post': True, 'allow_put': True,
'validate': {'type:string_or_none': None},
'is_visible': True, 'default': None},
'tenant_id': {'allow_post': True, 'allow_put': False,
'required_by_policy': True, 'is_visible': True},
'template_id': {'allow_post': True, 'allow_put': False,
'required_by_policy': True,
'validate': {'type:uuid': None}, 'is_visible': True},
'ha_enabled_by_default': {'allow_post': True, 'allow_put': True,
'convert_to': conv.convert_to_boolean,
'validate': {'type:boolean': None},
'default': False, 'is_visible': True},
'shared': {'allow_post': True, 'allow_put': False,
'convert_to': conv.convert_to_boolean,
'validate': {'type:boolean': None}, 'default': True,
'is_visible': True},
#TODO(bobmel): add HA attribute: One of None, 'GPLB', 'VRRP', or 'HSRP'
'slot_need': {'allow_post': True, 'allow_put': True,
'validate': {'type:non_negative': None},
'convert_to': conv.convert_to_int,
'default': 0, 'is_visible': True},
'scheduler': {'allow_post': True, 'allow_put': False,
'required_by_policy': True,
'convert_to': utils.convert_validate_driver_class,
'is_visible': True},
'driver': {'allow_post': True, 'allow_put': False,
'required_by_policy': True,
'convert_to': utils.convert_validate_driver_class,
'is_visible': True},
'cfg_agent_service_helper': {
'allow_post': True, 'allow_put': False,
'required_by_policy': True,
'convert_to': utils.convert_validate_driver_class,
'is_visible': True},
'cfg_agent_driver': {'allow_post': True, 'allow_put': False,
'required_by_policy': True,
'convert_to': utils.convert_validate_driver_class,
'is_visible': True},
}
}
EXTENDED_ATTRIBUTES_2_0 = {
'routers': {
TYPE_ATTR: {'allow_post': True, 'allow_put': True,
'validate': {'type:string': None},
'default': bc.constants.ATTR_NOT_SPECIFIED,
'is_visible': True},
}
}
class Routertype(bc.extensions.ExtensionDescriptor):
"""Extension class to define different types of Neutron routers.
This class is used by Neutron's extension framework to support
definition of different types of Neutron Routers.
Attribute 'router_type:id' is the uuid or name of a certain router type.
It can be set during creation of Neutron router. If a Neutron router is
moved (by admin user) to a hosting device of a different hosting device
type, the router type of the Neutron router will also change. Non-admin
users can request that a Neutron router's type is changed.
To create a router of router type <name>:
(shell) router-create <router_name> --router_type:id <uuid_or_name>
"""
@classmethod
def get_name(cls):
return "Router types for routing service"
@classmethod
def get_alias(cls):
return ROUTERTYPE_ALIAS
@classmethod
def get_description(cls):
return "Introduces router types for Neutron Routers"
@classmethod
def get_namespace(cls):
return "http://docs.openstack.org/ext/" + ROUTERTYPE + "/api/v2.0"
@classmethod
def get_updated(cls):
return "2014-02-07T10:00:00-00:00"
@classmethod
def get_resources(cls):
"""Returns Ext Resources."""
plural_mappings = resource_helper.build_plural_mappings(
{}, RESOURCE_ATTRIBUTE_MAP)
if NEUTRON_VERSION.version[0] <= NEUTRON_NEWTON_VERSION.version[0]:
attr.PLURALS.update(plural_mappings)
return resource_helper.build_resource_info(plural_mappings,
RESOURCE_ATTRIBUTE_MAP,
bc.constants.L3)
def get_extended_resources(self, version):
if version == "2.0":
return EXTENDED_ATTRIBUTES_2_0
else:
return {}
# router_type exceptions
class RouterTypeInUse(exceptions.InUse):
message = _("Router type %(id)s in use.")
class RouterTypeNotFound(exceptions.NotFound):
message = _("Router type %(id)s does not exist")
class MultipleRouterTypes(exceptions.NeutronException):
message = _("Multiple router type with same name %(name)s exist. Id "
"must be used to specify router type.")
class SchedulerNotFound(exceptions.NetworkNotFound):
message = _("Scheduler %(scheduler)s does not exist")
class RouterTypeAlreadyDefined(exceptions.NeutronException):
message = _("Router type %(type)s already exists")
class NoSuchHostingDeviceTemplateForRouterType(exceptions.NeutronException):
message = _("No hosting device template with id %(type)s exists")
class HostingDeviceTemplateUsedByRouterType(exceptions.NeutronException):
message = _("Router type %(type)s already defined for Hosting device "
"template with id %(type)s")
class RouterTypeHasRouters(exceptions.NeutronException):
message = _("Router type %(type)s cannot be deleted since routers "
"of that type exists")
class RoutertypePluginBase(object):
"""REST API to manage router types.
All methods except listing require admin context.
"""
@abc.abstractmethod
def create_routertype(self, context, routertype):
"""Creates a router type.
Also binds it to the specified hosting device template.
"""
pass
@abc.abstractmethod
def update_routertype(self, context, id, routertype):
"""Updates a router type."""
pass
@abc.abstractmethod
def delete_routertype(self, context, id):
"""Deletes a router type."""
pass
@abc.abstractmethod
def get_routertype(self, context, id, fields=None):
"""Lists defined router type."""
pass
@abc.abstractmethod
def get_routertypes(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
"""Lists defined router types."""
pass
|
|
#!/usr/bin/env python
#
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Mapreduce execution context.
Mapreduce context provides handler code with information about
current mapreduce execution and organizes utility data flow
from handlers such as counters, log messages, mutation pools.
"""
__all__ = ["get",
"Pool",
"Context",
"COUNTER_MAPPER_CALLS",
"COUNTER_MAPPER_WALLTIME_MS",
"DATASTORE_DEADLINE",
"MAX_ENTITY_COUNT",
]
import heapq
import logging
import threading
try:
from google.appengine.ext import ndb
except ImportError:
ndb = None
from google.appengine.api import datastore
from google.appengine.ext import db
from google.appengine.runtime import apiproxy_errors
# Maximum number of items. Pool will be flushed when reaches this amount.
# Datastore API is smart to group entities into as few RPCs as possible without
# exceeding RPC size. So in theory, MAX_ENTITY_COUNT can be as big as
# the instance's memory can hold.
# This number is just an estimate.
# TODO(user): Do batching by entity size if cheap. b/10427424
MAX_ENTITY_COUNT = 20
# Deadline in seconds for mutation pool datastore operations.
DATASTORE_DEADLINE = 15
# The name of the counter which counts all mapper calls.
COUNTER_MAPPER_CALLS = "mapper-calls"
# Total walltime in msec given to mapper process. This is not just mapper
# hundler function, but includes all i/o overhead.
COUNTER_MAPPER_WALLTIME_MS = "mapper-walltime-ms"
# pylint: disable=protected-access
# pylint: disable=g-bad-name
def _normalize_entity(value):
"""Return an entity from an entity or model instance."""
if ndb is not None and isinstance(value, ndb.Model):
return None
if getattr(value, "_populate_internal_entity", None):
return value._populate_internal_entity()
return value
def _normalize_key(value):
"""Return a key from an entity, model instance, key, or key string."""
if ndb is not None and isinstance(value, (ndb.Model, ndb.Key)):
return None
if getattr(value, "key", None):
return value.key()
elif isinstance(value, basestring):
return datastore.Key(value)
else:
return value
class _ItemList(object):
"""A buffer that holds arbitrary items and auto flushes them when full.
Callers of this class provides the logic on how to flush.
This class takes care of the common logic of when to flush and when to retry.
Properties:
items: list of objects.
length: length of item list.
size: aggregate item size in bytes.
"""
DEFAULT_RETRIES = 3
_LARGEST_ITEMS_TO_LOG = 5
def __init__(self,
max_entity_count,
flush_function,
timeout_retries=DEFAULT_RETRIES,
repr_function=None):
"""Constructor.
Args:
max_entity_count: maximum number of entities before flushing it to db.
flush_function: a function that can flush the items. The function is
called with a list of items as the first argument, a dict of options
as second argument. Currently options can contain {"deadline": int}.
see self.flush on how the function is called.
timeout_retries: how many times to retry upon timeouts.
repr_function: a function that turns an item into meaningful
representation. For debugging large items.
"""
self.items = []
self.__max_entity_count = int(max_entity_count)
self.__flush_function = flush_function
self.__repr_function = repr_function
self.__timeout_retries = int(timeout_retries)
def __str__(self):
return "ItemList of with %s items" % len(self.items)
def append(self, item):
"""Add new item to the list.
If needed, append will first flush existing items and clear existing items.
Args:
item: an item to add to the list.
"""
if self.should_flush():
self.flush()
self.items.append(item)
def flush(self):
"""Force a flush."""
if not self.items:
return
retry = 0
options = {"deadline": DATASTORE_DEADLINE}
while retry <= self.__timeout_retries:
try:
self.__flush_function(self.items, options)
self.clear()
break
except db.Timeout, e:
logging.warning(e)
logging.warning("Flushing '%s' timed out. Will retry for the %s time.",
self, retry)
retry += 1
options["deadline"] *= 2
except apiproxy_errors.RequestTooLargeError:
self._log_largest_items()
raise
else:
raise
def _log_largest_items(self):
if not self.__repr_function:
logging.error("Got RequestTooLargeError but can't interpret items in "
"_ItemList %s.", self)
return
sizes = [len(self.__repr_function(i)) for i in self.items]
largest = heapq.nlargest(self._LARGEST_ITEMS_TO_LOG,
zip(sizes, self.items),
lambda t: t[0])
# Set field for for test only.
self._largest = [(s, self.__repr_function(i)) for s, i in largest]
logging.error("Got RequestTooLargeError. Largest items: %r", self._largest)
def clear(self):
"""Clear item list."""
self.items = []
def should_flush(self):
"""Whether to flush before append the next entity.
Returns:
True to flush. False other.
"""
return len(self.items) >= self.__max_entity_count
class Pool(object):
"""Mutation pool accumulates changes to perform them in patch.
Any Pool subclass should not be public. Instead, Pool should define an
operation.Operation class and let user uses that. For example, in a map
function, user can do:
def map(foo):
yield OperationOnMyPool(any_argument)
Since Operation is a callable object, Mapreduce library will invoke
any Operation object that is yielded with context.Context instance.
The operation object can then access MyPool from Context.get_pool.
"""
def flush(self):
"""Flush all changes."""
raise NotImplementedError()
class _MutationPool(Pool):
"""Mutation pool accumulates datastore changes to perform them in batch.
Properties:
puts: _ItemList of entities to put to datastore.
deletes: _ItemList of keys to delete from datastore.
ndb_puts: _ItemList of ndb entities to put to datastore.
ndb_deletes: _ItemList of ndb keys to delete from datastore.
"""
def __init__(self,
max_entity_count=MAX_ENTITY_COUNT,
mapreduce_spec=None):
"""Constructor.
Args:
max_entity_count: maximum number of entities before flushing it to db.
mapreduce_spec: An optional instance of MapperSpec.
"""
self.max_entity_count = max_entity_count
params = mapreduce_spec.params if mapreduce_spec is not None else {}
self.force_writes = bool(params.get("force_ops_writes", False))
self.puts = _ItemList(max_entity_count,
self._flush_puts,
repr_function=self._db_repr)
self.deletes = _ItemList(max_entity_count,
self._flush_deletes)
self.ndb_puts = _ItemList(max_entity_count,
self._flush_ndb_puts,
repr_function=self._ndb_repr)
self.ndb_deletes = _ItemList(max_entity_count,
self._flush_ndb_deletes)
def put(self, entity):
"""Registers entity to put to datastore.
Args:
entity: an entity or model instance to put.
"""
actual_entity = _normalize_entity(entity)
if actual_entity is None:
return self.ndb_put(entity)
self.puts.append(actual_entity)
def ndb_put(self, entity):
"""Like put(), but for NDB entities."""
assert ndb is not None and isinstance(entity, ndb.Model)
self.ndb_puts.append(entity)
def delete(self, entity):
"""Registers entity to delete from datastore.
Args:
entity: an entity, model instance, or key to delete.
"""
key = _normalize_key(entity)
if key is None:
return self.ndb_delete(entity)
self.deletes.append(key)
def ndb_delete(self, entity_or_key):
"""Like delete(), but for NDB entities/keys."""
if ndb is not None and isinstance(entity_or_key, ndb.Model):
key = entity_or_key.key
else:
key = entity_or_key
self.ndb_deletes.append(key)
def flush(self):
"""Flush(apply) all changed to datastore."""
self.puts.flush()
self.deletes.flush()
self.ndb_puts.flush()
self.ndb_deletes.flush()
@classmethod
def _db_repr(cls, entity):
"""Converts entity to a readable repr.
Args:
entity: datastore.Entity or datastore_types.Key.
Returns:
Proto in str.
"""
return str(entity._ToPb())
@classmethod
def _ndb_repr(cls, entity):
"""Converts entity to a readable repr.
Args:
entity: ndb.Model
Returns:
Proto in str.
"""
return str(entity._to_pb())
def _flush_puts(self, items, options):
"""Flush all puts to datastore."""
datastore.Put(items, config=self._create_config(options))
def _flush_deletes(self, items, options):
"""Flush all deletes to datastore."""
datastore.Delete(items, config=self._create_config(options))
def _flush_ndb_puts(self, items, options):
"""Flush all NDB puts to datastore."""
assert ndb is not None
ndb.put_multi(items, config=self._create_config(options))
def _flush_ndb_deletes(self, items, options):
"""Flush all deletes to datastore."""
assert ndb is not None
ndb.delete_multi(items, config=self._create_config(options))
def _create_config(self, options):
"""Creates datastore Config.
Returns:
A datastore_rpc.Configuration instance.
"""
return datastore.CreateConfig(deadline=options["deadline"],
force_writes=self.force_writes)
class _Counters(Pool):
"""Regulates access to counters.
Counters Pool is a str to int map. It is saved as part of ShardState so it
is flushed when ShardState commits to datastore successfully.
"""
def __init__(self, shard_state):
"""Constructor.
Args:
shard_state: current mapreduce shard state as model.ShardState.
"""
self._shard_state = shard_state
def increment(self, counter_name, delta=1):
"""Increment counter value.
Args:
counter_name: name of the counter as string.
delta: increment delta as int.
"""
self._shard_state.counters_map.increment(counter_name, delta)
def flush(self):
"""Flush unsaved counter values."""
pass
class Context(object):
"""MapReduce execution context.
The main purpose of Context is to facilitate IO. User code, input reader,
and output writer code can plug in pools (see Pool class) to Context to
batch operations.
There is a single Context instance associated with each worker thread.
It can be accessed via context.get(). handlers.MapperWorkerHandler creates
this instance before any IO code (input_reader, output_writer, user functions)
is called.
Each Pool decides how to batch and when to flush.
Context and all its pools are flushed by the end of a slice.
Upon error in slice execution, what is flushed is undefined. (See _Counters
for an exception).
Properties:
mapreduce_spec: current mapreduce specification as model.MapreduceSpec.
"""
# Current context instance
_local = threading.local()
def __init__(self, mapreduce_spec, shard_state, task_retry_count=0):
"""Constructor.
Args:
mapreduce_spec: mapreduce specification as model.MapreduceSpec.
shard_state: an instance of model.ShardState. This has to be the same
instance as the one MapperWorkerHandler mutates. All mutations are
flushed to datastore in the end of the slice.
task_retry_count: how many times this task has been retried.
"""
self._shard_state = shard_state
self.mapreduce_spec = mapreduce_spec
self.task_retry_count = task_retry_count
if self.mapreduce_spec:
self.mapreduce_id = self.mapreduce_spec.mapreduce_id
else:
# Only in tests
self.mapreduce_id = None
if shard_state:
self.shard_id = shard_state.get_shard_id()
else:
# Only in tests
self.shard_id = None
# TODO(user): Allow user to specify max entity count for the pool
# as they know how big their entities are.
self._mutation_pool = _MutationPool(mapreduce_spec=mapreduce_spec)
self._counters = _Counters(shard_state)
# TODO(user): Remove this after fixing
# keyhole/dataeng/imagery/feeds/client_lib.py in another CL.
self.counters = self._counters
self._pools = {}
self.register_pool("mutation_pool", self._mutation_pool)
self.register_pool("counters", self.counters)
def flush(self):
"""Flush all information recorded in context."""
for pool in self._pools.values():
pool.flush()
def register_pool(self, key, pool):
"""Register an arbitrary pool to be flushed together with this context.
Args:
key: pool key as string.
pool: a pool instance.
"""
self._pools[key] = pool
def get_pool(self, key):
"""Obtains an instance of registered pool.
Args:
key: pool key as string.
Returns:
an instance of the pool registered earlier, or None.
"""
return self._pools.get(key, None)
@classmethod
def _set(cls, context):
"""Set current context instance.
Args:
context: new context as Context or None.
"""
cls._local._context_instance = context
# This method is intended for user code to access context instance.
# MR framework should still try to take context as an explicit argument
# whenever possible (dependency injection).
def get():
"""Get current context instance.
Returns:
current context as Context.
"""
if not hasattr(Context._local, "_context_instance") :
return None
return Context._local._context_instance
|
|
#########
# Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
import os
from nose.plugins.attrib import attr
from manager_rest.test.base_test import LATEST_API_VERSION
from cloudify_rest_client.exceptions import CloudifyClientError
from .test_base import SecurityTestBase
@attr(client_min_version=3, client_max_version=LATEST_API_VERSION)
class ResourcePermissionTests(SecurityTestBase):
NOT_FOUND_MSG = '404: Requested `{0}` with ID `{1}` was not found'
def _upload_blueprint(self, private=False):
blueprint_id = 'blueprint_id'
blueprint_path = os.path.join(
self.get_blueprint_path('mock_blueprint'),
'blueprint.yaml'
)
with self.use_secured_client(username='bob',
password='bob_password'):
self.client.blueprints.upload(
blueprint_path,
blueprint_id,
private_resource=private
)
return blueprint_id
def _create_deployment(self, private=False):
deployment_id = 'deployment_id'
blueprint_id = self._upload_blueprint()
with self.use_secured_client(username='bob',
password='bob_password'):
self.client.deployments.create(blueprint_id,
deployment_id,
private_resource=private)
return deployment_id
def _upload_plugin(self, private=False):
plugin_path = self.create_wheel('psutil', '3.3.0')
with self.use_secured_client(username='bob',
password='bob_password'):
plugin = self.client.plugins.upload(plugin_path,
private_resource=private)
return plugin.id
def _upload_snapshot(self, private=False):
snapshot_path = self.create_wheel('psutil', '3.3.0')
snapshot_id = 'snapshot_id'
with self.use_secured_client(username='bob',
password='bob_password'):
self.client.snapshots.upload(snapshot_path,
snapshot_id,
private_resource=private)
return snapshot_id
def _create_snapshot(self, username, password, private=False):
snapshot_id = 'snapshot_id'
with self.use_secured_client(username=username, password=password):
self.client.snapshots.create(snapshot_id=snapshot_id,
include_metrics=False,
include_credentials=False,
private_resource=private)
return snapshot_id
def test_private_blueprint(self):
blueprint_id = self._upload_blueprint(private=True)
self._test_resource_get_and_list(
resource_name='Blueprint',
get_func_getter=lambda client: client.blueprints.get,
resource_id=blueprint_id,
list_func_getter=lambda client: client.blueprints.list
)
def test_private_deployment(self):
deployment_id = self._create_deployment(private=True)
self._test_resource_get_and_list(
resource_name='Deployment',
get_func_getter=lambda client: client.deployments.get,
resource_id=deployment_id,
list_func_getter=lambda client: client.deployments.list
)
def test_deployment_in_private_blueprint(self):
deployment_id = 'deployment_id'
blueprint_id = self._upload_blueprint(private=True)
with self.use_secured_client(username='bob',
password='bob_password'):
self.client.deployments.create(blueprint_id,
deployment_id)
self._test_resource_get_and_list(
resource_name='Deployment',
get_func_getter=lambda client: client.deployments.get,
resource_id=deployment_id,
list_func_getter=lambda client: client.deployments.list
)
def test_private_plugin(self):
plugin_id = self._upload_plugin(private=True)
self._test_resource_get_and_list(
resource_name='Plugin',
get_func_getter=lambda client: client.plugins.get,
resource_id=plugin_id,
list_func_getter=lambda client: client.plugins.list
)
def test_private_snapshot_upload(self):
snapshot_id = self._upload_snapshot(private=True)
self._test_snapshots_get_and_list(snapshot_id)
def test_private_snapshot_create(self):
# Only admin are allowed to create snapshots, so bob should fail
self.assertRaises(
CloudifyClientError,
self._create_snapshot,
'bob',
'bob_password'
)
snapshot_id = self._create_snapshot(
'alice',
'alice_password',
private=True
)
self._test_snapshots_get_and_list(snapshot_id)
def test_cant_view_private_blueprint(self):
blueprint_id = self._upload_blueprint(private=True)
self._test_cant_view_private_resource(
resource_id=blueprint_id,
resource_name='Blueprint',
get_func_getter=lambda client: client.blueprints.get
)
def test_cant_view_private_plugin(self):
plugin_id = self._upload_plugin(private=True)
self._test_cant_view_private_resource(
resource_id=plugin_id,
resource_name='Plugin',
get_func_getter=lambda client: client.plugins.get
)
def test_cant_view_private_snapshot(self):
snapshot_id = self._upload_snapshot(private=True)
self._test_cant_view_private_resource(
resource_id=snapshot_id,
resource_name='Snapshot',
get_func_getter=lambda client: client.snapshots.get
)
def test_cant_view_private_deployment(self):
deployment_id = self._create_deployment(private=True)
self._test_cant_view_private_resource(
resource_id=deployment_id,
resource_name='Deployment',
get_func_getter=lambda client: client.deployments.get
)
def _test_snapshots_get_and_list(self, snapshot_id):
self._test_resource_get_and_list(
resource_name='Snapshot',
get_func_getter=lambda client: client.snapshots.get,
resource_id=snapshot_id,
list_func_getter=lambda client: client.snapshots.list
)
def _test_resource_get_and_list(self,
resource_name,
get_func_getter,
resource_id,
list_func_getter):
error_msg = self.NOT_FOUND_MSG.format(resource_name, resource_id)
# A resource uploaded with `private_resource` set to True shouldn't
# be visible to other users
with self.use_secured_client(username='dave',
password='dave_password'):
get_func = get_func_getter(self.client)
list_func = list_func_getter(self.client)
self.assertRaisesRegexp(
CloudifyClientError,
error_msg,
get_func,
resource_id
)
self.assertEqual(len(list_func()), 0)
# But it still should be visible to admins
with self.use_secured_client(username='alice',
password='alice_password'):
get_func = get_func_getter(self.client)
list_func = list_func_getter(self.client)
get_func(resource_id)
self.assertEqual(len(list_func()), 1)
def _test_cant_view_private_resource(self,
resource_id,
resource_name,
get_func_getter):
error_msg = self.NOT_FOUND_MSG.format(resource_name, resource_id)
# A resource uploaded with `private_resource` set to True shouldn't
# be visible to other users (dave)
with self.use_secured_client(username='dave',
password='dave_password'):
get_func = get_func_getter(self.client)
self.assertRaisesRegexp(
CloudifyClientError,
error_msg,
get_func,
resource_id
)
|
|
import sys, re, inspect, operator
import logging
from util import emit, name2label, plural, singular, glob_intersection
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
import sqlalchemy
from sqlalchemy import MetaData
from sqlalchemy.ext.declarative import declarative_base
try:
from sqlalchemy.ext.declarative import _deferred_relationship
except ImportError:
#SA 0.5 support
from sqlalchemy.ext.declarative import _deferred_relation as _deferred_relationship
from sqlalchemy.orm import relation, backref, class_mapper, Mapper
try:
#SA 0.5 support
from sqlalchemy.orm import RelationProperty
except ImportError:
RelationProperty = None
import config
import constants
from formatter import _repr_coltype_as
log = logging.getLogger('saac.decl')
log.setLevel(logging.DEBUG)
handler = logging.StreamHandler()
formatter = logging.Formatter("%(name)s - %(levelname)s - %(message)s")
handler.setFormatter(formatter)
log.addHandler(handler)
def by_name(a, b):
if a.name>b.name:
return 1
return -1
def by__name__(a, b):
if a.__name__ > b.__name__:
return 1
return -1
def column_repr(self):
kwarg = []
if self.key != self.name:
kwarg.append( 'key')
if hasattr(self, 'primary_key') and self.primary_key:
self.primary_key = True
kwarg.append( 'primary_key')
if not self.nullable:
kwarg.append( 'nullable')
if self.onupdate:
kwarg.append( 'onupdate')
if self.default:
kwarg.append( 'default')
ks = ', '.join('%s=%r' % (k, getattr(self, k)) for k in kwarg)
name = self.name
if not hasattr(config, 'options') and config.options.generictypes:
coltype = repr(self.type)
elif type(self.type).__module__ == 'sqlalchemy.types':
coltype = repr(self.type)
else:
# Try to 'cast' this column type to a cross-platform type
# from sqlalchemy.types, dropping any database-specific type
# arguments.
for base in type(self.type).__mro__:
if (base.__module__ == 'sqlalchemy.types' and
base.__name__ in sqlalchemy.__all__):
coltype = _repr_coltype_as(self.type, base)
break
# FIXME: if a dialect has a non-standard type that does not
# derive from an ANSI type, there's no choice but to ignore
# generic-types and output the exact type. However, import
# headers have already been output and lack the required
# dialect import.
else:
coltype = repr(self.type)
data = {'name': self.name,
'type': coltype,
'constraints': ', '.join(["ForeignKey('%s')"%cn.target_fullname for cn in self.foreign_keys]),
'args': ks and ks or '',
}
if data['constraints']:
if data['constraints']: data['constraints'] = ', ' + data['constraints']
if data['args']:
if data['args']: data['args'] = ', ' + data['args']
return constants.COLUMN % data
class ModelFactory(object):
def __init__(self, config):
self.config = config
self.used_model_names = []
self.used_table_names = []
schema = getattr(self.config, 'schema', None)
self._metadata = MetaData(bind=config.engine)
kw = {}
self.schemas = None
if schema:
if isinstance(schema, (list, tuple)):
self.schemas = schema
else:
self.schemas = (schema, )
for schema in self.schemas:
log.info('Reflecting database... schema:%s'%schema)
self._metadata.reflect(schema=schema)
else:
log.info('Reflecting database...')
self._metadata.reflect()
self.DeclarativeBase = declarative_base(metadata=self._metadata)
def _table_repr(self, table):
s = "Table(u'%s', metadata,\n"%(table.name)
for column in table.c:
s += " %s,\n"%column_repr(column)
if table.schema:
s +=" schema='%s'\n"%table.schema
s+=")"
return s
def __repr__(self):
tables = self.get_many_to_many_tables()
models = self.models
s = StringIO()
engine = self.config.engine
if not isinstance(engine, basestring):
engine = str(engine.url)
s.write(constants.HEADER_DECL%engine)
if 'postgres' in engine:
s.write(constants.PG_IMPORT)
self.used_table_names = []
self.used_model_names = []
for table in tables:
table_name = self.find_new_name(table.name, self.used_table_names)
self.used_table_names.append(table_name)
s.write('%s = %s\n\n'%(table_name, self._table_repr(table)))
for model in models:
s.write(model.__repr__())
s.write("\n\n")
if self.config.example or self.config.interactive:
s.write(constants.EXAMPLE_DECL%(models[0].__name__,models[0].__name__))
if self.config.interactive:
s.write(constants.INTERACTIVE%([model.__name__ for model in models], models[0].__name__))
return s.getvalue()
@property
def tables(self):
return self._metadata.tables.keys()
@property
def models(self):
if hasattr(self, '_models'):
return self._models
self.used_model_names = []
self.used_table_names = []
tables = self.config.options.tables
if tables:
subset, missing, unglobbed = glob_intersection([t.name for t in self.get_non_many_to_many_tables()], tables)
for identifier in missing:
print >>config.err, 'Table "%s" not found.' % identifier
for glob in unglobbed:
print >>config.err, '"%s" matched no tables.' % glob
if not subset:
print >>config.err, "No tables matched!"
sys.exit(1)
do_tables = [t for t in self.get_non_many_to_many_tables() if t.name in subset]
else:
do_tables = self.get_non_many_to_many_tables()
exclude = self.config.options.exclude
if exclude:
do_tables = [t for t in do_tables if not re.search(exclude, t.name)]
models = []
for table in self.get_non_many_to_many_tables():
try:
model = self.create_model(table)
models.append(model)
except Exception, e:
logging.warning("Error creating a model for %s: %s", table, e)
self._models = sorted(models, by__name__)
return self._models
def model_table_lookup(self):
if hasattr(self, '_model_table_lookup'):
return self._model_table_lookup
self._model_table_lookup = dict(((m.__table__.name, m.__name__) for m in self.models))
return self._model_table_lookup
def find_new_name(self, prefix, used, i=0):
if i!=0:
prefix = "%s%d"%(prefix, i)
if prefix in used:
prefix = prefix
return self.find_new_name(prefix, used, i+1)
return prefix
def create_model(self, table):
#partially borrowed from Jorge Vargas' code
#http://dpaste.org/V6YS/
log.debug('Creating Model from table: %s'%table.name)
model_name = self.find_new_name(singular(name2label(table.name)), self.used_model_names)
self.used_model_names.append(model_name)
is_many_to_many_table = self.is_many_to_many_table(table)
table_name = self.find_new_name(table.name, self.used_table_names)
self.used_table_names.append(table_name)
mtl = self.model_table_lookup
class Temporal(self.DeclarativeBase):
__table__ = table
@classmethod
def _relation_repr(cls, rel):
target = rel.argument
if target and inspect.isfunction(target):
target = target()
if isinstance(target, Mapper):
target = target.class_
target = target.__name__
primaryjoin=''
lookup = mtl()
if rel.primaryjoin is not None:
right_lookup = lookup.get(rel.primaryjoin.right.table.name, '%s.c.'%rel.primaryjoin.right.table.name)
left_lookup = lookup.get(rel.primaryjoin.left.table.name, '%s.c.'%rel.primaryjoin.left.table.name)
primaryjoin = ", primaryjoin='%s.%s==%s.%s'"%(left_lookup,
rel.primaryjoin.left.name,
right_lookup,
rel.primaryjoin.right.name)
secondary = ''
secondaryjoin = ''
if rel.secondary is not None:
secondary = ", secondary=%s"%rel.secondary.name
right_lookup = lookup.get(rel.secondaryjoin.right.table.name, '%s.c.'%rel.secondaryjoin.right.table.name)
left_lookup = lookup.get(rel.secondaryjoin.left.table.name, '%s.c.'%rel.secondaryjoin.left.table.name)
# import ipdb; ipdb.set_trace()
secondaryjoin = ", secondaryjoin='%s.%s==%s.%s'"%(left_lookup,
rel.secondaryjoin.left.name,
right_lookup,
rel.secondaryjoin.right.name)
backref=''
# if rel.backref:
# backref=", backref='%s'"%rel.backref.key
return "%s = relation('%s'%s%s%s%s)"%(rel.key, target, primaryjoin, secondary, secondaryjoin, backref)
@classmethod
def __repr__(cls):
log.debug('repring class with name %s'%cls.__name__)
try:
mapper = class_mapper(cls)
s = ""
s += "class "+model_name+'(DeclarativeBase):\n'
if is_many_to_many_table:
s += " __table__ = %s\n\n"%table_name
else:
s += " __tablename__ = '%s'\n\n"%table_name
if hasattr(cls, '__table_args__'):
s+=" __table_args__ = %s\n\n"%cls.__table_args__
s += " #column definitions\n"
for column in sorted(cls.__table__.c, by_name):
s += " %s = %s\n"%(column.name, column_repr(column))
s += "\n #relation definitions\n"
ess = s
# this is only required in SA 0.5
if RelationProperty:
for prop in mapper.iterate_properties:
if isinstance(prop, RelationshipProperty):
s+=' %s\n'%cls._relation_repr(prop)
return s
except Exception, e:
log.error("Could not generate class for: %s"%cls.__name__)
from traceback import format_exc
log.error(format_exc())
return ''
#hack the class to have the right classname
Temporal.__name__ = model_name
#add in the schema
if self.config.schema:
Temporal.__table_args__ = {'schema':table.schema}
#trick sa's model registry to think the model is the correct name
if model_name != 'Temporal':
Temporal._decl_class_registry[model_name] = Temporal._decl_class_registry['Temporal']
del Temporal._decl_class_registry['Temporal']
#add in single relations
fks = self.get_foreign_keys(table)
for related_table in sorted(fks.keys(), by_name):
columns = fks[related_table]
if len(columns)>1:
continue
column = columns[0]
log.info(' Adding <primary> foreign key for:%s'%related_table.name)
backref_name = plural(table_name)
# import ipdb; ipdb.set_trace()
rel = relation(singular(name2label(related_table.name, related_table.schema)),
primaryjoin=column==list(column.foreign_keys)[0].column)#, backref=backref_name)
setattr(Temporal, related_table.name, _deferred_relationship(Temporal, rel))
#add in many-to-many relations
for join_table in self.get_related_many_to_many_tables(table.name):
primary_column = [c for c in join_table.columns if c.foreign_keys and list(c.foreign_keys)[0].column.table==table][0]
# import ipdb; ipdb.set_trace();
for column in join_table.columns:
if column.foreign_keys:
key = list(column.foreign_keys)[0]
if key.column.table is not table:
related_column = related_table = list(column.foreign_keys)[0].column
related_table = related_column.table
log.info(' Adding <secondary> foreign key(%s) for:%s'%(key, related_table.name))
# import ipdb; ipdb.set_trace()
setattr(Temporal, plural(related_table.name), _deferred_relationship(Temporal,
relation(singular(name2label(related_table.name,
related_table.schema)),
secondary=join_table,
primaryjoin=list(primary_column.foreign_keys)[0].column==primary_column,
secondaryjoin=column==related_column
)))
break;
return Temporal
def get_table(self, name):
"""(name) -> sqlalchemy.schema.Table
get the table definition with the given table name
"""
if self.schemas:
for schema in self.schemas:
if schema and not name.startswith(schema):
new_name = '.'.join((schema, name))
table = self._metadata.tables.get(new_name, None)
if table is not None:
return table
return self._metadata.tables[name]
def get_foreign_keys(self, table):
fks = {}
for column in table.columns:
if len(column.foreign_keys)>0:
fks.setdefault(list(column.foreign_keys)[0].column.table, []).append(column)
return fks
def is_many_to_many_table(self, table):
fks = self.get_foreign_keys(table).values()
if len(fks) >= 2:
if len(fks[0]) == 1 and len(fks[1]) == 1:
return list(fks[0][0].foreign_keys)[0].column.table != list(fks[1][0].foreign_keys)[0].column.table
return False
def is_only_many_to_many_table(self, table):
return len(self.get_foreign_keys(table)) == 2 and len(table.c) == 2
def get_many_to_many_tables(self):
if not hasattr(self, '_many_to_many_tables'):
self._many_to_many_tables = [table for table in self._metadata.tables.values() if self.is_many_to_many_table(table)]
return sorted(self._many_to_many_tables, by_name)
def get_non_many_to_many_tables(self):
tables = [table for table in self._metadata.tables.values() if not(self.is_only_many_to_many_table(table))]
return sorted(tables, by_name)
def get_related_many_to_many_tables(self, table_name):
tables = []
src_table = self.get_table(table_name)
for table in self.get_many_to_many_tables():
for column in table.columns:
if column.foreign_keys:
key = list(column.foreign_keys)[0]
if key.column.table is src_table:
tables.append(table)
break
return sorted(tables, by_name)
|
|
#
# Copyright (c) 2008-2015 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class nshttpparam(base_resource) :
""" Configuration for HTTP parameter resource. """
def __init__(self) :
self._dropinvalreqs = ""
self._markhttp09inval = ""
self._markconnreqinval = ""
self._insnssrvrhdr = ""
self._nssrvrhdr = ""
self._logerrresp = ""
self._conmultiplex = ""
self._maxreusepool = 0
@property
def dropinvalreqs(self) :
ur"""Drop invalid HTTP requests or responses.<br/>Default value: OFF<br/>Possible values = ON, OFF.
"""
try :
return self._dropinvalreqs
except Exception as e:
raise e
@dropinvalreqs.setter
def dropinvalreqs(self, dropinvalreqs) :
ur"""Drop invalid HTTP requests or responses.<br/>Default value: OFF<br/>Possible values = ON, OFF
"""
try :
self._dropinvalreqs = dropinvalreqs
except Exception as e:
raise e
@property
def markhttp09inval(self) :
ur"""Mark HTTP/0.9 requests as invalid.<br/>Default value: OFF<br/>Possible values = ON, OFF.
"""
try :
return self._markhttp09inval
except Exception as e:
raise e
@markhttp09inval.setter
def markhttp09inval(self, markhttp09inval) :
ur"""Mark HTTP/0.9 requests as invalid.<br/>Default value: OFF<br/>Possible values = ON, OFF
"""
try :
self._markhttp09inval = markhttp09inval
except Exception as e:
raise e
@property
def markconnreqinval(self) :
ur"""Mark CONNECT requests as invalid.<br/>Default value: OFF<br/>Possible values = ON, OFF.
"""
try :
return self._markconnreqinval
except Exception as e:
raise e
@markconnreqinval.setter
def markconnreqinval(self, markconnreqinval) :
ur"""Mark CONNECT requests as invalid.<br/>Default value: OFF<br/>Possible values = ON, OFF
"""
try :
self._markconnreqinval = markconnreqinval
except Exception as e:
raise e
@property
def insnssrvrhdr(self) :
ur"""Enable or disable NetScaler server header insertion for NetScaler generated HTTP responses.<br/>Default value: OFF<br/>Possible values = ON, OFF.
"""
try :
return self._insnssrvrhdr
except Exception as e:
raise e
@insnssrvrhdr.setter
def insnssrvrhdr(self, insnssrvrhdr) :
ur"""Enable or disable NetScaler server header insertion for NetScaler generated HTTP responses.<br/>Default value: OFF<br/>Possible values = ON, OFF
"""
try :
self._insnssrvrhdr = insnssrvrhdr
except Exception as e:
raise e
@property
def nssrvrhdr(self) :
ur"""The server header value to be inserted. If no explicit header is specified then NSBUILD.RELEASE is used as default server header.<br/>Minimum length = 1.
"""
try :
return self._nssrvrhdr
except Exception as e:
raise e
@nssrvrhdr.setter
def nssrvrhdr(self, nssrvrhdr) :
ur"""The server header value to be inserted. If no explicit header is specified then NSBUILD.RELEASE is used as default server header.<br/>Minimum length = 1
"""
try :
self._nssrvrhdr = nssrvrhdr
except Exception as e:
raise e
@property
def logerrresp(self) :
ur"""Server header value to be inserted.<br/>Default value: ON<br/>Possible values = ON, OFF.
"""
try :
return self._logerrresp
except Exception as e:
raise e
@logerrresp.setter
def logerrresp(self, logerrresp) :
ur"""Server header value to be inserted.<br/>Default value: ON<br/>Possible values = ON, OFF
"""
try :
self._logerrresp = logerrresp
except Exception as e:
raise e
@property
def conmultiplex(self) :
ur"""Reuse server connections for requests from more than one client connections.<br/>Default value: ENABLED<br/>Possible values = ENABLED, DISABLED.
"""
try :
return self._conmultiplex
except Exception as e:
raise e
@conmultiplex.setter
def conmultiplex(self, conmultiplex) :
ur"""Reuse server connections for requests from more than one client connections.<br/>Default value: ENABLED<br/>Possible values = ENABLED, DISABLED
"""
try :
self._conmultiplex = conmultiplex
except Exception as e:
raise e
@property
def maxreusepool(self) :
ur"""Maximum limit on the number of connections, from the NetScaler to a particular server that are kept in the reuse pool. This setting is helpful for optimal memory utilization and for reducing the idle connections to the server just after the peak time.<br/>Maximum length = 360000.
"""
try :
return self._maxreusepool
except Exception as e:
raise e
@maxreusepool.setter
def maxreusepool(self, maxreusepool) :
ur"""Maximum limit on the number of connections, from the NetScaler to a particular server that are kept in the reuse pool. This setting is helpful for optimal memory utilization and for reducing the idle connections to the server just after the peak time.<br/>Maximum length = 360000
"""
try :
self._maxreusepool = maxreusepool
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
ur""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(nshttpparam_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.nshttpparam
except Exception as e :
raise e
def _get_object_name(self) :
ur""" Returns the value of object identifier argument
"""
try :
return 0
except Exception as e :
raise e
@classmethod
def update(cls, client, resource) :
ur""" Use this API to update nshttpparam.
"""
try :
if type(resource) is not list :
updateresource = nshttpparam()
updateresource.dropinvalreqs = resource.dropinvalreqs
updateresource.markhttp09inval = resource.markhttp09inval
updateresource.markconnreqinval = resource.markconnreqinval
updateresource.insnssrvrhdr = resource.insnssrvrhdr
updateresource.nssrvrhdr = resource.nssrvrhdr
updateresource.logerrresp = resource.logerrresp
updateresource.conmultiplex = resource.conmultiplex
updateresource.maxreusepool = resource.maxreusepool
return updateresource.update_resource(client)
except Exception as e :
raise e
@classmethod
def unset(cls, client, resource, args) :
ur""" Use this API to unset the properties of nshttpparam resource.
Properties that need to be unset are specified in args array.
"""
try :
if type(resource) is not list :
unsetresource = nshttpparam()
return unsetresource.unset_resource(client, args)
except Exception as e :
raise e
@classmethod
def get(cls, client, name="", option_="") :
ur""" Use this API to fetch all the nshttpparam resources that are configured on netscaler.
"""
try :
if not name :
obj = nshttpparam()
response = obj.get_resources(client, option_)
return response
except Exception as e :
raise e
class Conmultiplex:
ENABLED = "ENABLED"
DISABLED = "DISABLED"
class Markhttp09inval:
ON = "ON"
OFF = "OFF"
class Insnssrvrhdr:
ON = "ON"
OFF = "OFF"
class Markconnreqinval:
ON = "ON"
OFF = "OFF"
class Logerrresp:
ON = "ON"
OFF = "OFF"
class Dropinvalreqs:
ON = "ON"
OFF = "OFF"
class nshttpparam_response(base_response) :
def __init__(self, length=1) :
self.nshttpparam = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.nshttpparam = [nshttpparam() for _ in range(length)]
|
|
import tensorflow as tf
from six.moves import cPickle
# Loading net skeleton with parameters name and shapes.
with open("./util/net_skeleton.ckpt", "rb") as f:
net_skeleton = cPickle.load(f)
# The DeepLab-LargeFOV model can be represented as follows:
## input -> [conv-relu](dilation=1, channels=64) x 2 -> [max_pool](stride=2)
## -> [conv-relu](dilation=1, channels=128) x 2 -> [max_pool](stride=2)
## -> [conv-relu](dilation=1, channels=256) x 3 -> [max_pool](stride=2)
## -> [conv-relu](dilation=1, channels=512) x 3 -> [max_pool](stride=1)
## -> [conv-relu](dilation=2, channels=512) x 3 -> [max_pool](stride=1) -> [avg_pool](stride=1)
## -> [conv-relu](dilation=12, channels=1024) -> [dropout]
## -> [conv-relu](dilation=1, channels=1024) -> [dropout]
## -> [conv-relu](dilation=1, channels=21) -> [pixel-wise softmax loss].
num_layers = [2, 2, 3, 3, 3, 1, 1, 1]
dilations = [[1, 1],
[1, 1],
[1, 1, 1],
[1, 1, 1],
[2, 2, 2],
[12],
[1],
[1]]
n_classes = 21
# All convolutional and pooling operations are applied using kernels of size 3x3;
# padding is added so that the output of the same size as the input.
ks = 3
def create_variable(name, shape):
"""Create a convolution filter variable of the given name and shape,
and initialise it using Xavier initialisation
(http://jmlr.org/proceedings/papers/v9/glorot10a/glorot10a.pdf).
"""
initialiser = tf.contrib.layers.xavier_initializer_conv2d(dtype=tf.float32)
variable = tf.Variable(initialiser(shape=shape), name=name)
return variable
def create_bias_variable(name, shape):
"""Create a bias variable of the given name and shape,
and initialise it to zero.
"""
initialiser = tf.constant_initializer(value=0.0, dtype=tf.float32)
variable = tf.Variable(initialiser(shape=shape), name=name)
return variable
class DeepLabLFOVModel(object):
"""DeepLab-LargeFOV model with atrous convolution and bilinear upsampling.
This class implements a multi-layer convolutional neural network for semantic image segmentation task.
This is the same as the model described in this paper: https://arxiv.org/abs/1412.7062 - please look
there for details.
"""
def __init__(self, weights_path=None):
"""Create the model.
Args:
weights_path: the path to the cpkt file with dictionary of weights from .caffemodel.
"""
self.variables = self._create_variables(weights_path)
def _create_variables(self, weights_path):
"""Create all variables used by the network.
This allows to share them between multiple calls
to the loss function.
Args:
weights_path: the path to the ckpt file with dictionary of weights from .caffemodel.
If none, initialise all variables randomly.
Returns:
A dictionary with all variables.
"""
var = list()
index = 0
if weights_path is not None:
with open(weights_path, "rb") as f:
weights = cPickle.load(f) # Load pre-trained weights.
for name, shape in net_skeleton:
var.append(tf.Variable(weights[name],
name=name))
del weights
else:
# Initialise all weights randomly with the Xavier scheme,
# and
# all biases to 0's.
for name, shape in net_skeleton:
if "/w" in name: # Weight filter.
w = create_variable(name, list(shape))
var.append(w)
else:
b = create_bias_variable(name, list(shape))
var.append(b)
return var
def _create_network(self, input_batch, keep_prob):
"""Construct DeepLab-LargeFOV network.
Args:
input_batch: batch of pre-processed images.
keep_prob: probability of keeping neurons intact.
Returns:
A downsampled segmentation mask.
"""
current = input_batch
v_idx = 0 # Index variable.
# Last block is the classification layer.
for b_idx in xrange(len(dilations) - 1):
for l_idx, dilation in enumerate(dilations[b_idx]):
w = self.variables[v_idx * 2]
b = self.variables[v_idx * 2 + 1]
if dilation == 1:
conv = tf.nn.conv2d(current, w, strides=[1, 1, 1, 1], padding='SAME')
else:
conv = tf.nn.atrous_conv2d(current, w, dilation, padding='SAME')
current = tf.nn.relu(tf.nn.bias_add(conv, b))
v_idx += 1
# Optional pooling and dropout after each block.
if b_idx < 3:
current = tf.nn.max_pool(current,
ksize=[1, ks, ks, 1],
strides=[1, 2, 2, 1],
padding='SAME')
elif b_idx == 3:
current = tf.nn.max_pool(current,
ksize=[1, ks, ks, 1],
strides=[1, 1, 1, 1],
padding='SAME')
elif b_idx == 4:
current = tf.nn.max_pool(current,
ksize=[1, ks, ks, 1],
strides=[1, 1, 1, 1],
padding='SAME')
current = tf.nn.avg_pool(current,
ksize=[1, ks, ks, 1],
strides=[1, 1, 1, 1],
padding='SAME')
elif b_idx <= 6:
current = tf.nn.dropout(current, keep_prob=keep_prob)
# Classification layer; no ReLU.
w = self.variables[v_idx * 2]
b = self.variables[v_idx * 2 + 1]
conv = tf.nn.conv2d(current, w, strides=[1, 1, 1, 1], padding='SAME')
current = tf.nn.bias_add(conv, b)
return current
def prepare_label(self, input_batch, new_size):
"""Resize masks and perform one-hot encoding.
Args:
input_batch: input tensor of shape [batch_size H W 1].
new_size: a tensor with new height and width.
Returns:
Outputs a tensor of shape [batch_size h w 21]
with last dimension comprised of 0's and 1's only.
"""
with tf.name_scope('label_encode'):
input_batch = tf.image.resize_nearest_neighbor(input_batch, new_size) # As labels are integer numbers, need to use NN interp.
input_batch = tf.squeeze(input_batch, squeeze_dims=[3]) # Reducing the channel dimension.
input_batch = tf.one_hot(input_batch, depth=21)
return input_batch
def preds(self, input_batch):
"""Create the network and run inference on the input batch.
Args:
input_batch: batch of pre-processed images.
Returns:
Argmax over the predictions of the network of the same shape as the input.
"""
raw_output = self._create_network(tf.cast(input_batch, tf.float32), keep_prob=tf.constant(1.0))
raw_output = tf.image.resize_bilinear(raw_output, tf.shape(input_batch)[1:3,])
raw_output = tf.argmax(raw_output, dimension=3)
raw_output = tf.expand_dims(raw_output, dim=3) # Create 4D-tensor.
return tf.cast(raw_output, tf.uint8)
def loss(self, img_batch, label_batch):
"""Create the network, run inference on the input batch and compute loss.
Args:
input_batch: batch of pre-processed images.
Returns:
Pixel-wise softmax loss.
"""
raw_output = self._create_network(tf.cast(img_batch, tf.float32), keep_prob=tf.constant(0.5))
prediction = tf.reshape(raw_output, [-1, n_classes])
# Need to resize labels and convert using one-hot encoding.
label_batch = self.prepare_label(label_batch, tf.pack(raw_output.get_shape()[1:3]))
gt = tf.reshape(label_batch, [-1, n_classes])
# Pixel-wise softmax loss.
loss = tf.nn.softmax_cross_entropy_with_logits(prediction, gt)
reduced_loss = tf.reduce_mean(loss)
return reduced_loss
|
|
"""
@package mi.dataset.driver.PARAD_K.STC_IMODEM.test.test_driver
@file marine-integrations/mi/dataset/driver/PARAD_K/STC_IMODEM/driver.py
@author Mike Nicoletti
@brief Test cases for PARAD_K_STC_IMODEM driver
USAGE:
Make tests verbose and provide stdout
* From the IDK
$ bin/dsa/test_driver
$ bin/dsa/test_driver -i [-t testname]
$ bin/dsa/test_driver -q [-t testname]
"""
__author__ = 'Mike Nicoletti, Steve Myerson (recovered)'
__license__ = 'Apache 2.0'
import unittest
from nose.plugins.attrib import attr
from mock import Mock
from mi.core.log import get_logger ; log = get_logger()
from mi.idk.exceptions import SampleTimeout
from mi.idk.dataset.unit_test import DataSetTestCase
from mi.idk.dataset.unit_test import DataSetIntegrationTestCase
from mi.idk.dataset.unit_test import DataSetQualificationTestCase
from mi.dataset.dataset_driver import DataSourceConfigKey, DataSetDriverConfigKeys
from mi.dataset.driver.PARAD_K.STC_IMODEM.driver import \
PARAD_K_STC_IMODEM_DataSetDriver, \
DataTypeKey
from mi.dataset.parser.parad_k_stc_imodem import \
Parad_k_stc_imodemDataParticle, \
Parad_k_stc_imodemRecoveredDataParticle
from pyon.agent.agent import ResourceAgentState
from interface.objects import ResourceAgentErrorEvent
from mi.dataset.dataset_driver import DriverParameter
DIR_REC = '/tmp/dsatest_rec'
DIR_TEL = '/tmp/dsatest_tel'
FILE1_TEL = 'E0000001.DAT'
FILE2_TEL = 'E0000002.DAT'
FILE3_TEL = 'E0000003.DAT'
FILE1_REC = 'E0000011.DAT'
FILE2_REC = 'E0000012.DAT'
FILE3_REC = 'E0000013.DAT'
# Fill in driver details
DataSetTestCase.initialize(
driver_module='mi.dataset.driver.PARAD_K.STC_IMODEM.driver',
driver_class='PARAD_K_STC_IMODEM_DataSetDriver',
agent_resource_id = '123xyz',
agent_name = 'Agent007',
agent_packet_config = PARAD_K_STC_IMODEM_DataSetDriver.stream_config(),
startup_config = {
DataSourceConfigKey.RESOURCE_ID: 'parad_k_stc_imodem',
DataSourceConfigKey.HARVESTER:
{
DataTypeKey.PARAD_K_STC:
{
DataSetDriverConfigKeys.DIRECTORY: DIR_TEL,
DataSetDriverConfigKeys.PATTERN: 'E*.DAT',
DataSetDriverConfigKeys.FREQUENCY: 1,
},
DataTypeKey.PARAD_K_STC_RECOVERED:
{
DataSetDriverConfigKeys.DIRECTORY: DIR_REC,
DataSetDriverConfigKeys.PATTERN: 'E*.DAT',
DataSetDriverConfigKeys.FREQUENCY: 1,
}
},
DataSourceConfigKey.PARSER:
{
DataTypeKey.PARAD_K_STC: {},
DataTypeKey.PARAD_K_STC_RECOVERED: {}
}
}
)
REC_SAMPLE_STREAM = 'parad_k__stc_imodem_instrument_recovered'
TEL_SAMPLE_STREAM = 'parad_k__stc_imodem_instrument'
###############################################################################
# INTEGRATION TESTS #
# Device specific integration tests are for #
# testing device specific capabilities #
###############################################################################
@attr('INT', group='mi')
class IntegrationTest(DataSetIntegrationTestCase):
def test_get(self):
"""
Test that we can get data from files. Verify that the driver
sampling can be started and stopped
"""
log.info("================ START INTEG TEST GET =====================")
key_rec = DataTypeKey.PARAD_K_STC_RECOVERED
key_tel = DataTypeKey.PARAD_K_STC
particle_rec = Parad_k_stc_imodemRecoveredDataParticle
particle_tel = Parad_k_stc_imodemDataParticle
# Start sampling
self.clear_async_data()
self.driver.start_sampling()
self.clear_async_data()
self.create_sample_data_set_dir('first.DAT', DIR_TEL, FILE1_TEL)
self.assert_data(particle_tel, 'first.result.yml', count=1, timeout=10)
self.assert_file_ingested(FILE1_TEL, key_tel)
self.create_sample_data_set_dir('first.DAT', DIR_REC, FILE1_REC)
self.assert_data(particle_rec, 'rec_first.result.yml', count=1, timeout=10)
self.assert_file_ingested(FILE1_REC, key_rec)
self.create_sample_data_set_dir('second.DAT', DIR_REC, FILE2_REC)
self.assert_data(particle_rec, 'rec_second.result.yml', count=4, timeout=10)
self.create_sample_data_set_dir('second.DAT', DIR_TEL, FILE2_TEL)
self.assert_data(particle_tel, 'second.result.yml', count=4, timeout=10)
self.create_sample_data_set_dir('E0000303.DAT', DIR_TEL, FILE3_TEL)
self.assert_data(particle_tel, count=32, timeout=10)
log.info("================ END INTEG TEST GET =====================")
def test_stop_resume(self):
"""
Test the ability to stop and restart the process
"""
path_1 = self.create_sample_data_set_dir('first.DAT', DIR_TEL, FILE1_TEL)
path_2 = self.create_sample_data_set_dir('second.DAT', DIR_TEL, FILE2_TEL)
rec_path_1 = self.create_sample_data_set_dir('first.DAT', DIR_REC, FILE1_REC)
rec_path_2 = self.create_sample_data_set_dir('second.DAT', DIR_REC, FILE2_REC)
# Create and store the new driver state
state = {
DataTypeKey.PARAD_K_STC:
{FILE1_TEL: self.get_file_state(path_1, True, 50),
FILE2_TEL: self.get_file_state(path_2, False, 76)},
DataTypeKey.PARAD_K_STC_RECOVERED:
{FILE1_REC: self.get_file_state(rec_path_1, True, 50),
FILE2_REC: self.get_file_state(rec_path_2, False, 76)}
}
self.driver = self._get_driver_object(memento=state)
# create some data to parse
self.clear_async_data()
self.driver.start_sampling()
# verify data is produced
self.assert_data(Parad_k_stc_imodemRecoveredDataParticle,
'rec_partial_second.result.yml', count=2, timeout=10)
self.assert_data(Parad_k_stc_imodemDataParticle,
'partial_second.result.yml', count=2, timeout=10)
def test_stop_start_ingest(self):
"""
Test the ability to stop and restart sampling, and ingesting files in the correct order
"""
log.info("========= START INTEG TEST STOP START INGEST ==============")
key_rec = DataTypeKey.PARAD_K_STC_RECOVERED
key_tel = DataTypeKey.PARAD_K_STC
particle_rec = Parad_k_stc_imodemRecoveredDataParticle
particle_tel = Parad_k_stc_imodemDataParticle
# create some data to parse
self.clear_async_data()
self.driver.start_sampling()
self.create_sample_data_set_dir('first.DAT', DIR_REC, FILE1_REC)
self.create_sample_data_set_dir('second.DAT', DIR_REC, FILE2_REC)
self.create_sample_data_set_dir('first.DAT', DIR_TEL, FILE1_TEL)
self.create_sample_data_set_dir('second.DAT', DIR_TEL, FILE2_TEL)
log.info("========= READ TELEMETERED FILE 1 ==============")
self.assert_data(particle_tel, 'first.result.yml', count=1, timeout=10)
self.assert_file_ingested(FILE1_TEL, key_tel)
self.assert_file_not_ingested(FILE2_TEL, key_tel)
log.info("========= READ RECOVERED FILE 1 ==============")
self.assert_data(particle_rec, 'rec_first.result.yml', count=1, timeout=10)
self.assert_file_ingested(FILE1_REC, key_rec)
self.assert_file_not_ingested(FILE2_REC, key_rec)
log.info("========= STOP AND RESTART SAMPLING ==============")
self.driver.stop_sampling()
self.driver.start_sampling()
log.info("========= READ RECOVERED FILE 2 ==============")
self.assert_data(particle_rec, 'rec_second.result.yml', count=4, timeout=10)
self.assert_file_ingested(FILE2_REC, key_rec)
log.info("========= READ TELEMETERED FILE 2 ==============")
self.assert_data(particle_tel, 'second.result.yml', count=4, timeout=10)
self.assert_file_ingested(FILE2_TEL, key_tel)
log.info("========= END INTEG TEST STOP START INGEST ==============")
def test_sample_exception(self):
"""
test that an empty file generates a sample exception
"""
self.clear_async_data()
filename = 'FOO'
self.create_sample_data_set_dir(filename, DIR_TEL, 'E1234567.DAT')
# Start sampling and watch for an exception
self.driver.start_sampling()
# an event catches the sample exception
self.assert_event('ResourceAgentErrorEvent')
###############################################################################
# QUALIFICATION TESTS #
# Device specific qualification tests are for #
# testing device specific capabilities #
###############################################################################
@attr('QUAL', group='mi')
class QualificationTest(DataSetQualificationTestCase):
def test_publish_path(self):
"""
Setup an agent/driver/harvester/parser and verify that data is
published out the agent
"""
self.create_sample_data_set_dir('second.DAT', DIR_TEL, FILE1_TEL)
self.create_sample_data_set_dir('second.DAT', DIR_REC, FILE1_REC)
self.assert_initialize(final_state=ResourceAgentState.COMMAND)
# NOTE: If the processing is not slowed down here, the engineering samples are
# returned in the wrong order
self.dataset_agent_client.set_resource({DriverParameter.RECORDS_PER_SECOND: 1})
self.assert_start_sampling()
# Verify we get 4 samples from each file
try:
result = self.data_subscribers.get_samples(TEL_SAMPLE_STREAM, 4)
log.debug("RESULT: %s", result)
# Verify values
self.assert_data_values(result, 'second.result.yml')
result = self.data_subscribers.get_samples(REC_SAMPLE_STREAM, 4)
log.debug("RECOVERED RESULT: %s", result)
# Verify values
self.assert_data_values(result, 'rec_second.result.yml')
except Exception as e:
log.error("Exception trapped: %s", e)
self.fail("Sample timeout.")
def test_large_import(self):
"""
Test importing a large number of samples from the file at once
"""
self.create_sample_data_set_dir('E0000303.DAT', DIR_TEL, 'E0000303.DAT')
self.create_sample_data_set_dir('E0000427.DAT', DIR_TEL, 'E0000427.DAT')
self.create_sample_data_set_dir('E0000303.DAT', DIR_REC, 'E0001303.DAT')
self.create_sample_data_set_dir('E0000427.DAT', DIR_REC, 'E0001427.DAT')
self.assert_initialize()
# get results for each of the data particle streams
self.get_samples(TEL_SAMPLE_STREAM,64,40)
self.get_samples(REC_SAMPLE_STREAM,64,40)
def test_for_nan(self):
"""
Test to verify that a Sample Exception occurs if the input file contains
a NaN value for the parad data.
"""
log.info("========== START QUAL TEST NAN INPUT TELEMETERED ==========")
self.event_subscribers.clear_events()
self.assert_initialize()
self.create_sample_data_set_dir('NaN.DAT', DIR_TEL, FILE3_TEL)
log.info("========== CHECK FOR EXCEPTION TELEMETERED ==========")
self.assert_event_received(ResourceAgentErrorEvent, 10)
self.assert_state_change(ResourceAgentState.STREAMING, 10)
log.info("========== END QUAL TEST NAN INPUT TELEMETERED ==========")
def test_for_nan_recovered(self):
"""
Test to verify that a Sample Exception occurs if the input file contains
a NaN value for the parad data.
"""
log.info("========== START QUAL TEST NAN INPUT RECOVERED ==========")
self.event_subscribers.clear_events()
self.assert_initialize()
self.create_sample_data_set_dir('NaN.DAT', DIR_REC, FILE2_REC)
log.info("========== CHECK FOR EXCEPTION RECOVERED ==========")
self.assert_event_received(ResourceAgentErrorEvent, 10)
self.assert_state_change(ResourceAgentState.STREAMING, 10)
log.info("========== END QUAL TEST NAN INPUT RECOVERED ==========")
def test_status_in_middle(self):
"""
This file has status particles in the middle and at the end
"""
self.create_sample_data_set_dir('E0000039.DAT', DIR_TEL,'E0000039.DAT' )
self.create_sample_data_set_dir('E0000039.DAT', DIR_REC,'E0000139.DAT' )
self.assert_initialize()
# get results for each of the data particle streams
result2 = self.get_samples(TEL_SAMPLE_STREAM,53,40)
result = self.get_samples(REC_SAMPLE_STREAM,53,40)
def test_stop_start(self):
"""
Test the agents ability to start data flowing, stop, then restart
at the correct spot.
"""
log.info("CONFIG: %s", self._agent_config())
self.create_sample_data_set_dir('first.DAT', DIR_TEL, FILE1_TEL)
self.create_sample_data_set_dir('first.DAT', DIR_REC, FILE1_REC)
self.assert_initialize(final_state=ResourceAgentState.COMMAND)
# Slow down processing to 1 per second to give us time to stop
self.dataset_agent_client.set_resource({DriverParameter.RECORDS_PER_SECOND: 1})
self.assert_start_sampling()
# Verify we get one sample
try:
# Read the first file and verify the data
result = self.get_samples(TEL_SAMPLE_STREAM)
log.debug("RESULT: %s", result)
# Verify values
self.assert_data_values(result, 'first.result.yml')
self.assert_sample_queue_size(TEL_SAMPLE_STREAM, 0)
# Read the first recovered file and verify the data
rec_result = self.get_samples(REC_SAMPLE_STREAM)
log.debug("REC RESULT: %s", rec_result)
# Verify values for recovered data
self.assert_data_values(rec_result, 'rec_first.result.yml')
self.assert_sample_queue_size(REC_SAMPLE_STREAM, 0)
self.create_sample_data_set_dir('second.DAT', DIR_TEL, FILE2_TEL)
self.create_sample_data_set_dir('second.DAT', DIR_REC, FILE2_REC)
# Now read the first two records of the second file
result = self.get_samples(TEL_SAMPLE_STREAM, 2)
log.debug("got result 1 %s", result)
# Now read the first two records of the second recovered file then stop
rec_result = self.get_samples(REC_SAMPLE_STREAM, 2)
log.debug("got rec result 1 %s", rec_result)
self.assert_stop_sampling()
self.assert_sample_queue_size(TEL_SAMPLE_STREAM, 0)
self.assert_sample_queue_size(REC_SAMPLE_STREAM, 0)
# Restart sampling and ensure we get the last 5 records of the file
self.assert_start_sampling()
result2 = self.get_samples(TEL_SAMPLE_STREAM, 2)
log.debug("got result 2 %s", result2)
result.extend(result2)
self.assert_data_values(result, 'second.result.yml')
self.assert_sample_queue_size(TEL_SAMPLE_STREAM, 0)
# Ensure we get the last 5 records of the recovered file
rec_result2 = self.get_samples(REC_SAMPLE_STREAM, 2)
log.debug("got rec result 2 %s", rec_result2)
rec_result.extend(rec_result2)
self.assert_data_values(rec_result, 'rec_second.result.yml')
self.assert_sample_queue_size(REC_SAMPLE_STREAM, 0)
except SampleTimeout as e:
log.error("Exception trapped: %s", e, exc_info=True)
self.fail("Sample timeout.")
def test_shutdown_restart(self):
"""
Test the agents ability to start, completely shutdown, then restart
at the correct spot.
"""
log.info("CONFIG: %s", self._agent_config())
self.create_sample_data_set_dir('first.DAT', DIR_TEL, FILE1_TEL)
self.create_sample_data_set_dir('first.DAT', DIR_REC, FILE1_REC)
self.assert_initialize(final_state=ResourceAgentState.COMMAND)
# Slow down processing to 1 per second to give us time to stop
self.dataset_agent_client.set_resource({DriverParameter.RECORDS_PER_SECOND: 1})
self.assert_start_sampling()
# Verify we get one sample
try:
# Read the first file and verify the data
result = self.get_samples(TEL_SAMPLE_STREAM)
log.debug("RESULT: %s", result)
# Verify values
self.assert_data_values(result, 'first.result.yml')
self.assert_sample_queue_size(TEL_SAMPLE_STREAM, 0)
# Read the first recovered file and verify the data
rec_result = self.get_samples(REC_SAMPLE_STREAM)
log.debug("REC RESULT: %s", rec_result)
# Verify values
self.assert_data_values(rec_result, 'rec_first.result.yml')
self.assert_sample_queue_size(REC_SAMPLE_STREAM, 0)
self.create_sample_data_set_dir('second.DAT', DIR_TEL, FILE2_TEL)
self.create_sample_data_set_dir('second.DAT', DIR_REC, FILE2_REC)
# Now read the first two records of the second file
result = self.get_samples(TEL_SAMPLE_STREAM, 2)
log.debug("got result 1 %s", result)
# Now read the first two records of the second recovered file then stop
rec_result = self.get_samples(REC_SAMPLE_STREAM, 2)
log.debug("got rec result 1 %s", rec_result)
self.assert_stop_sampling()
self.assert_sample_queue_size(TEL_SAMPLE_STREAM, 0)
self.assert_sample_queue_size(REC_SAMPLE_STREAM, 0)
# stop the agent
self.stop_dataset_agent_client()
# re-start the agent
self.init_dataset_agent_client()
#re-initialize
self.assert_initialize(final_state=ResourceAgentState.COMMAND)
# Restart sampling and ensure we get the last 2 records of the files
self.assert_start_sampling()
result2 = self.get_samples(TEL_SAMPLE_STREAM, 2)
log.debug("got result 2 %s", result2)
result.extend(result2)
self.assert_data_values(result, 'second.result.yml')
self.assert_sample_queue_size(TEL_SAMPLE_STREAM, 0)
rec_result2 = self.get_samples(REC_SAMPLE_STREAM, 2)
log.debug("got rec result 2 %s", result2)
rec_result.extend(rec_result2)
self.assert_data_values(rec_result, 'rec_second.result.yml')
self.assert_sample_queue_size(REC_SAMPLE_STREAM, 0)
except SampleTimeout as e:
log.error("Exception trapped: %s", e, exc_info=True)
self.fail("Sample timeout.")
def test_parser_exception(self):
"""
Test an exception is raised after the driver is started during
record parsing.
"""
self.create_sample_data_set_dir('bad.DAT', DIR_TEL, FILE1_TEL)
self.create_sample_data_set_dir('first.DAT', DIR_TEL, FILE2_TEL)
self.assert_initialize()
self.event_subscribers.clear_events()
result = self.get_samples(TEL_SAMPLE_STREAM, 1)
self.assert_data_values(result, 'first.result.yml')
self.assert_sample_queue_size(TEL_SAMPLE_STREAM, 0)
# Verify an event was raised and we are in our retry state
self.assert_event_received(ResourceAgentErrorEvent, 10)
self.assert_state_change(ResourceAgentState.STREAMING, 10)
def test_parser_exception_recovered(self):
"""
Test an exception is raised after the driver is started during
record parsing for recovered data.
"""
self.create_sample_data_set_dir('bad.DAT', DIR_REC, FILE1_REC)
self.create_sample_data_set_dir('first.DAT', DIR_REC, FILE2_REC)
self.assert_initialize()
self.event_subscribers.clear_events()
result = self.get_samples(REC_SAMPLE_STREAM, 1)
self.assert_data_values(result, 'rec_first.result.yml')
self.assert_sample_queue_size(TEL_SAMPLE_STREAM, 0)
# Verify an event was raised and we are in our retry state
self.assert_event_received(ResourceAgentErrorEvent, 10)
self.assert_state_change(ResourceAgentState.STREAMING, 10)
|
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Must satisfy the signature
# [t,X,D,P] = sim_function(T,X0,D0,P0,I0);
import numpy as np
from scipy.integrate import ode
import matplotlib.pyplot as plt
import utils as U
import copyreg as cr
import types
PLOT = False
def _pickle_method(m):
"""_pickle_method
Required for successfull pickling of sim()
"""
if m.im_self is None:
return getattr, (m.im_class, m.im_func.func_name)
else:
return getattr, (m.im_self, m.im_func.func_name)
cr.pickle(types.MethodType, _pickle_method)
# As it is created only once, all methods should be static
# methods.
class SIM(object):
def __init__(self, plt, pvt_init_data):
property_checker = True
# atol = 1e-10
rtol = 1e-5
rtol = 1e-6
if property_checker is not None:
max_step = 1e-2
else:
max_step = 0.0
nsteps = 1000
# tt,YY,dummy_D,dummy_P
# The orer of these calls is strict... do not change
# (1):set_integrator -> (2):set_solout -> (3):set_initial_value
self.solver = ode(dyn).set_integrator('dopri5', rtol=rtol, max_step=max_step, nsteps=nsteps) # (1)
@U.memoize2disk(U.memoize_hash_method)
def sim(self, TT, X0, D, P, I, property_checker):
property_violated_flag = False
num_dim_x = len(X0)
plot_data = ([np.empty(0, dtype=float), np.empty((0, num_dim_x), dtype=float)]
if PLOT else None)
Ti = TT[0]
Tf = TT[1]
T = Tf - Ti
#if property_checker is not None:
#violating_state = [()]
solver = self.solver
solver.set_solout(solout_fun(property_checker, plot_data)) # (2)
solver.set_initial_value(X0, t=0.0)
solver.set_f_params(U)
X_ = solver.integrate(T)
# Y = C*x + D*u
#if property_checker is not None:
if property_checker.check(Tf, X_):
property_violated_flag = True
dummy_D = np.zeros(D.shape)
dummy_P = np.zeros(P.shape)
ret_t = Tf
ret_X = X_
# ret_Y = Y
ret_D = dummy_D
ret_P = dummy_P
#plt.plot(plot_data[0] + Ti, plot_data[1][:, 0])
if PLOT:
plt.plot(plot_data[1][:, 0], plot_data[1][:, 1], 'b-', linewidth=0.5)
##plt.plot(plot_data[0] + Ti, np.tile(U, plot_data[0].shape))
return (ret_t, ret_X, ret_D, ret_P), property_violated_flag
def dyn_non_opt(t, X, u):
x1 = X[0]
x2 = X[1]
y1 = x2
y2 = 5.0 * (1 - x1 ** 2) * x2 - x1
return np.array([y1, y2])
def dyn(t, X, u):
# Bad things happen when you modify the passed in X.
# So, make a copy!
X = X.copy()
X[0], X[1] = (X[1], 5.0 * (1 - X[0] ** 2) * X[1] - X[0])
return X
def solout_fun(property_checker, plot_data):
def solout(t, Y):
if PLOT:
plot_data[0] = np.concatenate((plot_data[0], np.array([t])))
plot_data[1] = np.concatenate((plot_data[1], np.array([Y])))
# print Y
# print t, Y
if property_checker.check(t, Y):
#violating_state[0] = (np.copy(t), np.copy(Y))
# print 'violation found:', violating_state[0]
# return -1 to stop integration
return -1
else:
return 0
return solout
def sim_unfinished_everystep(T, XX, D, P, U, I):
sol = []
# atol = 1e-10
rtol = 1e-6
# set rtol and force a maximum of 1 step per call...
solver = ode(dyn).set_integrator('dopri5', rtol=rtol, nsteps=1)
solver.set_initial_value(XX, 0.0)
while solver.t < T:
solver.integrate(T, step=True)
sol.append([solver.t, solver.y])
dummy_D = np.zeros(D.shape)
dummy_P = np.zeros(P.shape)
ret_t = T
ret_X = Y
ret_D = dummy_D
ret_P = dummy_P
return (ret_t, ret_X, ret_D, ret_P)
# def dyn(t, X):
# Y(1) = X(2);
# Y(2) = 5 * (1 - X(1)^2) * X(2) - X(1);
# return Y
# import numpy as np
# from scipy.integrate import ode
# import matplotlib.pyplot as plt
# import warnings
#
#
# def logistic(t, y, r):
# return r * y * (1.0 - y)
#
# r = .01
# t0 = 0
# y0 = 1e-5
# t1 = 5000.0
#
##backend = 'vode'
# backend = 'dopri5'
##backend = 'dop853'
#
# solver = ode(logistic).set_integrator(dopri15, nsteps=1)
#
# solver.set_initial_value(y0, t0).set_f_params(r)
## suppress Fortran-printed warning
# solver._integrator.iwork[2] = -1
#
# sol = []
# warnings.filterwarnings("ignore", category=UserWarning)
# while solver.t < t1:
# solver.integrate(t1, step=True)
# sol.append([solver.t, solver.y])
# warnings.resetwarnings()
# sol = np.array(sol)
#
# plt.plot(sol[:,0], sol[:,1], 'b.-')
# plt.show()
# 'dopri5'
#
# This is an explicit runge-kutta method of order (4)5 due to Dormand & Prince
# (with stepsize control and dense output). Authors:
# E. Hairer and G. Wanner Universite de Geneve, Dept. de Mathematiques CH-1211
# Geneve 24, Switzerland e-mail: [email protected],
# [email protected] This code is described in [HNW93].
# This integrator accepts the following parameters in set_integrator() method
# of the ode class: atol : float or sequence absolute tolerance for solution
# rtol : float or sequence relative tolerance for solution
# nsteps : int Maximum number of (internally defined) steps allowed during one
# call to the solver.
# first_step : float
# max_step : float
# safety : float Safety factor on new step selection (default 0.9)
# ifactor : float
# dfactor : float Maximum factor to increase/decrease step size by in one step
# beta : float Beta parameter for stabilised step size control.
# verbosity : int Switch for printing messages (< 0 for no messages).
|
|
#------------------------------------------------------------------
#
# ElectroPi EXPLORER
#
# - A way for folks to turn WAV's into control codes.
#
# To use this script, record a ~0.5 second sample of your remote's
# control code at 22050 Hz in your DAW of choice. Then, export the
# recording as a mono, 16-bit WAV file with a 22050 Hz sample rate.
# Then, include the exported file as an argument to this script:
#
# sudo python decode.py recording.wav
#
#------------------------------------------------------------------
# Import modules
from __future__ import division
import sys
import wave, struct
import os
import time
rows, columns = os.popen('stty size', 'r').read().split() # Get console width for pretty output!
os.system("clear") # Clear the terminal
# Check if an input file was specified, die if it was not.
try:
inFile = sys.argv[1]
except:
print "You must specify an input file as an argument. (e.g. 'sudo python decode.py infile.wav')"
sys.exit(0)
# Function for progress bar printing
def drawProgressBar(percent, barLen = 20, num=" ", sep=" ",den=" ",units=" "):
sys.stdout.write("\r")
progress = ""
for i in range(barLen):
if i < int(barLen * percent):
progress += "="
else:
progress += " "
sys.stdout.write("[%s] %.2f%% %s%s%s %s" % (progress, percent * 100,num,sep,den,units))
sys.stdout.flush()
# Function to get the median of a number set
def median(numbers):
return (sorted(numbers)[int(round((len(numbers) - 1) / 2.0))] + sorted(numbers)[int(round((len(numbers) - 1) // 2.0))]) / 2.0
# Find the lowest and highest number of a set
def lowHigh(numbers):
lowest = 100000
highest = 0
last = 0
for item in numbers:
if item > highest:
highest = item
for item in numbers:
if item < lowest:
lowest = item
return [lowest,highest]
# Passes data to drawProgressBar()
def printPercent(soFar,total):
percent = soFar/total
drawProgressBar(percent,50,soFar,"/",total,"samples")
start_time = time.time() # GO!
print "\n--------------------"
print "ElectroPi RF Decoder"
print "by Connor Nishijima"
print "--------------------\n"
print "Reading WAV file..."
waveFile = wave.open(inFile, 'r') # Open WAV file
inArray = []
# Print cool stats
print inFile,"opened."
print "Sample rate:",waveFile.getframerate()
print "Number of channels:",waveFile.getnchannels()
print "Total samples:",waveFile.getnframes()
# Check if the WAV has the proper format
if waveFile.getnchannels() != 1 or waveFile.getnframes() > 200000:
print "XYou must supply a mono .WAV file with a sample rate of 22050 Hz/44100Hz, no more than 5 seconds in length."
sys.exit(0)
sampleRateOK = False
if waveFile.getframerate() == 22050:
sampleRateOK = True
if waveFile.getframerate() == 44100:
sampleRateOK = True
if sampleRateOK == False:
print "You must supply a mono .WAV file with a sample rate of 22050 Hz/44100Hz, no more than 5 seconds in length."
sys.exit(0)
length = waveFile.getnframes()
print "File is",length,"samples /",(length/waveFile.getframerate()),"seconds long.\n"
is44KHz = False
if waveFile.getframerate() == 44100:
is44KHz = True
# Warn stupid people if they can't follow directions
if waveFile.getnframes() > 12000 and is44KHz == False:
print "\n*****\nWARNING: Supplying a clip longer than 0.5 seconds is usually redundant, and takes much longer to process.\nYour file is",(waveFile.getnframes()/waveFile.getframerate()),"seconds long.\n*****\n"
if waveFile.getnframes() > 23000 and is44KHz == True:
print "\n*****\nWARNING: Supplying a clip longer than 0.5 seconds is usually redundant, and takes much longer to process.\nYour file is",(waveFile.getnframes()/waveFile.getframerate()),"seconds long.\n*****\n"
print "Building binary array..."
# Read through every sample of the file. If the sample is above zero, that's HIGH. If it's negative, that's LOW.
# These binary values populate in inArray[].
i = 0
while i < length:
waveData = waveFile.readframes(1)
data = struct.unpack("<h", waveData)
if int(data[0]) > 0:
inArray.append("1")
if str(i)[-2:] == "00":
printPercent(i,length)
else:
inArray.append("0")
if str(i)[-2:] == "00":
printPercent(i,length)
if is44KHz == False:
i += 1
else:
i += 2
printPercent(length,length)
countArray = []
count = 0
i = 0
bit = "X"
lastBit = "X"
print "\nGetting pulse lengths..."
# Read through inArray and get the length in samples of each HIGH and LOW.
# These counts populate in countArray[].
while i < len(inArray):
bit = inArray[i]
if bit != lastBit:
lastBit = bit
countArray.append(count)
count = 0
else:
count += 1
if str(i)[-2:] == "00":
printPercent(i,len(inArray))
if is44KHz == False:
i += 1
else:
i += 2
printPercent(len(inArray),len(inArray))
print "\n\nPulse lengths:"
print countArray
lh = lowHigh(countArray) # Get the lowest and highest value in countArray[]
med = median(lh) # Median between the two
space = lh[1] # Code spacing in samples
outArray = []
totalSamples = 0
keep = 0
done = 0
i = 0
# Trim the code data to one single code using spaces to help truncate
while i < len(countArray):
length = countArray[i]
if length > med:
if keep == 1 and done == 0:
keep = 0
done = 1
if keep == 0 and done == 0:
keep = 1
else:
if done == 0 and keep == 1:
outArray.append(length)
i += 1
outArray.append(space)
print "\nCode pulse lengths:"
print outArray
bit = 0
outString = ""
first = 1
# Convert to final output string of binary
for item in outArray:
if bit == 0:
if first == 1:
first = 0
else:
outString = outString + ("0")
bit = 1
outString = outString + ("1" * item)
elif bit == 1:
if first == 1:
first = 0
else:
outString = outString + ("1")
bit = 0
outString = outString + ("0" * item)
print "\n\nCODE FOUND: " + ("="*(int(columns)-12))
print ("="*int(columns)) + "\n"
print outString
print "\n"+("="*int(columns))
print ("="*int(columns)) + "\n"
# Done!
elapsed_time = time.time() - start_time
elapsed_time = ("%.2f" % round(elapsed_time,2))
print "\nDecoder finished in",str(elapsed_time),"seconds! Your code is above. Each bit repesents a state of LOW or HIGH (0 or 1) for a period of ~45uS - or 1 sample at 22050 Hz.\n"
|
|
# Copyright 2015 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Generic simple functions used for python based templage generation."""
import re
import sys
import traceback
import default
import yaml
RFC1035_RE = re.compile(r'^[a-z][-a-z0-9]{1,61}[a-z0-9]{1}$')
class Error(Exception):
"""Common exception wrapper for template exceptions."""
pass
def AddDiskResourcesIfNeeded(context):
"""Checks context if disk resources need to be added."""
if default.DISK_RESOURCES in context.properties:
return context.properties[default.DISK_RESOURCES]
else:
return []
def AutoName(base, resource, *args):
"""Helper method to generate names automatically based on default."""
auto_name = '%s-%s' % (base, '-'.join(list(args) + [default.AKA[resource]]))
if not RFC1035_RE.match(auto_name):
raise Error('"%s" name for type %s does not match RFC1035 regex (%s)' %
(auto_name, resource, RFC1035_RE.pattern))
return auto_name
def AutoRef(base, resource, *args):
"""Helper method that builds a reference for an auto-named resource."""
return Ref(AutoName(base, resource, *args))
def OrderedItems(dict_obj):
"""Convenient method to yield sorted iteritems of a dictionary."""
keys = list(dict_obj.keys())
keys.sort()
for k in keys:
yield (k, dict_obj[k])
def ShortenZoneName(zone):
"""Given a string that looks like a zone name, creates a shorter version."""
geo, coord, number, letter = re.findall(r'(\w+)-(\w+)(\d)-(\w)', zone)[0]
geo = geo.lower() if len(geo) == 2 else default.LOC[geo.lower()]
coord = default.LOC[coord.lower()]
number = str(number)
letter = letter.lower()
return geo + '-' + coord + number + letter
def ZoneToRegion(zone):
"""Derives the region from a zone name."""
parts = zone.split('-')
if len(parts) != 3:
raise Error('Cannot derive region from zone "%s"' % zone)
return '-'.join(parts[:2])
def FormatException(message):
"""Adds more information to the exception."""
message = ('Exception Type: %s\n'
'Details: %s\n'
'Message: %s\n') % (sys.exc_info()[0], traceback.format_exc(), message)
return message
def Ref(name):
return '$(ref.%s.selfLink)' % name
def RefGroup(name):
return '$(ref.%s.instanceGroup)' % name
def GlobalComputeLink(project, collection, name):
return ''.join([default.COMPUTE_URL_BASE, 'projects/', project, '/global/',
collection, '/', name])
def LocalComputeLink(project, zone, key, value):
return ''.join([default.COMPUTE_URL_BASE, 'projects/', project, '/zones/',
zone, '/', key, '/', value])
def ReadContext(context, prop_key):
return (context.env['project'], context.properties.get('zone', None),
context.properties[prop_key])
def MakeLocalComputeLink(context, key):
project, zone, value = ReadContext(context, key)
if IsComputeLink(value):
return value
else:
return LocalComputeLink(project, zone, key + 's', value)
def MakeGlobalComputeLink(context, key):
project, _, value = ReadContext(context, key)
if IsComputeLink(value):
return value
else:
return GlobalComputeLink(project, key + 's', value)
def MakeSubnetworkComputeLink(context, key):
project, zone, value = ReadContext(context, key)
region = ZoneToRegion(zone)
return ''.join([default.COMPUTE_URL_BASE, 'projects/', project, '/regions/',
region, '/subnetworks/', value])
def MakeFQHN(context, name):
return '%s.c.%s.internal' % (name, context.env['project'])
# TODO(victorg): Consider moving this method to a different file
def MakeC2DImageLink(name, dev_mode=False):
if IsGlobalProjectShortcut(name) or name.startswith('http'):
return name
else:
if dev_mode:
return 'global/images/%s' % name
else:
return GlobalComputeLink(default.C2D_IMAGES, 'images', name)
def IsGlobalProjectShortcut(name):
return name.startswith('projects/') or name.startswith('global/')
def IsComputeLink(name):
return (name.startswith(default.COMPUTE_URL_BASE) or
name.startswith(default.REFERENCE_PREFIX))
def GetNamesAndTypes(resources_dict):
return [(d['name'], d['type']) for d in resources_dict]
def SummarizeResources(res_dict):
"""Summarizes the name of resources per resource type."""
result = {}
for res in res_dict:
result.setdefault(res['type'], []).append(res['name'])
return result
def ListPropertyValuesOfType(res_dict, prop, res_type):
"""Lists all the values for a property of a certain type."""
return [r['properties'][prop] for r in res_dict if r['type'] == res_type]
def MakeResource(resource_list, output_list=None):
"""Wrapper for a DM template basic spec."""
content = {'resources': resource_list}
if output_list:
content['outputs'] = output_list
return yaml.dump(content)
def TakeZoneOut(properties):
"""Given a properties dictionary, removes the zone specific information."""
def _CleanZoneUrl(value):
value = value.split('/')[-1] if IsComputeLink(value) else value
return value
for name in default.VM_ZONE_PROPERTIES:
if name in properties:
properties[name] = _CleanZoneUrl(properties[name])
if default.ZONE in properties:
properties.pop(default.ZONE)
if default.BOOTDISK in properties:
properties[default.BOOTDISK] = _CleanZoneUrl(properties[default.BOOTDISK])
if default.DISKS in properties:
for disk in properties[default.DISKS]:
# Don't touch references to other disks
if default.DISK_SOURCE in disk:
continue
if default.INITIALIZEP in disk:
disk_init = disk[default.INITIALIZEP]
if default.DISKTYPE in disk_init:
disk_init[default.DISKTYPE] = _CleanZoneUrl(disk_init[default.DISKTYPE])
def GenerateEmbeddableYaml(yaml_string):
# Because YAML is a space delimited format, we need to be careful about
# embedding one YAML document in another. This function takes in a string in
# YAML format and produces an equivalent YAML representation that can be
# inserted into arbitrary points of another YAML document. It does so by
# printing the YAML string in a single line format. Consistent ordering of
# the string is also guaranteed by using yaml.dump.
yaml_object = yaml.load(yaml_string)
dumped_yaml = yaml.dump(yaml_object, default_flow_style=True)
return dumped_yaml
def FormatErrorsDec(func):
"""Decorator to format exceptions if they get raised."""
def FormatErrorsWrap(context):
try:
return func(context)
except Exception as e:
raise Error(FormatException(e.message))
return FormatErrorsWrap
|
|
# Copyright 2016-present Facebook. All Rights Reserved.
#
# revmap: trivial hg hash - linelog rev bidirectional map
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
from __future__ import absolute_import
import bisect
import io
import os
import struct
from mercurial.node import hex
from mercurial.pycompat import open
from mercurial import (
error as hgerror,
pycompat,
)
from . import error
# the revmap file format is straightforward:
#
# 8 bytes: header
# 1 byte : flag for linelog revision 1
# ? bytes: (optional) '\0'-terminated path string
# only exists if (flag & renameflag) != 0
# 20 bytes: hg hash for linelog revision 1
# 1 byte : flag for linelog revision 2
# ? bytes: (optional) '\0'-terminated path string
# 20 bytes: hg hash for linelog revision 2
# ....
#
# the implementation is kinda stupid: __init__ loads the whole revmap.
# no laziness. benchmark shows loading 10000 revisions is about 0.015
# seconds, which looks enough for our use-case. if this implementation
# becomes a bottleneck, we can change it to lazily read the file
# from the end.
# whether the changeset is in the side branch. i.e. not in the linear main
# branch but only got referenced by lines in merge changesets.
sidebranchflag = 1
# whether the changeset changes the file path (ie. is a rename)
renameflag = 2
# len(mercurial.node.nullid)
_hshlen = 20
class revmap(object):
"""trivial hg bin hash - linelog rev bidirectional map
also stores a flag (uint8) for each revision, and track renames.
"""
HEADER = b'REVMAP1\0'
def __init__(self, path=None):
"""create or load the revmap, optionally associate to a file
if path is None, the revmap is entirely in-memory. the caller is
responsible for locking. concurrent writes to a same file is unsafe.
the caller needs to make sure one file is associated to at most one
revmap object at a time."""
self.path = path
self._rev2hsh = [None]
self._rev2flag = [None]
self._hsh2rev = {}
# since rename does not happen frequently, do not store path for every
# revision. self._renamerevs can be used for bisecting.
self._renamerevs = [0]
self._renamepaths = [b'']
self._lastmaxrev = -1
if path:
if os.path.exists(path):
self._load()
else:
# write the header so "append" can do incremental updates
self.flush()
def copyfrom(self, rhs):
"""copy the map data from another revmap. do not affect self.path"""
self._rev2hsh = rhs._rev2hsh[:]
self._rev2flag = rhs._rev2flag[:]
self._hsh2rev = rhs._hsh2rev.copy()
self._renamerevs = rhs._renamerevs[:]
self._renamepaths = rhs._renamepaths[:]
self._lastmaxrev = -1
@property
def maxrev(self):
"""return max linelog revision number"""
return len(self._rev2hsh) - 1
def append(self, hsh, sidebranch=False, path=None, flush=False):
"""add a binary hg hash and return the mapped linelog revision.
if flush is True, incrementally update the file.
"""
if hsh in self._hsh2rev:
raise error.CorruptedFileError(
b'%r is in revmap already' % hex(hsh)
)
if len(hsh) != _hshlen:
raise hgerror.ProgrammingError(
b'hsh must be %d-char long' % _hshlen
)
idx = len(self._rev2hsh)
flag = 0
if sidebranch:
flag |= sidebranchflag
if path is not None and path != self._renamepaths[-1]:
flag |= renameflag
self._renamerevs.append(idx)
self._renamepaths.append(path)
self._rev2hsh.append(hsh)
self._rev2flag.append(flag)
self._hsh2rev[hsh] = idx
if flush:
self.flush()
return idx
def rev2hsh(self, rev):
"""convert linelog revision to hg hash. return None if not found."""
if rev > self.maxrev or rev < 0:
return None
return self._rev2hsh[rev]
def rev2flag(self, rev):
"""get the flag (uint8) for a given linelog revision.
return None if revision does not exist.
"""
if rev > self.maxrev or rev < 0:
return None
return self._rev2flag[rev]
def rev2path(self, rev):
"""get the path for a given linelog revision.
return None if revision does not exist.
"""
if rev > self.maxrev or rev < 0:
return None
idx = bisect.bisect_right(self._renamerevs, rev) - 1
return self._renamepaths[idx]
def hsh2rev(self, hsh):
"""convert hg hash to linelog revision. return None if not found."""
return self._hsh2rev.get(hsh)
def clear(self, flush=False):
"""make the map empty. if flush is True, write to disk"""
# rev 0 is reserved, real rev starts from 1
self._rev2hsh = [None]
self._rev2flag = [None]
self._hsh2rev = {}
self._rev2path = [b'']
self._lastmaxrev = -1
if flush:
self.flush()
def flush(self):
"""write the state down to the file"""
if not self.path:
return
if self._lastmaxrev == -1: # write the entire file
with open(self.path, b'wb') as f:
f.write(self.HEADER)
for i in pycompat.xrange(1, len(self._rev2hsh)):
self._writerev(i, f)
else: # append incrementally
with open(self.path, b'ab') as f:
for i in pycompat.xrange(
self._lastmaxrev + 1, len(self._rev2hsh)
):
self._writerev(i, f)
self._lastmaxrev = self.maxrev
def _load(self):
"""load state from file"""
if not self.path:
return
# use local variables in a loop. CPython uses LOAD_FAST for them,
# which is faster than both LOAD_CONST and LOAD_GLOBAL.
flaglen = 1
hshlen = _hshlen
with open(self.path, b'rb') as f:
if f.read(len(self.HEADER)) != self.HEADER:
raise error.CorruptedFileError()
self.clear(flush=False)
while True:
buf = f.read(flaglen)
if not buf:
break
flag = ord(buf)
rev = len(self._rev2hsh)
if flag & renameflag:
path = self._readcstr(f)
self._renamerevs.append(rev)
self._renamepaths.append(path)
hsh = f.read(hshlen)
if len(hsh) != hshlen:
raise error.CorruptedFileError()
self._hsh2rev[hsh] = rev
self._rev2flag.append(flag)
self._rev2hsh.append(hsh)
self._lastmaxrev = self.maxrev
def _writerev(self, rev, f):
"""append a revision data to file"""
flag = self._rev2flag[rev]
hsh = self._rev2hsh[rev]
f.write(struct.pack(b'B', flag))
if flag & renameflag:
path = self.rev2path(rev)
if path is None:
raise error.CorruptedFileError(b'cannot find path for %s' % rev)
f.write(path + b'\0')
f.write(hsh)
@staticmethod
def _readcstr(f):
"""read a C-language-like '\0'-terminated string"""
buf = b''
while True:
ch = f.read(1)
if not ch: # unexpected eof
raise error.CorruptedFileError()
if ch == b'\0':
break
buf += ch
return buf
def __contains__(self, f):
"""(fctx or (node, path)) -> bool.
test if (node, path) is in the map, and is not in a side branch.
f can be either a tuple of (node, path), or a fctx.
"""
if isinstance(f, tuple): # f: (node, path)
hsh, path = f
else: # f: fctx
hsh, path = f.node(), f.path()
rev = self.hsh2rev(hsh)
if rev is None:
return False
if path is not None and path != self.rev2path(rev):
return False
return (self.rev2flag(rev) & sidebranchflag) == 0
def getlastnode(path):
"""return the last hash in a revmap, without loading its full content.
this is equivalent to `m = revmap(path); m.rev2hsh(m.maxrev)`, but faster.
"""
hsh = None
try:
with open(path, b'rb') as f:
f.seek(-_hshlen, io.SEEK_END)
if f.tell() > len(revmap.HEADER):
hsh = f.read(_hshlen)
except IOError:
pass
return hsh
|
|
"""The tests for the Logger component."""
from collections import defaultdict
import logging
from unittest.mock import Mock, patch
import pytest
from homeassistant.components import logger
from homeassistant.components.logger import LOGSEVERITY
from homeassistant.setup import async_setup_component
HASS_NS = "unused.homeassistant"
COMPONENTS_NS = f"{HASS_NS}.components"
ZONE_NS = f"{COMPONENTS_NS}.zone"
GROUP_NS = f"{COMPONENTS_NS}.group"
CONFIGED_NS = "otherlibx"
UNCONFIG_NS = "unconfigurednamespace"
@pytest.fixture(autouse=True)
def restore_logging_class():
"""Restore logging class."""
klass = logging.getLoggerClass()
yield
logging.setLoggerClass(klass)
async def test_log_filtering(hass, caplog):
"""Test logging filters."""
assert await async_setup_component(
hass,
"logger",
{
"logger": {
"default": "warning",
"logs": {
"test.filter": "info",
},
"filters": {
"test.filter": [
"doesntmatchanything",
".*shouldfilterall.*",
"^filterthis:.*",
],
"test.other_filter": [".*otherfilterer"],
},
}
},
)
await hass.async_block_till_done()
filter_logger = logging.getLogger("test.filter")
def msg_test(logger, result, message, *args):
logger.error(message, *args)
formatted_message = message % args
assert (formatted_message in caplog.text) == result
caplog.clear()
msg_test(
filter_logger, False, "this line containing shouldfilterall should be filtered"
)
msg_test(filter_logger, True, "this line should not be filtered filterthis:")
msg_test(filter_logger, False, "filterthis: should be filtered")
msg_test(filter_logger, False, "format string shouldfilter%s", "all")
msg_test(filter_logger, True, "format string shouldfilter%s", "not")
# Filtering should work even if log level is modified
await hass.services.async_call(
"logger",
"set_level",
{"test.filter": "warning"},
blocking=True,
)
assert filter_logger.getEffectiveLevel() == logging.WARNING
msg_test(
filter_logger,
False,
"this line containing shouldfilterall should still be filtered",
)
# Filtering should be scoped to a service
msg_test(
filter_logger, True, "this line containing otherfilterer should not be filtered"
)
msg_test(
logging.getLogger("test.other_filter"),
False,
"this line containing otherfilterer SHOULD be filtered",
)
async def test_setting_level(hass):
"""Test we set log levels."""
mocks = defaultdict(Mock)
with patch("logging.getLogger", mocks.__getitem__):
assert await async_setup_component(
hass,
"logger",
{
"logger": {
"default": "warning",
"logs": {
"test": "info",
"test.child": "debug",
"test.child.child": "warning",
},
}
},
)
await hass.async_block_till_done()
assert len(mocks) == 4
assert len(mocks[""].orig_setLevel.mock_calls) == 1
assert mocks[""].orig_setLevel.mock_calls[0][1][0] == LOGSEVERITY["WARNING"]
assert len(mocks["test"].orig_setLevel.mock_calls) == 1
assert mocks["test"].orig_setLevel.mock_calls[0][1][0] == LOGSEVERITY["INFO"]
assert len(mocks["test.child"].orig_setLevel.mock_calls) == 1
assert mocks["test.child"].orig_setLevel.mock_calls[0][1][0] == LOGSEVERITY["DEBUG"]
assert len(mocks["test.child.child"].orig_setLevel.mock_calls) == 1
assert (
mocks["test.child.child"].orig_setLevel.mock_calls[0][1][0]
== LOGSEVERITY["WARNING"]
)
# Test set default level
with patch("logging.getLogger", mocks.__getitem__):
await hass.services.async_call(
"logger", "set_default_level", {"level": "fatal"}, blocking=True
)
assert len(mocks[""].orig_setLevel.mock_calls) == 2
assert mocks[""].orig_setLevel.mock_calls[1][1][0] == LOGSEVERITY["FATAL"]
# Test update other loggers
with patch("logging.getLogger", mocks.__getitem__):
await hass.services.async_call(
"logger",
"set_level",
{"test.child": "info", "new_logger": "notset"},
blocking=True,
)
assert len(mocks) == 5
assert len(mocks["test.child"].orig_setLevel.mock_calls) == 2
assert mocks["test.child"].orig_setLevel.mock_calls[1][1][0] == LOGSEVERITY["INFO"]
assert len(mocks["new_logger"].orig_setLevel.mock_calls) == 1
assert (
mocks["new_logger"].orig_setLevel.mock_calls[0][1][0] == LOGSEVERITY["NOTSET"]
)
async def test_can_set_level(hass):
"""Test logger propagation."""
assert await async_setup_component(
hass,
"logger",
{
"logger": {
"logs": {
CONFIGED_NS: "warning",
f"{CONFIGED_NS}.info": "info",
f"{CONFIGED_NS}.debug": "debug",
HASS_NS: "warning",
COMPONENTS_NS: "info",
ZONE_NS: "debug",
GROUP_NS: "info",
},
}
},
)
assert logging.getLogger(UNCONFIG_NS).level == logging.NOTSET
assert logging.getLogger(UNCONFIG_NS).isEnabledFor(logging.CRITICAL) is True
assert (
logging.getLogger(f"{UNCONFIG_NS}.any").isEnabledFor(logging.CRITICAL) is True
)
assert (
logging.getLogger(f"{UNCONFIG_NS}.any.any").isEnabledFor(logging.CRITICAL)
is True
)
assert logging.getLogger(CONFIGED_NS).isEnabledFor(logging.DEBUG) is False
assert logging.getLogger(CONFIGED_NS).isEnabledFor(logging.WARNING) is True
assert logging.getLogger(f"{CONFIGED_NS}.any").isEnabledFor(logging.WARNING) is True
assert (
logging.getLogger(f"{CONFIGED_NS}.any.any").isEnabledFor(logging.WARNING)
is True
)
assert logging.getLogger(f"{CONFIGED_NS}.info").isEnabledFor(logging.DEBUG) is False
assert logging.getLogger(f"{CONFIGED_NS}.info").isEnabledFor(logging.INFO) is True
assert (
logging.getLogger(f"{CONFIGED_NS}.info.any").isEnabledFor(logging.DEBUG)
is False
)
assert (
logging.getLogger(f"{CONFIGED_NS}.info.any").isEnabledFor(logging.INFO) is True
)
assert logging.getLogger(f"{CONFIGED_NS}.debug").isEnabledFor(logging.DEBUG) is True
assert (
logging.getLogger(f"{CONFIGED_NS}.debug.any").isEnabledFor(logging.DEBUG)
is True
)
assert logging.getLogger(HASS_NS).isEnabledFor(logging.DEBUG) is False
assert logging.getLogger(HASS_NS).isEnabledFor(logging.WARNING) is True
assert logging.getLogger(COMPONENTS_NS).isEnabledFor(logging.DEBUG) is False
assert logging.getLogger(COMPONENTS_NS).isEnabledFor(logging.WARNING) is True
assert logging.getLogger(COMPONENTS_NS).isEnabledFor(logging.INFO) is True
assert logging.getLogger(GROUP_NS).isEnabledFor(logging.DEBUG) is False
assert logging.getLogger(GROUP_NS).isEnabledFor(logging.WARNING) is True
assert logging.getLogger(GROUP_NS).isEnabledFor(logging.INFO) is True
assert logging.getLogger(f"{GROUP_NS}.any").isEnabledFor(logging.DEBUG) is False
assert logging.getLogger(f"{GROUP_NS}.any").isEnabledFor(logging.WARNING) is True
assert logging.getLogger(f"{GROUP_NS}.any").isEnabledFor(logging.INFO) is True
assert logging.getLogger(ZONE_NS).isEnabledFor(logging.DEBUG) is True
assert logging.getLogger(f"{ZONE_NS}.any").isEnabledFor(logging.DEBUG) is True
await hass.services.async_call(
logger.DOMAIN, "set_level", {f"{UNCONFIG_NS}.any": "debug"}, blocking=True
)
assert logging.getLogger(UNCONFIG_NS).level == logging.NOTSET
assert logging.getLogger(f"{UNCONFIG_NS}.any").level == logging.DEBUG
assert logging.getLogger(UNCONFIG_NS).level == logging.NOTSET
await hass.services.async_call(
logger.DOMAIN, "set_default_level", {"level": "debug"}, blocking=True
)
assert logging.getLogger(UNCONFIG_NS).isEnabledFor(logging.DEBUG) is True
assert logging.getLogger(f"{UNCONFIG_NS}.any").isEnabledFor(logging.DEBUG) is True
assert (
logging.getLogger(f"{UNCONFIG_NS}.any.any").isEnabledFor(logging.DEBUG) is True
)
assert logging.getLogger("").isEnabledFor(logging.DEBUG) is True
assert logging.getLogger(COMPONENTS_NS).isEnabledFor(logging.DEBUG) is False
assert logging.getLogger(GROUP_NS).isEnabledFor(logging.DEBUG) is False
logging.getLogger(CONFIGED_NS).setLevel(logging.INFO)
assert logging.getLogger(CONFIGED_NS).level == logging.WARNING
logging.getLogger("").setLevel(logging.NOTSET)
|
|
# Copyright (C) 2007-2012 Michael Foord & the mock team
# E-mail: fuzzyman AT voidspace DOT org DOT uk
# http://www.voidspace.org.uk/python/mock/
from tests.support import (
callable, unittest2, inPy3k, is_instance, next
)
import copy
import pickle
import sys
import mock
from mock import (
call, DEFAULT, patch, sentinel,
MagicMock, Mock, NonCallableMock,
NonCallableMagicMock, _CallList,
create_autospec
)
try:
unicode
except NameError:
unicode = str
class Iter(object):
def __init__(self):
self.thing = iter(['this', 'is', 'an', 'iter'])
def __iter__(self):
return self
def next(self):
return next(self.thing)
__next__ = next
class Subclass(MagicMock):
pass
class Thing(object):
attribute = 6
foo = 'bar'
class MockTest(unittest2.TestCase):
def test_all(self):
# if __all__ is badly defined then import * will raise an error
# We have to exec it because you can't import * inside a method
# in Python 3
exec("from mock import *")
def test_constructor(self):
mock = Mock()
self.assertFalse(mock.called, "called not initialised correctly")
self.assertEqual(mock.call_count, 0,
"call_count not initialised correctly")
self.assertTrue(is_instance(mock.return_value, Mock),
"return_value not initialised correctly")
self.assertEqual(mock.call_args, None,
"call_args not initialised correctly")
self.assertEqual(mock.call_args_list, [],
"call_args_list not initialised correctly")
self.assertEqual(mock.method_calls, [],
"method_calls not initialised correctly")
# Can't use hasattr for this test as it always returns True on a mock
self.assertFalse('_items' in mock.__dict__,
"default mock should not have '_items' attribute")
self.assertIsNone(mock._mock_parent,
"parent not initialised correctly")
self.assertIsNone(mock._mock_methods,
"methods not initialised correctly")
self.assertEqual(mock._mock_children, {},
"children not initialised incorrectly")
def test_unicode_not_broken(self):
# This used to raise an exception with Python 2.5 and Mock 0.4
unicode(Mock())
def test_return_value_in_constructor(self):
mock = Mock(return_value=None)
self.assertIsNone(mock.return_value,
"return value in constructor not honoured")
def test_repr(self):
mock = Mock(name='foo')
self.assertIn('foo', repr(mock))
self.assertIn("'%s'" % id(mock), repr(mock))
mocks = [(Mock(), 'mock'), (Mock(name='bar'), 'bar')]
for mock, name in mocks:
self.assertIn('%s.bar' % name, repr(mock.bar))
self.assertIn('%s.foo()' % name, repr(mock.foo()))
self.assertIn('%s.foo().bing' % name, repr(mock.foo().bing))
self.assertIn('%s()' % name, repr(mock()))
self.assertIn('%s()()' % name, repr(mock()()))
self.assertIn('%s()().foo.bar.baz().bing' % name,
repr(mock()().foo.bar.baz().bing))
def test_repr_with_spec(self):
class X(object):
pass
mock = Mock(spec=X)
self.assertIn(" spec='X' ", repr(mock))
mock = Mock(spec=X())
self.assertIn(" spec='X' ", repr(mock))
mock = Mock(spec_set=X)
self.assertIn(" spec_set='X' ", repr(mock))
mock = Mock(spec_set=X())
self.assertIn(" spec_set='X' ", repr(mock))
mock = Mock(spec=X, name='foo')
self.assertIn(" spec='X' ", repr(mock))
self.assertIn(" name='foo' ", repr(mock))
mock = Mock(name='foo')
self.assertNotIn("spec", repr(mock))
mock = Mock()
self.assertNotIn("spec", repr(mock))
mock = Mock(spec=['foo'])
self.assertNotIn("spec", repr(mock))
def test_side_effect(self):
mock = Mock()
def effect(*args, **kwargs):
raise SystemError('kablooie')
mock.side_effect = effect
self.assertRaises(SystemError, mock, 1, 2, fish=3)
mock.assert_called_with(1, 2, fish=3)
results = [1, 2, 3]
def effect():
return results.pop()
mock.side_effect = effect
self.assertEqual([mock(), mock(), mock()], [3, 2, 1],
"side effect not used correctly")
mock = Mock(side_effect=sentinel.SideEffect)
self.assertEqual(mock.side_effect, sentinel.SideEffect,
"side effect in constructor not used")
def side_effect():
return DEFAULT
mock = Mock(side_effect=side_effect, return_value=sentinel.RETURN)
self.assertEqual(mock(), sentinel.RETURN)
@unittest2.skipUnless('java' in sys.platform,
'This test only applies to Jython')
def test_java_exception_side_effect(self):
import java
mock = Mock(side_effect=java.lang.RuntimeException("Boom!"))
# can't use assertRaises with java exceptions
try:
mock(1, 2, fish=3)
except java.lang.RuntimeException:
pass
else:
self.fail('java exception not raised')
mock.assert_called_with(1,2, fish=3)
def test_reset_mock(self):
parent = Mock()
spec = ["something"]
mock = Mock(name="child", parent=parent, spec=spec)
mock(sentinel.Something, something=sentinel.SomethingElse)
something = mock.something
mock.something()
mock.side_effect = sentinel.SideEffect
return_value = mock.return_value
return_value()
mock.reset_mock()
self.assertEqual(mock._mock_name, "child",
"name incorrectly reset")
self.assertEqual(mock._mock_parent, parent,
"parent incorrectly reset")
self.assertEqual(mock._mock_methods, spec,
"methods incorrectly reset")
self.assertFalse(mock.called, "called not reset")
self.assertEqual(mock.call_count, 0, "call_count not reset")
self.assertEqual(mock.call_args, None, "call_args not reset")
self.assertEqual(mock.call_args_list, [], "call_args_list not reset")
self.assertEqual(mock.method_calls, [],
"method_calls not initialised correctly: %r != %r" %
(mock.method_calls, []))
self.assertEqual(mock.mock_calls, [])
self.assertEqual(mock.side_effect, sentinel.SideEffect,
"side_effect incorrectly reset")
self.assertEqual(mock.return_value, return_value,
"return_value incorrectly reset")
self.assertFalse(return_value.called, "return value mock not reset")
self.assertEqual(mock._mock_children, {'something': something},
"children reset incorrectly")
self.assertEqual(mock.something, something,
"children incorrectly cleared")
self.assertFalse(mock.something.called, "child not reset")
def test_reset_mock_recursion(self):
mock = Mock()
mock.return_value = mock
# used to cause recursion
mock.reset_mock()
def test_call(self):
mock = Mock()
self.assertTrue(is_instance(mock.return_value, Mock),
"Default return_value should be a Mock")
result = mock()
self.assertEqual(mock(), result,
"different result from consecutive calls")
mock.reset_mock()
ret_val = mock(sentinel.Arg)
self.assertTrue(mock.called, "called not set")
self.assertEqual(mock.call_count, 1, "call_count incoreect")
self.assertEqual(mock.call_args, ((sentinel.Arg,), {}),
"call_args not set")
self.assertEqual(mock.call_args_list, [((sentinel.Arg,), {})],
"call_args_list not initialised correctly")
mock.return_value = sentinel.ReturnValue
ret_val = mock(sentinel.Arg, key=sentinel.KeyArg)
self.assertEqual(ret_val, sentinel.ReturnValue,
"incorrect return value")
self.assertEqual(mock.call_count, 2, "call_count incorrect")
self.assertEqual(mock.call_args,
((sentinel.Arg,), {'key': sentinel.KeyArg}),
"call_args not set")
self.assertEqual(mock.call_args_list, [
((sentinel.Arg,), {}),
((sentinel.Arg,), {'key': sentinel.KeyArg})
],
"call_args_list not set")
def test_call_args_comparison(self):
mock = Mock()
mock()
mock(sentinel.Arg)
mock(kw=sentinel.Kwarg)
mock(sentinel.Arg, kw=sentinel.Kwarg)
self.assertEqual(mock.call_args_list, [
(),
((sentinel.Arg,),),
({"kw": sentinel.Kwarg},),
((sentinel.Arg,), {"kw": sentinel.Kwarg})
])
self.assertEqual(mock.call_args,
((sentinel.Arg,), {"kw": sentinel.Kwarg}))
def test_assert_called_with(self):
mock = Mock()
mock()
# Will raise an exception if it fails
mock.assert_called_with()
self.assertRaises(AssertionError, mock.assert_called_with, 1)
mock.reset_mock()
self.assertRaises(AssertionError, mock.assert_called_with)
mock(1, 2, 3, a='fish', b='nothing')
mock.assert_called_with(1, 2, 3, a='fish', b='nothing')
def test_assert_called_once_with(self):
mock = Mock()
mock()
# Will raise an exception if it fails
mock.assert_called_once_with()
mock()
self.assertRaises(AssertionError, mock.assert_called_once_with)
mock.reset_mock()
self.assertRaises(AssertionError, mock.assert_called_once_with)
mock('foo', 'bar', baz=2)
mock.assert_called_once_with('foo', 'bar', baz=2)
mock.reset_mock()
mock('foo', 'bar', baz=2)
self.assertRaises(
AssertionError,
lambda: mock.assert_called_once_with('bob', 'bar', baz=2)
)
def test_attribute_access_returns_mocks(self):
mock = Mock()
something = mock.something
self.assertTrue(is_instance(something, Mock), "attribute isn't a mock")
self.assertEqual(mock.something, something,
"different attributes returned for same name")
# Usage example
mock = Mock()
mock.something.return_value = 3
self.assertEqual(mock.something(), 3, "method returned wrong value")
self.assertTrue(mock.something.called,
"method didn't record being called")
def test_attributes_have_name_and_parent_set(self):
mock = Mock()
something = mock.something
self.assertEqual(something._mock_name, "something",
"attribute name not set correctly")
self.assertEqual(something._mock_parent, mock,
"attribute parent not set correctly")
def test_method_calls_recorded(self):
mock = Mock()
mock.something(3, fish=None)
mock.something_else.something(6, cake=sentinel.Cake)
self.assertEqual(mock.something_else.method_calls,
[("something", (6,), {'cake': sentinel.Cake})],
"method calls not recorded correctly")
self.assertEqual(mock.method_calls, [
("something", (3,), {'fish': None}),
("something_else.something", (6,), {'cake': sentinel.Cake})
],
"method calls not recorded correctly")
def test_method_calls_compare_easily(self):
mock = Mock()
mock.something()
self.assertEqual(mock.method_calls, [('something',)])
self.assertEqual(mock.method_calls, [('something', (), {})])
mock = Mock()
mock.something('different')
self.assertEqual(mock.method_calls, [('something', ('different',))])
self.assertEqual(mock.method_calls,
[('something', ('different',), {})])
mock = Mock()
mock.something(x=1)
self.assertEqual(mock.method_calls, [('something', {'x': 1})])
self.assertEqual(mock.method_calls, [('something', (), {'x': 1})])
mock = Mock()
mock.something('different', some='more')
self.assertEqual(mock.method_calls, [
('something', ('different',), {'some': 'more'})
])
def test_only_allowed_methods_exist(self):
for spec in ['something'], ('something',):
for arg in 'spec', 'spec_set':
mock = Mock(**{arg: spec})
# this should be allowed
mock.something
self.assertRaisesRegexp(
AttributeError,
"Mock object has no attribute 'something_else'",
getattr, mock, 'something_else'
)
def test_from_spec(self):
class Something(object):
x = 3
__something__ = None
def y(self):
pass
def test_attributes(mock):
# should work
mock.x
mock.y
mock.__something__
self.assertRaisesRegexp(
AttributeError,
"Mock object has no attribute 'z'",
getattr, mock, 'z'
)
self.assertRaisesRegexp(
AttributeError,
"Mock object has no attribute '__foobar__'",
getattr, mock, '__foobar__'
)
test_attributes(Mock(spec=Something))
test_attributes(Mock(spec=Something()))
def test_wraps_calls(self):
real = Mock()
mock = Mock(wraps=real)
self.assertEqual(mock(), real())
real.reset_mock()
mock(1, 2, fish=3)
real.assert_called_with(1, 2, fish=3)
def test_wraps_call_with_nondefault_return_value(self):
real = Mock()
mock = Mock(wraps=real)
mock.return_value = 3
self.assertEqual(mock(), 3)
self.assertFalse(real.called)
def test_wraps_attributes(self):
class Real(object):
attribute = Mock()
real = Real()
mock = Mock(wraps=real)
self.assertEqual(mock.attribute(), real.attribute())
self.assertRaises(AttributeError, lambda: mock.fish)
self.assertNotEqual(mock.attribute, real.attribute)
result = mock.attribute.frog(1, 2, fish=3)
Real.attribute.frog.assert_called_with(1, 2, fish=3)
self.assertEqual(result, Real.attribute.frog())
def test_exceptional_side_effect(self):
mock = Mock(side_effect=AttributeError)
self.assertRaises(AttributeError, mock)
mock = Mock(side_effect=AttributeError('foo'))
self.assertRaises(AttributeError, mock)
def test_baseexceptional_side_effect(self):
mock = Mock(side_effect=KeyboardInterrupt)
self.assertRaises(KeyboardInterrupt, mock)
mock = Mock(side_effect=KeyboardInterrupt('foo'))
self.assertRaises(KeyboardInterrupt, mock)
def test_assert_called_with_message(self):
mock = Mock()
self.assertRaisesRegexp(AssertionError, 'Not called',
mock.assert_called_with)
def test__name__(self):
mock = Mock()
self.assertRaises(AttributeError, lambda: mock.__name__)
mock.__name__ = 'foo'
self.assertEqual(mock.__name__, 'foo')
def test_spec_list_subclass(self):
class Sub(list):
pass
mock = Mock(spec=Sub(['foo']))
mock.append(3)
mock.append.assert_called_with(3)
self.assertRaises(AttributeError, getattr, mock, 'foo')
def test_spec_class(self):
class X(object):
pass
mock = Mock(spec=X)
self.assertTrue(isinstance(mock, X))
mock = Mock(spec=X())
self.assertTrue(isinstance(mock, X))
self.assertIs(mock.__class__, X)
self.assertEqual(Mock().__class__.__name__, 'Mock')
mock = Mock(spec_set=X)
self.assertTrue(isinstance(mock, X))
mock = Mock(spec_set=X())
self.assertTrue(isinstance(mock, X))
def test_setting_attribute_with_spec_set(self):
class X(object):
y = 3
mock = Mock(spec=X)
mock.x = 'foo'
mock = Mock(spec_set=X)
def set_attr():
mock.x = 'foo'
mock.y = 'foo'
self.assertRaises(AttributeError, set_attr)
def test_copy(self):
current = sys.getrecursionlimit()
self.addCleanup(sys.setrecursionlimit, current)
# can't use sys.maxint as this doesn't exist in Python 3
sys.setrecursionlimit(int(10e8))
# this segfaults without the fix in place
copy.copy(Mock())
@unittest2.skipIf(inPy3k, "no old style classes in Python 3")
def test_spec_old_style_classes(self):
class Foo:
bar = 7
mock = Mock(spec=Foo)
mock.bar = 6
self.assertRaises(AttributeError, lambda: mock.foo)
mock = Mock(spec=Foo())
mock.bar = 6
self.assertRaises(AttributeError, lambda: mock.foo)
@unittest2.skipIf(inPy3k, "no old style classes in Python 3")
def test_spec_set_old_style_classes(self):
class Foo:
bar = 7
mock = Mock(spec_set=Foo)
mock.bar = 6
self.assertRaises(AttributeError, lambda: mock.foo)
def _set():
mock.foo = 3
self.assertRaises(AttributeError, _set)
mock = Mock(spec_set=Foo())
mock.bar = 6
self.assertRaises(AttributeError, lambda: mock.foo)
def _set():
mock.foo = 3
self.assertRaises(AttributeError, _set)
def test_subclass_with_properties(self):
class SubClass(Mock):
def _get(self):
return 3
def _set(self, value):
raise NameError('strange error')
some_attribute = property(_get, _set)
s = SubClass(spec_set=SubClass)
self.assertEqual(s.some_attribute, 3)
def test():
s.some_attribute = 3
self.assertRaises(NameError, test)
def test():
s.foo = 'bar'
self.assertRaises(AttributeError, test)
def test_setting_call(self):
mock = Mock()
def __call__(self, a):
return self._mock_call(a)
type(mock).__call__ = __call__
mock('one')
mock.assert_called_with('one')
self.assertRaises(TypeError, mock, 'one', 'two')
@unittest2.skipUnless(sys.version_info[:2] >= (2, 6),
"__dir__ not available until Python 2.6 or later")
def test_dir(self):
mock = Mock()
attrs = set(dir(mock))
type_attrs = set([m for m in dir(Mock) if not m.startswith('_')])
# all public attributes from the type are included
self.assertEqual(set(), type_attrs - attrs)
# creates these attributes
mock.a, mock.b
self.assertIn('a', dir(mock))
self.assertIn('b', dir(mock))
# instance attributes
mock.c = mock.d = None
self.assertIn('c', dir(mock))
self.assertIn('d', dir(mock))
# magic methods
mock.__iter__ = lambda s: iter([])
self.assertIn('__iter__', dir(mock))
@unittest2.skipUnless(sys.version_info[:2] >= (2, 6),
"__dir__ not available until Python 2.6 or later")
def test_dir_from_spec(self):
mock = Mock(spec=unittest2.TestCase)
testcase_attrs = set(dir(unittest2.TestCase))
attrs = set(dir(mock))
# all attributes from the spec are included
self.assertEqual(set(), testcase_attrs - attrs)
# shadow a sys attribute
mock.version = 3
self.assertEqual(dir(mock).count('version'), 1)
@unittest2.skipUnless(sys.version_info[:2] >= (2, 6),
"__dir__ not available until Python 2.6 or later")
def test_filter_dir(self):
patcher = patch.object(mock, 'FILTER_DIR', False)
patcher.start()
try:
attrs = set(dir(Mock()))
type_attrs = set(dir(Mock))
# ALL attributes from the type are included
self.assertEqual(set(), type_attrs - attrs)
finally:
patcher.stop()
def test_configure_mock(self):
mock = Mock(foo='bar')
self.assertEqual(mock.foo, 'bar')
mock = MagicMock(foo='bar')
self.assertEqual(mock.foo, 'bar')
kwargs = {'side_effect': KeyError, 'foo.bar.return_value': 33,
'foo': MagicMock()}
mock = Mock(**kwargs)
self.assertRaises(KeyError, mock)
self.assertEqual(mock.foo.bar(), 33)
self.assertIsInstance(mock.foo, MagicMock)
mock = Mock()
mock.configure_mock(**kwargs)
self.assertRaises(KeyError, mock)
self.assertEqual(mock.foo.bar(), 33)
self.assertIsInstance(mock.foo, MagicMock)
def assertRaisesWithMsg(self, exception, message, func, *args, **kwargs):
# needed because assertRaisesRegex doesn't work easily with newlines
try:
func(*args, **kwargs)
except:
instance = sys.exc_info()[1]
self.assertIsInstance(instance, exception)
else:
self.fail('Exception %r not raised' % (exception,))
msg = str(instance)
self.assertEqual(msg, message)
def test_assert_called_with_failure_message(self):
mock = NonCallableMock()
expected = "mock(1, '2', 3, bar='foo')"
message = 'Expected call: %s\nNot called'
self.assertRaisesWithMsg(
AssertionError, message % (expected,),
mock.assert_called_with, 1, '2', 3, bar='foo'
)
mock.foo(1, '2', 3, foo='foo')
asserters = [
mock.foo.assert_called_with, mock.foo.assert_called_once_with
]
for meth in asserters:
actual = "foo(1, '2', 3, foo='foo')"
expected = "foo(1, '2', 3, bar='foo')"
message = 'Expected call: %s\nActual call: %s'
self.assertRaisesWithMsg(
AssertionError, message % (expected, actual),
meth, 1, '2', 3, bar='foo'
)
# just kwargs
for meth in asserters:
actual = "foo(1, '2', 3, foo='foo')"
expected = "foo(bar='foo')"
message = 'Expected call: %s\nActual call: %s'
self.assertRaisesWithMsg(
AssertionError, message % (expected, actual),
meth, bar='foo'
)
# just args
for meth in asserters:
actual = "foo(1, '2', 3, foo='foo')"
expected = "foo(1, 2, 3)"
message = 'Expected call: %s\nActual call: %s'
self.assertRaisesWithMsg(
AssertionError, message % (expected, actual),
meth, 1, 2, 3
)
# empty
for meth in asserters:
actual = "foo(1, '2', 3, foo='foo')"
expected = "foo()"
message = 'Expected call: %s\nActual call: %s'
self.assertRaisesWithMsg(
AssertionError, message % (expected, actual), meth
)
def test_mock_calls(self):
mock = MagicMock()
# need to do this because MagicMock.mock_calls used to just return
# a MagicMock which also returned a MagicMock when __eq__ was called
self.assertIs(mock.mock_calls == [], True)
mock = MagicMock()
mock()
expected = [('', (), {})]
self.assertEqual(mock.mock_calls, expected)
mock.foo()
expected.append(call.foo())
self.assertEqual(mock.mock_calls, expected)
# intermediate mock_calls work too
self.assertEqual(mock.foo.mock_calls, [('', (), {})])
mock = MagicMock()
mock().foo(1, 2, 3, a=4, b=5)
expected = [
('', (), {}), ('().foo', (1, 2, 3), dict(a=4, b=5))
]
self.assertEqual(mock.mock_calls, expected)
self.assertEqual(mock.return_value.foo.mock_calls,
[('', (1, 2, 3), dict(a=4, b=5))])
self.assertEqual(mock.return_value.mock_calls,
[('foo', (1, 2, 3), dict(a=4, b=5))])
mock = MagicMock()
mock().foo.bar().baz()
expected = [
('', (), {}), ('().foo.bar', (), {}),
('().foo.bar().baz', (), {})
]
self.assertEqual(mock.mock_calls, expected)
self.assertEqual(mock().mock_calls,
call.foo.bar().baz().call_list())
for kwargs in dict(), dict(name='bar'):
mock = MagicMock(**kwargs)
int(mock.foo)
expected = [('foo.__int__', (), {})]
self.assertEqual(mock.mock_calls, expected)
mock = MagicMock(**kwargs)
mock.a()()
expected = [('a', (), {}), ('a()', (), {})]
self.assertEqual(mock.mock_calls, expected)
self.assertEqual(mock.a().mock_calls, [call()])
mock = MagicMock(**kwargs)
mock(1)(2)(3)
self.assertEqual(mock.mock_calls, call(1)(2)(3).call_list())
self.assertEqual(mock().mock_calls, call(2)(3).call_list())
self.assertEqual(mock()().mock_calls, call(3).call_list())
mock = MagicMock(**kwargs)
mock(1)(2)(3).a.b.c(4)
self.assertEqual(mock.mock_calls,
call(1)(2)(3).a.b.c(4).call_list())
self.assertEqual(mock().mock_calls,
call(2)(3).a.b.c(4).call_list())
self.assertEqual(mock()().mock_calls,
call(3).a.b.c(4).call_list())
mock = MagicMock(**kwargs)
int(mock().foo.bar().baz())
last_call = ('().foo.bar().baz().__int__', (), {})
self.assertEqual(mock.mock_calls[-1], last_call)
self.assertEqual(mock().mock_calls,
call.foo.bar().baz().__int__().call_list())
self.assertEqual(mock().foo.bar().mock_calls,
call.baz().__int__().call_list())
self.assertEqual(mock().foo.bar().baz.mock_calls,
call().__int__().call_list())
def test_subclassing(self):
class Subclass(Mock):
pass
mock = Subclass()
self.assertIsInstance(mock.foo, Subclass)
self.assertIsInstance(mock(), Subclass)
class Subclass(Mock):
def _get_child_mock(self, **kwargs):
return Mock(**kwargs)
mock = Subclass()
self.assertNotIsInstance(mock.foo, Subclass)
self.assertNotIsInstance(mock(), Subclass)
def test_arg_lists(self):
mocks = [
Mock(),
MagicMock(),
NonCallableMock(),
NonCallableMagicMock()
]
def assert_attrs(mock):
names = 'call_args_list', 'method_calls', 'mock_calls'
for name in names:
attr = getattr(mock, name)
self.assertIsInstance(attr, _CallList)
self.assertIsInstance(attr, list)
self.assertEqual(attr, [])
for mock in mocks:
assert_attrs(mock)
if callable(mock):
mock()
mock(1, 2)
mock(a=3)
mock.reset_mock()
assert_attrs(mock)
mock.foo()
mock.foo.bar(1, a=3)
mock.foo(1).bar().baz(3)
mock.reset_mock()
assert_attrs(mock)
def test_call_args_two_tuple(self):
mock = Mock()
mock(1, a=3)
mock(2, b=4)
self.assertEqual(len(mock.call_args), 2)
args, kwargs = mock.call_args
self.assertEqual(args, (2,))
self.assertEqual(kwargs, dict(b=4))
expected_list = [((1,), dict(a=3)), ((2,), dict(b=4))]
for expected, call_args in zip(expected_list, mock.call_args_list):
self.assertEqual(len(call_args), 2)
self.assertEqual(expected[0], call_args[0])
self.assertEqual(expected[1], call_args[1])
def test_side_effect_iterator(self):
mock = Mock(side_effect=iter([1, 2, 3]))
self.assertEqual([mock(), mock(), mock()], [1, 2, 3])
self.assertRaises(StopIteration, mock)
mock = MagicMock(side_effect=['a', 'b', 'c'])
self.assertEqual([mock(), mock(), mock()], ['a', 'b', 'c'])
self.assertRaises(StopIteration, mock)
mock = Mock(side_effect='ghi')
self.assertEqual([mock(), mock(), mock()], ['g', 'h', 'i'])
self.assertRaises(StopIteration, mock)
class Foo(object):
pass
mock = MagicMock(side_effect=Foo)
self.assertIsInstance(mock(), Foo)
mock = Mock(side_effect=Iter())
self.assertEqual([mock(), mock(), mock(), mock()],
['this', 'is', 'an', 'iter'])
self.assertRaises(StopIteration, mock)
def test_side_effect_setting_iterator(self):
mock = Mock()
mock.side_effect = iter([1, 2, 3])
self.assertEqual([mock(), mock(), mock()], [1, 2, 3])
self.assertRaises(StopIteration, mock)
side_effect = mock.side_effect
self.assertIsInstance(side_effect, type(iter([])))
mock.side_effect = ['a', 'b', 'c']
self.assertEqual([mock(), mock(), mock()], ['a', 'b', 'c'])
self.assertRaises(StopIteration, mock)
side_effect = mock.side_effect
self.assertIsInstance(side_effect, type(iter([])))
this_iter = Iter()
mock.side_effect = this_iter
self.assertEqual([mock(), mock(), mock(), mock()],
['this', 'is', 'an', 'iter'])
self.assertRaises(StopIteration, mock)
self.assertIs(mock.side_effect, this_iter)
def test_side_effect_iterator_exceptions(self):
for Klass in Mock, MagicMock:
iterable = (ValueError, 3, KeyError, 6)
m = Klass(side_effect=iterable)
self.assertRaises(ValueError, m)
self.assertEqual(m(), 3)
self.assertRaises(KeyError, m)
self.assertEqual(m(), 6)
def test_assert_has_calls_any_order(self):
mock = Mock()
mock(1, 2)
mock(a=3)
mock(3, 4)
mock(b=6)
mock(b=6)
kalls = [
call(1, 2), ({'a': 3},),
((3, 4),), ((), {'a': 3}),
('', (1, 2)), ('', {'a': 3}),
('', (1, 2), {}), ('', (), {'a': 3})
]
for kall in kalls:
mock.assert_has_calls([kall], any_order=True)
for kall in call(1, '2'), call(b=3), call(), 3, None, 'foo':
self.assertRaises(
AssertionError, mock.assert_has_calls,
[kall], any_order=True
)
kall_lists = [
[call(1, 2), call(b=6)],
[call(3, 4), call(1, 2)],
[call(b=6), call(b=6)],
]
for kall_list in kall_lists:
mock.assert_has_calls(kall_list, any_order=True)
kall_lists = [
[call(b=6), call(b=6), call(b=6)],
[call(1, 2), call(1, 2)],
[call(3, 4), call(1, 2), call(5, 7)],
[call(b=6), call(3, 4), call(b=6), call(1, 2), call(b=6)],
]
for kall_list in kall_lists:
self.assertRaises(
AssertionError, mock.assert_has_calls,
kall_list, any_order=True
)
def test_assert_has_calls(self):
kalls1 = [
call(1, 2), ({'a': 3},),
((3, 4),), call(b=6),
('', (1,), {'b': 6}),
]
kalls2 = [call.foo(), call.bar(1)]
kalls2.extend(call.spam().baz(a=3).call_list())
kalls2.extend(call.bam(set(), foo={}).fish([1]).call_list())
mocks = []
for mock in Mock(), MagicMock():
mock(1, 2)
mock(a=3)
mock(3, 4)
mock(b=6)
mock(1, b=6)
mocks.append((mock, kalls1))
mock = Mock()
mock.foo()
mock.bar(1)
mock.spam().baz(a=3)
mock.bam(set(), foo={}).fish([1])
mocks.append((mock, kalls2))
for mock, kalls in mocks:
for i in range(len(kalls)):
for step in 1, 2, 3:
these = kalls[i:i+step]
mock.assert_has_calls(these)
if len(these) > 1:
self.assertRaises(
AssertionError,
mock.assert_has_calls,
list(reversed(these))
)
def test_assert_any_call(self):
mock = Mock()
mock(1, 2)
mock(a=3)
mock(1, b=6)
mock.assert_any_call(1, 2)
mock.assert_any_call(a=3)
mock.assert_any_call(1, b=6)
self.assertRaises(
AssertionError,
mock.assert_any_call
)
self.assertRaises(
AssertionError,
mock.assert_any_call,
1, 3
)
self.assertRaises(
AssertionError,
mock.assert_any_call,
a=4
)
def test_mock_calls_create_autospec(self):
def f(a, b):
pass
obj = Iter()
obj.f = f
funcs = [
create_autospec(f),
create_autospec(obj).f
]
for func in funcs:
func(1, 2)
func(3, 4)
self.assertEqual(
func.mock_calls, [call(1, 2), call(3, 4)]
)
def test_mock_add_spec(self):
class _One(object):
one = 1
class _Two(object):
two = 2
class Anything(object):
one = two = three = 'four'
klasses = [
Mock, MagicMock, NonCallableMock, NonCallableMagicMock
]
for Klass in list(klasses):
klasses.append(lambda K=Klass: K(spec=Anything))
klasses.append(lambda K=Klass: K(spec_set=Anything))
for Klass in klasses:
for kwargs in dict(), dict(spec_set=True):
mock = Klass()
#no error
mock.one, mock.two, mock.three
for One, Two in [(_One, _Two), (['one'], ['two'])]:
for kwargs in dict(), dict(spec_set=True):
mock.mock_add_spec(One, **kwargs)
mock.one
self.assertRaises(
AttributeError, getattr, mock, 'two'
)
self.assertRaises(
AttributeError, getattr, mock, 'three'
)
if 'spec_set' in kwargs:
self.assertRaises(
AttributeError, setattr, mock, 'three', None
)
mock.mock_add_spec(Two, **kwargs)
self.assertRaises(
AttributeError, getattr, mock, 'one'
)
mock.two
self.assertRaises(
AttributeError, getattr, mock, 'three'
)
if 'spec_set' in kwargs:
self.assertRaises(
AttributeError, setattr, mock, 'three', None
)
# note that creating a mock, setting an instance attribute, and
# *then* setting a spec doesn't work. Not the intended use case
def test_mock_add_spec_magic_methods(self):
for Klass in MagicMock, NonCallableMagicMock:
mock = Klass()
int(mock)
mock.mock_add_spec(object)
self.assertRaises(TypeError, int, mock)
mock = Klass()
mock['foo']
mock.__int__.return_value =4
mock.mock_add_spec(int)
self.assertEqual(int(mock), 4)
self.assertRaises(TypeError, lambda: mock['foo'])
def test_adding_child_mock(self):
for Klass in NonCallableMock, Mock, MagicMock, NonCallableMagicMock:
mock = Klass()
mock.foo = Mock()
mock.foo()
self.assertEqual(mock.method_calls, [call.foo()])
self.assertEqual(mock.mock_calls, [call.foo()])
mock = Klass()
mock.bar = Mock(name='name')
mock.bar()
self.assertEqual(mock.method_calls, [])
self.assertEqual(mock.mock_calls, [])
# mock with an existing _new_parent but no name
mock = Klass()
mock.baz = MagicMock()()
mock.baz()
self.assertEqual(mock.method_calls, [])
self.assertEqual(mock.mock_calls, [])
def test_adding_return_value_mock(self):
for Klass in Mock, MagicMock:
mock = Klass()
mock.return_value = MagicMock()
mock()()
self.assertEqual(mock.mock_calls, [call(), call()()])
def test_manager_mock(self):
class Foo(object):
one = 'one'
two = 'two'
manager = Mock()
p1 = patch.object(Foo, 'one')
p2 = patch.object(Foo, 'two')
mock_one = p1.start()
self.addCleanup(p1.stop)
mock_two = p2.start()
self.addCleanup(p2.stop)
manager.attach_mock(mock_one, 'one')
manager.attach_mock(mock_two, 'two')
Foo.two()
Foo.one()
self.assertEqual(manager.mock_calls, [call.two(), call.one()])
def test_magic_methods_mock_calls(self):
for Klass in Mock, MagicMock:
m = Klass()
m.__int__ = Mock(return_value=3)
m.__float__ = MagicMock(return_value=3.0)
int(m)
float(m)
self.assertEqual(m.mock_calls, [call.__int__(), call.__float__()])
self.assertEqual(m.method_calls, [])
def test_attribute_deletion(self):
# this behaviour isn't *useful*, but at least it's now tested...
for Klass in Mock, MagicMock, NonCallableMagicMock, NonCallableMock:
m = Klass()
original = m.foo
m.foo = 3
del m.foo
self.assertEqual(m.foo, original)
new = m.foo = Mock()
del m.foo
self.assertEqual(m.foo, new)
def test_mock_parents(self):
for Klass in Mock, MagicMock:
m = Klass()
original_repr = repr(m)
m.return_value = m
self.assertIs(m(), m)
self.assertEqual(repr(m), original_repr)
m.reset_mock()
self.assertIs(m(), m)
self.assertEqual(repr(m), original_repr)
m = Klass()
m.b = m.a
self.assertIn("name='mock.a'", repr(m.b))
self.assertIn("name='mock.a'", repr(m.a))
m.reset_mock()
self.assertIn("name='mock.a'", repr(m.b))
self.assertIn("name='mock.a'", repr(m.a))
m = Klass()
original_repr = repr(m)
m.a = m()
m.a.return_value = m
self.assertEqual(repr(m), original_repr)
self.assertEqual(repr(m.a()), original_repr)
def test_attach_mock(self):
classes = Mock, MagicMock, NonCallableMagicMock, NonCallableMock
for Klass in classes:
for Klass2 in classes:
m = Klass()
m2 = Klass2(name='foo')
m.attach_mock(m2, 'bar')
self.assertIs(m.bar, m2)
self.assertIn("name='mock.bar'", repr(m2))
m.bar.baz(1)
self.assertEqual(m.mock_calls, [call.bar.baz(1)])
self.assertEqual(m.method_calls, [call.bar.baz(1)])
def test_attach_mock_return_value(self):
classes = Mock, MagicMock, NonCallableMagicMock, NonCallableMock
for Klass in Mock, MagicMock:
for Klass2 in classes:
m = Klass()
m2 = Klass2(name='foo')
m.attach_mock(m2, 'return_value')
self.assertIs(m(), m2)
self.assertIn("name='mock()'", repr(m2))
m2.foo()
self.assertEqual(m.mock_calls, call().foo().call_list())
def test_attribute_deletion(self):
for mock in Mock(), MagicMock():
self.assertTrue(hasattr(mock, 'm'))
del mock.m
self.assertFalse(hasattr(mock, 'm'))
del mock.f
self.assertFalse(hasattr(mock, 'f'))
self.assertRaises(AttributeError, getattr, mock, 'f')
def test_class_assignable(self):
for mock in Mock(), MagicMock():
self.assertNotIsInstance(mock, int)
mock.__class__ = int
self.assertIsInstance(mock, int)
@unittest2.expectedFailure
def test_pickle(self):
for Klass in (MagicMock, Mock, Subclass, NonCallableMagicMock):
mock = Klass(name='foo', attribute=3)
mock.foo(1, 2, 3)
data = pickle.dumps(mock)
new = pickle.loads(data)
new.foo.assert_called_once_with(1, 2, 3)
self.assertFalse(new.called)
self.assertTrue(is_instance(new, Klass))
self.assertIsInstance(new, Thing)
self.assertIn('name="foo"', repr(new))
self.assertEqual(new.attribute, 3)
if __name__ == '__main__':
unittest2.main()
|
|
#
# Copyright 2012 Sonya Huang
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import math
from scipy import sparse
from common.dbhelper import db
def mult_numpy_matrices(multiplicand, multiplier):
if not sparse.issparse(multiplicand):
multiplicand = sparse.csr_matrix(multiplicand)
else:
multiplicand = multiplicand.tocsr()
if not sparse.issparse(multiplier):
multiplier = sparse.csr_matrix(multiplier)
else:
multiplier = multiplier.tocsr()
return multiplicand * multiplier
class NamedMatrix:
# square in this case means the columns and rows represent the same
# thing, so we only need one set of names.
# if cols/rows represent different things but happen to be the same
# length, square would be False here
def __init__(self, square=False, from_matrix=None, rows=[], cols=[]):
if from_matrix is not None and not sparse.issparse(from_matrix):
self.matrix = sparse.lil_matrix(from_matrix)
else:
self.matrix = from_matrix
self.square = square
self.harmonized_rows = {}
self.set_rows(rows)
if not self.square:
self.harmonized_cols = {}
self.set_columns(cols)
def get_diag(self):
if not self.square:
raise Exception("cannot get diag of non-square matrix")
matrix = NamedMatrix(square=False, rows=self.get_rows(), cols=["value"])
for row in self.get_rows():
matrix.set_element(row, "value", self.get_element(row, row))
return matrix
def square_matrix_from_diag(self):
cols = self.get_columns()
rows = self.get_rows()
matrix = None
if len(cols) == 1:
col = cols[0]
matrix = NamedMatrix(square=True, rows=rows)
for row in rows:
matrix.set_element(row, row, self.get_element(row, col))
elif len(rows) == 1:
row = rows[0]
matrix = NamedMatrix(square=True, rows=cols)
for col in cols:
matrix.set_element(col, col, self.get_element(row, col))
else:
raise Exception("cannot use square_matrix_from_diag on non-vector")
return matrix
# these getter methods are here for overriding
def mat(self):
return self.matrix
def get_rows(self):
return self.rows
def get_columns(self):
if self.square:
return self.rows
return self.cols
def get_submatrix(self, rows=None, cols=None):
rowindices = []
colindices = []
if rows is not None:
for row in rows:
rowindices.append(self.row_index(row))
if cols is not None:
for col in cols:
colindices.append(self.col_index(col))
if rows is None:
if cols is None:
matrix = self.matrix
else:
matrix = self.matrix[:, colindices]
elif cols is None:
matrix = self.matrix[rowindices, :]
else:
matrix = self.matrix[rowindices, colindices]
if rows is None:
rows = self.rows
if cols is None:
cols = self.cols
return NamedMatrix(False, matrix, rows, cols)
def get_named_column(self, column):
colindex = self.rev_columns[column]
colmatrix = self.matrix[:, colindex]
return NamedMatrix(
square=False,
rows=self.get_rows(),
cols=[column],
from_matrix=colmatrix)
def check_shape(self, other):
return self.nrows() == other.nrows() and self.ncols() == other.ncols()
def verify_mult_shape(self, other):
if self.ncols() != other.nrows():
dims = (self.nrows(), self.ncols(), other.nrows(), other.ncols())
ourset = set(self.get_columns())
theirset = set(other.get_rows())
print(list(ourset.difference(theirset)) + \
list(theirset.difference(ourset)))
raise Exception("matrix dimensions don't match " + \
"(attempted to multiply %dx%d by %dx%d)" % dims)
def dims(self):
return (self.nrows(), self.ncols())
def nrows(self):
return len(self.rows)
def ncols(self):
if self.square:
return len(self.rows)
return len(self.cols)
# return a new matrix that is us post-multiplied by them
def matrix_mult(self, other):
self.verify_mult_shape(other)
M = NamedMatrix(False)
M.set_rows(self.get_rows())
M.set_columns(other.get_columns())
M.matrix = mult_numpy_matrices(self.mat(), other.mat())
return M
def matrix_postmult(self, other):
other.verify_mult_shape(self)
M = NamedMatrix(False)
M.set_rows(other.get_rows())
M.set_columns(self.get_columns())
M.matrix = mult_numpy_matrices(other.mat(), self.mat())
return M
def transposed(self):
matrix = self.matrix.transpose()
M = NamedMatrix(
square=self.square,
from_matrix=self.matrix.transpose(),
rows=self.get_columns(),
cols=self.get_rows())
if self.square:
M.set_harmonized_rows(self.harmonized_rows)
else:
M.set_harmonized_rows(self.harmonized_cols)
M.set_harmonized_cols(self.harmonized_rows)
return M
### mutating functions
def matrix_postmult_inplace(self, other):
other.verify_mult_shape(self)
self.matrix = mult_numpy_matrices(other.mat(), self.mat())
self.set_rows(other.get_rows())
return self
def matrix_mult_inplace(self, other):
self.verify_mult_shape(other)
self.matrix = mult_numpy_matrices(self.mat(), other.mat())
self.set_column(other.get_columns())
return self
def scalar_mult_inplace(self, scalar):
self.matrix = self.matrix * scalar
return self
# return a new matrix point-wise multiplied by them
def mult(self, other):
return self._arith_helper(other, "multiply")
# return a new matrix that is us divided by them
def divide(self, other, ignore_zero_denom=False):
if ignore_zero_denom:
result = self._arith_helper(other, "divide", ["remove_inf"])
else:
result = self._arith_helper(other, "divide")
return result
def add(self, other):
return self._arith_helper(other, "add")
def subtract(self, other):
return self._arith_helper(other, "subtract")
def _arith_helper(self, other, operation, options=[]):
if not self.check_shape(other):
dims = (self.nrows(), self.ncols(), other.nrows(), other.ncols())
raise Exception("matrix dimensions don't match" + \
"(operands: %dx%d, %dx%d)" % dims)
M = NamedMatrix(self.square)
M.set_rows(self.get_rows())
if not self.square:
M.set_columns(self.get_columns())
if operation == "divide":
matrix = self.mat() / other.mat()
if "remove_inf" in options:
(rows, cols) = matrix.nonzero()
for (row, col) in zip(rows, cols):
if math.isinf(matrix[row, col]):
matrix[row, col] = 0
elif operation == "multiply":
multiplicand = self.mat()
if not sparse.issparse(multiplicand):
multiplicand = sparse.lil_matrix(multiplicand)
matrix = multiplicand.multiply(other.mat())
elif operation == "add":
matrix = self.mat() + other.mat()
elif operation == "subtract":
matrix = self.mat() - other.mat()
M.matrix = matrix
return M
def print(self, rowrange=None, colrange=None):
if rowrange is None:
rowrange = range(self.nrows())
if colrange is None:
if self.square:
colrange = range(self.nrows())
else:
colrange = range(self.ncols())
matrix = self.mat()
columns = self.get_columns()
rows = self.get_rows()
for row in rowrange:
for col in colrange:
if matrix[row, col] != 0:
print(rows[row], columns[col], matrix[row, col])
def set_harmonized_rows(self, harmonized):
self.harmonized_rows = harmonized
def get_harmonized_rowname(self, rowname):
if rowname in self.rev_rows:
return rowname
if rowname in self.harmonized_rows:
return self.harmonized_rows[rowname]
return rowname
def set_harmonized_cols(self, harmonized):
self.harmonized_cols = harmonized
def get_harmonized_colname(self, colname):
if self.square:
return self.get_harmonized_rowname(colname)
if colname in self.harmonized_cols:
return self.harmonized_cols[colname]
return colname
def has_row(self, row):
return row in self.rev_rows
def has_column(self, column):
return column in self.rev_columns
def set_rows(self, rows):
if self.matrix is not None and self.matrix.shape[0] != len(rows):
raise Exception(
"number of rows (%d) doesn't match existing matrix (%dx%d)"
% (len(rows), self.matrix.shape[0], self.matrix.shape[1]))
self.rows = [] # list of row names
self.rev_rows = {} # index number for each row name
for row in rows:
self.rev_rows[row] = len(self.rows)
self.rows.append(row)
def set_columns(self, columns):
if self.square:
raise Exception("set_columns doesn't work with 'square' option")
if self.matrix is not None and self.matrix.shape[1] != len(columns):
raise Exception("number of columns (%d) doesn't match " + \
"existing matrix (%dx%d)"
% (len(columns),
self.matrix.shape[0], self.matrix.shape[1]))
self.cols = [] # list of column names
self.rev_columns = {} # index number for each column name
for col in columns:
self.rev_columns[col] = len(self.cols)
self.cols.append(col)
def row_index(self, rowname):
harmonized_rowname = self.get_harmonized_rowname(rowname)
if harmonized_rowname not in self.rev_rows:
print(sorted(self.rev_rows))
return self.rev_rows[harmonized_rowname]
def col_index(self, colname):
if self.square:
colindex = self.row_index(colname)
else:
harmonized_colname = self.get_harmonized_colname(colname)
colindex = self.rev_columns[harmonized_colname]
return colindex
def sum(self, dimension=None):
if dimension is None:
return self.mat().sum()
else:
matrix = self.mat().sum(dimension)
if dimension == 0:
m = NamedMatrix(rows=["sum"], cols=self.get_columns(),
from_matrix=matrix)
else:
m = NamedMatrix(rows=self.get_rows(), cols=["sum"],
from_matrix=matrix)
return m
def get_element(self, rowname=None, colname=None):
if rowname is None and colname is None:
if self.nrows() == 1 and self.ncols() == 1:
return self.matrix[0, 0]
else:
raise Exception("rowname or colname must be defined")
if rowname is not None:
rowindex = self.row_index(rowname)
if colname is not None:
colindex = self.col_index(colname)
return self.matrix[rowindex, colindex]
else:
if self.ncols() == 1:
return self.matrix[rowindex, 0]
return self.matrix[rowindex, :]
# by now we know rowname is None, colname is not None
if self.nrows() == 1:
return self.matrix[0, colindex]
return self.matrix[:, colindex]
def set_column(self, colname, arg):
colindex = self.col_index(colname)
if self.matrix is None:
self.matrix = sparse.lil_matrix(self.dims())
elif type(self.matrix) is sparse.csr_matrix:
self.matrix = self.matrix.tolil()
if type(arg) is NamedMatrix:
column = arg.mat()
for i in range(self.matrix.shape[0]):
self.matrix[i, colindex] = column[i, 0]
elif type(arg) is int or type(arg) is float:
for i in range(self.matrix.shape[0]):
self.matrix[i, colindex] = arg
def set_element(self, rowname, colname, value):
if value is None:
return
if self.matrix is None:
self.matrix = sparse.lil_matrix(self.dims())
elif type(self.matrix) is sparse.csr_matrix:
self.matrix = self.matrix.tolil()
rowindex = self.row_index(rowname)
colindex = self.col_index(colname)
self.matrix[rowindex, colindex] = float(value)
def generate_selector_matrix(tablename, rows, row_field, col_field,
conditions=[]):
whereclause = ""
if len(conditions):
whereclause = "WHERE " + " AND ".join(conditions)
cols = []
sql = "SELECT DISTINCT %s FROM %s %s" % (col_field, tablename, whereclause)
stmt = db.prepare(sql)
for row in stmt():
if row[0] not in cols:
cols.append(row[0])
cols = sorted(cols)
sel = NamedMatrix(square=False)
# we want our columns to match their rows
sel.set_rows(cols)
sel.set_columns(rows)
sql = "SELECT %s, %s FROM %s %s" % (
row_field, col_field, tablename, whereclause)
stmt = db.prepare(sql)
for retrieved_row in stmt():
if retrieved_row[0] in rows:
sel.set_element(retrieved_row[1], retrieved_row[0], 1)
return sel
class TotalOutputMatrix(NamedMatrix):
def __init__(self, from_matrix=None, rows=[]):
NamedMatrix.__init__(self, square=False,
from_matrix=from_matrix,
rows=rows,
cols=["Total Output"])
def set_output(self, sector, value):
self.set_element(sector, "Total Output", value)
def get_output(self, sector):
return self.get_element(sector, "Total Output")
class FinalDemandMatrix(NamedMatrix):
def __init__(self, pce_colname, export_colname,
from_matrix=None, rows=[], cols=[]):
NamedMatrix.__init__(self, square=False,
from_matrix=from_matrix,
rows=rows, cols=cols)
self.pce_colname = pce_colname
self.export_colname = export_colname
# other matrices multplied by FD should also be FD
def matrix_postmult(self, other):
other.verify_mult_shape(self)
matrix = mult_numpy_matrices(other.mat(), self.mat())
M = FinalDemandMatrix(pce_colname=self.pce_colname,
export_colname=self.export_colname,
from_matrix=matrix,
rows=other.get_rows(),
cols=self.get_columns())
return M
def get_total(self):
total = self.mat().sum(1)
return NamedMatrix(
square=False,
rows=self.get_rows(),
cols=["Final Demand"],
from_matrix=total)
def get_pce(self):
return self.get_named_column(self.pce_colname)
def get_marginal_pce(self):
pce = self.get_pce()
pce.scalar_mult_inplace(1 / pce.sum())
return pce
def get_exports(self):
return self.get_named_column(self.export_colname)
def get_marginal_export(self):
exports = self.get_exports()
exports.scalar_mult_inplace(1 / exports.sum())
return exports
|
|
#!/usr/bin/env python3
from __future__ import print_function
from cffi import FFI
import time
ffi = FFI()
ffi.cdef("""
typedef uint64_t counter_t;
typedef struct {
counter_t packets;
counter_t bytes;
} vlib_counter_t;
typedef enum {
STAT_DIR_TYPE_ILLEGAL = 0,
STAT_DIR_TYPE_SCALAR_INDEX,
STAT_DIR_TYPE_COUNTER_VECTOR_SIMPLE,
STAT_DIR_TYPE_COUNTER_VECTOR_COMBINED,
STAT_DIR_TYPE_ERROR_INDEX,
STAT_DIR_TYPE_NAME_VECTOR,
} stat_directory_type_t;
typedef struct
{
stat_directory_type_t type;
union {
uint64_t offset;
uint64_t index;
uint64_t value;
};
uint64_t offset_vector;
char name[128]; // TODO change this to pointer to "somewhere"
} stat_segment_directory_entry_t;
typedef struct
{
char *name;
stat_directory_type_t type;
union
{
double scalar_value;
counter_t *error_vector;
counter_t **simple_counter_vec;
vlib_counter_t **combined_counter_vec;
uint8_t **name_vector;
};
} stat_segment_data_t;
typedef struct
{
uint64_t version;
uint64_t epoch;
uint64_t in_progress;
uint64_t directory_offset;
uint64_t error_offset;
uint64_t stats_offset;
} stat_segment_shared_header_t;
typedef struct
{
uint64_t current_epoch;
stat_segment_shared_header_t *shared_header;
stat_segment_directory_entry_t *directory_vector;
ssize_t memory_size;
} stat_client_main_t;
stat_client_main_t * stat_client_get(void);
void stat_client_free(stat_client_main_t * sm);
int stat_segment_connect_r (char *socket_name, stat_client_main_t * sm);
int stat_segment_connect (char *socket_name);
void stat_segment_disconnect_r (stat_client_main_t * sm);
void stat_segment_disconnect (void);
uint32_t *stat_segment_ls_r (uint8_t ** patterns, stat_client_main_t * sm);
uint32_t *stat_segment_ls (uint8_t ** pattern);
stat_segment_data_t *stat_segment_dump_r (uint32_t * stats,
stat_client_main_t * sm);
stat_segment_data_t *stat_segment_dump (uint32_t * counter_vec);
void stat_segment_data_free (stat_segment_data_t * res);
double stat_segment_heartbeat_r (stat_client_main_t * sm);
int stat_segment_vec_len(void *vec);
uint8_t **stat_segment_string_vector(uint8_t **string_vector, char *string);
char *stat_segment_index_to_name_r (uint32_t index, stat_client_main_t * sm);
uint64_t stat_segment_version(void);
uint64_t stat_segment_version_r(stat_client_main_t *sm);
void free(void *ptr);
""") # noqa: E501
# Utility functions
def make_string_vector(api, strings):
vec = ffi.NULL
if type(strings) is not list:
strings = [strings]
for s in strings:
vec = api.stat_segment_string_vector(vec, ffi.new("char []",
s.encode('utf-8')))
return vec
def make_string_list(api, vec):
vec_len = api.stat_segment_vec_len(vec)
return [ffi.string(vec[i]) for i in range(vec_len)]
# 2-dimensonal array of thread, index
def simple_counter_vec_list(api, e):
vec = []
for thread in range(api.stat_segment_vec_len(e)):
len_interfaces = api.stat_segment_vec_len(e[thread])
if_per_thread = [e[thread][interfaces]
for interfaces in range(len_interfaces)]
vec.append(if_per_thread)
return vec
def vlib_counter_dict(c):
return {'packets': c.packets,
'bytes': c.bytes}
def combined_counter_vec_list(api, e):
vec = []
for thread in range(api.stat_segment_vec_len(e)):
len_interfaces = api.stat_segment_vec_len(e[thread])
if_per_thread = [vlib_counter_dict(e[thread][interfaces])
for interfaces in range(len_interfaces)]
vec.append(if_per_thread)
return vec
def error_vec_list(api, e):
vec = []
for thread in range(api.stat_segment_vec_len(e)):
vec.append(e[thread])
return vec
def name_vec_list(api, e):
return [ffi.string(e[i]).decode('utf-8') for i in
range(api.stat_segment_vec_len(e)) if e[i] != ffi.NULL]
def stat_entry_to_python(api, e):
# Scalar index
if e.type == 1:
return e.scalar_value
if e.type == 2:
return simple_counter_vec_list(api, e.simple_counter_vec)
if e.type == 3:
return combined_counter_vec_list(api, e.combined_counter_vec)
if e.type == 4:
return error_vec_list(api, e.error_vector)
if e.type == 5:
return name_vec_list(api, e.name_vector)
raise NotImplementedError()
class VPPStatsIOError(IOError):
message = "Stat segment client connection returned: " \
"%(retval)s %(strerror)s."
strerror = {-1: "Stat client couldn't open socket",
-2: "Stat client socket open but couldn't connect",
-3: "Receiving file descriptor failed",
-4: "mmap fstat failed",
-5: "mmap map failed"
}
def __init__(self, message=None, **kwargs):
if 'retval' in kwargs:
self.retval = kwargs['retval']
kwargs['strerror'] = self.strerror[int(self.retval)]
if not message:
try:
message = self.message % kwargs
except Exception:
message = self.message
else:
message = message % kwargs
super(VPPStatsIOError, self).__init__(message)
class VPPStatsClientLoadError(RuntimeError):
pass
class VPPStats(object):
VPPStatsIOError = VPPStatsIOError
default_socketname = '/run/vpp/stats.sock'
sharedlib_name = 'libvppapiclient.so'
def __init__(self, socketname=default_socketname, timeout=10):
self.socketname = socketname
self.timeout = timeout
self.connected = False
try:
self.api = ffi.dlopen(VPPStats.sharedlib_name)
except Exception:
raise VPPStatsClientLoadError("Could not open: %s" %
VPPStats.sharedlib_name)
def connect(self):
self.client = self.api.stat_client_get()
poll_end_time = time.time() + self.timeout
while time.time() < poll_end_time:
rv = self.api.stat_segment_connect_r(
self.socketname.encode('utf-8'), self.client)
# Break out if success or any other error than "no such file"
# (indicating that VPP hasn't started yet)
if rv == 0 or ffi.errno != 2:
self.connected = True
break
if rv != 0:
raise VPPStatsIOError(retval=rv)
def heartbeat(self):
if not self.connected:
self.connect()
return self.api.stat_segment_heartbeat_r(self.client)
def ls(self, patterns):
if not self.connected:
self.connect()
return self.api.stat_segment_ls_r(make_string_vector(self.api,
patterns),
self.client)
def lsstr(self, patterns):
if not self.connected:
self.connect()
rv = self.api.stat_segment_ls_r(make_string_vector(self.api,
patterns),
self.client)
if rv == ffi.NULL:
raise VPPStatsIOError()
return [ffi.string(self.api.stat_segment_index_to_name_r(
rv[i], self.client)).decode('utf-8')
for i in range(self.api.stat_segment_vec_len(rv))]
def dump(self, counters):
if not self.connected:
self.connect()
stats = {}
rv = self.api.stat_segment_dump_r(counters, self.client)
# Raise exception and retry
if rv == ffi.NULL:
raise VPPStatsIOError()
rv_len = self.api.stat_segment_vec_len(rv)
for i in range(rv_len):
n = ffi.string(rv[i].name).decode('utf-8')
e = stat_entry_to_python(self.api, rv[i])
if e is not None:
stats[n] = e
return stats
def get_counter(self, name):
retries = 0
while True:
try:
d = self.ls(name)
s = self.dump(d)
if len(s) > 1:
raise AttributeError('Matches multiple counters {}'
.format(name))
k, v = s.popitem()
return v
except VPPStatsIOError:
if retries > 10:
return None
retries += 1
def get_err_counter(self, name):
"""Get an error counter. The errors from each worker thread
are summed"""
return sum(self.get_counter(name))
def disconnect(self):
try:
self.api.stat_segment_disconnect_r(self.client)
self.api.stat_client_free(self.client)
self.connected = False
del self.client
except AttributeError:
# no need to disconnect if we're not connected
pass
def set_errors(self):
'''Return all errors counters > 0'''
retries = 0
while True:
try:
error_names = self.ls(['/err/'])
error_counters = self.dump(error_names)
break
except VPPStatsIOError:
if retries > 10:
return None
retries += 1
return {k: sum(error_counters[k])
for k in error_counters.keys() if sum(error_counters[k])}
def set_errors_str(self):
'''Return all errors counters > 0 pretty printed'''
s = ['ERRORS:']
error_counters = self.set_errors()
for k in sorted(error_counters):
s.append('{:<60}{:>10}'.format(k, error_counters[k]))
return '%s\n' % '\n'.join(s)
|
|
# Python Tools for Visual Studio
# Copyright(c) Microsoft Corporation
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the License); you may not use
# this file except in compliance with the License. You may obtain a copy of the
# License at http://www.apache.org/licenses/LICENSE-2.0
#
# THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS
# OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY
# IMPLIED WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
# MERCHANTABILITY OR NON-INFRINGEMENT.
#
# See the Apache Version 2.0 License for specific language governing
# permissions and limitations under the License.
from __future__ import with_statement
__author__ = "Microsoft Corporation <[email protected]>"
__version__ = "3.0.0.0"
import os.path
import sys
import json
import time
import unittest
import socket
import traceback
from types import CodeType, FunctionType
try:
import thread
except:
import _thread as thread
try:
from unittest import TextTestResult
_IS_OLD_UNITTEST = False
except:
from unittest import _TextTestResult as TextTestResult
_IS_OLD_UNITTEST = True
if sys.version_info[0] < 3:
if sys.version_info[:2] < (2, 6):
from codecs import open
else:
from io import open
class _TestOutput(object):
"""file like object which redirects output to the repl window."""
errors = 'strict'
def __init__(self, old_out, is_stdout):
self.is_stdout = is_stdout
self.old_out = old_out
if sys.version >= '3.' and hasattr(old_out, 'buffer'):
self.buffer = _TestOutputBuffer(old_out.buffer, is_stdout)
def flush(self):
if self.old_out:
self.old_out.flush()
def writelines(self, lines):
for line in lines:
self.write(line)
@property
def encoding(self):
return 'utf8'
def write(self, value):
_channel.send_event('stdout' if self.is_stdout else 'stderr', content=value)
if self.old_out:
self.old_out.write(value)
def isatty(self):
return True
def next(self):
pass
@property
def name(self):
if self.is_stdout:
return "<stdout>"
else:
return "<stderr>"
def __getattr__(self, name):
return getattr(self.old_out, name)
class _TestOutputBuffer(object):
def __init__(self, old_buffer, is_stdout):
self.buffer = old_buffer
self.is_stdout = is_stdout
def write(self, data):
_channel.send_event('stdout' if self.is_stdout else 'stderr', content=data)
self.buffer.write(data)
def flush(self):
self.buffer.flush()
def truncate(self, pos = None):
return self.buffer.truncate(pos)
def tell(self):
return self.buffer.tell()
def seek(self, pos, whence = 0):
return self.buffer.seek(pos, whence)
class _IpcChannel(object):
def __init__(self, socket):
self.socket = socket
self.seq = 0
self.lock = thread.allocate_lock()
def close(self):
self.socket.close()
def send_event(self, name, **args):
with self.lock:
body = {'type': 'event', 'seq': self.seq, 'event':name, 'body':args}
self.seq += 1
content = json.dumps(body).encode('utf-8')
headers = ('Content-Length: %d\r\n\r\n' % (len(content), )).encode('ascii')
self.socket.send(headers)
self.socket.send(content)
_channel = None
class VsTestResult(TextTestResult):
_start_time = None
_result = None
def startTest(self, test):
super(VsTestResult, self).startTest(test)
self._start_time = time.time()
if _channel is not None:
_channel.send_event(
name='start',
test = test.test_id
)
def stopTest(self, test):
# stopTest is called after tearDown on all Python versions
# so it is the right time to send the result back to VS
# (sending it in the addX methods is too early on Python <= 3.1)
super(VsTestResult, self).stopTest(test)
if _channel is not None and self._result is not None:
_channel.send_event(**self._result)
def addError(self, test, err):
super(VsTestResult, self).addError(test, err)
self._setResult(test, 'failed', err)
def addFailure(self, test, err):
super(VsTestResult, self).addFailure(test, err)
self._setResult(test, 'failed', err)
def addSuccess(self, test):
super(VsTestResult, self).addSuccess(test)
self._setResult(test, 'passed')
def addSkip(self, test, reason):
super(VsTestResult, self).addSkip(test, reason)
self._setResult(test, 'skipped')
def addExpectedFailure(self, test, err):
super(VsTestResult, self).addExpectedFailure(test, err)
self._setResult(test, 'failed', err)
def addUnexpectedSuccess(self, test):
super(VsTestResult, self).addUnexpectedSuccess(test)
self._setResult(test, 'passed')
def _setResult(self, test, outcome, trace = None):
tb = None
message = None
duration = time.time() - self._start_time if self._start_time else 0
if trace is not None:
traceback.print_exception(*trace)
tb = _get_traceback(trace)
message = str(trace[1])
self._result = dict(
name = 'result',
outcome = outcome,
traceback = tb,
message = message,
durationInSecs = duration,
test = test.test_id
)
def _get_traceback(trace):
def norm_module(file_path):
return os.path.splitext(os.path.normcase(file_path))[0] + '.py'
def is_framework_frame(f):
return is_excluded_module_path(norm_module(f[0]))
if _IS_OLD_UNITTEST:
def is_excluded_module_path(file_path):
# unittest is a module, not a package on 2.5, 2.6, 3.0, 3.1
return file_path == norm_module(unittest.__file__) or file_path == norm_module(__file__)
else:
def is_excluded_module_path(file_path):
for lib_path in unittest.__path__:
# if it's in unit test package or it's this module
if file_path.startswith(os.path.normcase(lib_path)) or file_path == norm_module(__file__):
return True
return False
all = traceback.extract_tb(trace[2])
filtered = [f for f in reversed(all) if not is_framework_frame(f)]
# stack trace parser needs the Python version, it parses the user's
# code to create fully qualified function names
lang_ver = '{0}.{1}'.format(sys.version_info[0], sys.version_info[1])
tb = ''.join(traceback.format_list(filtered))
return lang_ver + '\n' + tb
def main():
import os
from optparse import OptionParser
global _channel
parser = OptionParser(prog = 'visualstudio_py_testlauncher', usage = 'Usage: %prog [<option>] <test names>... ')
parser.add_option('-s', '--secret', metavar='<secret>', help='restrict server to only allow clients that specify <secret> when connecting')
parser.add_option('-p', '--port', type='int', metavar='<port>', help='listen for debugger connections on <port>')
parser.add_option('-x', '--mixed-mode', action='store_true', help='wait for mixed-mode debugger to attach')
parser.add_option('-t', '--test', type='str', dest='tests', action='append', help='specifies a test to run')
parser.add_option('-c', '--coverage', type='str', help='enable code coverage and specify filename')
parser.add_option('-r', '--result-port', type='int', help='connect to port on localhost and send test results')
parser.add_option('--test-list', metavar='<file>', type='str', help='read tests from this file')
parser.add_option('--dry-run', action='store_true', help='prints a list of tests without executing them')
(opts, _) = parser.parse_args()
sys.path[0] = os.getcwd()
if opts.result_port:
_channel = _IpcChannel(socket.create_connection(('127.0.0.1', opts.result_port)))
sys.stdout = _TestOutput(sys.stdout, is_stdout = True)
sys.stderr = _TestOutput(sys.stderr, is_stdout = False)
if opts.secret and opts.port:
from ptvsd.debugger import DONT_DEBUG, DEBUG_ENTRYPOINTS, get_code
from ptvsd import DEFAULT_PORT, enable_attach, wait_for_attach
DONT_DEBUG.append(os.path.normcase(__file__))
DEBUG_ENTRYPOINTS.add(get_code(main))
enable_attach(opts.secret, ('127.0.0.1', getattr(opts, 'port', DEFAULT_PORT)), redirect_output = True)
wait_for_attach()
elif opts.mixed_mode:
# For mixed-mode attach, there's no ptvsd and hence no wait_for_attach(),
# so we have to use Win32 API in a loop to do the same thing.
from time import sleep
from ctypes import windll, c_char
while True:
if windll.kernel32.IsDebuggerPresent() != 0:
break
sleep(0.1)
try:
debugger_helper = windll['Microsoft.PythonTools.Debugger.Helper.x86.dll']
except WindowsError:
debugger_helper = windll['Microsoft.PythonTools.Debugger.Helper.x64.dll']
isTracing = c_char.in_dll(debugger_helper, "isTracing")
while True:
if isTracing.value != 0:
break
sleep(0.1)
all_tests = list(opts.tests or [])
if opts.test_list:
with open(opts.test_list, 'r', encoding='utf-8') as test_list:
all_tests.extend(t.strip() for t in test_list)
if opts.dry_run:
if _channel:
for test in all_tests:
print(test)
_channel.send_event(
name='start',
test = test
)
_channel.send_event(
name='result',
outcome='passed',
test = test
)
else:
for test in all_tests:
print(test)
sys.exit(0)
cov = None
try:
if opts.coverage:
try:
import coverage
cov = coverage.coverage(opts.coverage)
cov.load()
cov.start()
except:
pass
tests = []
for test in all_tests:
if not test:
continue
try:
for loaded_test in unittest.defaultTestLoader.loadTestsFromName(test):
# Starting with Python 3.5, rather than letting any import error
# exception propagate out of loadTestsFromName, unittest catches it and
# creates instance(s) of unittest.loader._FailedTest.
# Those have an unexpected test.id(), ex: 'unittest.loader._FailedTest.test1'
# Store the test id passed in as an additional attribute and
# VsTestResult will use that instead of test.id().
loaded_test.test_id = test
tests.append(loaded_test)
except Exception:
trace = sys.exc_info()
traceback.print_exception(*trace)
tb = _get_traceback(trace)
message = str(trace[1])
if _channel is not None:
_channel.send_event(
name='start',
test = test
)
_channel.send_event(
name='result',
outcome='failed',
traceback = tb,
message = message,
test = test
)
if _IS_OLD_UNITTEST:
def _makeResult(self):
return VsTestResult(self.stream, self.descriptions, self.verbosity)
unittest.TextTestRunner._makeResult = _makeResult
runner = unittest.TextTestRunner(verbosity=0)
else:
runner = unittest.TextTestRunner(verbosity=0, resultclass=VsTestResult)
result = runner.run(unittest.defaultTestLoader.suiteClass(tests))
sys.exit(not result.wasSuccessful())
finally:
if cov is not None:
cov.stop()
cov.save()
cov.xml_report(outfile = opts.coverage + '.xml', omit=__file__)
if _channel is not None:
_channel.send_event(
name='done'
)
_channel.close()
if __name__ == '__main__':
main()
|
|
from cab.abm.agent import CabAgent
from cab.util.rng import get_RNG
__author__ = 'Michael Wagner'
__version__ = '1.0'
class HiveAgent(CabAgent):
def __init__(self, x, y, gc):
super().__init__(x, y, gc)
self.id = "hive"
self.max_ants = gc.MAX_ANTS
self.food = 0
self.color = (90, 0, 255)
self.dead = False
self.spawned = 0
def perceive_and_act(self, abm, ca):
if self.spawned < self.max_ants:
ant = AntAgent(self.x, self.y, self.gc)
abm.add_agent(ant)
self.spawned += 1
class FoodAgent(CabAgent):
def __init__(self, x, y, gc):
super().__init__(x, y, gc)
self.id = "food"
self.food = gc.MAX_FOOD
self.color = (50, 255, 0)
self.dead = False
def perceive_and_act(self, ca, agent_list):
if self.food < self.gc.MAX_FOOD:
print("[FoodAgent] food left: {}".format(self.food))
self.update_color()
if self.food < 0:
self.dead = True
return
def update_color(self):
red = max(0, int((self.food / self.gc.MAX_FOOD) * 50))
green = max(0, int((self.food / self.gc.MAX_FOOD) * 255))
self.color = (red, green, 0)
class AntAgent(CabAgent):
def __init__(self, x, y, gc):
"""
Initializes an agent
"""
super().__init__(x, y, gc)
self.id = "ant"
self.prev_x = x
self.prev_y = y
self.max_ph = gc.MAX_PHEROMONE
self.food = 1
self.has_food = False
self.dead = False
self.color = (0, 175, 200)
self.directions = [(1, 0), (1, -1), ( 0, -1), (-1, 0), (-1, 1), ( 0, 1)]
self.current_dir = get_RNG().randint(0, 5)
def perceive_and_act(self, abm, ca):
"""
Perceiving the environment and act according to the rules
"""
self.prev_x = self.x
self.prev_y = self.y
neighborhood = ca.get_agent_neighborhood(self.x, self.y, 1)
self.forage(neighborhood)
def forage(self, neighborhood):
if self.has_food:
self.return_to_hive(neighborhood)
else:
self.find_food_source(neighborhood)
def return_to_hive(self, neighborhood):
cell = self.get_cell_with_pheromone("hive", neighborhood)
if cell:
this_cell = neighborhood[self.x, self.y]
self.drop_pheromones("food", this_cell)
self.move_to(cell[0])
self.check_if_at_hive(cell[1])
else:
print('Ant Error: no valid hive bound cell found!')
def find_food_source(self, neighborhood):
cell = self.get_cell_with_pheromone("food", neighborhood)
if cell:
this_cell = neighborhood[self.x, self.y]
self.drop_pheromones("hive", this_cell)
self.move_to(cell[0])
self.check_if_at_food(cell[1])
else:
print('Ant Error: no valid hive bound cell found!')
def get_cell_with_pheromone(self, target_ph, neighborhood):
"""
Gets the cell with highest pheromone value (or random if no pheromones present)
from the immediate neighborhood.
:param: neighborhood is a dict of (x, y) -> cell mappings,
where cell is a tuple of (ca_cell, [agent(s) on cell]).
If no agent is on the cell then the list in the tuple is simply 'False'
"""
result = None
result_list = []
backup_list = []
best_cell = None
max_ph = 0
# Choose the possible directions the ants can currently look at.
if self.current_dir == 5:
possible_dirs = [4, 5, 0]
elif self.current_dir == 0:
possible_dirs = [5, 0, 1]
else:
possible_dirs = [self.current_dir - 1, self.current_dir, self.current_dir + 1]
for i in possible_dirs:
d = self.directions[i]
_x = self.x + d[0]
_y = self.y + d[1]
if (_x, _y) in neighborhood:
cell = neighborhood[_x, _y]
if cell[0].pheromones[target_ph] > 0.00: # and (not cell[1] or len(cell[1]) < 10):
ph = cell[0].pheromones[target_ph]
if ph > max_ph:
best_cell = cell
max_ph = ph
self.current_dir = i
result_list.append((cell, ph, i))
# elif not cell[1] or len(cell[1]) < 10:
else:
backup_list.append((cell, i))
if result_list:
if get_RNG().random() < 0.10:
choice = AntAgent.weighted_choice(result_list)
# choice = get_RNG().choice(result_list)
result = choice[0]
self.current_dir = choice[1]
else:
result = best_cell
elif backup_list:
choice = get_RNG().choice(backup_list)
result = choice[0]
self.current_dir = choice[1]
else:
# print('Ant Error: no cells found to move to!')
self.current_dir = AntAgent.get_opposite_direction(self.current_dir)
return self.get_cell_with_pheromone(target_ph, neighborhood)
# Add a small random factor to the movement.
return result
def drop_pheromones(self, target_ph, cell):
if cell[1]:
for agent in cell[1]:
# Check if one of the agents present on this cell is hive or food.
if agent.id == target_ph:
cell[0].pheromones[target_ph] = self.max_ph
return
max_ph = cell[0].last_neighbor_max_pheromone[target_ph]
des = max_ph - 2
d = des - cell[0].pheromones[target_ph]
if d > 0:
cell[0].pheromones[target_ph] += d
return
def move_to(self, target_c):
# Save my current position...
self.prev_x = self.x
self.prev_y = self.y
# ... and move to the new one.
self.x = target_c.x
self.y = target_c.y
def check_if_at_hive(self, agents_at_cell):
if agents_at_cell:
for agent in agents_at_cell:
if agent.id == "hive":
# print('found the hive!')
agent.food += self.food
self.has_food = False
def check_if_at_food(self, agents_at_cell):
if agents_at_cell:
for agent in agents_at_cell:
if agent.id == "food" and not agent.dead:
# print('found the food!')
agent.food -= self.food
self.has_food = True
@staticmethod
def weighted_choice(choices):
total = sum(w for c, w, i in choices)
r = get_RNG().uniform(0, total)
up_to = 0
for c, w, i in choices:
if up_to + w > r:
return c, i
up_to += w
assert False, "Shouldn't get here"
@staticmethod
def get_opposite_direction(number):
if number < 3:
return number + 3
else:
return number - 3
|
|
# -*- coding: utf-8 -*-
"""Maraschino auto updater."""
# Original code by Mikie (https://github.com/Mikie-Ghost/)
import maraschino
from maraschino import RUNDIR, logger, DATA_DIR
import urllib2, tarfile, os, shutil, platform, subprocess, re
from flask import json
# define master repo as user and branch in github repo
user = 'mrkipling'
branch = 'master'
def joinRundir(path):
"""Join rundir with 'path'"""
return os.path.join(RUNDIR, path)
# file containg currently installed version hash
version_file = os.path.join(DATA_DIR, 'Version.txt')
def writeVersion(hash):
"""Write hash to version file"""
f = open(version_file, 'w')
f.write(hash)
f.close()
def latestCommit():
"""Get SHA hash from latest commit"""
url = 'https://api.github.com/repos/%s/maraschino/commits/%s' % (user, branch)
result = urllib2.urlopen(url).read()
git = json.JSONDecoder().decode(result)
return git['sha']
def commitsBehind():
"""Calculate how many commits are missing"""
url = 'https://api.github.com/repos/%s/maraschino/compare/%s...%s' % (user, maraschino.CURRENT_COMMIT, maraschino.LATEST_COMMIT)
result = urllib2.urlopen(url).read()
git = json.JSONDecoder().decode(result)
return git['total_commits']
def checkGithub():
"""Check github repo for updates"""
logger.log('UPDATER :: Checking for updates', 'INFO')
try:
maraschino.LATEST_COMMIT = latestCommit()
if maraschino.FIRST_RUN:
maraschino.CURRENT_COMMIT = maraschino.LATEST_COMMIT
writeVersion(maraschino.CURRENT_COMMIT)
except:
logger.log('UPDATER :: Could not get latest commit from github', 'WARNING')
if maraschino.CURRENT_COMMIT:
try:
maraschino.COMMITS_BEHIND = commitsBehind()
except:
logger.log('UPDATER :: Could not get commits behind from github', 'WARNING')
if maraschino.COMMITS_BEHIND >= 1:
logger.log('UPDATER :: Update available, you are %i commits behind' % maraschino.COMMITS_BEHIND, 'INFO')
maraschino.COMMITS_COMPARE_URL = 'https://github.com/%s/maraschino/compare/%s...%s' % (user, maraschino.CURRENT_COMMIT, maraschino.LATEST_COMMIT)
elif maraschino.COMMITS_BEHIND == 0:
logger.log('UPDATER :: Up to date', 'INFO')
elif maraschino.COMMITS_BEHIND == -1:
logger.log('UPDATER :: Unknown version. Please run the updater', 'INFO')
else:
logger.log('UPDATER :: Unknown version. Please run the updater', 'INFO')
return maraschino.COMMITS_BEHIND
def RemoveUpdateFiles():
"""Remove the downloaded new version"""
logger.log('UPDATER :: Removing update files', 'INFO')
tar_file = joinRundir('maraschino.tar.gz')
update_folder = joinRundir('maraschino-update')
try:
if os.path.exists(tar_file):
logger.log('UPDATER :: Removing %s' % tar_file, 'DEBUG')
os.remove(tar_file)
except:
logger.log('UPDATER :: Could not remove %s' % tar_file, 'WARNING')
try:
if os.path.exists(update_folder):
logger.log('UPDATER :: Removing %s' % update_folder, 'DEBUG')
shutil.rmtree(update_folder)
except:
logger.log('UPDATER :: Could not remove %s' % update_folder, 'WARNING')
return
def Update():
"""Update maraschino installation"""
if maraschino.USE_GIT:
update = gitUpdate()
if update == 'complete':
return True
else:
logger.log('Git update failed, attempting tarball update', 'INFO')
tar_file = joinRundir('maraschino.tar.gz')
update_folder = joinRundir('maraschino-update')
# Download repo
try:
logger.log('UPDATER :: Downloading update file to %s' % tar_file, 'DEBUG')
url = urllib2.urlopen('https://github.com/%s/maraschino/tarball/%s' % (user, branch))
f = open(tar_file,'wb')
f.write(url.read())
f.close()
except:
logger.log('UPDATER :: Failed to download update file', 'WARNING')
RemoveUpdateFiles()
return False
# Write new hash to file
try:
logger.log('UPDATER :: Writing new hash to %s' % version_file, 'DEBUG')
writeVersion(maraschino.LATEST_COMMIT)
except:
logger.log('UPDATER :: Faied to write new hash to version file', 'WARNING')
RemoveUpdateFiles()
return False
# Extract to temp folder
try:
logger.log('UPDATER :: Extracting %s' % tar_file, 'DEBUG')
tar = tarfile.open(tar_file)
tar.extractall(update_folder)
tar.close()
except:
logger.log('Failed to extract update file', 'WARNING')
RemoveUpdateFiles()
return False
# Overwrite old files with new ones
root_src_dir = os.path.join(update_folder, '%s-maraschino-%s' % (user, maraschino.LATEST_COMMIT[:7]))
try:
logger.log('UPDATER :: Overwriting old files', 'DEBUG')
for src_dir, dirs, files in os.walk(root_src_dir):
dst_dir = src_dir.replace(root_src_dir, RUNDIR)
if not os.path.exists(dst_dir):
os.mkdir(dst_dir)
for file_ in files:
src_file = os.path.join(src_dir, file_)
dst_file = os.path.join(dst_dir, file_)
if os.path.exists(dst_file):
os.remove(dst_file)
shutil.move(src_file, dst_dir)
except:
logger.log('UPDATER :: Failed to overwrite old files', 'WARNING')
RemoveUpdateFiles()
return False
# Clean up
RemoveUpdateFiles()
maraschino.CURRENT_COMMIT = maraschino.LATEST_COMMIT
maraschino.COMMITS_BEHIND = 0
return True
def runGit(args):
"""Run git command with args as arguments"""
git_locations = ['git']
if platform.system().lower() == 'darwin':
git_locations.append('/usr/local/git/bin/git')
output = err = None
for cur_git in git_locations:
cmd = cur_git + ' ' + args
try:
logger.log('UPDATER :: Trying to execute: "' + cmd + '" with shell in ' + RUNDIR, 'DEBUG')
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True, cwd=RUNDIR)
output, err = p.communicate()
logger.log('UPDATER :: Git output: ' + output, 'DEBUG')
except OSError:
logger.log('UPDATER :: Command ' + cmd + ' didn\'t work, couldn\'t find git', 'WARNING')
continue
if 'not found' in output or "not recognized as an internal or external command" in output:
logger.log('UPDATER :: Unable to find git with command ' + cmd, 'WARNING')
output = None
elif 'fatal:' in output or err:
logger.log('UPDATER :: Git returned bad info. Are you sure this is a git installation?', 'WARNING')
output = None
elif output:
break
return (output, err)
def gitCurrentVersion():
"""Get version hash for local installation"""
output, err = runGit('rev-parse HEAD')
if not output:
logger.log('UPDATER :: Couldn\'t find latest installed version with git', 'WARNING')
maraschino.USE_GIT = False
return None
current_commit = output.strip()
if not re.match('^[a-z0-9]+$', current_commit):
logger.log('UPDATER :: Git output doesn\'t look like a hash, not using it', 'WARNING')
return None
writeVersion(current_commit)
return
def gitUpdate():
"""Update Maraschino using git"""
output, err = runGit('pull origin %s' % branch)
if not output:
logger.log('Couldn\'t download latest version', 'ERROR')
maraschino.USE_GIT = False
return 'failed'
for line in output.split('\n'):
if 'Already up-to-date.' in line:
logger.log('UPDATER :: Already up to date', 'INFO')
logger.log('UPDATER :: Git output: ' + str(output), 'DEBUG')
return 'complete'
elif 'Aborting' in line:
logger.log('UPDATER :: Unable to update from git: '+line, 'ERROR')
logger.log('UPDATER :: Output: ' + str(output), 'DEBUG')
maraschino.USE_GIT = False
return 'failed'
maraschino.CURRENT_COMMIT = maraschino.LATEST_COMMIT
writeVersion(maraschino.LATEST_COMMIT)
maraschino.COMMITS_BEHIND = 0
return 'complete'
|
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2013 Mellanox Technologies, Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sqlalchemy.orm import exc
from neutron.common import exceptions as q_exc
import neutron.db.api as db
from neutron.db import models_v2
from neutron.db import securitygroups_db as sg_db
from neutron import manager
from neutron.openstack.common import log as logging
from neutron.plugins.mlnx.common import config # noqa
from neutron.plugins.mlnx.db import mlnx_models_v2
LOG = logging.getLogger(__name__)
def initialize():
db.configure_db()
def _remove_non_allocatable_vlans(session, allocations,
physical_network, vlan_ids):
if physical_network in allocations:
for entry in allocations[physical_network]:
try:
# see if vlan is allocatable
vlan_ids.remove(entry.segmentation_id)
except KeyError:
# it's not allocatable, so check if its allocated
if not entry.allocated:
# it's not, so remove it from table
LOG.debug(_(
"Removing vlan %(seg_id)s on "
"physical network "
"%(net)s from pool"),
{'seg_id': entry.segmentation_id,
'net': physical_network})
session.delete(entry)
del allocations[physical_network]
def _add_missing_allocatable_vlans(session, physical_network, vlan_ids):
for vlan_id in sorted(vlan_ids):
entry = mlnx_models_v2.SegmentationIdAllocation(physical_network,
vlan_id)
session.add(entry)
def _remove_unconfigured_vlans(session, allocations):
for entries in allocations.itervalues():
for entry in entries:
if not entry.allocated:
LOG.debug(_("Removing vlan %(seg_id)s on physical "
"network %(net)s from pool"),
{'seg_id': entry.segmentation_id,
'net': entry.physical_network})
session.delete(entry)
def sync_network_states(network_vlan_ranges):
"""Synchronize network_states table with current configured VLAN ranges."""
session = db.get_session()
with session.begin():
# get existing allocations for all physical networks
allocations = dict()
entries = (session.query(mlnx_models_v2.SegmentationIdAllocation).
all())
for entry in entries:
allocations.setdefault(entry.physical_network, set()).add(entry)
# process vlan ranges for each configured physical network
for physical_network, vlan_ranges in network_vlan_ranges.iteritems():
# determine current configured allocatable vlans for this
# physical network
vlan_ids = set()
for vlan_range in vlan_ranges:
vlan_ids |= set(xrange(vlan_range[0], vlan_range[1] + 1))
# remove from table unallocated vlans not currently allocatable
_remove_non_allocatable_vlans(session, allocations,
physical_network, vlan_ids)
# add missing allocatable vlans to table
_add_missing_allocatable_vlans(session, physical_network, vlan_ids)
# remove from table unallocated vlans for any unconfigured physical
# networks
_remove_unconfigured_vlans(session, allocations)
def get_network_state(physical_network, segmentation_id):
"""Get entry of specified network."""
session = db.get_session()
qry = session.query(mlnx_models_v2.SegmentationIdAllocation)
qry = qry.filter_by(physical_network=physical_network,
segmentation_id=segmentation_id)
return qry.first()
def reserve_network(session):
with session.begin(subtransactions=True):
entry = (session.query(mlnx_models_v2.SegmentationIdAllocation).
filter_by(allocated=False).
with_lockmode('update').
first())
if not entry:
raise q_exc.NoNetworkAvailable()
LOG.debug(_("Reserving vlan %(seg_id)s on physical network "
"%(net)s from pool"),
{'seg_id': entry.segmentation_id,
'net': entry.physical_network})
entry.allocated = True
return (entry.physical_network, entry.segmentation_id)
def reserve_specific_network(session, physical_network, segmentation_id):
with session.begin(subtransactions=True):
log_args = {'seg_id': segmentation_id, 'phy_net': physical_network}
try:
entry = (session.query(mlnx_models_v2.SegmentationIdAllocation).
filter_by(physical_network=physical_network,
segmentation_id=segmentation_id).
with_lockmode('update').
one())
if entry.allocated:
raise q_exc.VlanIdInUse(vlan_id=segmentation_id,
physical_network=physical_network)
LOG.debug(_("Reserving specific vlan %(seg_id)s "
"on physical network %(phy_net)s from pool"),
log_args)
entry.allocated = True
except exc.NoResultFound:
LOG.debug(_("Reserving specific vlan %(seg_id)s on "
"physical network %(phy_net)s outside pool"),
log_args)
entry = mlnx_models_v2.SegmentationIdAllocation(physical_network,
segmentation_id)
entry.allocated = True
session.add(entry)
def release_network(session, physical_network,
segmentation_id, network_vlan_ranges):
with session.begin(subtransactions=True):
log_args = {'seg_id': segmentation_id, 'phy_net': physical_network}
try:
state = (session.query(mlnx_models_v2.SegmentationIdAllocation).
filter_by(physical_network=physical_network,
segmentation_id=segmentation_id).
with_lockmode('update').
one())
state.allocated = False
inside = False
for vlan_range in network_vlan_ranges.get(physical_network, []):
if (segmentation_id >= vlan_range[0] and
segmentation_id <= vlan_range[1]):
inside = True
break
if inside:
LOG.debug(_("Releasing vlan %(seg_id)s "
"on physical network "
"%(phy_net)s to pool"),
log_args)
else:
LOG.debug(_("Releasing vlan %(seg_id)s "
"on physical network "
"%(phy_net)s outside pool"),
log_args)
session.delete(state)
except exc.NoResultFound:
LOG.warning(_("vlan_id %(seg_id)s on physical network "
"%(phy_net)s not found"),
log_args)
def add_network_binding(session, network_id, network_type,
physical_network, vlan_id):
with session.begin(subtransactions=True):
binding = mlnx_models_v2.NetworkBinding(network_id, network_type,
physical_network, vlan_id)
session.add(binding)
def get_network_binding(session, network_id):
return (session.query(mlnx_models_v2.NetworkBinding).
filter_by(network_id=network_id).first())
def add_port_profile_binding(session, port_id, vnic_type):
with session.begin(subtransactions=True):
binding = mlnx_models_v2.PortProfileBinding(port_id, vnic_type)
session.add(binding)
def get_port_profile_binding(session, port_id):
return (session.query(mlnx_models_v2.PortProfileBinding).
filter_by(port_id=port_id).first())
def get_port_from_device(device):
"""Get port from database."""
LOG.debug(_("get_port_from_device() called"))
session = db.get_session()
sg_binding_port = sg_db.SecurityGroupPortBinding.port_id
query = session.query(models_v2.Port,
sg_db.SecurityGroupPortBinding.security_group_id)
query = query.outerjoin(sg_db.SecurityGroupPortBinding,
models_v2.Port.id == sg_binding_port)
query = query.filter(models_v2.Port.id.startswith(device))
port_and_sgs = query.all()
if not port_and_sgs:
return
port = port_and_sgs[0][0]
plugin = manager.NeutronManager.get_plugin()
port_dict = plugin._make_port_dict(port)
port_dict['security_groups'] = [
sg_id for port_in_db, sg_id in port_and_sgs if sg_id
]
port_dict['security_group_rules'] = []
port_dict['security_group_source_groups'] = []
port_dict['fixed_ips'] = [ip['ip_address']
for ip in port['fixed_ips']]
return port_dict
def get_port_from_device_mac(device_mac):
"""Get port from database."""
LOG.debug(_("Get_port_from_device_mac() called"))
session = db.get_session()
qry = session.query(models_v2.Port).filter_by(mac_address=device_mac)
return qry.first()
def set_port_status(port_id, status):
"""Set the port status."""
LOG.debug(_("Set_port_status as %s called"), status)
session = db.get_session()
try:
port = session.query(models_v2.Port).filter_by(id=port_id).one()
port['status'] = status
session.merge(port)
session.flush()
except exc.NoResultFound:
raise q_exc.PortNotFound(port_id=port_id)
|
|
import pyglet
def loadBoat1():
return {
'texture': pyglet.image.load('boat1tex.png').get_texture(),
'vertices': (
(0, -9.3916, 0.726013),
(0, -0.658051, -0.372787),
(2.15999, -5.97165, 0.726013),
(1.5168, -1.10376, -0.164337),
(2.71582, -3.53603, 1.10835),
(2.15999, -5.97165, 0.726013),
(2.15999, -5.97165, 0.726013),
(0, -5.10858, 0.998764),
(0, -9.3916, 0.726013),
(0, -0.658051, -0.372787),
(0, -9.3916, 0.726013),
(-2.15999, -5.97165, 0.726013),
(-1.5168, -1.10376, -0.164337),
(0, -0.658051, -0.372787),
(-2.15999, -5.97165, 0.726013),
(-1.5168, -1.10376, -0.164337),
(-2.15999, -5.97165, 0.726013),
(-2.71582, -3.53603, 1.10835),
(0, -9.3916, 0.726013),
(0, -5.10858, 0.998764),
(-2.15999, -5.97165, 0.726013),
(2.15999, -5.97165, 0.726013),
(0, -0.658051, -0.372787),
(1.5168, -1.10376, -0.164337),
(2.71582, -3.53603, 1.10835),
(1.35791, -3.88701, 1.05356),
(2.15999, -5.97165, 0.726013),
(0, -5.10858, 0.998764),
(-1.35791, -3.88701, 1.05356),
(-2.15999, -5.97165, 0.726013),
(1.5168, -1.10376, -0.164337),
(0, -0.658051, -0.372787),
(1.29581, 4.017, 0.621231),
(-2.71582, -3.53603, 1.10835),
(-2.07408, 1.72919, 0.862762),
(-1.5168, -1.10376, -0.164337),
(-1.29581, 4.017, 0.621231),
(0, -0.658051, -0.372787),
(-1.5168, -1.10376, -0.164337),
(2.07408, 1.72919, 0.862762),
(1.29581, 4.017, 0.621231),
(0, 0.152588, 1.80235),
(-2.07408, 1.72919, 0.862762),
(0, 0.152588, 1.80235),
(-1.29581, 4.017, 0.621231),
(0, 4.017, -0.513504),
(0, -0.658051, -0.372787),
(-1.29581, 4.017, 0.621231),
(-1.29581, 4.017, 0.621231),
(1.29581, 4.017, 0.621231),
(0, 4.017, -0.513504),
(0, -0.658051, -0.372787),
(0, 4.017, -0.513504),
(1.29581, 4.017, 0.621231),
(-1.29581, 4.017, 0.621231),
(0, 0.152588, 1.80235),
(0, 4.017, 1.79726),
(0, 0.152588, 1.80235),
(1.29581, 4.017, 0.621231),
(0, 4.017, 1.79726),
(-1.29581, 4.017, 0.621231),
(0, 4.017, 1.79726),
(1.29581, 4.017, 0.621231),
(-1.35791, -3.88701, 1.05356),
(-1.58604, -1.8672, 1.32179),
(-2.71582, -3.53603, 1.10835),
(2.07408, 1.72919, 0.862762),
(1.58604, -1.8672, 1.32179),
(2.71582, -3.53603, 1.10835),
(2.71582, -3.53603, 1.10835),
(1.5168, -1.10376, -0.164337),
(2.07408, 1.72919, 0.862762),
(3.41731, 1.87068, 0.76561),
(1.5168, -1.10376, -0.164337),
(2.77556, 4.15849, 0.520035),
(1.29581, 4.017, 0.621231),
(2.77556, 4.15849, 0.520035),
(1.5168, -1.10376, -0.164337),
(3.41731, 1.87068, 0.76561),
(2.77556, 4.15849, 0.520035),
(2.07408, 1.72919, 0.862762),
(-2.07408, 1.72919, 0.862762),
(-3.41731, 1.87068, 0.76561),
(-1.5168, -1.10376, -0.164337),
(-1.29581, 4.017, 0.621231),
(-2.77556, 4.15849, 0.520035),
(-2.07408, 1.72919, 0.862762),
(-3.41731, 1.87068, 0.76561),
(-2.77556, 4.15849, 0.520035),
(-1.5168, -1.10376, -0.164337),
(1.35791, -3.88701, 1.05356),
(0, -3.99136, 1.52379),
(0, -5.10858, 0.998764),
(0.678955, -1.9857, 2.14563),
(0, 0.152588, 1.80235),
(-0.678955, -1.9857, 2.14563),
(-1.35791, -3.88701, 1.05356),
(0, -5.10858, 0.998764),
(0, -3.99136, 1.52379),
(-0.678955, -1.9857, 2.14563),
(0, -3.99136, 1.52379),
(0.678955, -1.9857, 2.14563),
(0, 0.152588, 1.80235),
(0.678955, -1.9857, 2.14563),
(2.07408, 1.72919, 0.862762),
(1.58604, -1.8672, 1.32179),
(2.07408, 1.72919, 0.862762),
(0.678955, -1.9857, 2.14563),
(0, 0.152588, 1.80235),
(-2.07408, 1.72919, 0.862762),
(-0.678955, -1.9857, 2.14563),
(-0.678955, -1.9857, 2.14563),
(-2.07408, 1.72919, 0.862762),
(-1.58604, -1.8672, 1.32179),
(1.35791, -3.88701, 1.05356),
(2.71582, -3.53603, 1.10835),
(1.58604, -1.8672, 1.32179),
(-2.07408, 1.72919, 0.862762),
(-2.71582, -3.53603, 1.10835),
(-1.58604, -1.8672, 1.32179),
(-1.29581, 4.017, 0.621231),
(-1.5168, -1.10376, -0.164337),
(-2.77556, 4.15849, 0.520035),
(2.07408, 1.72919, 0.862762),
(1.5168, -1.10376, -0.164337),
(3.41731, 1.87068, 0.76561),
(-2.71582, -3.53603, 1.10835),
(-2.15999, -5.97165, 0.726013),
(-1.35791, -3.88701, 1.05356),
(0, -5.10858, 0.998764),
(2.15999, -5.97165, 0.726013),
(1.35791, -3.88701, 1.05356),
(1.29581, 4.017, 0.621231),
(2.07408, 1.72919, 0.862762),
(2.77556, 4.15849, 0.520035),
(-3.41731, 1.87068, 0.76561),
(-2.07408, 1.72919, 0.862762),
(-2.77556, 4.15849, 0.520035),
(0, -3.99136, 1.52379),
(-0.678955, -1.9857, 2.14563),
(-1.35791, -3.88701, 1.05356),
(1.58604, -1.8672, 1.32179),
(0.678955, -1.9857, 2.14563),
(1.35791, -3.88701, 1.05356),
(0, -3.99136, 1.52379),
(1.35791, -3.88701, 1.05356),
(0.678955, -1.9857, 2.14563),
(-1.58604, -1.8672, 1.32179),
(-1.35791, -3.88701, 1.05356),
(-0.678955, -1.9857, 2.14563)),
'indices': (
0, 1, 2,
3, 4, 5,
6, 7, 8,
9, 10, 11,
12, 13, 14,
15, 16, 17,
18, 19, 20,
21, 22, 23,
24, 25, 26,
27, 28, 29,
30, 31, 32,
33, 34, 35,
36, 37, 38,
39, 40, 41,
42, 43, 44,
45, 46, 47,
48, 49, 50,
51, 52, 53,
54, 55, 56,
57, 58, 59,
60, 61, 62,
63, 64, 65,
66, 67, 68,
69, 70, 71,
72, 73, 74,
75, 76, 77,
78, 79, 80,
81, 82, 83,
84, 85, 86,
87, 88, 89,
90, 91, 92,
93, 94, 95,
96, 97, 98,
99, 100, 101,
102, 103, 104,
105, 106, 107,
108, 109, 110,
111, 112, 113,
114, 115, 116,
117, 118, 119,
120, 121, 122,
123, 124, 125,
126, 127, 128,
129, 130, 131,
132, 133, 134,
135, 136, 137,
138, 139, 140,
141, 142, 143,
144, 145, 146,
147, 148, 149),
'uvmap': (
(0.0381012, -0.261093),
(0.629089, -0.261734),
(0.26564, -0.406982),
(0.598373, -0.370407),
(0.419739, -0.454941),
(0.26564, -0.406982),
(0.751053, -0.829849),
(0.670044, -0.688614),
(0.966003, -0.648087),
(0.629089, -0.261734),
(0.0381012, -0.261093),
(0.266113, -0.11615),
(0.596466, -0.154099),
(0.629089, -0.261734),
(0.266113, -0.11615),
(0.596466, -0.154099),
(0.266113, -0.11615),
(0.420105, -0.0706634),
(0.966003, -0.648087),
(0.670044, -0.688614),
(0.71019, -0.53093),
(0.26564, -0.406982),
(0.629089, -0.261734),
(0.598373, -0.370407),
(0.586777, -0.888382),
(0.599594, -0.79364),
(0.751053, -0.829849),
(0.670044, -0.688614),
(0.574402, -0.606201),
(0.71019, -0.53093),
(0.598373, -0.370407),
(0.629089, -0.261734),
(0.937607, -0.357208),
(0.420105, -0.0706634),
(0.734055, -0.0180816),
(0.596466, -0.154099),
(0.929169, -0.153946),
(0.629089, -0.261734),
(0.596466, -0.154099),
(0.244492, -0.895523),
(0.089508, -0.878281),
(0.32309, -0.734955),
(0.2061, -0.600281),
(0.32309, -0.734955),
(0.0610046, -0.656113),
(0.931931, -0.254898),
(0.629089, -0.261734),
(0.929169, -0.153946),
(0.0371246, -0.443939),
(0.20549, -0.443939),
(0.121307, -0.517654),
(0.629089, -0.261734),
(0.931931, -0.254898),
(0.937607, -0.357208),
(0.0610046, -0.656113),
(0.32309, -0.734955),
(0.0757598, -0.767059),
(0.32309, -0.734955),
(0.089508, -0.878281),
(0.0757598, -0.767059),
(0.223709, -0.52034),
(0.307892, -0.443939),
(0.392059, -0.52034),
(0.574402, -0.606201),
(0.440201, -0.606033),
(0.537262, -0.518631),
(0.244492, -0.895523),
(0.469864, -0.829056),
(0.586777, -0.888382),
(0.419739, -0.454941),
(0.598373, -0.370407),
(0.74202, -0.507355),
(0.80455, -0.490738),
(0.598373, -0.370407),
(0.949036, -0.447693),
(0.937607, -0.357208),
(0.949036, -0.447693),
(0.598373, -0.370407),
(0.253433, -0.982468),
(0.0995025, -0.973648),
(0.244492, -0.895523),
(0.734055, -0.0180816),
(0.79274, -0.0307617),
(0.596466, -0.154099),
(0.0610046, -0.656113),
(0.0473632, -0.561722),
(0.2061, -0.600281),
(0.79274, -0.0307617),
(0.933807, -0.0664215),
(0.596466, -0.154099),
(0.599594, -0.79364),
(0.587357, -0.699814),
(0.670044, -0.688614),
(0.466156, -0.753265),
(0.32309, -0.734955),
(0.45636, -0.68013),
(0.574402, -0.606201),
(0.670044, -0.688614),
(0.587357, -0.699814),
(0.45636, -0.68013),
(0.587357, -0.699814),
(0.466156, -0.753265),
(0.32309, -0.734955),
(0.466156, -0.753265),
(0.244492, -0.895523),
(0.469864, -0.829056),
(0.244492, -0.895523),
(0.466156, -0.753265),
(0.32309, -0.734955),
(0.2061, -0.600281),
(0.45636, -0.68013),
(0.45636, -0.68013),
(0.2061, -0.600281),
(0.440201, -0.606033),
(0.599594, -0.79364),
(0.586777, -0.888382),
(0.469864, -0.829056),
(0.2061, -0.600281),
(0.537262, -0.518631),
(0.440201, -0.606033),
(0.929169, -0.153946),
(0.596466, -0.154099),
(0.933807, -0.0664215),
(0.74202, -0.507355),
(0.598373, -0.370407),
(0.80455, -0.490738),
(0.537262, -0.518631),
(0.71019, -0.53093),
(0.574402, -0.606201),
(0.670044, -0.688614),
(0.751053, -0.829849),
(0.599594, -0.79364),
(0.089508, -0.878281),
(0.244492, -0.895523),
(0.0995025, -0.973648),
(0.192841, -0.514603),
(0.2061, -0.600281),
(0.0473632, -0.561722),
(0.587357, -0.699814),
(0.45636, -0.68013),
(0.574402, -0.606201),
(0.469864, -0.829056),
(0.466156, -0.753265),
(0.599594, -0.79364),
(0.587357, -0.699814),
(0.599594, -0.79364),
(0.466156, -0.753265),
(0.440201, -0.606033),
(0.574402, -0.606201),
(0.45636, -0.68013))}
def loadBoat2():
return {
'texture': pyglet.image.load('boat2tex.png').get_texture(),
'vertices': (
(0, -11.7114, 0.0783996),
(0, -1.77814, -1.28052),
(2.15999, -5.97165, 0.0783996),
(2.62422, -1.10376, -0.811966),
(1.00511, -3.53603, 0.460724),
(2.15999, -5.97165, 0.0783996),
(0, -3.6402, 0.996384),
(0, -5.97916, 0.351135),
(1.00511, -3.53603, 0.460724),
(3.58784, 3.77087, 0.671005),
(3.04967, 0.274628, 0.671005),
(3.61475, 2.959, -0.435928),
(2.62422, -1.10376, -0.811966),
(1.96803, -0.236206, 0.873077),
(1.00511, -3.53603, 0.460724),
(3.04967, 0.274628, 0.671005),
(1.42467, 3.89023, 0.751083),
(1.96803, -0.236206, 0.873077),
(3.04967, 0.274628, 0.671005),
(2.62422, -1.10376, -0.811966),
(3.61475, 0.523361, -0.435928),
(2.26152, 4.90634, 0.289413),
(2.27435, 4.92723, -0.41684),
(1.42467, 3.89023, 0.751083),
(3.58784, 3.77087, 0.671005),
(2.70465, 4.90634, 0.309814),
(1.42467, 3.89023, 0.751083),
(0, -3.6402, 0.996384),
(1.28694, 0.56517, 1.51672),
(-1.33673, 0.56517, 1.51672),
(2.15999, -5.97165, 0.0783996),
(0, -5.97916, 0.351135),
(0, -11.7114, 0.0783996),
(0, -1.77814, -1.28052),
(0, -11.7114, 0.0783996),
(-2.15999, -5.97165, 0.0783996),
(-2.62422, -1.10376, -0.811966),
(0, -1.77814, -1.28052),
(-2.15999, -5.97165, 0.0783996),
(-2.62422, -1.10376, -0.811966),
(-2.15999, -5.97165, 0.0783996),
(-1.00511, -3.53603, 0.460724),
(0, -3.6402, 0.996384),
(-1.00511, -3.53603, 0.460724),
(0, -5.97916, 0.351135),
(-3.61475, 0.523361, -0.435928),
(-3.04967, 0.274628, 0.671005),
(-3.61475, 2.959, -0.435928),
(-1.33673, 0.56517, 1.51672),
(-1.96803, -0.236206, 0.873077),
(-1.00511, -3.53603, 0.460724),
(-3.58784, 3.77087, 0.671005),
(-3.04967, 0.274628, 0.671005),
(-1.42467, 3.89023, 0.751083),
(-1.43999, 3.79219, -0.811966),
(-2.27435, 4.92723, -0.41684),
(-1.42467, 3.89023, 0.751083),
(-1.42467, 3.89023, 0.751083),
(-2.26152, 4.90634, 0.289413),
(-3.58784, 3.77087, 0.671005),
(-2.62422, -1.10376, -0.811966),
(-1.96803, -0.236206, 0.873077),
(-3.04967, 0.274628, 0.671005),
(0, -11.7114, 0.0783996),
(0, -5.97916, 0.351135),
(-2.15999, -5.97165, 0.0783996),
(1.00511, -3.53603, 0.460724),
(1.28694, 0.56517, 1.51672),
(0, -3.6402, 0.996384),
(-1.00511, -3.53603, 0.460724),
(0, -3.6402, 0.996384),
(-1.33673, 0.56517, 1.51672),
(-1.42467, 3.89023, 0.751083),
(-1.33673, 0.56517, 1.51672),
(0, 4.0096, 1.76315),
(0, 4.0096, 1.76315),
(1.42467, 3.89023, 0.751083),
(0, 7.38785, 1.73131),
(2.15999, -5.97165, 0.0783996),
(0, -1.77814, -1.28052),
(2.62422, -1.10376, -0.811966),
(2.15999, -5.97165, 0.0783996),
(1.00511, -3.53603, 0.460724),
(0, -5.97916, 0.351135),
(3.61475, 0.523361, -0.435928),
(3.61475, 2.959, -0.435928),
(3.04967, 0.274628, 0.671005),
(3.58784, 3.77087, 0.671005),
(1.42467, 3.89023, 0.751083),
(3.04967, 0.274628, 0.671005),
(3.58784, 3.77087, 0.671005),
(3.61475, 2.959, -0.435928),
(2.70465, 4.90634, 0.309814),
(1.43999, 3.79219, -0.811966),
(1.42467, 3.89023, 0.751083),
(2.27435, 4.92723, -0.41684),
(2.26152, 4.90634, 0.289413),
(1.42467, 3.89023, 0.751083),
(2.70465, 4.90634, 0.309814),
(-2.15999, -5.97165, 0.0783996),
(0, -5.97916, 0.351135),
(-1.00511, -3.53603, 0.460724),
(-3.58784, 3.77087, 0.671005),
(-3.61475, 2.959, -0.435928),
(-3.04967, 0.274628, 0.671005),
(-3.04967, 0.274628, 0.671005),
(-3.61475, 0.523361, -0.435928),
(-2.62422, -1.10376, -0.811966),
(-1.43999, 3.79219, -0.811966),
(-3.61475, 2.959, -0.435928),
(-2.27435, 4.92723, -0.41684),
(-2.26152, 4.90634, 0.289413),
(-1.42467, 3.89023, 0.751083),
(-2.27435, 4.92723, -0.41684),
(-2.70465, 4.90634, 0.309814),
(-3.58784, 3.77087, 0.671005),
(-2.26152, 4.90634, 0.289413),
(-1.33673, 0.56517, 1.51672),
(1.28694, 0.56517, 1.51672),
(0, 4.0096, 1.76315),
(1.42467, 3.89023, 0.751083),
(0, 4.0096, 1.76315),
(1.28694, 0.56517, 1.51672),
(1.43999, 3.79219, -0.811966),
(0, -1.77814, -1.28052),
(-1.43999, 3.79219, -0.811966),
(0, -1.77814, -1.28052),
(-2.62422, -1.10376, -0.811966),
(-1.43999, 3.79219, -0.811966),
(3.61475, 2.959, -0.435928),
(3.61475, 0.523361, -0.435928),
(1.43999, 3.79219, -0.811966),
(3.61475, 0.523361, -0.435928),
(2.62422, -1.10376, -0.811966),
(1.43999, 3.79219, -0.811966),
(-3.61475, 2.959, -0.435928),
(-1.43999, 3.79219, -0.811966),
(-3.61475, 0.523361, -0.435928),
(-3.61475, 0.523361, -0.435928),
(-1.43999, 3.79219, -0.811966),
(-2.62422, -1.10376, -0.811966),
(0, -1.77814, -1.28052),
(1.43999, 3.79219, -0.811966),
(2.62422, -1.10376, -0.811966),
(2.62422, -1.10376, -0.811966),
(3.04967, 0.274628, 0.671005),
(1.96803, -0.236206, 0.873077),
(1.28694, 0.56517, 1.51672),
(1.96803, -0.236206, 0.873077),
(1.42467, 3.89023, 0.751083),
(-1.33673, 0.56517, 1.51672),
(-1.42467, 3.89023, 0.751083),
(-1.96803, -0.236206, 0.873077),
(-3.04967, 0.274628, 0.671005),
(-1.96803, -0.236206, 0.873077),
(-1.42467, 3.89023, 0.751083),
(-2.62422, -1.10376, -0.811966),
(-1.00511, -3.53603, 0.460724),
(-1.96803, -0.236206, 0.873077),
(1.28694, 0.56517, 1.51672),
(1.00511, -3.53603, 0.460724),
(1.96803, -0.236206, 0.873077),
(-2.26152, 4.90634, 0.289413),
(-2.27435, 4.92723, -0.41684),
(-2.70465, 4.90634, 0.309814),
(-3.58784, 3.77087, 0.671005),
(-2.70465, 4.90634, 0.309814),
(-3.61475, 2.959, -0.435928),
(-2.27435, 4.92723, -0.41684),
(-3.61475, 2.959, -0.435928),
(-2.70465, 4.90634, 0.309814),
(2.26152, 4.90634, 0.289413),
(2.70465, 4.90634, 0.309814),
(2.27435, 4.92723, -0.41684),
(1.43999, 3.79219, -0.811966),
(2.27435, 4.92723, -0.41684),
(3.61475, 2.959, -0.435928),
(2.70465, 4.90634, 0.309814),
(3.61475, 2.959, -0.435928),
(2.27435, 4.92723, -0.41684),
(1.42467, 3.89023, 0.751083),
(1.43999, 3.79219, -0.811966),
(0, 7.38785, 1.73131),
(0, 4.0096, 1.76315),
(0, 7.38785, 1.73131),
(-1.42467, 3.89023, 0.751083),
(1.43999, 3.79219, -0.811966),
(-1.43999, 3.79219, -0.811966),
(0, 7.38785, 1.73131),
(-1.42467, 3.89023, 0.751083),
(0, 7.38785, 1.73131),
(-1.43999, 3.79219, -0.811966)),
'indices': (
0, 1, 2,
3, 4, 5,
6, 7, 8,
9, 10, 11,
12, 13, 14,
15, 16, 17,
18, 19, 20,
21, 22, 23,
24, 25, 26,
27, 28, 29,
30, 31, 32,
33, 34, 35,
36, 37, 38,
39, 40, 41,
42, 43, 44,
45, 46, 47,
48, 49, 50,
51, 52, 53,
54, 55, 56,
57, 58, 59,
60, 61, 62,
63, 64, 65,
66, 67, 68,
69, 70, 71,
72, 73, 74,
75, 76, 77,
78, 79, 80,
81, 82, 83,
84, 85, 86,
87, 88, 89,
90, 91, 92,
93, 94, 95,
96, 97, 98,
99, 100, 101,
102, 103, 104,
105, 106, 107,
108, 109, 110,
111, 112, 113,
114, 115, 116,
117, 118, 119,
120, 121, 122,
123, 124, 125,
126, 127, 128,
129, 130, 131,
132, 133, 134,
135, 136, 137,
138, 139, 140,
141, 142, 143,
144, 145, 146,
147, 148, 149,
150, 151, 152,
153, 154, 155,
156, 157, 158,
159, 160, 161,
162, 163, 164,
165, 166, 167,
168, 169, 170,
171, 172, 173,
174, 175, 176,
177, 178, 179,
180, 181, 182,
183, 184, 185,
186, 187, 188,
189, 190, 191),
'uvmap': (
(0.525665, -1),
(0.506287, -0.403488),
(0.217407, -0.684235),
(0.14389, -0.384583),
(0.371979, -0.561615),
(0.217407, -0.684235),
(0.510803, -0.584961),
(0.514938, -0.692169),
(0.371979, -0.561615),
(0.00263977, -0.166748),
(0.0833587, -0.359756),
(0, -0.173569),
(0.14389, -0.384583),
(0.233337, -0.394058),
(0.371979, -0.561615),
(0.0833587, -0.359756),
(0.300293, -0.161026),
(0.233337, -0.394058),
(0.0833587, -0.359756),
(0.14389, -0.384583),
(0.00460815, -0.308365),
(0.182938, -0.0896148),
(0.180832, -0.0641479),
(0.300293, -0.161026),
(0.00263977, -0.166748),
(0.121948, -0.0906982),
(0.300293, -0.161026),
(0.510803, -0.584961),
(0.322433, -0.376129),
(0.68367, -0.37384),
(0.217407, -0.684235),
(0.514938, -0.692169),
(0.525665, -1),
(0.506287, -0.403488),
(0.525665, -1),
(0.81221, -0.680481),
(0.866531, -0.38002),
(0.506287, -0.403488),
(0.81221, -0.680481),
(0.866531, -0.38002),
(0.81221, -0.680481),
(0.648758, -0.559875),
(0.510803, -0.584961),
(0.648758, -0.559875),
(0.514938, -0.692169),
(1, -0.302063),
(0.923141, -0.354462),
(0.995377, -0.167282),
(0.68367, -0.37384),
(0.775269, -0.390625),
(0.648758, -0.559875),
(0.990631, -0.160507),
(0.923141, -0.354462),
(0.692596, -0.158539),
(0.694214, -0.110107),
(0.807114, -0.0601806),
(0.692596, -0.158539),
(0.692596, -0.158539),
(0.805695, -0.0856781),
(0.990631, -0.160507),
(0.866531, -0.38002),
(0.775269, -0.390625),
(0.923141, -0.354462),
(0.525665, -1),
(0.514938, -0.692169),
(0.81221, -0.680481),
(0.371979, -0.561615),
(0.322433, -0.376129),
(0.510803, -0.584961),
(0.648758, -0.559875),
(0.510803, -0.584961),
(0.68367, -0.37384),
(0.692596, -0.158539),
(0.68367, -0.37384),
(0.496658, -0.188034),
(0.496658, -0.188034),
(0.300293, -0.161026),
(0.490265, 0),
(0.217407, -0.684235),
(0.506287, -0.403488),
(0.14389, -0.384583),
(0.217407, -0.684235),
(0.371979, -0.561615),
(0.514938, -0.692169),
(0.00460815, -0.308365),
(0, -0.173569),
(0.0833587, -0.359756),
(0.00263977, -0.166748),
(0.300293, -0.161026),
(0.0833587, -0.359756),
(0.00263977, -0.166748),
(0, -0.173569),
(0.121948, -0.0906982),
(0.297684, -0.112625),
(0.300293, -0.161026),
(0.180832, -0.0641479),
(0.182938, -0.0896148),
(0.300293, -0.161026),
(0.121948, -0.0906982),
(0.81221, -0.680481),
(0.514938, -0.692169),
(0.648758, -0.559875),
(0.990631, -0.160507),
(0.995377, -0.167282),
(0.923141, -0.354462),
(0.923141, -0.354462),
(1, -0.302063),
(0.866531, -0.38002),
(0.694214, -0.110107),
(0.995377, -0.167282),
(0.807114, -0.0601806),
(0.805695, -0.0856781),
(0.692596, -0.158539),
(0.807114, -0.0601806),
(0.866714, -0.0859985),
(0.990631, -0.160507),
(0.805695, -0.0856781),
(0.68367, -0.37384),
(0.322433, -0.376129),
(0.496658, -0.188034),
(0.300293, -0.161026),
(0.496658, -0.188034),
(0.322433, -0.376129),
(0.297684, -0.112625),
(0.506287, -0.403488),
(0.694214, -0.110107),
(0.506287, -0.403488),
(0.866531, -0.38002),
(0.694214, -0.110107),
(0, -0.173569),
(0.00460815, -0.308365),
(0.297684, -0.112625),
(0.00460815, -0.308365),
(0.14389, -0.384583),
(0.297684, -0.112625),
(0.995377, -0.167282),
(0.694214, -0.110107),
(1, -0.302063),
(1, -0.302063),
(0.694214, -0.110107),
(0.866531, -0.38002),
(0.506287, -0.403488),
(0.297684, -0.112625),
(0.14389, -0.384583),
(0.14389, -0.384583),
(0.0833587, -0.359756),
(0.233337, -0.394058),
(0.322433, -0.376129),
(0.233337, -0.394058),
(0.300293, -0.161026),
(0.68367, -0.37384),
(0.692596, -0.158539),
(0.775269, -0.390625),
(0.923141, -0.354462),
(0.775269, -0.390625),
(0.692596, -0.158539),
(0.866531, -0.38002),
(0.648758, -0.559875),
(0.775269, -0.390625),
(0.322433, -0.376129),
(0.371979, -0.561615),
(0.233337, -0.394058),
(0.805695, -0.0856781),
(0.807114, -0.0601806),
(0.866714, -0.0859985),
(0.990631, -0.160507),
(0.866714, -0.0859985),
(0.995377, -0.167282),
(0.807114, -0.0601806),
(0.995377, -0.167282),
(0.866714, -0.0859985),
(0.182938, -0.0896148),
(0.121948, -0.0906982),
(0.180832, -0.0641479),
(0.297684, -0.112625),
(0.180832, -0.0641479),
(0, -0.173569),
(0.121948, -0.0906982),
(0, -0.173569),
(0.180832, -0.0641479),
(0.300293, -0.161026),
(0.297684, -0.112625),
(0.490265, 0),
(0.496658, -0.188034),
(0.490265, 0),
(0.692596, -0.158539),
(0.297684, -0.112625),
(0.694214, -0.110107),
(0.490265, 0),
(0.692596, -0.158539),
(0.490265, 0),
(0.694214, -0.110107))}
def loadIsland2():
return {
'texture': pyglet.image.load('island2tex.png').get_texture(),
'vertices': (
(-1.06761, -0.780533, 0.00448608),
(-1.17841, -0.955368, 0.462875),
(-1.06761, -1.4328, 0.00448608),
(-1.72369, -1.4328, 0.00448608),
(-1.72369, -0.780533, 0.00448608),
(-1.06761, -1.4328, 0.00448608),
(-1.33263, -1.054, 1.45544),
(-1.23141, -0.776794, 1.4323),
(-1.61009, -0.955154, 1.42603),
(-1.61292, -1.15643, 0.484634),
(-1.72369, -1.4328, 0.00448608),
(-1.29454, -1.2719, 0.497116),
(-1.49678, -0.839905, 0.450378),
(-1.72369, -0.780533, 0.00448608),
(-1.61292, -1.15643, 0.484634),
(-1.50888, -0.677948, 1.40286),
(-1.23141, -0.776794, 1.4323),
(-1.55685, -0.556656, 1.00879),
(-1.23141, -0.776794, 1.4323),
(-1.33263, -1.054, 1.45544),
(-1.04933, -0.737442, 1.06262),
(-1.23445, -1.24449, 1.10498),
(-1.33263, -1.054, 1.45544),
(-1.74197, -1.23012, 1.06915),
(-1.61009, -0.955154, 1.42603),
(-1.50888, -0.677948, 1.40286),
(-1.74197, -1.23012, 1.06915),
(-1.17841, -0.955368, 0.462875),
(-1.06761, -0.780533, 0.00448608),
(-1.49678, -0.839905, 0.450378),
(-1.29454, -1.2719, 0.497116),
(-1.06761, -1.4328, 0.00448608),
(-1.17841, -0.955368, 0.462875),
(-1.06761, -1.4328, 0.00448608),
(-1.72369, -0.780533, 0.00448608),
(-1.06761, -0.780533, 0.00448608),
(-1.50888, -0.677948, 1.40286),
(-1.61009, -0.955154, 1.42603),
(-1.23141, -0.776794, 1.4323),
(-1.06761, -1.4328, 0.00448608),
(-1.29454, -1.2719, 0.497116),
(-1.72369, -1.4328, 0.00448608),
(-1.61292, -1.15643, 0.484634),
(-1.72369, -0.780533, 0.00448608),
(-1.72369, -1.4328, 0.00448608),
(-1.04933, -0.737442, 1.06262),
(-1.55685, -0.556656, 1.00879),
(-1.23141, -0.776794, 1.4323),
(-1.23445, -1.24449, 1.10498),
(-1.04933, -0.737442, 1.06262),
(-1.33263, -1.054, 1.45544),
(-1.61009, -0.955154, 1.42603),
(-1.74197, -1.23012, 1.06915),
(-1.33263, -1.054, 1.45544),
(-1.55685, -0.556656, 1.00879),
(-1.74197, -1.23012, 1.06915),
(-1.50888, -0.677948, 1.40286),
(-1.72369, -0.780533, 0.00448608),
(-1.49678, -0.839905, 0.450378),
(-1.06761, -0.780533, 0.00448608),
(-1.29454, -1.2719, 0.497116),
(-1.74197, -1.23012, 1.06915),
(-1.61292, -1.15643, 0.484634),
(-1.23445, -1.24449, 1.10498),
(-1.74197, -1.23012, 1.06915),
(-1.29454, -1.2719, 0.497116),
(-1.49678, -0.839905, 0.450378),
(-1.04933, -0.737442, 1.06262),
(-1.17841, -0.955368, 0.462875),
(-1.55685, -0.556656, 1.00879),
(-1.04933, -0.737442, 1.06262),
(-1.49678, -0.839905, 0.450378),
(-1.23445, -1.24449, 1.10498),
(-1.29454, -1.2719, 0.497116),
(-1.04933, -0.737442, 1.06262),
(-1.29454, -1.2719, 0.497116),
(-1.17841, -0.955368, 0.462875),
(-1.04933, -0.737442, 1.06262),
(-1.55685, -0.556656, 1.00879),
(-1.49678, -0.839905, 0.450378),
(-1.74197, -1.23012, 1.06915),
(-1.49678, -0.839905, 0.450378),
(-1.61292, -1.15643, 0.484634),
(-1.74197, -1.23012, 1.06915),
(0.327194, -0.388901, 0.00752258),
(0.327194, 0.267166, 0.00752258),
(0.301727, -0.201691, 0.479523),
(-0.328873, 0.267166, 0.00752258),
(0.327194, 0.267166, 0.00752258),
(-0.328873, -0.388901, 0.00752258),
(0.0621948, 0.128891, 2.25124),
(-0.215256, 0.0274505, 2.2326),
(0.163391, -0.149185, 2.25803),
(-0.303421, 0.0799713, 0.479523),
(0.139984, 0.241714, 0.479523),
(-0.328873, 0.267166, 0.00752258),
(-0.141678, -0.363434, 0.479523),
(-0.303421, 0.0799713, 0.479523),
(-0.328873, -0.388901, 0.00752258),
(0.138596, 0.239395, 1.88083),
(0.0780181, 0.108582, 1.30341),
(-0.300415, 0.223694, 1.85136),
(-0.114059, -0.25061, 2.2394),
(-0.140274, -0.361115, 1.86212),
(0.163391, -0.149185, 2.25803),
(0.163391, -0.149185, 2.25803),
(0.298737, -0.200607, 1.89159),
(0.0621948, 0.128891, 2.25124),
(0.138596, 0.239395, 1.88083),
(-0.300415, 0.223694, 1.85136),
(0.0621948, 0.128891, 2.25124),
(-0.215256, 0.0274505, 2.2326),
(-0.300415, 0.223694, 1.85136),
(-0.114059, -0.25061, 2.2394),
(0.301727, -0.201691, 0.479523),
(-0.141678, -0.363434, 0.479523),
(0.327194, -0.388901, 0.00752258),
(0.139984, 0.241714, 0.479523),
(0.301727, -0.201691, 0.479523),
(0.327194, 0.267166, 0.00752258),
(0.327194, -0.388901, 0.00752258),
(-0.328873, -0.388901, 0.00752258),
(0.327194, 0.267166, 0.00752258),
(-0.114059, -0.25061, 2.2394),
(0.163391, -0.149185, 2.25803),
(-0.215256, 0.0274505, 2.2326),
(0.327194, 0.267166, 0.00752258),
(-0.328873, 0.267166, 0.00752258),
(0.139984, 0.241714, 0.479523),
(-0.328873, 0.267166, 0.00752258),
(-0.328873, -0.388901, 0.00752258),
(-0.303421, 0.0799713, 0.479523),
(0.301727, -0.201691, 0.479523),
(0.168594, -0.139725, 1.01736),
(-0.141678, -0.363434, 0.479523),
(0.0780181, 0.108582, 1.30341),
(0.168594, -0.139725, 1.01736),
(0.139984, 0.241714, 0.479523),
(-0.170273, 0.0180053, 1.01736),
(-0.141678, -0.363434, 0.479523),
(-0.0796966, -0.230301, 1.30341),
(0.0780181, 0.108582, 1.30341),
(-0.170273, 0.0180053, 1.01736),
(-0.300415, 0.223694, 1.85136),
(-0.140274, -0.361115, 1.86212),
(-0.0796966, -0.230301, 1.30341),
(0.298737, -0.200607, 1.89159),
(0.138596, 0.239395, 1.88083),
(0.298737, -0.200607, 1.89159),
(0.0780181, 0.108582, 1.30341),
(-0.140274, -0.361115, 1.86212),
(-0.300415, 0.223694, 1.85136),
(-0.0796966, -0.230301, 1.30341),
(0.298737, -0.200607, 1.89159),
(0.163391, -0.149185, 2.25803),
(-0.140274, -0.361115, 1.86212),
(0.138596, 0.239395, 1.88083),
(0.0621948, 0.128891, 2.25124),
(0.298737, -0.200607, 1.89159),
(-0.215256, 0.0274505, 2.2326),
(0.0621948, 0.128891, 2.25124),
(-0.300415, 0.223694, 1.85136),
(-0.140274, -0.361115, 1.86212),
(-0.114059, -0.25061, 2.2394),
(-0.300415, 0.223694, 1.85136),
(-0.0796966, -0.230301, 1.30341),
(-0.300415, 0.223694, 1.85136),
(-0.170273, 0.0180053, 1.01736),
(0.298737, -0.200607, 1.89159),
(0.168594, -0.139725, 1.01736),
(0.0780181, 0.108582, 1.30341),
(0.298737, -0.200607, 1.89159),
(-0.0796966, -0.230301, 1.30341),
(0.168594, -0.139725, 1.01736),
(-0.303421, 0.0799713, 0.479523),
(-0.141678, -0.363434, 0.479523),
(-0.170273, 0.0180053, 1.01736),
(0.168594, -0.139725, 1.01736),
(-0.0796966, -0.230301, 1.30341),
(-0.141678, -0.363434, 0.479523),
(-0.303421, 0.0799713, 0.479523),
(-0.170273, 0.0180053, 1.01736),
(0.139984, 0.241714, 0.479523),
(0.0780181, 0.108582, 1.30341),
(0.139984, 0.241714, 0.479523),
(-0.170273, 0.0180053, 1.01736),
(0.301727, -0.201691, 0.479523),
(0.139984, 0.241714, 0.479523),
(0.168594, -0.139725, 1.01736),
(-0.328873, -0.388901, 0.00752258),
(0.327194, -0.388901, 0.00752258),
(-0.141678, -0.363434, 0.479523)),
'indices': (
0, 1, 2,
3, 4, 5,
6, 7, 8,
9, 10, 11,
12, 13, 14,
15, 16, 17,
18, 19, 20,
21, 22, 23,
24, 25, 26,
27, 28, 29,
30, 31, 32,
33, 34, 35,
36, 37, 38,
39, 40, 41,
42, 43, 44,
45, 46, 47,
48, 49, 50,
51, 52, 53,
54, 55, 56,
57, 58, 59,
60, 61, 62,
63, 64, 65,
66, 67, 68,
69, 70, 71,
72, 73, 74,
75, 76, 77,
78, 79, 80,
81, 82, 83,
84, 85, 86,
87, 88, 89,
90, 91, 92,
93, 94, 95,
96, 97, 98,
99, 100, 101,
102, 103, 104,
105, 106, 107,
108, 109, 110,
111, 112, 113,
114, 115, 116,
117, 118, 119,
120, 121, 122,
123, 124, 125,
126, 127, 128,
129, 130, 131,
132, 133, 134,
135, 136, 137,
138, 139, 140,
141, 142, 143,
144, 145, 146,
147, 148, 149,
150, 151, 152,
153, 154, 155,
156, 157, 158,
159, 160, 161,
162, 163, 164,
165, 166, 167,
168, 169, 170,
171, 172, 173,
174, 175, 176,
177, 178, 179,
180, 181, 182,
183, 184, 185,
186, 187, 188,
189, 190, 191),
'uvmap': (
(0.383682, 0),
(0.280838, -0.2034),
(0, 0),
(0, 0),
(0.383682, 0),
(0, 0),
(0.222824, -0.64386),
(0.38588, -0.633575),
(0.28096, -0.630798),
(0.162567, -0.213058),
(0, 0),
(0.0946502, -0.218597),
(0.348755, -0.197861),
(0.383682, 0),
(0.162567, -0.213058),
(0.444031, -0.620514),
(0.38588, -0.633575),
(0.515381, -0.445648),
(0.38588, -0.633575),
(0.222824, -0.64386),
(0.409027, -0.469528),
(0.110764, -0.488327),
(0.222824, -0.64386),
(0.119217, -0.472427),
(0.28096, -0.630798),
(0.444031, -0.620514),
(0.119217, -0.472427),
(0.280838, -0.2034),
(0.383682, 0),
(0.348755, -0.197861),
(0.0946502, -0.218597),
(0, 0),
(0.280838, -0.2034),
(0, 0),
(0.383682, 0),
(0.383682, 0),
(0.444031, -0.620514),
(0.28096, -0.630798),
(0.38588, -0.633575),
(0, 0),
(0.0946502, -0.218597),
(0, 0),
(0.162567, -0.213058),
(0.383682, 0),
(0, 0),
(0.409027, -0.469528),
(0.515381, -0.445648),
(0.38588, -0.633575),
(0.110764, -0.488327),
(0.409027, -0.469528),
(0.222824, -0.64386),
(0.28096, -0.630798),
(0.119217, -0.472427),
(0.222824, -0.64386),
(0.515381, -0.445648),
(0.119217, -0.472427),
(0.444031, -0.620514),
(0.383682, 0),
(0.348755, -0.197861),
(0.383682, 0),
(0.0946502, -0.218597),
(0.119217, -0.472427),
(0.162567, -0.213058),
(0.110764, -0.488327),
(0.119217, -0.472427),
(0.0946502, -0.218597),
(0.348755, -0.197861),
(0.409027, -0.469528),
(0.280838, -0.2034),
(0.515381, -0.445648),
(0.409027, -0.469528),
(0.348755, -0.197861),
(0.110764, -0.488327),
(0.0946502, -0.218597),
(0.409027, -0.469528),
(0.0946502, -0.218597),
(0.280838, -0.2034),
(0.409027, -0.469528),
(0.515381, -0.445648),
(0.348755, -0.197861),
(0.119217, -0.472427),
(0.348755, -0.197861),
(0.162567, -0.213058),
(0.119217, -0.472427),
(0.614059, -0.00134277),
(1, -0.00134277),
(0.724182, -0.210785),
(1, -0.00134277),
(1, -0.00134277),
(0.614059, -0.00134277),
(0.918655, -0.996979),
(0.858978, -0.988708),
(0.755066, -1),
(0.889877, -0.210785),
(0.985016, -0.210785),
(1, -0.00134277),
(0.629044, -0.210785),
(0.889877, -0.210785),
(0.614059, -0.00134277),
(0.983658, -0.832611),
(0.906708, -0.576385),
(0.974411, -0.819534),
(0.695404, -0.99173),
(0.630402, -0.82431),
(0.755066, -1),
(0.755066, -1),
(0.724823, -0.837387),
(0.918655, -0.996979),
(0.983658, -0.832611),
(0.974411, -0.819534),
(0.918655, -0.996979),
(0.858978, -0.988708),
(0.974411, -0.819534),
(0.695404, -0.99173),
(0.724182, -0.210785),
(0.629044, -0.210785),
(0.614059, -0.00134277),
(0.985016, -0.210785),
(0.724182, -0.210785),
(1, -0.00134277),
(0.614059, -0.00134277),
(0.614059, -0.00134277),
(1, -0.00134277),
(0.695404, -0.99173),
(0.755066, -1),
(0.858978, -0.988708),
(1, -0.00134277),
(1, -0.00134277),
(0.985016, -0.210785),
(1, -0.00134277),
(0.614059, -0.00134277),
(0.889877, -0.210785),
(0.724182, -0.210785),
(0.760635, -0.449448),
(0.629044, -0.210785),
(0.906708, -0.576385),
(0.760635, -0.449448),
(0.985016, -0.210785),
(0.853424, -0.449448),
(0.629044, -0.210785),
(0.707352, -0.576385),
(0.906708, -0.576385),
(0.853424, -0.449448),
(0.974411, -0.819534),
(0.630402, -0.82431),
(0.707352, -0.576385),
(0.724823, -0.837387),
(0.983658, -0.832611),
(0.724823, -0.837387),
(0.906708, -0.576385),
(0.630402, -0.82431),
(0.974411, -0.819534),
(0.707352, -0.576385),
(0.724823, -0.837387),
(0.755066, -1),
(0.630402, -0.82431),
(0.983658, -0.832611),
(0.918655, -0.996979),
(0.724823, -0.837387),
(0.858978, -0.988708),
(0.918655, -0.996979),
(0.974411, -0.819534),
(0.630402, -0.82431),
(0.695404, -0.99173),
(0.974411, -0.819534),
(0.707352, -0.576385),
(0.974411, -0.819534),
(0.853424, -0.449448),
(0.724823, -0.837387),
(0.760635, -0.449448),
(0.906708, -0.576385),
(0.724823, -0.837387),
(0.707352, -0.576385),
(0.760635, -0.449448),
(0.889877, -0.210785),
(0.629044, -0.210785),
(0.853424, -0.449448),
(0.760635, -0.449448),
(0.707352, -0.576385),
(0.629044, -0.210785),
(0.889877, -0.210785),
(0.853424, -0.449448),
(0.985016, -0.210785),
(0.906708, -0.576385),
(0.985016, -0.210785),
(0.853424, -0.449448),
(0.724182, -0.210785),
(0.985016, -0.210785),
(0.760635, -0.449448),
(0.614059, -0.00134277),
(0.614059, -0.00134277),
(0.629044, -0.210785))}
def loadIsland1():
return {
'texture': pyglet.image.load('island1tex.png').get_texture(),
'vertices': (
(-0.419022, 2.4046, 1.98764),
(-0.639664, 1.24741, 1.79893),
(-0.461929, 2.09698, 2.75305),
(-0.357971, -0.584793, 2.88612),
(0.961838, -1, 2.68634),
(-0.461929, 2.09698, 2.75305),
(0.961838, -1, 2.68634),
(0.923874, 2.09698, 2.60301),
(-0.461929, 2.09698, 2.75305),
(-0.961838, -0.584793, 2.3443),
(0, -1.53358, 2.38168),
(-0.357971, -0.584793, 2.88612),
(0.961838, -1, 2.68634),
(-0.357971, -0.584793, 2.88612),
(0, -1.53358, 2.38168),
(1.32661, 0.0526428, -0.0107574),
(1.16219, -1.20143, -0.0107574),
(-1.16055, -0.0850982, -0.0107574),
(-0.739105, 2.50594, -0.00807189),
(0.831543, 2.38768, -0.00807189),
(-1.12611, 1.30867, -0.00807189),
(0.669525, 0.656982, 1.76611),
(0.476822, -0.191589, 1.33751),
(-0.639664, 1.24741, 1.79893),
(-0.439804, 0.0430908, 1.34462),
(-0.722473, -1.08788, 1.83481),
(-0.639664, 1.24741, 1.79893),
(0.476822, -0.191589, 1.33751),
(0.699387, -0.355774, 0.974243),
(-0.439804, 0.0430908, 1.34462),
(-0.458038, -0.867325, 0.990585),
(0.458038, -0.867325, 0.990585),
(-0.722473, -1.08788, 1.83481),
(-0.458038, -0.867325, 0.990585),
(-0.439804, 0.0430908, 1.34462),
(-0.717773, -0.284958, 0.394806),
(-0.458038, -0.867325, 0.990585),
(-0.670227, -1.06004, 0.419617),
(0.458038, -0.867325, 0.990585),
(-0.717773, -0.284958, 0.394806),
(-1.16055, -0.0850982, -0.0107574),
(-0.670227, -1.06004, 0.419617),
(0.669525, 0.656982, 1.76611),
(0.961838, -1, 2.68634),
(0.476822, -0.191589, 1.33751),
(0.609802, 1.23466, 1.76611),
(0.549988, 1.51712, 1.46841),
(0.419022, 2.49605, 1.98764),
(-0.288681, 2.24104, 1.08629),
(-0.325546, 1.63148, 1.08629),
(-0.419022, 2.4046, 1.98764),
(0.831543, 2.38768, -0.00807189),
(-0.739105, 2.50594, -0.00807189),
(0.228531, 2.12608, 1.08629),
(-0.373871, 1.71059, 0.521423),
(0.343185, 1.6566, 0.525848),
(-0.325546, 1.63148, 1.08629),
(1.03369, 1.14606, -0.00807189),
(0.831543, 2.38768, -0.00807189),
(0.343185, 1.6566, 0.525848),
(-0.373871, 1.71059, 0.521423),
(-1.12611, 1.30867, -0.00807189),
(0.343185, 1.6566, 0.525848),
(0, 2.58858, 2.60301),
(-0.419022, 2.4046, 1.98764),
(-0.461929, 2.09698, 2.75305),
(0.609802, 1.23466, 1.76611),
(0.923874, 2.09698, 2.60301),
(0.669525, 0.656982, 1.76611),
(0, 2.58858, 2.60301),
(0.923874, 2.09698, 2.60301),
(0.419022, 2.49605, 1.98764),
(0.419022, 2.49605, 1.98764),
(0.923874, 2.09698, 2.60301),
(0.609802, 1.23466, 1.76611),
(-0.722473, -1.08788, 1.83481),
(0.722473, -1.14145, 1.87538),
(0, -1.53358, 2.38168),
(-0.461929, 2.09698, 2.75305),
(-0.639664, 1.24741, 1.79893),
(-0.357971, -0.584793, 2.88612),
(-0.461929, 2.09698, 2.75305),
(0.923874, 2.09698, 2.60301),
(0, 2.58858, 2.60301),
(-1.08363, -1.33919, -0.0107574),
(-1.16055, -0.0850982, -0.0107574),
(1.16219, -1.20143, -0.0107574),
(0.669525, 0.656982, 1.76611),
(-0.639664, 1.24741, 1.79893),
(0.609802, 1.23466, 1.76611),
(1.03369, 1.14606, -0.00807189),
(-1.12611, 1.30867, -0.00807189),
(0.831543, 2.38768, -0.00807189),
(-0.439804, 0.0430908, 1.34462),
(-0.639664, 1.24741, 1.79893),
(0.476822, -0.191589, 1.33751),
(0.722473, -1.14145, 1.87538),
(-0.722473, -1.08788, 1.83481),
(0.458038, -0.867325, 0.990585),
(-0.722473, -1.08788, 1.83481),
(-0.439804, 0.0430908, 1.34462),
(-0.458038, -0.867325, 0.990585),
(-0.670227, -1.06004, 0.419617),
(-0.458038, -0.867325, 0.990585),
(-0.717773, -0.284958, 0.394806),
(-1.16055, -0.0850982, -0.0107574),
(-0.717773, -0.284958, 0.394806),
(1.32661, 0.0526428, -0.0107574),
(1.16219, -1.20143, -0.0107574),
(1.32661, 0.0526428, -0.0107574),
(0.699387, -0.355774, 0.974243),
(-1.08363, -1.33919, -0.0107574),
(-0.670227, -1.06004, 0.419617),
(-1.16055, -0.0850982, -0.0107574),
(-0.419022, 2.4046, 1.98764),
(0.419022, 2.49605, 1.98764),
(0.228531, 2.12608, 1.08629),
(0.609802, 1.23466, 1.76611),
(-0.639664, 1.24741, 1.79893),
(0.549988, 1.51712, 1.46841),
(0.228531, 2.12608, 1.08629),
(-0.288681, 2.24104, 1.08629),
(-0.419022, 2.4046, 1.98764),
(-0.325546, 1.63148, 1.08629),
(-0.288681, 2.24104, 1.08629),
(-0.373871, 1.71059, 0.521423),
(-1.12611, 1.30867, -0.00807189),
(-0.373871, 1.71059, 0.521423),
(-0.739105, 2.50594, -0.00807189),
(1.03369, 1.14606, -0.00807189),
(0.343185, 1.6566, 0.525848),
(-1.12611, 1.30867, -0.00807189),
(0.419022, 2.49605, 1.98764),
(-0.419022, 2.4046, 1.98764),
(0, 2.58858, 2.60301),
(0, -1.53358, 2.38168),
(-0.961838, -0.584793, 2.3443),
(-0.722473, -1.08788, 1.83481),
(-0.639664, 1.24741, 1.79893),
(-0.722473, -1.08788, 1.83481),
(-0.961838, -0.584793, 2.3443),
(0, -1.53358, 2.38168),
(0.722473, -1.14145, 1.87538),
(0.961838, -1, 2.68634),
(0.923874, 2.09698, 2.60301),
(0.961838, -1, 2.68634),
(0.669525, 0.656982, 1.76611),
(0.961838, -1, 2.68634),
(0.722473, -1.14145, 1.87538),
(0.476822, -0.191589, 1.33751),
(0.458038, -0.867325, 0.990585),
(-0.670227, -1.06004, 0.419617),
(1.16219, -1.20143, -0.0107574),
(0.458038, -0.867325, 0.990585),
(1.16219, -1.20143, -0.0107574),
(0.699387, -0.355774, 0.974243),
(-1.08363, -1.33919, -0.0107574),
(1.16219, -1.20143, -0.0107574),
(-0.670227, -1.06004, 0.419617),
(-0.373871, 1.71059, 0.521423),
(-0.288681, 2.24104, 1.08629),
(-0.739105, 2.50594, -0.00807189),
(0.343185, 1.6566, 0.525848),
(0.831543, 2.38768, -0.00807189),
(0.228531, 2.12608, 1.08629),
(-0.288681, 2.24104, 1.08629),
(0.228531, 2.12608, 1.08629),
(-0.739105, 2.50594, -0.00807189),
(0.699387, -0.355774, 0.974243),
(-0.717773, -0.284958, 0.394806),
(-0.439804, 0.0430908, 1.34462),
(-0.325546, 1.63148, 1.08629),
(0.343185, 1.6566, 0.525848),
(0.549988, 1.51712, 1.46841),
(0.343185, 1.6566, 0.525848),
(0.228531, 2.12608, 1.08629),
(0.549988, 1.51712, 1.46841),
(0.549988, 1.51712, 1.46841),
(-0.639664, 1.24741, 1.79893),
(-0.325546, 1.63148, 1.08629),
(-0.639664, 1.24741, 1.79893),
(-0.419022, 2.4046, 1.98764),
(-0.325546, 1.63148, 1.08629),
(0.549988, 1.51712, 1.46841),
(0.228531, 2.12608, 1.08629),
(0.419022, 2.49605, 1.98764),
(-0.961838, -0.584793, 2.3443),
(-0.357971, -0.584793, 2.88612),
(-0.639664, 1.24741, 1.79893),
(-0.717773, -0.284958, 0.394806),
(0.699387, -0.355774, 0.974243),
(1.32661, 0.0526428, -0.0107574),
(0.722473, -1.14145, 1.87538),
(0.458038, -0.867325, 0.990585),
(0.476822, -0.191589, 1.33751),
(0.699387, -0.355774, 0.974243),
(0.476822, -0.191589, 1.33751),
(0.458038, -0.867325, 0.990585)),
'indices': (
0, 1, 2,
3, 4, 5,
6, 7, 8,
9, 10, 11,
12, 13, 14,
15, 16, 17,
18, 19, 20,
21, 22, 23,
24, 25, 26,
27, 28, 29,
30, 31, 32,
33, 34, 35,
36, 37, 38,
39, 40, 41,
42, 43, 44,
45, 46, 47,
48, 49, 50,
51, 52, 53,
54, 55, 56,
57, 58, 59,
60, 61, 62,
63, 64, 65,
66, 67, 68,
69, 70, 71,
72, 73, 74,
75, 76, 77,
78, 79, 80,
81, 82, 83,
84, 85, 86,
87, 88, 89,
90, 91, 92,
93, 94, 95,
96, 97, 98,
99, 100, 101,
102, 103, 104,
105, 106, 107,
108, 109, 110,
111, 112, 113,
114, 115, 116,
117, 118, 119,
120, 121, 122,
123, 124, 125,
126, 127, 128,
129, 130, 131,
132, 133, 134,
135, 136, 137,
138, 139, 140,
141, 142, 143,
144, 145, 146,
147, 148, 149,
150, 151, 152,
153, 154, 155,
156, 157, 158,
159, 160, 161,
162, 163, 164,
165, 166, 167,
168, 169, 170,
171, 172, 173,
174, 175, 176,
177, 178, 179,
180, 181, 182,
183, 184, 185,
186, 187, 188,
189, 190, 191,
192, 193, 194,
195, 196, 197),
'uvmap': (
(0.955368, -0.689835),
(0.674637, -0.624695),
(0.880737, -0.954056),
(0.230164, -1),
(0.12944, -0.93103),
(0.880737, -0.954056),
(0.12944, -0.93103),
(0.880737, -0.902267),
(0.880737, -0.954056),
(0.230164, -0.812958),
(0, -0.825867),
(0.230164, -1),
(0.12944, -0.93103),
(0.230164, -1),
(0, -0.825867),
(0.384796, 0),
(0.0805664, 0),
(0.351379, 0),
(0.97995, -0.000915527),
(0.951263, -0.000915527),
(0.689499, -0.000915527),
(0.531403, -0.613373),
(0.325546, -0.465424),
(0.674637, -0.624695),
(0.382477, -0.467865),
(0.108109, -0.637085),
(0.674637, -0.624695),
(0.325546, -0.465424),
(0.285721, -0.340012),
(0.382477, -0.467865),
(0.161621, -0.345657),
(0.161621, -0.345657),
(0.108109, -0.637085),
(0.161621, -0.345657),
(0.382477, -0.467865),
(0.302902, -0.139999),
(0.161621, -0.345657),
(0.114868, -0.14856),
(0.161621, -0.345657),
(0.302902, -0.139999),
(0.351379, 0),
(0.114868, -0.14856),
(0.531403, -0.613373),
(0.12944, -0.93103),
(0.325546, -0.465424),
(0.671539, -0.613373),
(0.740067, -0.510605),
(0.977554, -0.689835),
(0.91568, -0.378693),
(0.767807, -0.378693),
(0.955368, -0.689835),
(0.951263, -0.000915527),
(0.97995, -0.000915527),
(0.887802, -0.378693),
(0.787003, -0.183701),
(0.773911, -0.185226),
(0.767807, -0.378693),
(0.650055, -0.000915527),
(0.951263, -0.000915527),
(0.773911, -0.185226),
(0.787003, -0.183701),
(0.689499, -0.000915527),
(0.773911, -0.185226),
(1, -0.902267),
(0.955368, -0.689835),
(0.880737, -0.954056),
(0.671539, -0.613373),
(0.880737, -0.902267),
(0.531403, -0.613373),
(1, -0.902267),
(0.880737, -0.902267),
(0.977554, -0.689835),
(0.977554, -0.689835),
(0.880737, -0.902267),
(0.671539, -0.613373),
(0.108109, -0.637085),
(0.0951232, -0.651093),
(0, -0.825867),
(0.880737, -0.954056),
(0.674637, -0.624695),
(0.230164, -1),
(0.880737, -0.954056),
(0.880737, -0.902267),
(1, -0.902267),
(0.0471496, 0),
(0.351379, 0),
(0.0805664, 0),
(0.531403, -0.613373),
(0.674637, -0.624695),
(0.671539, -0.613373),
(0.650055, -0.000915527),
(0.689499, -0.000915527),
(0.951263, -0.000915527),
(0.382477, -0.467865),
(0.674637, -0.624695),
(0.325546, -0.465424),
(0.0951232, -0.651093),
(0.108109, -0.637085),
(0.161621, -0.345657),
(0.108109, -0.637085),
(0.382477, -0.467865),
(0.161621, -0.345657),
(0.114868, -0.14856),
(0.161621, -0.345657),
(0.302902, -0.139999),
(0.351379, 0),
(0.302902, -0.139999),
(0.384796, 0),
(0.0805664, 0),
(0.384796, 0),
(0.285721, -0.340012),
(0.0471496, 0),
(0.114868, -0.14856),
(0.351379, 0),
(0.955368, -0.689835),
(0.977554, -0.689835),
(0.887802, -0.378693),
(0.671539, -0.613373),
(0.674637, -0.624695),
(0.740067, -0.510605),
(0.887802, -0.378693),
(0.91568, -0.378693),
(0.955368, -0.689835),
(0.767807, -0.378693),
(0.91568, -0.378693),
(0.787003, -0.183701),
(0.689499, -0.000915527),
(0.787003, -0.183701),
(0.97995, -0.000915527),
(0.650055, -0.000915527),
(0.773911, -0.185226),
(0.689499, -0.000915527),
(0.977554, -0.689835),
(0.955368, -0.689835),
(1, -0.902267),
(0, -0.825867),
(0.230164, -0.812958),
(0.108109, -0.637085),
(0.674637, -0.624695),
(0.108109, -0.637085),
(0.230164, -0.812958),
(0, -0.825867),
(0.0951232, -0.651093),
(0.12944, -0.93103),
(0.880737, -0.902267),
(0.12944, -0.93103),
(0.531403, -0.613373),
(0.12944, -0.93103),
(0.0951232, -0.651093),
(0.325546, -0.465424),
(0.161621, -0.345657),
(0.114868, -0.14856),
(0.0805664, 0),
(0.161621, -0.345657),
(0.0805664, 0),
(0.285721, -0.340012),
(0.0471496, 0),
(0.0805664, 0),
(0.114868, -0.14856),
(0.787003, -0.183701),
(0.91568, -0.378693),
(0.97995, -0.000915527),
(0.773911, -0.185226),
(0.951263, -0.000915527),
(0.887802, -0.378693),
(0.91568, -0.378693),
(0.887802, -0.378693),
(0.97995, -0.000915527),
(0.285721, -0.340012),
(0.302902, -0.139999),
(0.382477, -0.467865),
(0.767807, -0.378693),
(0.773911, -0.185226),
(0.740067, -0.510605),
(0.773911, -0.185226),
(0.887802, -0.378693),
(0.740067, -0.510605),
(0.740067, -0.510605),
(0.674637, -0.624695),
(0.767807, -0.378693),
(0.674637, -0.624695),
(0.955368, -0.689835),
(0.767807, -0.378693),
(0.740067, -0.510605),
(0.887802, -0.378693),
(0.977554, -0.689835),
(0.230164, -0.812958),
(0.230164, -1),
(0.674637, -0.624695),
(0.302902, -0.139999),
(0.285721, -0.340012),
(0.384796, 0),
(0.0951232, -0.651093),
(0.161621, -0.345657),
(0.325546, -0.465424),
(0.285721, -0.340012),
(0.325546, -0.465424),
(0.161621, -0.345657))}
def loadCheckpoint():
return {
'texture': None,
'vertices': (
(-0.00740051, -0.00157165, 1.41418),
(1.17003, -1.13281, 0.00520324),
(1.17186, 1.12926, 0.00704956),
(1.17186, 1.12926, 0.00704956),
(1.17003, -1.13281, 0.00520324),
(0.00740051, 0.00157165, -1.41418),
(-0.00740051, -0.00157165, 1.41418),
(-1.17186, -1.12926, -0.00704956),
(1.17003, -1.13281, 0.00520324),
(0.00740051, 0.00157165, -1.41418),
(-1.17003, 1.13281, -0.00520324),
(1.17186, 1.12926, 0.00704956),
(-1.17186, -1.12926, -0.00704956),
(-1.17003, 1.13281, -0.00520324),
(0.00740051, 0.00157165, -1.41418),
(-0.00740051, -0.00157165, 1.41418),
(-1.17003, 1.13281, -0.00520324),
(-1.17186, -1.12926, -0.00704956),
(0.00740051, 0.00157165, -1.41418),
(1.17003, -1.13281, 0.00520324),
(-1.17186, -1.12926, -0.00704956),
(-0.00740051, -0.00157165, 1.41418),
(1.17186, 1.12926, 0.00704956),
(-1.17003, 1.13281, -0.00520324)),
'indices': (
0, 1, 2,
3, 4, 5,
6, 7, 8,
9, 10, 11,
12, 13, 14,
15, 16, 17,
18, 19, 20,
21, 22, 23),
'uvmap': (
(0.015625, -0.960938),
(0.960938, -0.96875),
(0.476563, -0.476563),
(0.476563, -0.476563),
(0.960938, -0.96875),
(0.96875, -0.0234375),
(0.015625, -0.960938),
(0.507813, -0.507813),
(0.960938, -0.96875),
(0.96875, -0.0234375),
(0.0234375, -0.015625),
(0.476563, -0.476563),
(0.507813, -0.507813),
(0.0234375, -0.015625),
(0.96875, -0.0234375),
(0.015625, -0.960938),
(0.0234375, -0.015625),
(0.507813, -0.507813),
(0.96875, -0.0234375),
(0.960938, -0.96875),
(0.507813, -0.507813),
(0.015625, -0.960938),
(0.476563, -0.476563),
(0.0234375, -0.015625))}
def loadTest(): # test cube
return {
'texture': pyglet.image.load('watertex.png').get_texture(),
'vertices': (
# front
(-0.5, -0.5, 0.5), ( 0.5, -0.5, 0.5), ( 0.5, 0.5, 0.5),
( 0.5, 0.5, 0.5), (-0.5, 0.5, 0.5), (-0.5, -0.5, 0.5),
# back
(-0.5, -0.5, -0.5), (-0.5, 0.5, -0.5), ( 0.5, 0.5, -0.5),
( 0.5, 0.5, -0.5), ( 0.5, -0.5, -0.5), (-0.5, -0.5, -0.5),
# top
(-0.5, 0.5, -0.5), (-0.5, 0.5, 0.5), ( 0.5, 0.5, 0.5),
( 0.5, 0.5, 0.5), ( 0.5, 0.5, -0.5), (-0.5, 0.5, -0.5),
# bottom
(-0.5, -0.5, -0.5), ( 0.5, -0.5, -0.5), ( 0.5, -0.5, 0.5),
( 0.5, -0.5, 0.5), (-0.5, -0.5, 0.5), (-0.5, -0.5, -0.5),
# right
( 0.5, -0.5, -0.5), ( 0.5, 0.5, -0.5), ( 0.5, 0.5, 0.5),
( 0.5, 0.5, 0.5), ( 0.5, -0.5, 0.5), ( 0.5, -0.5, -0.5),
# left
(-0.5, -0.5, -0.5), (-0.5, -0.5, 0.5), (-0.5, 0.5, 0.5),
(-0.5, 0.5, 0.5), (-0.5, 0.5, -0.5), (-0.5, -0.5, -0.5)),
'indices': (
0, 1, 2, 3, 4, 5, # front
6, 7, 8, 9, 10, 11, # back
12, 13, 14, 15, 16, 17, # top
18, 19, 20, 21, 22, 23, # bottom
24, 25, 26, 27, 28, 29, # right
30, 31, 32, 33, 34, 35), # left
'uvmap': (
# front
(0.0, 0.0), (1.0, 0.0), (1.0, 1.0),
(1.0, 1.0), (0.0, 1.0), (0.0, 0.0),
# back
(0.0, 0.0), (1.0, 0.0), (1.0, 1.0),
(1.0, 1.0), (0.0, 1.0), (0.0, 0.0),
# top
(0.0, 0.0), (1.0, 0.0), (1.0, 1.0),
(1.0, 1.0), (0.0, 1.0), (0.0, 0.0),
# bottom
(0.0, 0.0), (1.0, 0.0), (1.0, 1.0),
(1.0, 1.0), (0.0, 1.0), (0.0, 0.0),
# right
(0.0, 0.0), (1.0, 0.0), (1.0, 1.0),
(1.0, 1.0), (0.0, 1.0), (0.0, 0.0),
# left
(0.0, 0.0), (1.0, 0.0), (1.0, 1.0),
(1.0, 1.0), (0.0, 1.0), (0.0, 0.0))}
|
|
import asyncio
import json
from collections import namedtuple
from . import hdrs
from .helpers import PY_35, PY_352, Timeout, call_later, create_future
from .http import (WS_CLOSED_MESSAGE, WS_CLOSING_MESSAGE, HttpProcessingError,
WebSocketError, WebSocketReader,
WSMessage, WSMsgType, do_handshake)
from .streams import FlowControlDataQueue
from .web_exceptions import (HTTPBadRequest, HTTPInternalServerError,
HTTPMethodNotAllowed)
from .web_response import StreamResponse
__all__ = ('WebSocketResponse', 'WebSocketReady', 'MsgType', 'WSMsgType',)
THRESHOLD_CONNLOST_ACCESS = 5
# deprecated since 1.0
MsgType = WSMsgType
class WebSocketReady(namedtuple('WebSocketReady', 'ok protocol')):
def __bool__(self):
return self.ok
class WebSocketResponse(StreamResponse):
def __init__(self, *,
timeout=10.0, receive_timeout=None,
autoclose=True, autoping=True, heartbeat=None,
protocols=()):
super().__init__(status=101)
self._protocols = protocols
self._ws_protocol = None
self._writer = None
self._reader = None
self._closed = False
self._closing = False
self._conn_lost = 0
self._close_code = None
self._loop = None
self._waiting = None
self._exception = None
self._timeout = timeout
self._receive_timeout = receive_timeout
self._autoclose = autoclose
self._autoping = autoping
self._heartbeat = heartbeat
self._heartbeat_cb = None
if heartbeat is not None:
self._pong_heartbeat = heartbeat/2.0
self._pong_response_cb = None
def _cancel_heartbeat(self):
if self._pong_response_cb is not None:
self._pong_response_cb.cancel()
self._pong_response_cb = None
if self._heartbeat_cb is not None:
self._heartbeat_cb.cancel()
self._heartbeat_cb = None
def _reset_heartbeat(self):
self._cancel_heartbeat()
if self._heartbeat is not None:
self._heartbeat_cb = call_later(
self._send_heartbeat, self._heartbeat, self._loop)
def _send_heartbeat(self):
if self._heartbeat is not None and not self._closed:
self.ping()
if self._pong_response_cb is not None:
self._pong_response_cb.cancel()
self._pong_response_cb = call_later(
self._pong_not_received, self._pong_heartbeat, self._loop)
def _pong_not_received(self):
self._closed = True
self._close_code = 1006
self._exception = asyncio.TimeoutError()
if self._req is not None:
self._req.transport.close()
@asyncio.coroutine
def prepare(self, request):
# make pre-check to don't hide it by do_handshake() exceptions
if self._payload_writer is not None:
return self._payload_writer
protocol, writer = self._pre_start(request)
payload_writer = yield from super().prepare(request)
self._post_start(request, protocol, writer)
yield from payload_writer.drain()
return payload_writer
def _pre_start(self, request):
self._loop = request.app.loop
try:
status, headers, _, writer, protocol = do_handshake(
request.method, request.headers, request._protocol.writer,
self._protocols)
except HttpProcessingError as err:
if err.code == 405:
raise HTTPMethodNotAllowed(
request.method, [hdrs.METH_GET], body=b'')
elif err.code == 400:
raise HTTPBadRequest(text=err.message, headers=err.headers)
else: # pragma: no cover
raise HTTPInternalServerError() from err
self._reset_heartbeat()
if self.status != status:
self.set_status(status)
for k, v in headers:
self.headers[k] = v
self.force_close()
return protocol, writer
def _post_start(self, request, protocol, writer):
self._ws_protocol = protocol
self._writer = writer
self._reader = FlowControlDataQueue(
request._protocol, limit=2 ** 16, loop=self._loop)
request.protocol.set_parser(WebSocketReader(self._reader))
def can_prepare(self, request):
if self._writer is not None:
raise RuntimeError('Already started')
try:
_, _, _, _, protocol = do_handshake(
request.method, request.headers, request.transport,
self._protocols)
except HttpProcessingError:
return WebSocketReady(False, None)
else:
return WebSocketReady(True, protocol)
@property
def closed(self):
return self._closed
@property
def close_code(self):
return self._close_code
@property
def ws_protocol(self):
return self._ws_protocol
def exception(self):
return self._exception
def ping(self, message='b'):
if self._writer is None:
raise RuntimeError('Call .prepare() first')
self._writer.ping(message)
def pong(self, message='b'):
# unsolicited pong
if self._writer is None:
raise RuntimeError('Call .prepare() first')
self._writer.pong(message)
def send_str(self, data):
if self._writer is None:
raise RuntimeError('Call .prepare() first')
if not isinstance(data, str):
raise TypeError('data argument must be str (%r)' % type(data))
return self._writer.send(data, binary=False)
def send_bytes(self, data):
if self._writer is None:
raise RuntimeError('Call .prepare() first')
if not isinstance(data, (bytes, bytearray, memoryview)):
raise TypeError('data argument must be byte-ish (%r)' %
type(data))
return self._writer.send(data, binary=True)
def send_json(self, data, *, dumps=json.dumps):
return self.send_str(dumps(data))
@asyncio.coroutine
def write_eof(self):
if self._eof_sent:
return
if self._payload_writer is None:
raise RuntimeError("Response has not been started")
yield from self.close()
self._eof_sent = True
@asyncio.coroutine
def close(self, *, code=1000, message=b''):
if self._writer is None:
raise RuntimeError('Call .prepare() first')
# we need to break `receive()` cycle first,
# `close()` may be called from different task
if self._waiting is not None and not self._closed:
self._reader.feed_data(WS_CLOSING_MESSAGE, 0)
yield from self._waiting
if not self._closed:
self._cancel_heartbeat()
self._closed = True
try:
self._writer.close(code, message)
yield from self.drain()
except (asyncio.CancelledError, asyncio.TimeoutError):
self._close_code = 1006
raise
except Exception as exc:
self._close_code = 1006
self._exception = exc
return True
if self._closing:
return True
try:
with Timeout(self._timeout, loop=self._loop):
msg = yield from self._reader.read()
except asyncio.CancelledError:
self._close_code = 1006
raise
except Exception as exc:
self._close_code = 1006
self._exception = exc
return True
if msg.type == WSMsgType.CLOSE:
self._close_code = msg.data
return True
self._close_code = 1006
self._exception = asyncio.TimeoutError()
return True
else:
return False
@asyncio.coroutine
def receive(self, timeout=None):
if self._reader is None:
raise RuntimeError('Call .prepare() first')
while True:
if self._waiting is not None:
raise RuntimeError(
'Concurrent call to receive() is not allowed')
if self._closed:
self._conn_lost += 1
if self._conn_lost >= THRESHOLD_CONNLOST_ACCESS:
raise RuntimeError('WebSocket connection is closed.')
return WS_CLOSED_MESSAGE
elif self._closing:
return WS_CLOSING_MESSAGE
try:
self._waiting = create_future(self._loop)
try:
with Timeout(
timeout or self._receive_timeout, loop=self._loop):
msg = yield from self._reader.read()
finally:
self._reset_heartbeat()
waiter = self._waiting
self._waiting = None
waiter.set_result(True)
except (asyncio.CancelledError, asyncio.TimeoutError) as exc:
self._close_code = 1006
raise
except WebSocketError as exc:
self._close_code = exc.code
yield from self.close(code=exc.code)
return WSMessage(WSMsgType.ERROR, exc, None)
except Exception as exc:
self._exception = exc
self._closing = True
self._close_code = 1006
yield from self.close()
return WSMessage(WSMsgType.ERROR, exc, None)
if msg.type == WSMsgType.CLOSE:
self._closing = True
self._close_code = msg.data
if not self._closed and self._autoclose:
yield from self.close()
elif msg.type == WSMsgType.CLOSING:
self._closing = True
elif msg.type == WSMsgType.PING and self._autoping:
self.pong(msg.data)
continue
elif msg.type == WSMsgType.PONG and self._autoping:
continue
return msg
@asyncio.coroutine
def receive_str(self, *, timeout=None):
msg = yield from self.receive(timeout)
if msg.type != WSMsgType.TEXT:
raise TypeError(
"Received message {}:{!r} is not WSMsgType.TEXT".format(
msg.type, msg.data))
return msg.data
@asyncio.coroutine
def receive_bytes(self, *, timeout=None):
msg = yield from self.receive(timeout)
if msg.type != WSMsgType.BINARY:
raise TypeError(
"Received message {}:{!r} is not bytes".format(msg.type,
msg.data))
return msg.data
@asyncio.coroutine
def receive_json(self, *, loads=json.loads, timeout=None):
data = yield from self.receive_str(timeout=timeout)
return loads(data)
def write(self, data):
raise RuntimeError("Cannot call .write() for websocket")
if PY_35:
def __aiter__(self):
return self
if not PY_352: # pragma: no cover
__aiter__ = asyncio.coroutine(__aiter__)
@asyncio.coroutine
def __anext__(self):
msg = yield from self.receive()
if msg.type in (WSMsgType.CLOSE,
WSMsgType.CLOSING,
WSMsgType.CLOSED):
raise StopAsyncIteration # NOQA
return msg
|
|
import datetime
import json
import unittest
from enum import Enum
from rdr_service.model.bq_base import BQField, BQFieldModeEnum, BQFieldTypeEnum, BQRecord, BQRecordField, BQSchema, \
BQTable
from rdr_service.model.bq_questionnaires import BQPDRTheBasicsSchema
from tests.test_data import data_path
from tests.helpers.unittest_base import BaseTestCase
class BQTestEnum(Enum):
FIRST = 1
SECOND = 2
THIRD = 3
class BQTestNestedSchema(BQSchema):
int_field = BQField("int_field", BQFieldTypeEnum.INTEGER, BQFieldModeEnum.NULLABLE)
str_field = BQField("str_field", BQFieldTypeEnum.STRING, BQFieldModeEnum.NULLABLE)
enum_field = BQField("enum_field", BQFieldTypeEnum.INTEGER, BQFieldModeEnum.NULLABLE, fld_enum=BQTestEnum)
class BQTestSchema(BQSchema):
descr = BQField("descr", BQFieldTypeEnum.STRING, BQFieldModeEnum.REQUIRED)
timestamp = BQField("timestamp", BQFieldTypeEnum.DATETIME, BQFieldModeEnum.REQUIRED)
nested = BQRecordField("nested", BQTestNestedSchema)
# Simulated schema received from BQ. Keep identical to BQTestSchema.
schemaFromBQ = [
{"name": "descr", "type": "STRING", "mode": "REQUIRED"},
{"name": "timestamp", "type": "DATETIME", "mode": "REQUIRED"},
{
"name": "nested",
"type": "RECORD",
"mode": "REPEATED",
"description": "..tests.cloud_util_tests.bigquery_schema_test.BQTestNestedSchema",
"fields": [
{"name": "int_field", "type": "INTEGER", "mode": "NULLABLE"},
{"name": "str_field", "type": "STRING", "mode": "NULLABLE"},
{
"name": "enum_field",
"type": "INTEGER",
"mode": "NULLABLE",
"description": "..tests.cloud_util_tests.bigquery_schema_test.BQTestEnum",
},
],
},
]
class BQTestTable(BQTable):
__tablename__ = "test_table"
__schema__ = BQTestSchema
class BigQuerySchemaTest(unittest.TestCase):
""" test BigQuery schema structures """
def test_table_name(self):
table = BQTestTable()
name = table.get_name()
self.assertEqual(name, "test_table")
def test_schema_from_table_schema(self):
""" test that we can dump the schema and make an identical schema from the dump """
table = BQTestTable()
schema = table.get_schema()
struct_str = schema.to_json()
new_schema = BQSchema(json.loads(struct_str))
self.assertEqual(schema, new_schema)
@unittest.skip('Schema comparison operation needs to be more robust.')
def test_schema_from_dict(self):
""" test we can take a list of fields definitions and make an identical schema """
table = BQTestTable()
schema = table.get_schema()
new_schema = BQSchema(schemaFromBQ)
self.assertEqual(schema, new_schema)
def test_schema_getitem(self):
""" test getting a BQField/BQRecordField object from schema """
schema = BQTestSchema()
field = schema["timestamp"]
self.assertEqual(field, BQTestSchema.timestamp)
class BigQueryRecordTest(unittest.TestCase):
""" test BigQuery schema data """
partial_data = {"descr": "str_field data", "timestamp": datetime.datetime.utcnow()}
full_data = {
"descr": "str_field data",
"timestamp": datetime.datetime.utcnow(),
"nested": [
{"int_field": 10, "str_field": "first string data", "enum_field": BQTestEnum.FIRST},
{"int_field": 20, "str_field": "second string data", "enum_field": BQTestEnum.SECOND},
{"int_field": 30, "str_field": "third string data", "enum_field": BQTestEnum.THIRD},
],
}
bq_data = {
"descr": "str_field data",
"timestamp": "2019-06-26T19:26:42.015372",
"nested": [
{"int_field": 10, "enum_field": 1, "str_field": "first string data"},
{"int_field": 20, "enum_field": 2, "str_field": "second string data"},
{"int_field": 30, "enum_field": 3, "str_field": "third string data"},
],
}
def test_schema_no_data(self):
""" test a BQRecord object with only schema """
record = BQRecord(schema=BQTestSchema, data=None)
# add partial data
record.update_values(self.partial_data)
self.assertEqual(self.partial_data, record.to_dict())
def test_schema_with_data(self):
""" test a BQRecord object with schema and data """
record = BQRecord(schema=BQTestSchema, data=self.partial_data)
self.assertEqual(self.partial_data, record.to_dict())
def test_schema_nested_data(self):
""" test a BQRecord object with schema and nested data """
record = BQRecord(schema=BQTestSchema, data=self.full_data, convert_to_enum=False)
new_data = record.to_dict()
self.assertEqual(self.full_data, new_data)
# alter some data and verify we are not equal anymore.
new_data["nested"][0]["int_field"] = 55
self.assertNotEqual(self.full_data, new_data)
@unittest.skip("remove when value casting and constraint enforcement are in bq_base.BQRecord.update_values()")
def test_record_from_bq_data(self):
""" test receiving data from bigquery """
schema = BQSchema(schemaFromBQ)
record = BQRecord(schema=schema, data=self.bq_data)
new_data = record.to_dict()
self.assertEqual(self.full_data, new_data)
def test_schema_sql_field_list(self):
""" Test generating a sql field list from schema """
sql_fields = BQTestSchema.get_sql_field_names()
self.assertEqual('descr, timestamp, nested', sql_fields)
sql_fields = BQTestSchema.get_sql_field_names(exclude_fields=['timestamp'])
self.assertEqual('descr, nested', sql_fields)
def _questionnaire_response_url(participant_id):
return "Participant/%s/QuestionnaireResponse" % participant_id
class BigQuerySchemaDataTest(BaseTestCase):
participant_id = None
def setUp(self, with_data=True, with_consent_codes=False) -> None:
super().setUp(with_data=with_data, with_consent_codes=with_consent_codes)
# Create a participant.
self.participant_id = self.create_participant()
self.send_consent(self.participant_id)
# Load TheBasics questionnaire.
questionnaire_id = self.create_questionnaire("questionnaire_the_basics.json")
with open(data_path("questionnaire_the_basics_resp.json")) as f:
resource = json.load(f)
# Submit a TheBasics questionnaire response for the participant.
resource["subject"]["reference"] = resource["subject"]["reference"].format(participant_id=self.participant_id)
resource["questionnaire"]["reference"] = resource["questionnaire"]["reference"].format(
questionnaire_id=questionnaire_id
)
resource["authored"] = datetime.datetime.now().isoformat()
self.send_post(_questionnaire_response_url(self.participant_id), resource)
def test_included_fields_from_the_basics(self):
""" Ensure that when we generate a schema the included fields are in it."""
schema = BQPDRTheBasicsSchema()
schema_fields = schema.get_fields()
# Build a simple list of field names in the schema.
fields = list()
for schema_field in schema_fields:
fields.append(schema_field['name'])
included_fields = ['id', 'created', 'modified', 'authored', 'language', 'participant_id',
'questionnaire_response_id', 'Race_WhatRaceEthnicity']
for included_field in included_fields:
self.assertIn(included_field, fields)
def test_excluded_fields_from_the_basics(self):
""" Ensure that when we generate a schema the excluded fields are not in it."""
schema = BQPDRTheBasicsSchema()
schema_fields = schema.get_fields()
# Build a simple list of field names in the schema.
fields = list()
for schema_field in schema_fields:
fields.append(schema_field['name'])
for excluded_field in schema._excluded_fields:
self.assertNotIn(excluded_field, fields)
# TODO: Future: Test REQUIRED/NULLABLE BQ constraints when combining schema and data.
|
|
import sys
import re
from datetime import datetime
from functools import wraps
from django.conf import settings
from django.db import models
from django.db.models.sql import aggregates as sqlaggregates
from django.db.models.sql.compiler import SQLCompiler
from django.db.models.sql import aggregates as sqlaggregates
from django.db.models.sql.constants import LOOKUP_SEP, MULTI, SINGLE
from django.db.models.sql.where import AND, OR
from django.db.utils import DatabaseError, IntegrityError
from django.db.models.sql.where import WhereNode
from django.db.models.fields import NOT_PROVIDED
from django.utils.tree import Node
from pyes import MatchAllQuery, FilteredQuery, BoolQuery, StringQuery, \
WildcardQuery, RegexTermQuery, RangeQuery, ESRange, \
TermQuery, ConstantScoreQuery, TermFilter, TermsFilter, NotFilter, RegexTermFilter
from djangotoolbox.db.basecompiler import NonrelQuery, NonrelCompiler, \
NonrelInsertCompiler, NonrelUpdateCompiler, NonrelDeleteCompiler
from django.db.models.fields import AutoField
import logging
TYPE_MAPPING_FROM_DB = {
'unicode': lambda val: unicode(val),
'int': lambda val: int(val),
'float': lambda val: float(val),
'bool': lambda val: bool(val),
}
TYPE_MAPPING_TO_DB = {
'unicode': lambda val: unicode(val),
'int': lambda val: int(val),
'float': lambda val: float(val),
'bool': lambda val: bool(val),
'date': lambda val: datetime(val.year, val.month, val.day),
'time': lambda val: datetime(2000, 1, 1, val.hour, val.minute,
val.second, val.microsecond),
}
OPERATORS_MAP = {
'exact': lambda val: val,
'iexact': lambda val: val, #tofix
'startswith': lambda val: r'^%s' % re.escape(val),
'istartswith': lambda val: r'^%s' % re.escape(val),
'endswith': lambda val: r'%s$' % re.escape(val),
'iendswith': lambda val: r'%s$' % re.escape(val),
'contains': lambda val: r'%s' % re.escape(val),
'icontains': lambda val: r'%s' % re.escape(val),
'regex': lambda val: val,
'iregex': lambda val: re.compile(val, re.IGNORECASE),
'gt': lambda val: {"_from" : val, "include_lower" : False},
'gte': lambda val: {"_from" : val, "include_lower" : True},
'lt': lambda val: {"_to" : val, "include_upper": False},
'lte': lambda val: {"_to" : val, "include_upper": True},
'range': lambda val: {"_from" : val[0], "_to" : val[1], "include_lower" : True, "include_upper": True},
'year': lambda val: {"_from" : val[0], "_to" : val[1], "include_lower" : True, "include_upper": False},
'isnull': lambda val: None if val else {'$ne': None},
'in': lambda val: val,
}
NEGATED_OPERATORS_MAP = {
'exact': lambda val: {'$ne': val},
'gt': lambda val: {"_to" : val, "include_upper": True},
'gte': lambda val: {"_to" : val, "include_upper": False},
'lt': lambda val: {"_from" : val, "include_lower" : True},
'lte': lambda val: {"_from" : val, "include_lower" : False},
'isnull': lambda val: {'$ne': None} if val else None,
'in': lambda val: {'$nin': val},
}
def _get_mapping(db_type, value, mapping):
# TODO - comments. lotsa comments
if value == NOT_PROVIDED:
return None
if value is None:
return None
if db_type in mapping:
_func = mapping[db_type]
else:
_func = lambda val: val
# TODO - what if the data is represented as list on the python side?
if isinstance(value, list):
return map(_func, value)
return _func(value)
def python2db(db_type, value):
return _get_mapping(db_type, value, TYPE_MAPPING_TO_DB)
def db2python(db_type, value):
return _get_mapping(db_type, value, TYPE_MAPPING_FROM_DB)
def safe_call(func):
@wraps(func)
def _func(*args, **kwargs):
try:
return func(*args, **kwargs)
except Exception, e:
import traceback
traceback.print_exc()
raise DatabaseError, DatabaseError(str(e)), sys.exc_info()[2]
return _func
class DBQuery(NonrelQuery):
# ----------------------------------------------
# Public API
# ----------------------------------------------
def __init__(self, compiler, fields):
super(DBQuery, self).__init__(compiler, fields)
self._connection = self.connection.db_connection
self._ordering = []
self.db_query = ConstantScoreQuery()
# This is needed for debugging
def __repr__(self):
return '<DBQuery: %r ORDER %r>' % (self.db_query, self._ordering)
@safe_call
def fetch(self, low_mark, high_mark):
results = self._get_results()
if low_mark > 0:
results = results[low_mark:]
if high_mark is not None:
results = results[low_mark:high_mark - low_mark]
for hit in results:
entity = hit.get_data()
entity['id'] = hit.meta.id
yield entity
@safe_call
def count(self, limit=None):
query = self.db_query
if self.db_query.is_empty():
query = MatchAllQuery()
res = self._connection.count(query, doc_types=self.query.model._meta.db_table)
return res["count"]
@safe_call
def delete(self):
self._collection.remove(self.db_query)
@safe_call
def order_by(self, ordering):
for order in ordering:
if order.startswith('-'):
order, direction = order[1:], {"reverse" : True}
else:
direction = 'desc'
self._ordering.append({order: direction})
# This function is used by the default add_filters() implementation
@safe_call
def add_filter(self, column, lookup_type, negated, db_type, value):
if column == self.query.get_meta().pk.column:
column = '_id'
# Emulated/converted lookups
if negated and lookup_type in NEGATED_OPERATORS_MAP:
op = NEGATED_OPERATORS_MAP[lookup_type]
negated = False
else:
op = OPERATORS_MAP[lookup_type]
value = op(self.convert_value_for_db(db_type, value))
queryf = self._get_query_type(column, lookup_type, db_type, value)
if negated:
self.db_query.add([NotFilter(queryf)])
else:
self.db_query.add([queryf])
def _get_query_type(self, column, lookup_type, db_type, value):
if db_type == "unicode":
if (lookup_type == "exact" or lookup_type == "iexact"):
q = TermQuery(column, value)
return q
if (lookup_type == "startswith" or lookup_type == "istartswith"):
return RegexTermFilter(column, value)
if (lookup_type == "endswith" or lookup_type == "iendswith"):
return RegexTermFilter(column, value)
if (lookup_type == "contains" or lookup_type == "icontains"):
return RegexTermFilter(column, value)
if (lookup_type == "regex" or lookup_type == "iregex"):
return RegexTermFilter(column, value)
if db_type == "datetime" or db_type == "date":
if (lookup_type == "exact" or lookup_type == "iexact"):
return TermFilter(column, value)
#TermFilter, TermsFilter
if lookup_type in ["gt", "gte", "lt", "lte", "range", "year"]:
value['field'] = column
return RangeQuery(ESRange(**value))
if lookup_type == "in":
# terms = [TermQuery(column, val) for val in value]
# if len(terms) == 1:
# return terms[0]
# return BoolQuery(should=terms)
return TermsFilter(field=column, values=value)
raise NotImplemented
def _get_results(self):
"""
@returns: elasticsearch iterator over results
defined by self.query
"""
query = self.db_query
if self.db_query.is_empty():
query = MatchAllQuery()
if self._ordering:
query.sort = self._ordering
#print "query", self.query.tables, query
return self._connection.search(query, indices=[self.connection.db_name], doc_types=self.query.model._meta.db_table)
class SQLCompiler(NonrelCompiler):
"""
A simple query: no joins, no distinct, etc.
"""
query_class = DBQuery
def convert_value_from_db(self, db_type, value):
# Handle list types
if db_type is not None and \
isinstance(value, (list, tuple)) and len(value) and \
db_type.startswith('ListField:'):
db_sub_type = db_type.split(':', 1)[1]
value = [self.convert_value_from_db(db_sub_type, subvalue)
for subvalue in value]
else:
value = db2python(db_type, value)
return value
# This gets called for each field type when you insert() an entity.
# db_type is the string that you used in the DatabaseCreation mapping
def convert_value_for_db(self, db_type, value):
if db_type is not None and \
isinstance(value, (list, tuple)) and len(value) and \
db_type.startswith('ListField:'):
db_sub_type = db_type.split(':', 1)[1]
value = [self.convert_value_for_db(db_sub_type, subvalue)
for subvalue in value]
else:
value = python2db(db_type, value)
return value
def insert_params(self):
conn = self.connection
params = {
'safe': conn.safe_inserts,
}
if conn.w:
params['w'] = conn.w
return params
def _get_ordering(self):
if not self.query.default_ordering:
ordering = self.query.order_by
else:
ordering = self.query.order_by or self.query.get_meta().ordering
result = []
for order in ordering:
if LOOKUP_SEP in order:
#raise DatabaseError("Ordering can't span tables on non-relational backends (%s)" % order)
print "Ordering can't span tables on non-relational backends (%s):skipping" % order
continue
if order == '?':
raise DatabaseError("Randomized ordering isn't supported by the backend")
order = order.lstrip('+')
descending = order.startswith('-')
name = order.lstrip('-')
if name == 'pk':
name = self.query.get_meta().pk.name
order = '-' + name if descending else name
if self.query.standard_ordering:
result.append(order)
else:
if descending:
result.append(name)
else:
result.append('-' + name)
return result
class SQLInsertCompiler(NonrelInsertCompiler, SQLCompiler):
@safe_call
def insert(self, data, return_id=False):
pk_column = self.query.get_meta().pk.column
pk = None
if pk_column in data:
pk = data[pk_column]
db_table = self.query.get_meta().db_table
logging.debug("Insert data %s: %s" % (db_table, data))
#print("Insert data %s: %s" % (db_table, data))
res = self.connection.db_connection.index(data, self.connection.db_name, db_table, id=pk)
#print "Insert result", res
return res['_id']
# TODO: Define a common nonrel API for updates and add it to the nonrel
# backend base classes and port this code to that API
class SQLUpdateCompiler(SQLCompiler):
def execute_sql(self, return_id=False):
"""
self.query - the data that should be inserted
"""
data = {}
for (field, value), column in zip(self.query.values, self.query.columns):
data[column] = python2db(field.db_type(connection=self.connection), value)
# every object should have a unique pk
pk_field = self.query.model._meta.pk
pk_name = pk_field.attname
db_table = self.query.get_meta().db_table
res = self.connection.db_connection.index(data, self.connection.db_name, db_table, id=pk)
return res['_id']
class SQLDeleteCompiler(NonrelDeleteCompiler, SQLCompiler):
def execute_sql(self, return_id=False):
"""
self.query - the data that should be inserted
"""
db_table = self.query.get_meta().db_table
if len(self.query.where.children) == 1 and isinstance(self.query.where.children[0][0].field, AutoField) and self.query.where.children[0][1] == "in":
for pk in self.query.where.children[0][3]:
self.connection.db_connection.delete(self.connection.db_name, db_table, pk)
return
|
|
#!/usr/bin/env python
import os
import shutil
import glob
import time
import sys
import subprocess
import string
from optparse import OptionParser, make_option
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
PKG_NAME = os.path.basename(SCRIPT_DIR)
PARAMETERS = None
#XW_ENV = "export DBUS_SESSION_BUS_ADDRESS=unix:path=/run/user/5000/dbus/user_bus_socket"
SRC_DIR = ""
PKG_SRC_DIR = ""
def doCMD(cmd):
# Do not need handle timeout in this short script, let tool do it
print "-->> \"%s\"" % cmd
output = []
cmd_return_code = 1
cmd_proc = subprocess.Popen(
cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True)
while True:
output_line = cmd_proc.stdout.readline().strip("\r\n")
cmd_return_code = cmd_proc.poll()
if output_line == '' and cmd_return_code != None:
break
sys.stdout.write("%s\n" % output_line)
sys.stdout.flush()
output.append(output_line)
return (cmd_return_code, output)
def updateCMD(cmd=None):
if "pkgcmd" in cmd:
cmd = "su - %s -c '%s;%s'" % (PARAMETERS.user, XW_ENV, cmd)
return cmd
def getUSERID():
if PARAMETERS.mode == "SDB":
cmd = "sdb -s %s shell id -u %s" % (
PARAMETERS.device, PARAMETERS.user)
else:
cmd = "ssh %s \"id -u %s\"" % (
PARAMETERS.device, PARAMETERS.user )
return doCMD(cmd)
def getPKGID(pkg_name=None):
if PARAMETERS.mode == "SDB":
cmd = "sdb -s %s shell %s" % (
PARAMETERS.device, updateCMD('pkgcmd -l'))
else:
cmd = "ssh %s \"%s\"" % (
PARAMETERS.device, updateCMD('pkgcmd -l'))
(return_code, output) = doCMD(cmd)
if return_code != 0:
return None
test_pkg_id = None
for line in output:
if line.find("[" + pkg_name + "]") != -1:
pkgidIndex = line.split().index("pkgid")
test_pkg_id = line.split()[pkgidIndex+1].strip("[]")
break
return test_pkg_id
def doRemoteCMD(cmd=None):
if PARAMETERS.mode == "SDB":
cmd = "sdb -s %s shell %s" % (PARAMETERS.device, updateCMD(cmd))
else:
cmd = "ssh %s \"%s\"" % (PARAMETERS.device, updateCMD(cmd))
return doCMD(cmd)
def doRemoteCopy(src=None, dest=None):
if PARAMETERS.mode == "SDB":
cmd_prefix = "sdb -s %s push" % PARAMETERS.device
cmd = "%s %s %s" % (cmd_prefix, src, dest)
else:
cmd = "scp -r %s %s:/%s" % (src, PARAMETERS.device, dest)
(return_code, output) = doCMD(cmd)
doRemoteCMD("sync")
if return_code != 0:
return True
else:
return False
def uninstPKGs():
action_status = True
for root, dirs, files in os.walk(SCRIPT_DIR):
if root.endswith("mediasrc"):
continue
for file in files:
if file.endswith(".xpk"):
pkg_id = getPKGID(os.path.basename(os.path.splitext(file)[0]))
if not pkg_id:
action_status = False
continue
(return_code, output) = doRemoteCMD(
"pkgcmd -u -t xpk -q -n %s" % pkg_id)
for line in output:
if "Failure" in line:
action_status = False
break
(return_code, output) = doRemoteCMD(
"rm -rf %s" % PKG_SRC_DIR)
if return_code != 0:
action_status = False
return action_status
def instPKGs():
action_status = True
(return_code, output) = doRemoteCMD(
"mkdir -p %s" % PKG_SRC_DIR)
if return_code != 0:
action_status = False
for root, dirs, files in os.walk(SCRIPT_DIR):
if root.endswith("mediasrc"):
continue
# for file in files:
# if file.endswith(".xpk"):
# if not doRemoteCopy(os.path.join(root, file), "%s/%s" % (SRC_DIR, file)):
# action_status = False
# (return_code, output) = doRemoteCMD(
# "pkgcmd -i -t xpk -q -p %s/%s" % (SRC_DIR, file))
# doRemoteCMD("rm -rf %s/%s" % (SRC_DIR, file))
# for line in output:
# if "Failure" in line:
# action_status = False
# break
# Do some special copy/delete... steps
'''
(return_code, output) = doRemoteCMD(
"mkdir -p %s/tests" % PKG_SRC_DIR)
if return_code != 0:
action_status = False
if not doRemoteCopy("specname/tests", "%s/tests" % PKG_SRC_DIR):
action_status = False
'''
return action_status
def main():
try:
usage = "usage: inst.py -i"
opts_parser = OptionParser(usage=usage)
opts_parser.add_option(
"-m", dest="mode", action="store", help="Specify mode")
opts_parser.add_option(
"-s", dest="device", action="store", help="Specify device")
opts_parser.add_option(
"-i", dest="binstpkg", action="store_true", help="Install package")
opts_parser.add_option(
"-u", dest="buninstpkg", action="store_true", help="Uninstall package")
opts_parser.add_option(
"-a", dest="user", action="store", help="User name")
global PARAMETERS
(PARAMETERS, args) = opts_parser.parse_args()
except Exception, e:
print "Got wrong option: %s, exit ..." % e
sys.exit(1)
if not PARAMETERS.user:
PARAMETERS.user = "app"
global SRC_DIR, PKG_SRC_DIR
SRC_DIR = "/home/%s/content" % PARAMETERS.user
PKG_SRC_DIR = "%s/tct/opt/%s" % (SRC_DIR, PKG_NAME)
if not PARAMETERS.mode:
PARAMETERS.mode = "SDB"
if PARAMETERS.mode == "SDB":
if not PARAMETERS.device:
(return_code, output) = doCMD("sdb devices")
for line in output:
if str.find(line, "\tdevice") != -1:
PARAMETERS.device = line.split("\t")[0]
break
else:
PARAMETERS.mode = "SSH"
if not PARAMETERS.device:
print "No device provided"
sys.exit(1)
user_info = getUSERID()
re_code = user_info[0]
if re_code == 0 :
global XW_ENV
userid = user_info[1][0]
XW_ENV = "export DBUS_SESSION_BUS_ADDRESS=unix:path=/run/user/%s/dbus/user_bus_socket"%str(userid)
else:
print "[Error] cmd commands error : %s"%str(user_info[1])
sys.exit(1)
if PARAMETERS.binstpkg and PARAMETERS.buninstpkg:
print "-i and -u are conflict"
sys.exit(1)
if PARAMETERS.buninstpkg:
if not uninstPKGs():
sys.exit(1)
else:
if not instPKGs():
sys.exit(1)
if __name__ == "__main__":
main()
sys.exit(0)
|
|
import sys
import unittest
import numpy
import pytest
import cupy
from cupy import testing
from cupy._core import _accelerator
# Note that numpy.bincount does not support uint64 on 64-bit environment
# as it casts an input array to intp.
# And it does not support uint32, int64 and uint64 on 32-bit environment.
_all_types = (
numpy.float16, numpy.float32, numpy.float64,
numpy.int8, numpy.int16, numpy.int32,
numpy.uint8, numpy.uint16,
numpy.bool_)
_signed_types = (
numpy.int8, numpy.int16, numpy.int32,
numpy.bool_)
if sys.maxsize > 2 ** 32:
_all_types = _all_types + (numpy.int64, numpy.uint32)
_signed_types = _signed_types + (numpy.int64,)
def for_all_dtypes_bincount(name='dtype'):
return testing.for_dtypes(_all_types, name=name)
def for_signed_dtypes_bincount(name='dtype'):
return testing.for_dtypes(_signed_types, name=name)
def for_all_dtypes_combination_bincount(names):
return testing.for_dtypes_combination(_all_types, names=names)
@testing.gpu
class TestHistogram(unittest.TestCase):
@testing.for_all_dtypes(no_bool=True, no_complex=True)
@testing.numpy_cupy_array_equal()
def test_histogram(self, xp, dtype):
x = testing.shaped_arange((10,), xp, dtype)
y, bin_edges = xp.histogram(x)
return y, bin_edges
@testing.for_all_dtypes(no_bool=True, no_complex=True)
@testing.numpy_cupy_array_equal()
def test_histogram_same_value(self, xp, dtype):
x = xp.zeros(10, dtype)
y, bin_edges = xp.histogram(x, 3)
return y, bin_edges
@testing.for_all_dtypes(no_bool=True, no_complex=True)
@testing.numpy_cupy_array_equal()
def test_histogram_density(self, xp, dtype):
x = testing.shaped_arange((10,), xp, dtype)
y, bin_edges = xp.histogram(x, density=True)
# check normalization
area = xp.sum(y * xp.diff(bin_edges))
testing.assert_allclose(area, 1)
return y, bin_edges
@testing.for_float_dtypes()
@testing.numpy_cupy_array_equal()
def test_histogram_range_lower_outliers(self, xp, dtype):
# Check that lower outliers are not tallied
a = xp.arange(10, dtype=dtype) + .5
h, b = xp.histogram(a, range=[0, 9])
assert int(h.sum()) == 9
return h, b
@testing.for_float_dtypes()
@testing.numpy_cupy_array_equal()
def test_histogram_range_upper_outliers(self, xp, dtype):
# Check that upper outliers are not tallied
a = xp.arange(10, dtype=dtype) + .5
h, b = xp.histogram(a, range=[1, 10])
assert int(h.sum()) == 9
return h, b
@testing.for_float_dtypes()
@testing.numpy_cupy_allclose()
def test_histogram_range_with_density(self, xp, dtype):
a = xp.arange(10, dtype=dtype) + .5
h, b = xp.histogram(a, range=[1, 9], density=True)
# check normalization
testing.assert_allclose(float((h * xp.diff(b)).sum()), 1)
return h
@testing.for_float_dtypes()
@testing.numpy_cupy_allclose()
def test_histogram_range_with_weights_and_density(self, xp, dtype):
a = xp.arange(10, dtype=dtype) + .5
w = xp.arange(10, dtype=dtype) + .5
h, b = xp.histogram(a, range=[1, 9], weights=w, density=True)
testing.assert_allclose(float((h * xp.diff(b)).sum()), 1)
return h
def test_histogram_invalid_range(self):
for xp in (numpy, cupy):
with pytest.raises(ValueError):
# range must be None or have two elements
xp.histogram(xp.arange(10), range=[1, 9, 15])
def test_histogram_invalid_range2(self):
for xp in (numpy, cupy):
with pytest.raises(TypeError):
xp.histogram(xp.arange(10), range=10)
@testing.for_all_dtypes(no_bool=True, no_complex=True)
def test_histogram_weights_mismatch(self, dtype):
for xp in (numpy, cupy):
a = xp.arange(10, dtype=dtype) + .5
w = xp.arange(11, dtype=dtype) + .5
with pytest.raises(ValueError):
xp.histogram(a, range=[1, 9], weights=w, density=True)
@testing.for_all_dtypes(no_bool=True, no_complex=True)
@testing.numpy_cupy_allclose()
def test_histogram_int_weights_dtype(self, xp, dtype):
# Check the type of the returned histogram
a = xp.arange(10, dtype=dtype)
h, b = xp.histogram(a, weights=xp.ones(10, int))
assert xp.issubdtype(h.dtype, xp.integer)
return h
@testing.for_all_dtypes(no_bool=True, no_complex=True)
@testing.numpy_cupy_allclose()
def test_histogram_float_weights_dtype(self, xp, dtype):
# Check the type of the returned histogram
a = xp.arange(10, dtype=dtype)
h, b = xp.histogram(a, weights=xp.ones(10, float))
assert xp.issubdtype(h.dtype, xp.floating)
return h
def test_histogram_weights_basic(self):
v = cupy.random.rand(100)
w = cupy.ones(100) * 5
a, b = cupy.histogram(v)
na, nb = cupy.histogram(v, density=True)
wa, wb = cupy.histogram(v, weights=w)
nwa, nwb = cupy.histogram(v, weights=w, density=True)
testing.assert_array_almost_equal(a * 5, wa)
testing.assert_array_almost_equal(na, nwa)
@testing.for_float_dtypes()
@testing.numpy_cupy_allclose()
def test_histogram_float_weights(self, xp, dtype):
# Check weights are properly applied.
v = xp.linspace(0, 10, 10, dtype=dtype)
w = xp.concatenate((xp.zeros(5, dtype=dtype), xp.ones(5, dtype=dtype)))
wa, wb = xp.histogram(v, bins=xp.arange(11), weights=w)
testing.assert_array_almost_equal(wa, w)
return wb
@testing.for_int_dtypes(no_bool=True)
@testing.numpy_cupy_array_equal(type_check=False)
def test_histogram_int_weights(self, xp, dtype):
# Check with integer weights
v = xp.asarray([1, 2, 2, 4], dtype=dtype)
w = xp.asarray([4, 3, 2, 1], dtype=dtype)
wa, wb = xp.histogram(v, bins=4, weights=w)
testing.assert_array_equal(wa, [4, 5, 0, 1])
return wa, wb
@testing.for_int_dtypes(no_bool=True)
@testing.numpy_cupy_allclose()
def test_histogram_int_weights_normalized(self, xp, dtype):
v = xp.asarray([1, 2, 2, 4], dtype=dtype)
w = xp.asarray([4, 3, 2, 1], dtype=dtype)
wa, wb = xp.histogram(v, bins=4, weights=w, density=True)
testing.assert_array_almost_equal(
wa, xp.asarray([4, 5, 0, 1]) / 10. / 3. * 4)
return wb
@testing.for_int_dtypes(no_bool=True)
@testing.numpy_cupy_array_equal()
def test_histogram_int_weights_nonuniform_bins(self, xp, dtype):
# Check weights with non-uniform bin widths
a, b = xp.histogram(
xp.arange(9, dtype=dtype),
xp.asarray([0, 1, 3, 6, 10], dtype=dtype),
weights=xp.asarray([2, 1, 1, 1, 1, 1, 1, 1, 1], dtype=dtype),
density=True)
testing.assert_array_almost_equal(a, [.2, .1, .1, .075])
return a, b
@testing.for_complex_dtypes()
@testing.numpy_cupy_array_equal(type_check=False)
def test_histogram_complex_weights(self, xp, dtype):
values = xp.asarray([1.3, 2.5, 2.3])
weights = xp.asarray([1, -1, 2]) + 1j * xp.asarray([2, 1, 2])
weights = weights.astype(dtype)
a, b = xp.histogram(
values, bins=2, weights=weights)
return a, b
@testing.for_complex_dtypes()
@testing.numpy_cupy_array_equal(type_check=False)
def test_histogram_complex_weights_uneven_bins(self, xp, dtype):
values = xp.asarray([1.3, 2.5, 2.3])
weights = xp.asarray([1, -1, 2]) + 1j * xp.asarray([2, 1, 2])
weights = weights.astype(dtype)
a, b = xp.histogram(
values, bins=xp.asarray([0, 2, 3]), weights=weights)
return a, b
@testing.for_all_dtypes(no_bool=True, no_complex=True)
@testing.numpy_cupy_array_equal()
def test_histogram_empty(self, xp, dtype):
x = xp.array([], dtype)
y, bin_edges = xp.histogram(x)
return y, bin_edges
@testing.for_all_dtypes(no_bool=True, no_complex=True)
@testing.numpy_cupy_array_equal()
def test_histogram_int_bins(self, xp, dtype):
x = testing.shaped_arange((10,), xp, dtype)
y, bin_edges = xp.histogram(x, 4)
return y, bin_edges
@testing.for_all_dtypes(no_bool=True, no_complex=True)
@testing.numpy_cupy_array_equal()
def test_histogram_array_bins(self, xp, dtype):
x = testing.shaped_arange((10,), xp, dtype)
bins = testing.shaped_arange((3,), xp, dtype)
y, bin_edges = xp.histogram(x, bins)
return y, bin_edges
@testing.for_all_dtypes(no_bool=True, no_complex=True)
@testing.numpy_cupy_array_equal()
def test_histogram_numpy_bins(self, xp, dtype):
x = testing.shaped_arange((10,), xp, dtype)
bins = testing.shaped_arange((3,), numpy, dtype)
y, bin_edges = xp.histogram(x, bins)
return y, bin_edges
@testing.for_all_dtypes(no_bool=True, no_complex=True)
@testing.numpy_cupy_array_equal()
def test_histogram_list_bins(self, xp, dtype):
x = testing.shaped_arange((10,), xp, dtype)
bins = list(testing.shaped_arange((3,), numpy, dtype))
y, bin_edges = xp.histogram(x, bins)
return y, bin_edges
# numpy 1.13.1 does not check this error correctly with unsigned int.
@testing.for_all_dtypes(no_bool=True, no_complex=True)
def test_histogram_bins_not_ordered(self, dtype):
for xp in (numpy, cupy):
x = testing.shaped_arange((10,), xp, dtype)
bins = xp.array([1, 3, 2], dtype)
with pytest.raises(ValueError):
xp.histogram(x, bins)
@for_all_dtypes_bincount()
@testing.numpy_cupy_allclose(accept_error=TypeError)
def test_bincount(self, xp, dtype):
x = testing.shaped_arange((3,), xp, dtype)
return xp.bincount(x)
@for_all_dtypes_bincount()
@testing.numpy_cupy_allclose(accept_error=TypeError)
def test_bincount_duplicated_value(self, xp, dtype):
x = xp.array([1, 2, 2, 1, 2, 4], dtype)
return xp.bincount(x)
@for_all_dtypes_combination_bincount(names=['x_type', 'w_type'])
@testing.numpy_cupy_allclose(accept_error=TypeError)
def test_bincount_with_weight(self, xp, x_type, w_type):
x = testing.shaped_arange((3,), xp, x_type)
w = testing.shaped_arange((3,), xp, w_type)
return xp.bincount(x, weights=w)
@for_all_dtypes_bincount()
@testing.numpy_cupy_allclose(accept_error=TypeError)
def test_bincount_with_minlength(self, xp, dtype):
x = testing.shaped_arange((3,), xp, dtype)
return xp.bincount(x, minlength=5)
@for_all_dtypes_combination_bincount(names=['x_type', 'w_type'])
def test_bincount_invalid_weight_length(self, x_type, w_type):
for xp in (numpy, cupy):
x = testing.shaped_arange((1,), xp, x_type)
w = testing.shaped_arange((2,), xp, w_type)
# TODO(imanishi): Split this test into a test for ValueError and
# a test for TypeError.
with pytest.raises((ValueError, TypeError)):
xp.bincount(x, weights=w)
@for_signed_dtypes_bincount()
def test_bincount_negative(self, dtype):
for xp in (numpy, cupy):
x = testing.shaped_arange((3,), xp, dtype) - 2
with pytest.raises(ValueError):
xp.bincount(x)
@for_all_dtypes_bincount()
def test_bincount_too_deep(self, dtype):
for xp in (numpy, cupy):
x = xp.array([[1]], dtype)
with pytest.raises(ValueError):
xp.bincount(x)
@for_all_dtypes_bincount()
def test_bincount_too_small(self, dtype):
for xp in (numpy, cupy):
x = xp.zeros((), dtype)
with pytest.raises(ValueError):
xp.bincount(x)
@for_all_dtypes_bincount()
@testing.numpy_cupy_allclose(accept_error=TypeError)
def test_bincount_zero(self, xp, dtype):
x = testing.shaped_arange((3,), xp, dtype)
return xp.bincount(x, minlength=0)
@for_all_dtypes_bincount()
def test_bincount_too_small_minlength(self, dtype):
for xp in (numpy, cupy):
x = testing.shaped_arange((3,), xp, dtype)
# TODO(imanishi): Split this test into a test for ValueError and
# a test for TypeError.
with pytest.raises((ValueError, TypeError)):
xp.bincount(x, minlength=-1)
# This class compares CUB results against NumPy's
@testing.gpu
@unittest.skipUnless(cupy.cuda.cub.available, 'The CUB routine is not enabled')
class TestCubHistogram(unittest.TestCase):
def setUp(self):
self.old_accelerators = _accelerator.get_routine_accelerators()
_accelerator.set_routine_accelerators(['cub'])
def tearDown(self):
_accelerator.set_routine_accelerators(self.old_accelerators)
@testing.for_all_dtypes(no_bool=True, no_complex=True)
@testing.numpy_cupy_array_equal()
def test_histogram(self, xp, dtype):
x = testing.shaped_arange((10,), xp, dtype)
if xp is numpy:
return xp.histogram(x)
# xp is cupy, first ensure we really use CUB
cub_func = 'cupy._statistics.histogram.cub.device_histogram'
with testing.AssertFunctionIsCalled(cub_func):
xp.histogram(x)
# ...then perform the actual computation
return xp.histogram(x)
@testing.for_all_dtypes(no_bool=True, no_complex=True)
@testing.numpy_cupy_array_equal()
def test_histogram_range_float(self, xp, dtype):
a = testing.shaped_arange((10,), xp, dtype)
h, b = xp.histogram(a, testing.shaped_arange((10,), xp, numpy.float64))
assert int(h.sum()) == 10
return h, b
@testing.for_all_dtypes_combination(['dtype_a', 'dtype_b'],
no_bool=True, no_complex=True)
@testing.numpy_cupy_array_equal()
def test_histogram_with_bins(self, xp, dtype_a, dtype_b):
x = testing.shaped_arange((10,), xp, dtype_a)
bins = testing.shaped_arange((4,), xp, dtype_b)
if xp is numpy:
return xp.histogram(x, bins)[0]
# xp is cupy, first ensure we really use CUB
cub_func = 'cupy._statistics.histogram.cub.device_histogram'
with testing.AssertFunctionIsCalled(cub_func):
xp.histogram(x, bins)
# ...then perform the actual computation
return xp.histogram(x, bins)[0]
@testing.for_all_dtypes_combination(['dtype_a', 'dtype_b'],
no_bool=True, no_complex=True)
@testing.numpy_cupy_array_equal()
def test_histogram_with_bins2(self, xp, dtype_a, dtype_b):
x = testing.shaped_arange((10,), xp, dtype_a)
bins = testing.shaped_arange((4,), xp, dtype_b)
if xp is numpy:
return xp.histogram(x, bins)[1]
# xp is cupy, first ensure we really use CUB
cub_func = 'cupy._statistics.histogram.cub.device_histogram'
with testing.AssertFunctionIsCalled(cub_func):
xp.histogram(x, bins)
# ...then perform the actual computation
return xp.histogram(x, bins)[1]
@testing.gpu
@testing.parameterize(*testing.product(
{'bins': [
# Test monotonically increasing with in-bounds values
[1.5, 2.5, 4.0, 6.0],
# Explicit out-of-bounds for x values
[-1.0, 1.0, 2.5, 4.0, 20.0],
# Repeated values should yield right-most or left-most indexes
[0.0, 1.0, 1.0, 4.0, 4.0, 10.0],
],
'increasing': [True, False],
'right': [True, False],
'shape': [(), (10,), (6, 3, 3)]})
)
class TestDigitize:
@testing.for_all_dtypes(no_bool=True, no_complex=True)
@testing.numpy_cupy_array_equal()
def test_digitize(self, xp, dtype):
x = testing.shaped_arange(self.shape, xp, dtype)
bins = self.bins
if not self.increasing:
bins = bins[::-1]
bins = xp.array(bins)
y = xp.digitize(x, bins, right=self.right)
return y,
@testing.gpu
@testing.parameterize(
{'right': True},
{'right': False})
class TestDigitizeNanInf(unittest.TestCase):
@testing.numpy_cupy_array_equal()
def test_digitize_nan(self, xp):
x = testing.shaped_arange((14,), xp, xp.float32)
x[5] = float('nan')
bins = xp.array([1.0, 3.0, 5.0, 8.0, 12.0], xp.float32)
y = xp.digitize(x, bins, right=self.right)
return y,
@testing.numpy_cupy_array_equal()
def test_digitize_nan_bins(self, xp):
x = testing.shaped_arange((14,), xp, xp.float32)
bins = xp.array([1.0, 3.0, 5.0, 8.0, float('nan')], xp.float32)
y = xp.digitize(x, bins, right=self.right)
return y,
@testing.numpy_cupy_array_equal()
def test_digitize_nan_bins_repeated(self, xp):
x = testing.shaped_arange((14,), xp, xp.float32)
x[5] = float('nan')
bins = [1.0, 3.0, 5.0, 8.0, float('nan'), float('nan')]
bins = xp.array(bins, xp.float32)
y = xp.digitize(x, bins, right=self.right)
return y,
@testing.numpy_cupy_array_equal()
def test_digitize_nan_bins_decreasing(self, xp):
x = testing.shaped_arange((14,), xp, xp.float32)
x[5] = float('nan')
bins = [float('nan'), 8.0, 5.0, 3.0, 1.0]
bins = xp.array(bins, xp.float32)
y = xp.digitize(x, bins, right=self.right)
return y,
@testing.numpy_cupy_array_equal()
def test_digitize_nan_bins_decreasing_repeated(self, xp):
x = testing.shaped_arange((14,), xp, xp.float32)
x[5] = float('nan')
bins = [float('nan'), float('nan'), float('nan'), 5.0, 3.0, 1.0]
bins = xp.array(bins, xp.float32)
y = xp.digitize(x, bins, right=self.right)
return y,
@testing.numpy_cupy_array_equal()
def test_digitize_all_nan_bins(self, xp):
x = testing.shaped_arange((14,), xp, xp.float32)
x[5] = float('nan')
bins = [float('nan'), float('nan'), float('nan'), float('nan')]
bins = xp.array(bins, xp.float32)
y = xp.digitize(x, bins, right=self.right)
return y,
@testing.numpy_cupy_array_equal()
def test_searchsorted_inf(self, xp):
x = testing.shaped_arange((14,), xp, xp.float64)
x[5] = float('inf')
bins = xp.array([0, 1, 2, 4, 10])
y = xp.digitize(x, bins, right=self.right)
return y,
@testing.numpy_cupy_array_equal()
def test_searchsorted_minf(self, xp):
x = testing.shaped_arange((14,), xp, xp.float64)
x[5] = float('-inf')
bins = xp.array([0, 1, 2, 4, 10])
y = xp.digitize(x, bins, right=self.right)
return y,
@testing.gpu
class TestDigitizeInvalid(unittest.TestCase):
def test_digitize_complex(self):
for xp in (numpy, cupy):
x = testing.shaped_arange((14,), xp, complex)
bins = xp.array([1.0, 3.0, 5.0, 8.0, 12.0], complex)
with pytest.raises(TypeError):
xp.digitize(x, bins)
def test_digitize_nd_bins(self):
for xp in (numpy, cupy):
x = testing.shaped_arange((14,), xp, xp.float64)
bins = xp.array([[1], [2]])
with pytest.raises(ValueError):
xp.digitize(x, bins)
@testing.parameterize(
*testing.product(
{'weights': [None, 1, 2],
'weights_dtype': [numpy.int32, numpy.float64],
'density': [True, False],
'bins': [10, (8, 16, 12), (16, 8, 12), (16, 12, 8), (12, 8, 16),
'array_list'],
'range': [None, ((20, 50), (10, 100), (0, 40))]}
)
)
@testing.gpu
class TestHistogramdd:
@testing.for_all_dtypes(no_bool=True, no_complex=True)
@testing.numpy_cupy_allclose(atol=1e-7, rtol=1e-7)
def test_histogramdd(self, xp, dtype):
x = testing.shaped_random((100, 3), xp, dtype, scale=100)
if self.bins == 'array_list':
bins = [xp.arange(0, 100, 4),
xp.arange(0, 100, 10),
xp.arange(25)]
else:
bins = self.bins
if self.weights is not None:
weights = xp.ones((x.shape[0],), dtype=self.weights_dtype)
else:
weights = None
y, bin_edges = xp.histogramdd(x, bins=bins, range=self.range,
weights=weights, density=self.density)
return [y, ] + [e for e in bin_edges]
@testing.gpu
class TestHistogramddErrors(unittest.TestCase):
def test_histogramdd_invalid_bins(self):
for xp in (numpy, cupy):
x = testing.shaped_random((16, 2), xp, scale=100)
bins = [xp.arange(0, 100, 10), ] * 3
with pytest.raises(ValueError):
y, bin_edges = xp.histogramdd(x, bins)
def test_histogramdd_invalid_bins2(self):
for xp in (numpy, cupy):
x = testing.shaped_random((16, 2), xp, scale=100)
with pytest.raises(ValueError):
y, bin_edges = xp.histogramdd(x, bins=0)
def test_histogramdd_invalid_bins3(self):
for xp in (numpy, cupy):
x = testing.shaped_random((16, 2), xp, scale=100)
bins = xp.arange(100)
bins[30] = 99 # non-ascending bins
with pytest.raises(ValueError):
y, bin_edges = xp.histogramdd(x, bins=bins)
def test_histogramdd_invalid_bins4(self):
for xp in (numpy, cupy):
x = testing.shaped_random((16, 2), xp, scale=100)
bins = xp.arange(64).reshape((8, 8)) # too many dimensions
with pytest.raises(ValueError):
y, bin_edges = xp.histogramdd(x, bins=bins)
def test_histogramdd_invalid_range(self):
for xp in (numpy, cupy):
x = testing.shaped_random((16, 2), xp, scale=100)
r = ((0, 100),) * 3
with pytest.raises(ValueError):
y, bin_edges = xp.histogramdd(x, range=r)
def test_histogramdd_disallow_arraylike_bins(self):
x = testing.shaped_random((16, 2), cupy, scale=100)
bins = [[0, 10, 20, 50, 90]] * 2 # too many dimensions
with pytest.raises(ValueError):
y, bin_edges = cupy.histogramdd(x, bins=bins)
@testing.parameterize(
*testing.product(
{'weights': [None, 1, 2],
'weights_dtype': [numpy.int32, numpy.float64],
'density': [True, False],
'bins': [10, (8, 16), (16, 8), 'array_list', 'array'],
'range': [None, ((20, 50), (10, 100))]}
)
)
@testing.gpu
class TestHistogram2d:
@testing.for_all_dtypes(no_bool=True, no_complex=True)
@testing.numpy_cupy_allclose(atol=1e-7, rtol=1e-7)
def test_histogram2d(self, xp, dtype):
x = testing.shaped_random((100, ), xp, dtype, scale=100)
y = testing.shaped_random((100, ), xp, dtype, scale=100)
if self.bins == 'array_list':
bins = [xp.arange(0, 100, 4), xp.arange(0, 100, 10)]
elif self.bins == 'array':
bins = xp.arange(0, 100, 4)
else:
bins = self.bins
if self.weights is not None:
weights = xp.ones((x.shape[0],), dtype=self.weights_dtype)
else:
weights = None
y, edges0, edges1 = xp.histogram2d(x, y, bins=bins,
range=self.range, weights=weights,
density=self.density)
return y, edges0, edges1
@testing.gpu
class TestHistogram2dErrors(unittest.TestCase):
def test_histogram2d_disallow_arraylike_bins(self):
x = testing.shaped_random((16, ), cupy, scale=100)
y = testing.shaped_random((16, ), cupy, scale=100)
bins = [0, 10, 20, 50, 90]
with pytest.raises(ValueError):
y, bin_edges = cupy.histogram2d(x, y, bins=bins)
|
|
from django import template
from django.conf import settings
from django.core import urlresolvers
from django.utils.safestring import mark_safe
from l10n.l10n_settings import get_l10n_setting
from product.models import Category
from satchmo_utils.numbers import trunc_decimal
from satchmo_utils.json import json_encode
from threaded_multihost import threadlocals
import logging
import math
log = logging.getLogger('shop.templatetags.satchmo_util')
register = template.Library()
def debug_mode(value):
"""Return true if site is in debug mode"""
if settings.DEBUG:
return "true"
return ""
register.filter('debug_mode', debug_mode)
def template_range(value):
"""Return a range 1..value"""
try:
value = int(value)
except:
value = 0
return range(1, value + 1)
register.filter('template_range', template_range)
def in_list(value, val=None):
"""returns "true" if the value is in the list"""
if val in value:
return "true"
return ""
register.filter('in_list', in_list)
def as_json(value):
"""Return the value as a json encoded object"""
return mark_safe(json_encode(value))
register.filter('as_json', as_json)
def blackbird_logging(context):
return {
'debug': settings.DEBUG,
'form': context.get('form', None),
'STATIC_URL': context.get('STATIC_URL', None),
}
register.inclusion_tag('shop/_blackbird_logging.html', takes_context=True)(blackbird_logging)
def truncate_decimal(val, places=2):
return trunc_decimal(val, places)
register.filter('truncate_decimal', truncate_decimal)
def tag_attr(obj, arg1):
att, value = arg1.split("=")
obj.field.widget.attrs[att] = value
return obj
register.filter('tag_attr', tag_attr)
def shuffle(l):
"""
Returns the shuffled list.
"""
import random
l = list(l)
random.shuffle(l)
return l
register.filter('shuffle', shuffle)
def remove_tags(value):
"""
Returns the text with all tags removed
This can fail if give invalid input. This is only intended to be used on safe HTML markup. It should not be used to
clean unsafe input data.
For example, this will fail.
Example::
>> remove_tags('<<> <test></test>')
'<test></test>'
"""
i = value.find('<')
last = -1
out = []
if i == -1:
return value
while i > -1:
out.append(value[last + 1:i])
last = value.find(">", i)
if last > -1:
i = value.find("<", last)
else:
break
if last > -1:
out.append(value[last + 1:])
ret = " ".join(out)
ret = ret.replace(" ", " ")
ret = ret.replace(" ", " ")
if ret.endswith(" "):
ret = ret[:-1]
return ret
register.filter('remove_tags', remove_tags)
def lookup(value, key):
"""
Return a dictionary lookup of key in value
"""
try:
return value[key]
except KeyError:
return ""
register.filter('lookup', lookup)
def is_mod(value, args=""):
try:
val = int(value)
mod = int(args)
if val % mod == 0:
return "true"
except:
pass
return ""
register.filter('is_mod', is_mod)
def more_than(value, args=""):
try:
val = int(value)
more = int(args)
if val > more:
return "true"
except:
pass
return ""
register.filter('more_than', more_than)
def satchmo_category_search_form(category=None):
"""
Display the form for customer to specify category to search.
"""
try:
url = urlresolvers.reverse('satchmo_search')
except urlresolvers.NoReverseMatch:
url = ""
log.warning('No url found for satchmo_search (OK if running tests)')
cats = Category.objects.root_categories()
return {
'satchmo_search_url': url,
'categories': cats,
'category': category,
}
register.inclusion_tag("shop/_search.html", takes_context=False)(satchmo_category_search_form)
def satchmo_language_selection_form(context):
"""
Display the set language form, if enabled in shop settings.
"""
request = threadlocals.get_current_request()
enabled = get_l10n_setting('allow_translation_choice')
languages = []
if enabled:
try:
url = urlresolvers.reverse('satchmo_set_language')
languages = settings.LANGUAGES
print "111"
except urlresolvers.NoReverseMatch:
url = ""
log.warning('No url found for satchmo_set_language (OK if running tests)')
print "112"
else:
url = ""
return {
'enabled': enabled,
'set_language_url': url,
'languages': languages,
'STATIC_URL': context.get('STATIC_URL', ''), # for easy flag images
'django_language': request.session.get('django_language', 'en'),
}
register.inclusion_tag("l10n/_language_selection_form.html", takes_context=True)(satchmo_language_selection_form)
def satchmo_search_form():
"""
Display the search form.
"""
try:
url = urlresolvers.reverse('satchmo_search')
except urlresolvers.NoReverseMatch:
url = ""
log.warning('No url found for satchmo_search (OK if running tests)')
return {
'satchmo_search_url': url,
'categories': None,
}
register.inclusion_tag("shop/_search.html", takes_context=False)(satchmo_search_form)
def pounds(weight):
"""
Finds the weight of a cart item, taking into consideration the quantity in
the order.
"""
return int(weight)
register.filter('pounds', pounds)
def ounces(weight):
fract = weight - pounds(weight)
return int(math.ceil(fract * 16))
register.filter('ounces', ounces)
|
|
# -*- coding: utf-8 -*-
from six import text_type
from zerver.lib.test_classes import WebhookTestCase
class TaigaHookTests(WebhookTestCase):
STREAM_NAME = 'taiga'
TOPIC = "subject"
URL_TEMPLATE = u"/api/v1/external/taiga?stream={stream}&api_key={api_key}&topic={topic}"
FIXTURE_DIR_NAME = 'taiga'
def build_webhook_url(self):
# type: () -> text_type
api_key = self.get_api_key(self.TEST_USER_EMAIL)
return self.URL_TEMPLATE.format(stream=self.STREAM_NAME, api_key=api_key, topic=self.TOPIC)
def test_taiga_userstory_deleted(self):
# type: () -> None
message = u':x: Antek deleted user story **A newer hope**.\n'
self.send_and_test_stream_message("userstory_deleted", u'subject', message)
def test_taiga_userstory_created(self):
# type: () -> None
message = u':package: Antek created user story **A new hope**.\n'
self.send_and_test_stream_message("userstory_created", u'subject', message)
def test_taiga_userstory_changed_unblocked(self):
# type: () -> None
message = u':unlock: Antek unblocked user story **A newer hope**.\n'
self.send_and_test_stream_message("userstory_changed_unblocked", u'subject', message)
def test_taiga_userstory_changed_subject(self):
# type: () -> None
message = u':notebook: Antek renamed user story from A new hope to **A newer hope**.\n'
self.send_and_test_stream_message("userstory_changed_subject", u'subject', message)
def test_taiga_userstory_changed_status(self):
# type: () -> None
message = u':chart_with_upwards_trend: Antek changed status of user story **A new hope** from New to Done.\n'
self.send_and_test_stream_message("userstory_changed_status", u'subject', message)
def test_taiga_userstory_changed_reassigned(self):
# type: () -> None
message = u':busts_in_silhouette: Antek reassigned user story **Great US** from Antek to Han Solo.\n'
self.send_and_test_stream_message("userstory_changed_reassigned", u'subject', message)
def test_taiga_userstory_changed_points(self):
# type: () -> None
message = u':game_die: Antek changed estimation of user story **A new hope**.\n'
self.send_and_test_stream_message("userstory_changed_points", u'subject', message)
def test_taiga_userstory_changed_new_milestone(self):
# type: () -> None
message = u':calendar: Antek added user story **A newer hope** to sprint New sprint.\n'
self.send_and_test_stream_message("userstory_changed_new_milestone", u'subject', message)
def test_taiga_userstory_changed_milestone(self):
# type: () -> None
message = u':calendar: Antek changed sprint of user story **A newer hope** from Old sprint to New sprint.\n'
self.send_and_test_stream_message("userstory_changed_milestone", u'subject', message)
def test_taiga_userstory_changed_description(self):
# type: () -> None
message = u':notebook: Antek updated description of user story **A newer hope**.\n'
self.send_and_test_stream_message("userstory_changed_description", u'subject', message)
def test_taiga_userstory_changed_closed(self):
# type: () -> None
message = u':chart_with_upwards_trend: Antek changed status of user story **A newer hope** from New to Done.\n:checkered_flag: Antek closed user story **A newer hope**.\n'
self.send_and_test_stream_message("userstory_changed_closed", u'subject', message)
def test_taiga_userstory_changed_reopened(self):
# type: () -> None
message = u':chart_with_upwards_trend: Antek changed status of user story **A newer hope** from Done to New.\n:package: Antek reopened user story **A newer hope**.\n'
self.send_and_test_stream_message("userstory_changed_reopened", u'subject', message)
def test_taiga_userstory_changed_blocked(self):
# type: () -> None
message = u':lock: Antek blocked user story **A newer hope**.\n'
self.send_and_test_stream_message("userstory_changed_blocked", u'subject', message)
def test_taiga_userstory_changed_assigned(self):
# type: () -> None
message = u':busts_in_silhouette: Antek assigned user story **Great US** to Antek.\n'
self.send_and_test_stream_message("userstory_changed_assigned", u'subject', message)
def test_taiga_task_created(self):
# type: () -> None
message = u':clipboard: Antek created task **New task assigned and in progress**.\n'
self.send_and_test_stream_message("task_created", u'subject', message)
def test_taiga_task_changed_status(self):
# type: () -> None
message = u':chart_with_upwards_trend: Antek changed status of task **New task assigned and in progress** from Ready for test to New.\n'
self.send_and_test_stream_message("task_changed_status", u'subject', message)
def test_taiga_task_changed_blocked(self):
# type: () -> None
message = u':lock: Antek blocked task **A new task**.\n'
self.send_and_test_stream_message("task_changed_blocked", u'subject', message)
def test_taiga_task_changed_unblocked(self):
# type: () -> None
message = u':unlock: Antek unblocked task **A new task**.\n'
self.send_and_test_stream_message("task_changed_unblocked", u'subject', message)
def test_taiga_task_changed_assigned(self):
# type: () -> None
message = u':busts_in_silhouette: Antek assigned task **Aaaa** to Antek.\n'
self.send_and_test_stream_message("task_changed_assigned", u'subject', message)
def test_taiga_task_changed_reassigned(self):
# type: () -> None
message = u':busts_in_silhouette: Antek reassigned task **Aaaa** from Han Solo to Antek.\n'
self.send_and_test_stream_message("task_changed_reassigned", u'subject', message)
def test_taiga_task_changed_subject(self):
# type: () -> None
message = u':notebook: Antek renamed task New task to **Even newer task**.\n'
self.send_and_test_stream_message("task_changed_subject", u'subject', message)
def test_taiga_task_changed_description(self):
# type: () -> None
message = u':notebook: Antek updated description of task **Even newer task.**.\n'
self.send_and_test_stream_message("task_changed_description", u'subject', message)
def test_taiga_task_changed_us(self):
# type: () -> None
message = u':clipboard: Antek moved task **A new task** from user story #3 Great US to #6 Greater US.\n'
self.send_and_test_stream_message("task_changed_us", u'subject', message)
def test_taiga_task_deleted(self):
# type: () -> None
message = u':x: Antek deleted task **hhh**.\n'
self.send_and_test_stream_message("task_deleted", u'subject', message)
def test_taiga_milestone_created(self):
# type: () -> None
message = u':calendar: Antek created sprint **New sprint**.\n'
self.send_and_test_stream_message("milestone_created", u'subject', message)
def test_taiga_milestone_deleted(self):
# type: () -> None
message = u':x: Antek deleted sprint **Newer sprint**.\n'
self.send_and_test_stream_message("milestone_deleted", u'subject', message)
def test_taiga_milestone_changed_time(self):
# type: () -> None
message = u':calendar: Antek changed estimated finish of sprint **New sprint** from 2016-04-27 to 2016-04-30.\n'
self.send_and_test_stream_message("milestone_changed_time", u'subject', message)
def test_taiga_milestone_changed_name(self):
# type: () -> None
message = u':notebook: Antek renamed sprint from New sprint to **Newer sprint**.\n'
self.send_and_test_stream_message("milestone_changed_name", u'subject', message)
def test_taiga_issue_created(self):
# type: () -> None
message = u':bulb: Antek created issue **A new issue**.\n'
self.send_and_test_stream_message("issue_created", u'subject', message)
def test_taiga_issue_deleted(self):
# type: () -> None
message = u':x: Antek deleted issue **Aaaa**.\n'
self.send_and_test_stream_message("issue_deleted", u'subject', message)
def test_taiga_issue_changed_assigned(self):
# type: () -> None
message = u':busts_in_silhouette: Antek assigned issue **Aaaa** to Antek.\n'
self.send_and_test_stream_message("issue_changed_assigned", u'subject', message)
def test_taiga_issue_changed_reassigned(self):
# type: () -> None
message = u':busts_in_silhouette: Antek reassigned issue **Aaaa** from Antek to Han Solo.\n'
self.send_and_test_stream_message("issue_changed_reassigned", u'subject', message)
def test_taiga_issue_changed_subject(self):
# type: () -> None
message = u':notebook: Antek renamed issue Aaaa to **More descriptive name**.\n'
self.send_and_test_stream_message("issue_changed_subject", u'subject', message)
def test_taiga_issue_changed_description(self):
# type: () -> None
message = u':notebook: Antek updated description of issue **More descriptive name**.\n'
self.send_and_test_stream_message("issue_changed_description", u'subject', message)
def test_taiga_issue_changed_type(self):
# type: () -> None
message = u':bulb: Antek changed type of issue **A new issue** from Bug to Enhancement.\n'
self.send_and_test_stream_message("issue_changed_type", u'subject', message)
def test_taiga_issue_changed_status(self):
# type: () -> None
message = u':chart_with_upwards_trend: Antek changed status of issue **A new issue** from New to Rejected.\n'
self.send_and_test_stream_message("issue_changed_status", u'subject', message)
def test_taiga_issue_changed_severity(self):
# type: () -> None
message = u':warning: Antek changed severity of issue **A new issue** from Important to Critical.\n'
self.send_and_test_stream_message("issue_changed_severity", u'subject', message)
def test_taiga_issue_changed_priority(self):
# type: () -> None
message = u':rocket: Antek changed priority of issue **A new issue** from Normal to High.\n'
self.send_and_test_stream_message("issue_changed_priority", u'subject', message)
def test_taiga_userstory_comment_added(self):
# type: () -> None
message = u':thought_balloon: Han Solo commented on user story **Great US**.\n'
self.send_and_test_stream_message("userstory_changed_comment_added", u'subject', message)
def test_taiga_task_changed_comment_added(self):
# type: () -> None
message = u':thought_balloon: Antek commented on task **New task assigned and in progress**.\n'
self.send_and_test_stream_message("task_changed_comment_added", u'subject', message)
def test_taiga_issue_changed_comment_added(self):
# type: () -> None
message = u':thought_balloon: Antek commented on issue **Aaaa**.\n'
self.send_and_test_stream_message("issue_changed_comment_added", u'subject', message)
|
|
import os
import shutil
import hashlib
from django.contrib.auth.models import User
from django.core import mail
from django.core.urlresolvers import reverse
from django.test import TestCase
from django.conf import settings
from tagging.utils import edit_string_for_tags
from djangopeople.djangopeople.models import DjangoPerson, Country
from djangopeople.machinetags.utils import tagdict
class EditViewTest(TestCase):
fixtures = ['test_data']
def setUp(self): # noqa
super(EditViewTest, self).setUp()
self.client.login(username='daveb', password='123456')
img_content = open(os.path.join(settings.OUR_ROOT,
'djangopeople/fixtures/pony.gif'),
'rb').read()
sha1sum = hashlib.sha1(img_content).hexdigest()
self.hashed_upload_img_file_name = os.path.join(sha1sum[:1],
sha1sum[1:2], sha1sum)
# make sure the profile upload folder exists
self.profile_img_path = os.path.join(settings.MEDIA_ROOT, 'profiles')
if not os.path.exists(self.profile_img_path):
os.makedirs(self.profile_img_path)
def tearDown(self): # noqa
# remove uploaded profile picture
if os.path.exists(self.profile_img_path):
shutil.rmtree(self.profile_img_path)
def test_edit_finding_permissions(self):
'''
logged in user can only edit his own skills
'''
url = reverse('edit_finding', args=['daveb'])
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
response = self.client.post(url)
self.assertEqual(response.status_code, 200)
url = reverse('edit_finding', args=['satchmo'])
response = self.client.get(url)
self.assertEqual(response.status_code, 403)
response = self.client.post(url)
self.assertEqual(response.status_code, 403)
def test_edit_finding_initial_data(self):
url_edit_finding = reverse('edit_finding', args=['daveb'])
p = DjangoPerson.objects.get(user__username='daveb')
mtags = tagdict(p.machinetags.all())
response = self.client.get(url_edit_finding)
self.assertContains(response, mtags['profile']['looking_for_work'])
self.assertContains(response, mtags['im']['django'])
self.assertContains(response, p.user.email)
def test_edit_finding_email(self):
url_edit_finding = reverse('edit_finding', args=['daveb'])
url_profile = reverse('user_profile', args=['daveb'])
new_email = '[email protected]'
data = {'email': new_email,
'first_name': 'Test',
'last_name': 'User',
'privacy_search': 'public',
'privacy_email': 'private',
'privacy_im': 'private',
'privacy_irctrack': 'public'}
u = User.objects.get(username='daveb')
self.assertNotEqual(u.first_name, 'Test')
self.assertNotEqual(u.last_name, 'User')
response = self.client.post(url_edit_finding, data, follow=True)
self.assertRedirects(response, url_profile)
self.assertContains(response, new_email)
u = User.objects.get(username='daveb')
self.assertEqual(u.email, new_email)
self.assertEqual(u.first_name, 'Test')
self.assertEqual(u.last_name, 'User')
def test_edit_finding_looking_for_work(self):
url_edit_finding = reverse('edit_finding', args=['daveb'])
url_profile = reverse('user_profile', args=['daveb'])
new_email = '[email protected]'
looking_for_work = 'freelance'
data = {'looking_for_work': looking_for_work,
'email': new_email,
'first_name': 'Hello',
'last_name': 'World',
'privacy_search': 'public',
'privacy_email': 'private',
'privacy_im': 'private',
'privacy_irctrack': 'public'}
p = DjangoPerson.objects.get(user__username='daveb')
mtags = tagdict(p.machinetags.all())
self.assertEqual(mtags['profile']['looking_for_work'], 'full-time')
response = self.client.post(url_edit_finding, data, follow=True)
self.assertRedirects(response, url_profile)
p = DjangoPerson.objects.get(user__username='daveb')
mtags = tagdict(p.machinetags.all())
self.assertEqual(mtags['profile']['looking_for_work'], 'freelance')
# check initial value
response = self.client.get(url_edit_finding)
self.assertContains(response, looking_for_work)
def test_edit_finding_im(self):
url_edit_finding = reverse('edit_finding', args=['daveb'])
url_profile = reverse('user_profile', args=['daveb'])
new_email = '[email protected]'
im_jabber = '[email protected]'
data = {'im_jabber': im_jabber,
'email': new_email,
'first_name': 'Hello',
'last_name': 'World',
'privacy_search': 'public',
'privacy_email': 'private',
'privacy_im': 'private',
'privacy_irctrack': 'public'}
p = DjangoPerson.objects.get(user__username='daveb')
mtags = tagdict(p.machinetags.all())
self.assertEqual(mtags['im']['jabber'], '')
response = self.client.post(url_edit_finding, data, follow=True)
self.assertRedirects(response, url_profile)
p = DjangoPerson.objects.get(user__username='daveb')
mtags = tagdict(p.machinetags.all())
self.assertEqual(mtags['im']['jabber'], im_jabber)
# check initial value
response = self.client.get(url_edit_finding)
self.assertContains(response, im_jabber)
def test_edit_finding_services(self):
url_edit_finding = reverse('edit_finding', args=['daveb'])
url_profile = reverse('user_profile', args=['daveb'])
service_twitter = 'https://twitter.com/davebbar'
data = {'service_twitter': service_twitter,
'email': '[email protected]',
'first_name': 'Hello',
'last_name': 'World',
'privacy_search': 'public',
'privacy_email': 'private',
'privacy_im': 'private',
'privacy_irctrack': 'public'}
p = DjangoPerson.objects.get(user__username='daveb')
mtags = tagdict(p.machinetags.all())
self.assertEqual(mtags['services']['twitter'], '')
response = self.client.post(url_edit_finding, data, follow=True)
self.assertRedirects(response, url_profile)
p = DjangoPerson.objects.get(user__username='daveb')
mtags = tagdict(p.machinetags.all())
self.assertEqual(mtags['services']['twitter'], service_twitter)
# check initial value
response = self.client.get(url_edit_finding)
self.assertContains(response, service_twitter)
def test_edit_finding_form_error_email_validation(self):
url_edit_finding = reverse('edit_finding', args=['daveb'])
u = User.objects.get(username='daveb')
old_email = u.email
other_user = User.objects.get(username='satchmo')
# set new email for daveb to existing email of user satchmo
data = {'email': other_user.email,
'privacy_search': 'public',
'privacy_email': 'private',
'privacy_im': 'private',
'privacy_irctrack': 'public'}
u = User.objects.get(username='daveb')
self.assertEqual(u.email, old_email)
response = self.client.post(url_edit_finding, data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertFormError(response, 'form', 'email',
'That e-mail is already in use')
u = User.objects.get(username='daveb')
self.assertEqual(u.email, old_email)
def test_edit_finding_form_error_fields_required(self):
url_edit_finding = reverse('edit_finding', args=['daveb'])
url_profile = reverse('user_profile', args=['daveb'])
data = {'email': '[email protected]',
'first_name': 'Hello',
'last_name': 'World',
'privacy_search': 'public',
'privacy_email': 'private',
'privacy_im': 'private',
'privacy_irctrack': 'public'}
response = self.client.post(url_edit_finding, data, follow=True)
self.assertRedirects(response, url_profile)
data.pop('email')
response = self.client.post(url_edit_finding, data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertFormError(response, 'form', 'email',
'This field is required.')
data.pop('privacy_search')
response = self.client.post(url_edit_finding, data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertFormError(response, 'form', 'email',
'This field is required.')
self.assertFormError(response, 'form', 'privacy_search',
'This field is required.')
data.pop('privacy_email')
response = self.client.post(url_edit_finding, data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertFormError(response, 'form', 'email',
'This field is required.')
self.assertFormError(response, 'form', 'privacy_search',
'This field is required.')
self.assertFormError(response, 'form', 'privacy_email',
'This field is required.')
data.pop('privacy_im')
response = self.client.post(url_edit_finding, data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertFormError(response, 'form', 'email',
'This field is required.')
self.assertFormError(response, 'form', 'privacy_search',
'This field is required.')
self.assertFormError(response, 'form', 'privacy_email',
'This field is required.')
self.assertFormError(response, 'form', 'privacy_im',
'This field is required.')
data.pop('privacy_irctrack')
response = self.client.post(url_edit_finding, data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertFormError(response, 'form', 'email',
'This field is required.')
self.assertFormError(response, 'form', 'privacy_search',
'This field is required.')
self.assertFormError(response, 'form', 'privacy_email',
'This field is required.')
self.assertFormError(response, 'form', 'privacy_irctrack',
'This field is required.')
def test_edit_skill_permission(self):
'''
logged in user can only edit his own skills
'''
url_edit_skills = reverse('edit_skills', args=['daveb'])
response = self.client.get(url_edit_skills)
self.assertEqual(response.status_code, 200)
response = self.client.post(url_edit_skills)
self.assertEqual(response.status_code, 302)
url_edit_skills = reverse('edit_skills', args=['satchmo'])
response = self.client.get(url_edit_skills)
self.assertEqual(response.status_code, 403)
response = self.client.post(url_edit_skills)
self.assertEqual(response.status_code, 403)
def test_add_skills(self):
'''
test adding skills
'''
url_edit_skills = reverse('edit_skills', args=['daveb'])
p = DjangoPerson.objects.get(user__username='daveb')
self.assertEqual(len(p.skilltags), 3)
self.assertTrue('jazz' in edit_string_for_tags(p.skilltags))
self.assertTrue('linux' in edit_string_for_tags(p.skilltags))
self.assertTrue('python' in edit_string_for_tags(p.skilltags))
skills = '%s django' % (edit_string_for_tags(p.skilltags))
self.client.post(url_edit_skills, {'skills': skills})
p = DjangoPerson.objects.get(user__username='daveb')
self.assertEqual(len(p.skilltags), 4)
self.assertTrue('jazz' in edit_string_for_tags(p.skilltags))
self.assertTrue('linux' in edit_string_for_tags(p.skilltags))
self.assertTrue('python' in edit_string_for_tags(p.skilltags))
self.assertTrue('django' in edit_string_for_tags(p.skilltags))
def test_delete_skill(self):
'''
test deleting skills
'''
url_edit_skills = reverse('edit_skills', args=['daveb'])
p = DjangoPerson.objects.get(user__username='daveb')
self.assertEqual(len(p.skilltags), 3)
self.assertTrue('jazz' in edit_string_for_tags(p.skilltags))
self.assertTrue('linux' in edit_string_for_tags(p.skilltags))
self.assertTrue('python' in edit_string_for_tags(p.skilltags))
# delete jazz skill
skills = 'linux python'
self.client.post(url_edit_skills, {'skills': skills})
p = DjangoPerson.objects.get(user__username='daveb')
self.assertEqual(len(p.skilltags), 2)
self.assertTrue('linux' in edit_string_for_tags(p.skilltags))
self.assertTrue('python' in edit_string_for_tags(p.skilltags))
self.assertFalse('jazz' in edit_string_for_tags(p.skilltags))
# delete all skills
self.client.post(url_edit_skills, {'skills': ''})
p = DjangoPerson.objects.get(user__username='daveb')
self.assertEqual(len(p.skilltags), 0)
self.assertEqual(edit_string_for_tags(p.skilltags), '')
def test_edit_account_permission(self):
'''
logged in user can only edit his own account
'''
url_edit_account = reverse('edit_account', args=['daveb'])
response = self.client.get(url_edit_account)
self.assertEqual(response.status_code, 200)
url_edit_account = reverse('edit_account', args=['satchmo'])
response = self.client.get(url_edit_account)
self.assertEqual(response.status_code, 403)
def test_edit_account(self):
'''
add and change openid
'''
url_profile = reverse('user_profile', args=['daveb'])
url_edit_account = reverse('edit_account', args=['daveb'])
p = DjangoPerson.objects.get(user__username='daveb')
self.assertEqual(p.openid_server, u'')
self.assertEqual(p.openid_delegate, u'')
response = self.client.post(url_edit_account,
{'openid_server': 'http://example.com',
'openid_delegate': 'http://google.com'})
self.assertRedirects(response, url_profile)
p = DjangoPerson.objects.get(user__username='daveb')
self.assertEqual(p.openid_server, 'http://example.com/')
self.assertEqual(p.openid_delegate, 'http://google.com/')
# test display openid change form (with initial data)
response = self.client.get(url_edit_account)
self.assertHTMLEqual(
response.content.split(
'<label for="id_openid_server">OpenID server:</label>'
)[1].split('</div>')[0],
(
'<input id="id_openid_server" type="url" '
'name="openid_server" value="http://example.com/" '
'maxlength="255" />')
)
self.assertHTMLEqual(
response.content.split(
'<label for="id_openid_delegate">OpenID delegate:</label>'
)[1].split('</div>')[0],
(
'<input id="id_openid_delegate" '
'type="url" name="openid_delegate" '
'value="http://google.com/" '
'maxlength="255" />'
)
)
# test change openid settings
response = self.client.post(url_edit_account,
{'openid_server': 'http://test.com',
'openid_delegate': 'http://yahoo.com'})
p = DjangoPerson.objects.get(user__username='daveb')
self.assertEqual(p.openid_server, 'http://test.com/')
self.assertEqual(p.openid_delegate, 'http://yahoo.com/')
def test_edit_account_form_error(self):
'''
check AccountForm error messages
'''
p = DjangoPerson.objects.get(user__username='daveb')
self.assertEqual(p.openid_server, u'')
self.assertEqual(p.openid_delegate, u'')
url_edit_account = reverse('edit_account', args=['daveb'])
response = self.client.post(url_edit_account,
{'openid_server': 'example',
'openid_delegate': 'fooBar'})
self.assertEqual(response.status_code, 200)
self.assertFormError(response, 'form', 'openid_server',
'Enter a valid URL.')
self.assertFormError(response, 'form', 'openid_delegate',
'Enter a valid URL.')
p = DjangoPerson.objects.get(user__username='daveb')
self.assertEqual(p.openid_server, u'')
self.assertEqual(p.openid_delegate, u'')
def test_change_portfolio_entry(self):
url_profile = reverse('user_profile', args=['daveb'])
url_edit_portfolio = reverse('edit_portfolio', args=['daveb'])
response = self.client.get(url_profile)
self.assertContains(response, '<li><a href="http://example.org/" '
'class="url" rel="nofollow"><cite>'
'cheese-shop</cite></a></li>')
# test change existing portfolio entry
response = self.client.post(url_edit_portfolio,
{'title_1': 'chocolate shop',
'url_1': 'cs.org'}, follow=True)
self.assertRedirects(response, url_profile)
self.assertNotContains(response, '<li><a href="http://example.org/" '
'class="url" rel="nofollow"><cite>'
'cheese-shop</cite></a></li>')
self.assertContains(response, '<li><a href="http://cs.org/" class="url'
'" rel="nofollow"><cite>chocolate shop'
'</cite></a></li>')
def test_remove_portfolio_entry(self):
# test remove existing portfolio entry
url_profile = reverse('user_profile', args=['daveb'])
url_edit_portfolio = reverse('edit_portfolio', args=['daveb'])
response = self.client.post(url_edit_portfolio,
{'title_1': '', 'url_1': ''}, follow=True)
self.assertRedirects(response, url_profile)
self.assertNotContains(response, '<li><a href="http://example.org/" '
'class="url" rel="nofollow"><cite>'
'cheese-shop</cite></a></li>')
self.assertNotContains(response, '<li><a href="cs.org/" class="url" '
'rel="nofollow"><cite>chocolate shop'
'</cite></a></li>')
self.assertContains(response, 'Add some sites')
def test_add_portfolio_entry(self):
# test add new portfolio entry
url_profile = reverse('user_profile', args=['daveb'])
url_edit_portfolio = reverse('edit_portfolio', args=['daveb'])
response = self.client.post(url_edit_portfolio,
{'title_1': 'chocolate shop',
'url_1': 'cs.org'},
follow=True)
self.assertRedirects(response, url_profile)
self.assertNotContains(response, 'Add some sites')
self.assertContains(response, '<li><a href="http://cs.org/" class="url'
'" rel="nofollow"><cite>chocolate shop'
'</cite></a></li>')
def test_portfolio_form_url_error(self):
# test portfolio edit form
url_edit_portfolio = reverse('edit_portfolio', args=['daveb'])
response = self.client.get(url_edit_portfolio)
self.assertHTMLEqual(
response.content.split(
'<label for="id_title_1">Title 1:</label>'
)[1].split('</div>')[0],
(
'<input id="id_title_1" type="text" '
'name="title_1" value="cheese-shop" '
'maxlength="100" />'
)
)
self.assertHTMLEqual(
response.content.split(
'<label for="id_url_1">URL 1:</label>'
)[1].split('</div>')[0],
(
'<input id="id_url_1" type="url" '
'name="url_1" value="http://example.org/'
'" maxlength="255" />'
)
)
self.assertHTMLEqual(
response.content.split(
'<label for="id_title_2">Title 2:</label>'
)[1].split('</div>')[0],
(
'<input id="id_title_2" type="text" '
'name="title_2" maxlength="100" />'
)
)
self.assertHTMLEqual(
response.content.split(
'<label for="id_url_2">URL 2:</label>'
)[1].split('</div>')[0],
(
'<input id="id_url_2" type="url" '
'name="url_2" maxlength="255" />'
)
)
# test form error messages
response = self.client.post(url_edit_portfolio,
{'title_1': 'chocolate shop',
'url_1': 'no url'},
follow=True)
self.assertFormError(response, 'form', 'url_1', 'Enter a valid URL.')
def test_edit_other_user(self):
# test editing another users portfolio
# add new user
user = User.objects.create_user('testuser', '[email protected]', 'pass')
DjangoPerson.objects.create(
user=user,
country=Country.objects.get(pk=1),
latitude=44,
longitude=2,
location_description='Somewhere',
)
url_profile = reverse('user_profile', args=['testuser'])
url_edit_portfolio = reverse('edit_portfolio', args=['testuser'])
# no Add some sites link for user daveb on testuser's profile page
response = self.client.get(url_profile)
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, 'Add some sites')
# daveb can't add sites to testuser's portfolio
response = self.client.post(url_edit_portfolio,
{'title_1': 'chocolate shop',
'url_1': 'cs.org'}, follow=True)
self.assertEqual(response.status_code, 403)
response = self.client.get(url_profile)
self.assertNotContains(response, '<li><a href="http://cs.org/" class="'
'url" rel="nofollow"><cite>chocolate '
'shop </cite></a></li>')
def test_edit_password_permission(self):
'''
logged in user can only edit his own password
'''
url_edit_password = reverse('edit_password', args=['daveb'])
# user can edit his own password
response = self.client.get(url_edit_password)
self.assertEqual(response.status_code, 200)
response = self.client.post(url_edit_password)
self.assertEqual(response.status_code, 200)
# user can't edit passwords of other users
url_edit_password = reverse('edit_password', args=['satchmo'])
response = self.client.get(url_edit_password)
self.assertEqual(response.status_code, 403)
response = self.client.post(url_edit_password)
self.assertEqual(response.status_code, 403)
def test_edit_password(self):
'''
test editing passwords
'''
url_edit_password = reverse('edit_password', args=['daveb'])
url_profile = reverse('user_profile', args=['daveb'])
response = self.client.get(url_edit_password)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'edit_password.html')
u = User.objects.get(username='daveb')
self.assertTrue(u.check_password('123456'))
response = self.client.post(url_edit_password,
{'current_password': '123456',
'password1': 'foo',
'password2': 'foo'})
self.assertRedirects(response, url_profile)
u = User.objects.get(username='daveb')
self.assertTrue(u.check_password('foo'))
def test_edit_password_form_current_password_error(self):
'''
test form error messages when current password is invalid
'''
url_edit_password = reverse('edit_password', args=['daveb'])
response = self.client.post(url_edit_password,
{'current_password': 'invalid pw',
'password1': 'foo1',
'password2': 'foo'})
self.assertEqual(response.status_code, 200)
self.assertFormError(response, 'form', 'current_password',
'Please submit your current password.')
def test_edit_password_form_error_fields_required(self):
'''
test form error messages when form fields are empty
'''
url_edit_password = reverse('edit_password', args=['daveb'])
response = self.client.post(url_edit_password, {'password1': 'foo1'})
self.assertEqual(response.status_code, 200)
self.assertFormError(response, 'form', 'password2',
'This field is required.')
response = self.client.post(url_edit_password, {'password2': 'foo1'})
self.assertEqual(response.status_code, 200)
self.assertFormError(response, 'form', 'password1',
'This field is required.')
response = self.client.post(url_edit_password, {})
self.assertEqual(response.status_code, 200)
self.assertFormError(response, 'form', 'password1',
'This field is required.')
self.assertFormError(response, 'form', 'password2',
'This field is required.')
def test_edit_password_form_error_different_passwords(self):
'''
test form error message when user submits two different
passwords
'''
url_edit_password = reverse('edit_password', args=['daveb'])
u = User.objects.get(username='daveb')
self.assertTrue(u.check_password('123456'))
# two passwords aren't the same
response = self.client.post(url_edit_password, {'password1': 'foo1',
'password2': 'foo'})
self.assertEqual(response.status_code, 200)
self.assertFormError(response, 'form', None,
'The passwords did not match.')
u = User.objects.get(username='daveb')
self.assertTrue(u.check_password('123456'))
def test_edit_bio_permission(self):
'''
logged in user can only edit his own bio
'''
url = reverse('edit_bio', args=['daveb'])
# user can edit his own password
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
response = self.client.post(url)
self.assertEqual(response.status_code, 302)
# user can't edit passwords of other users
url = reverse('edit_bio', args=['satchmo'])
response = self.client.get(url)
self.assertEqual(response.status_code, 403)
response = self.client.post(url)
self.assertEqual(response.status_code, 403)
def test_edit_bio(self):
'''
test changing the bio
'''
url_edit_bio = reverse('edit_bio', args=['daveb'])
url_profile = reverse('user_profile', args=['daveb'])
response = self.client.get(url_edit_bio)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'edit_bio.html')
p = DjangoPerson.objects.get(user__username='daveb')
self.assertEqual(p.bio, 'ad')
bio_string = 'I do a lot of Django stuff'
response = self.client.post(url_edit_bio,
{'bio': bio_string}, follow=True)
self.assertRedirects(response, url_profile)
self.assertContains(response, bio_string)
self.assertContains(response, 'edit bio')
p = DjangoPerson.objects.get(user__username='daveb')
self.assertEqual(p.bio, bio_string)
def test_delete_bio(self):
url_edit_bio = reverse('edit_bio', args=['daveb'])
url_profile = reverse('user_profile', args=['daveb'])
response = self.client.post(url_edit_bio,
{'bio': ''}, follow=True)
self.assertRedirects(response, url_profile)
self.assertContains(response, 'Create your bio')
p = DjangoPerson.objects.get(user__username='daveb')
self.assertEqual(p.bio, '')
def test_edit_location_permission(self):
'''
logged in user can only edit his own location
'''
url = reverse('edit_location', args=['daveb'])
# user can edit his own password
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
response = self.client.post(url)
self.assertEqual(response.status_code, 200)
# user can't edit passwords of other users
url = reverse('edit_location', args=['satchmo'])
response = self.client.get(url)
self.assertEqual(response.status_code, 403)
response = self.client.post(url)
self.assertEqual(response.status_code, 403)
def test_edit_location(self):
'''
test changing the location
'''
longitude = 14.9853515625
latitude = 50.0359736721955
location_description = 'Vienna, Austria'
country = 12 # id of Austria
url_edit_location = reverse('edit_location', args=['daveb'])
url_profile = reverse('user_profile', args=['daveb'])
response = self.client.get(url_profile)
self.assertContains(response, 'Austria')
self.assertContains(response, 'data-shrinklat="%d' % latitude)
self.assertContains(response, 'data-shrinklon="%d' % longitude)
p = DjangoPerson.objects.get(user__username='daveb')
self.assertTrue(abs(p.latitude - latitude) < 0.01)
self.assertTrue(abs(p.longitude - longitude) < 0.01)
self.assertEqual(p.location_description, location_description)
self.assertEqual(p.country.pk, country)
response = self.client.get(url_edit_location)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'edit_location.html')
new_longitude = 153.023071289
new_latitude = -27.5411533739
new_location_description = 'Brisbane'
new_country = 'AU' # iso code of Australia
location_dict = {'longitude': new_longitude,
'latitude': new_latitude,
'location_description': new_location_description,
'country': new_country,
'region': 'AL'}
response = self.client.post(url_edit_location, location_dict)
self.assertEqual(response.status_code, 200)
self.assertFormError(response, 'form', 'region',
('The region you selected does not match the '
'country'))
del location_dict['region']
response = self.client.post(url_edit_location, data=location_dict,
follow=True)
self.assertRedirects(response, url_profile)
self.assertNotContains(response, 'Austria')
self.assertNotContains(response, 'data-shrinklat="%d' % latitude)
self.assertNotContains(response, 'data-shrinklon="%d' % longitude)
self.assertContains(response, 'Australia')
self.assertContains(response, 'data-shrinklat="%d' % new_latitude)
self.assertContains(response, 'data-shrinklon="%d' % new_longitude)
p = DjangoPerson.objects.get(user__username='daveb')
self.assertTrue(abs(p.latitude - new_latitude) < 0.01)
self.assertTrue(abs(p.longitude - new_longitude) < 0.01)
self.assertEqual(p.location_description, new_location_description)
self.assertEqual(p.country.iso_code, new_country)
def test_location_bug_24(self):
# https://github.com/brutasse/djangopeople/issues/24
url = reverse('edit_location', args=['daveb'])
data = {
'location_description': 'Rapid City, South Dakota',
'country': 'US',
'latitude': '44.07883004975277',
'longitude': '-103.28332901005193',
'region': 'SD',
}
response = self.client.post(url, data)
self.assertEqual(response.status_code, 302)
def test_edit_location_form_error_fields_required(self):
url_edit_location = reverse('edit_location', args=['daveb'])
new_longitude = 153.023071289
new_latitude = -27.5411533739
new_location_description = 'Brisbane'
new_country = 'AU' # iso code of Australia
location_dict = {'longitude': new_longitude,
'latitude': new_latitude,
'location_description': new_location_description,
'country': new_country}
response = self.client.post(url_edit_location, data=location_dict)
self.assertEqual(response.status_code, 302)
# remove longitutde
location_dict.pop('longitude')
response = self.client.post(url_edit_location, data=location_dict)
self.assertFormError(response, 'form', 'longitude',
'This field is required.')
# remove latitude
location_dict.pop('latitude')
response = self.client.post(url_edit_location, data=location_dict)
self.assertFormError(response, 'form', 'longitude',
'This field is required.')
self.assertFormError(response, 'form', 'latitude',
'This field is required.')
# remove location_description
location_dict.pop('location_description')
response = self.client.post(url_edit_location, data=location_dict)
self.assertFormError(response, 'form', 'longitude',
'This field is required.')
self.assertFormError(response, 'form', 'latitude',
'This field is required.')
self.assertFormError(response, 'form', 'location_description',
'This field is required.')
# remove country
location_dict.pop('country')
response = self.client.post(url_edit_location, data=location_dict)
self.assertFormError(response, 'form', 'longitude',
'This field is required.')
self.assertFormError(response, 'form', 'latitude',
'This field is required.')
self.assertFormError(response, 'form', 'location_description',
'This field is required.')
self.assertFormError(response, 'form', 'country',
'This field is required.')
def test_edit_loctaion_form_error_invalid_iso_code(self):
url_edit_location = reverse('edit_location', args=['daveb'])
new_longitude = 153.023071289
new_latitude = -27.5411533739
new_location_description = 'Brisbane'
new_country = 'XXX' # invalid iso code
location_dict = {'longitude': new_longitude,
'latitude': new_latitude,
'location_description': new_location_description,
'country': new_country}
response = self.client.post(url_edit_location, data=location_dict)
self.assertFormError(
response, 'form', 'country',
'Select a valid choice. XXX is not one of the available choices.'
)
def test_edit_location_not_in_the_atlantic(self):
'''
test form error message when 43 < lat < 45 and -39 < lon < -33
'''
url_edit_location = reverse('edit_location', args=['daveb'])
new_longitude = -35
new_latitude = 44
new_location_description = 'Brisbane'
new_country = 13 # id of Australia
location_dict = {'longitude': new_longitude,
'latitude': new_latitude,
'location_description': new_location_description,
'country': new_country}
response = self.client.post(url_edit_location, data=location_dict)
self.assertEqual(response.status_code, 200)
self.assertFormError(response, 'form', 'location_description',
('Drag and zoom the map until the crosshair '
'matches your location'))
def test_delete_account(self):
url = reverse('delete_account_request', args=['daveb'])
response = self.client.get(url)
self.assertContains(response, "Account deletion")
response = self.client.post(url, {})
url = reverse('delete_account_next', args=['daveb'])
self.assertRedirects(response, url)
self.assertEqual(len(mail.outbox), 1)
response = self.client.get(url)
self.assertContains(response, 'An email was just sent')
url = mail.outbox[0].body.split('testserver')[2].split('\n')[0]
response = self.client.get(url)
self.assertContains(response, 'Account deletion')
target = response.content.split('action="')[1].split('"', 1)[0]
self.assertEqual(target, url)
data = {'password': 'example'}
response = self.client.post(url, data)
self.assertContains(response, 'Your password was invalid')
self.assertEqual(User.objects.count(), 3)
response = self.client.post(url, {'password': '123456'})
self.assertEqual(User.objects.count(), 2)
with self.assertRaises(User.DoesNotExist):
User.objects.get(username='daveb')
url = reverse('delete_account_done', args=['daveb'])
self.assertRedirects(response, url)
response = self.client.get(url)
self.assertContains(response, 'Account deleted')
def test_failing_deletion(self):
# expired link: redirect to form
url = reverse('delete_account',
args=['daveb', 'Mg:1Sd7hl:RoSbkTsuqHVUjChAwoB5HZumgCg'])
response = self.client.get(url, follow=True)
self.assertEqual(len(response.redirect_chain), 1)
self.assertContains(response, 'Account deletion')
# invalid link: 404
url = reverse('delete_account', args=['daveb', 'test_some_data'])
response = self.client.get(url)
self.assertEqual(response.status_code, 404)
# delete confirmation page only shown if account does not exist
url = reverse('delete_account_done',
args=[User.objects.all()[0].username])
response = self.client.get(url)
self.assertEqual(response.status_code, 404)
|
|
"""
flickr.py
Copyright 2004-2006 James Clarke <[email protected]>
Portions Copyright 2007-2008 Joshua Henderson <[email protected]>
Portions Copyright 2015 Jeroen Arnoldus <[email protected]>
THIS SOFTWARE IS SUPPLIED WITHOUT WARRANTY OF ANY KIND, AND MAY BE
COPIED, MODIFIED OR DISTRIBUTED IN ANY WAY, AS LONG AS THIS NOTICE
AND ACKNOWLEDGEMENT OF AUTHORSHIP REMAIN.
2015-01-10
-Additional fields for photo search
2007-12-17
For an upto date TODO list, please see:
http://code.google.com/p/flickrpy/wiki/TodoList
For information on how to use the Authentication
module, plese see:
http://code.google.com/p/flickrpy/wiki/UserAuthentication
2006-12-19
Applied patches from Berco Beute and Wolfram Kriesing.
"""
__author__ = "James Clarke <[email protected]>"
__version__ = "$Rev$"
__date__ = "$Date$"
__copyright__ = "Copyright: 2004-2010 James Clarke; Portions: 2007-2008 Joshua Henderson; Portions: 2011 Andrei Vlad Vacariu; Portions: 2015 Jeroen Arnoldus;"
from urllib.parse import urlencode
from urllib.request import urlopen
from xml.dom import minidom
import hashlib
import os
HOST = 'http://flickr.com'
API = '/services/rest'
# set these here or using flickr.API_KEY in your application
API_KEY = ''
API_SECRET = ''
email = None
password = None
AUTH = False
debug = False
# The next 2 variables are only importatnt if authentication is used
# this can be set here or using flickr.tokenPath in your application
# this is the path to the folder containing tokenFile (default: token.txt)
tokenPath = ''
# this can be set here or using flickr.tokenFile in your application
# this is the name of the file containing the stored token.
tokenFile = 'token.txt'
class FlickrError(Exception): pass
class Photo(object):
"""Represents a Flickr Photo."""
__readonly = ['id', 'secret', 'server', 'farm', 'isfavorite', 'license', 'rotation',
'owner', 'dateposted', 'datetaken', 'takengranularity',
'title', 'description', 'ispublic', 'isfriend', 'isfamily',
'cancomment', 'canaddmeta', 'comments', 'tags', 'permcomment',
'permaddmeta', 'url', 'views']
#XXX: Hopefully None won't cause problems
def __init__(self, id, owner=None, dateuploaded=None, \
title=None, description=None, ispublic=None, \
isfriend=None, isfamily=None, cancomment=None, \
canaddmeta=None, comments=None, tags=None, secret=None, \
isfavorite=None, server=None, farm=None, license=None, \
rotation=None, url=None, views=None):
"""Must specify id, rest is optional."""
self.__loaded = False
self.__cancomment = cancomment
self.__canaddmeta = canaddmeta
self.__comments = comments
self.__dateuploaded = dateuploaded
self.__description = description
self.__id = id
self.__license = license
self.__isfamily = isfamily
self.__isfavorite = isfavorite
self.__isfriend = isfriend
self.__ispublic = ispublic
self.__owner = owner
self.__rotation = rotation
self.__secret = secret
self.__server = server
self.__farm = farm
self.__tags = tags
self.__title = title
self.__dateposted = None
self.__datetaken = None
self.__takengranularity = None
self.__permcomment = None
self.__permaddmeta = None
self.__url = None
self.__views = None
def __setattr__(self, key, value):
if key in self.__class__.__readonly:
raise AttributeError("The attribute %s is read-only." % key)
else:
super(Photo, self).__setattr__(key, value)
def _val(self, key):
if key in self.__class__.__readonly:
return super(Photo, self).__getattribute__("_%s__%s" % (self.__class__.__name__, key))
else:
return super(Photo, self).__getattribute__(key)
def __getattr__(self, key):
val = self._val(key)
if val == None and not self.__loaded:
self._load_properties()
val = self._val(key)
return val
def _load_properties(self):
"""Loads the properties from Flickr."""
self.__loaded = True
method = 'flickr.photos.getInfo'
data = _doget(method, photo_id=self.id)
photo = data.rsp.photo
self.__secret = photo.secret
self.__server = photo.server
self.__farm = photo.farm
self.__isfavorite = photo.isfavorite
self.__license = photo.license
self.__rotation = photo.rotation
owner = photo.owner
self.__owner = User(owner.nsid, username=owner.username,\
realname=owner.realname,\
location=owner.location)
self.__title = photo.title.text
self.__description = photo.description.text
self.__ispublic = photo.visibility.ispublic
self.__isfriend = photo.visibility.isfriend
self.__isfamily = photo.visibility.isfamily
self.__dateposted = photo.dates.posted
self.__datetaken = photo.dates.taken
self.__takengranularity = photo.dates.takengranularity
self.__cancomment = photo.editability.cancomment
self.__canaddmeta = photo.editability.canaddmeta
self.__comments = photo.comments.text
self.__url = photo.urls.url.text
self.__views = photo.views
try:
self.__permcomment = photo.permissions.permcomment
self.__permaddmeta = photo.permissions.permaddmeta
except AttributeError:
self.__permcomment = None
self.__permaddmeta = None
#TODO: Implement Notes?
if hasattr(photo.tags, "tag"):
if isinstance(photo.tags.tag, list):
self.__tags = [Tag(tag.id, User(tag.author), tag.raw, tag.text) \
for tag in photo.tags.tag]
else:
tag = photo.tags.tag
self.__tags = [Tag(tag.id, User(tag.author), tag.raw, tag.text)]
def __str__(self):
return '<Flickr Photo %s>' % self.id
def setTags(self, tags):
"""Set the tags for current photo to list tags.
(flickr.photos.settags)
"""
method = 'flickr.photos.setTags'
tags = uniq(tags)
_dopost(method, auth=True, photo_id=self.id, tags=tags)
self._load_properties()
def addTags(self, tags):
"""Adds the list of tags to current tags. (flickr.photos.addtags)
"""
method = 'flickr.photos.addTags'
if isinstance(tags, list):
tags = uniq(tags)
_dopost(method, auth=True, photo_id=self.id, tags=tags)
#load properties again
self._load_properties()
def removeTag(self, tag):
"""Remove the tag from the photo must be a Tag object.
(flickr.photos.removeTag)
"""
method = 'flickr.photos.removeTag'
tag_id = ''
try:
tag_id = tag.id
except AttributeError:
raise FlickrError("Tag object expected")
_dopost(method, auth=True, photo_id=self.id, tag_id=tag_id)
self._load_properties()
def setMeta(self, title=None, description=None):
"""Set metadata for photo. (flickr.photos.setMeta)"""
method = 'flickr.photos.setMeta'
if title is None:
title = self.title
if description is None:
description = self.description
_dopost(method, auth=True, title=title, \
description=description, photo_id=self.id)
self.__title = title
self.__description = description
def getAllContexts(self):
"""Retrieves lists of the pools/sets the photo is in"""
method = 'flickr.photos.getAllContexts'
data = _doget(method, photo_id=self.id)
d = {'pools': [], 'sets': []}
if hasattr(data.rsp, "pool"):
if isinstance(data.rsp.pool, list):
for pool in data.rsp.pool:
d["pools"].append({"id": pool.id, "title": pool.title})
else:
d["pools"].append({"id": data.rsp.pool.id, "title": data.rsp.pool.title})
if hasattr(data.rsp, "set"):
if isinstance(data.rsp.set, list):
for theset in data.rsp.set:
d["sets"].append({"id": theset.id, "title": theset.title})
else:
d["sets"].append({"id": data.rsp.set.id, "title": data.rsp.set.title})
return d
def getPoolCount(self):
"""Retrieves a count of the pools the photo is in"""
d = self.getAllContexts()
return len( d["pools"] )
def getSetCount(self):
"""Retrieves a count of the pools the photo is in"""
d = self.getAllContexts()
return len( d["sets"] )
def getURL(self, size='Medium', urlType='url'):
"""Retrieves a url for the photo. (flickr.photos.getSizes)
urlType - 'url' or 'source'
'url' - flickr page of photo
'source' - image file
"""
method = 'flickr.photos.getSizes'
data = _doget(method, photo_id=self.id)
for psize in data.rsp.sizes.size:
if psize.label == size:
return getattr(psize, urlType)
raise FlickrError("No URL found")
def getSizes(self):
"""
Get all the available sizes of the current image, and all available
data about them.
Returns: A list of dicts with the size data.
"""
method = 'flickr.photos.getSizes'
data = _doget(method, photo_id=self.id)
ret = []
# The given props are those that we return and the according types, since
# return width and height as string would make "75">"100" be True, which
# is just error prone.
props = {'url':str,'width':int,'height':int,'label':str,'source':str,'text':str}
for psize in data.rsp.sizes.size:
d = {}
for prop,convert_to_type in list(props.items()):
d[prop] = convert_to_type(getattr(psize, prop))
ret.append(d)
return ret
def getExif(self):
"""Retrieves EXIF metadata for the photo.
Example usage:
>>> exif = photo.getExif()
>>> print exif.camera
>>> for t in exif.tags:
... print '%s: %s' % (t.label, t.raw)
"""
return Exif.getExif(self.id)
def getLocation(self):
"""
Return the latitude+longitutde of the picture.
Returns None if no location given for this pic.
"""
method = 'flickr.photos.geo.getLocation'
try:
data = _doget(method, photo_id=self.id)
except FlickrError: # Some other error might have occured too!?
return None
loc = data.rsp.photo.location
return [loc.latitude, loc.longitude]
def getComments(self):
""""
get list of comments for photo
returns a list of comment objects
comment text is in return [item].text
"""
method = "flickr.photos.comments.getList"
try:
data = _doget(method, photo_id=self.id)
except FlickrError: # ???? what errors might there be????
return None
return data.rsp.comments
def _getDirectURL(self, size):
return "http://farm%s.static.flickr.com/%s/%s_%s_%s.jpg" % \
(self.farm, self.server, self.id, self.secret, size)
def getThumbnail(self):
"""
Return a string representation of the URL to the thumbnail
image (not the thumbnail image page).
"""
return self._getDirectURL('t')
def getSmallSquare(self):
"""
Return a string representation of the URL to the small square
image (not the small square image page).
"""
return self._getDirectURL('s')
def getSmall(self):
"""
Return a string representation of the URL to the small
image (not the small image page).
"""
return self._getDirectURL('m')
def getMedium(self):
"""
Return a string representation of the URL to the medium
image (not the medium image page).
"""
return self._getDirectURL('z')
def getLarge(self):
"""
Return a string representation of the URL to the large
image (not the large image page).
"""
return self._getDirectURL('b')
def getGalleryList(self, per_page='', page=''):
"""
get list of galleries which
contain the photo.
Galleries are returned sorted by
date which the photo was added
to the gallery
"""
if per_page > 500: # Max is 500
per_page = 500
method = "flickr.galleries.getListForPhoto"
try:
data = _doget(method, photo_id=self.id, per_page=per_page, \
page=page)
except FlickrError:
return None
return data.rsp.galleries.gallery
def getFavoriteCount(self):
"""
Return the number of favorites to the specific photo
"""
method = 'flickr.photos.getFavorites'
data = _doget(method, photo_id=self.id)
return data.rsp.photo.total
def getFavoriteUsers(self):
"""
Return the list of users who marked the specific photo as favorite
return format: { userid, username, date of marking favorite}
"""
method = 'flickr.photos.getFavorites'
data = _doget(method, photo_id=self.id)
u = []
try:
users = data.rsp.photo.person
except AttributeError:
return u # there are no favorite of this photo
try:
iter(users)
except TypeError:
users = [users] # there is only one favorite, so make is a list
for user in users:
u.append({"id": user.nsid, "username": user.username, "favedate": user.favedate})
return u
class Photoset(object):
"""A Flickr photoset.
If constructed with just an ID, the rest of the data about the Photoset is
fetched from the API.
"""
def __init__(self, id, title=None, primary=None, photos=0, description='', \
secret='', server=''):
self.__id = id
if not title and not primary:
method = 'flickr.photosets.getInfo'
data = _doget(method, photoset_id=self.id)
title = data.rsp.photoset.title.text
primary = Photo(data.rsp.photoset.primary)
description = data.rsp.photoset.description.text
count = data.rsp.photoset.photos
self.__title = title
self.__primary = primary
self.__description = description
self.__count = photos
self.__secret = secret
self.__server = server
id = property(lambda self: self.__id)
title = property(lambda self: self.__title)
description = property(lambda self: self.__description)
primary = property(lambda self: self.__primary)
def __len__(self):
return self.__count
def __str__(self):
return '<Flickr Photoset %s>' % self.id
def getPhotos(self):
"""Returns list of Photos."""
method = 'flickr.photosets.getPhotos'
data = _doget(method, photoset_id=self.id)
photos = data.rsp.photoset.photo
p = []
# If there's only one photo in the set, the API returns a single photo,
# not a list
try:
iter(photos)
except TypeError:
photos = [photos]
for photo in photos:
p.append(Photo(photo.id, title=photo.title, secret=photo.secret, \
server=photo.server))
return p
def editPhotos(self, photos, primary=None):
"""Edit the photos in this set.
photos - photos for set
primary - primary photo (if None will used current)
"""
method = 'flickr.photosets.editPhotos'
if primary is None:
primary = self.primary
ids = [photo.id for photo in photos]
if primary.id not in ids:
ids.append(primary.id)
_dopost(method, auth=True, photoset_id=self.id,\
primary_photo_id=primary.id,
photo_ids=ids)
self.__count = len(ids)
return True
def addPhoto(self, photo):
"""Add a photo to this set.
photo - the photo
"""
method = 'flickr.photosets.addPhoto'
_dopost(method, auth=True, photoset_id=self.id, photo_id=photo.id)
self.__count += 1
return True
def removePhoto(self, photo):
"""Remove the photo from this set.
photo - the photo
"""
method = 'flickr.photosets.removePhoto'
_dopost(method, auth=True, photoset_id=self.id, photo_id=photo.id)
self.__count = self.__count - 1
return True
def editMeta(self, title=None, description=None):
"""Set metadata for photo. (flickr.photos.setMeta)"""
method = 'flickr.photosets.editMeta'
if title is None:
title = self.title
if description is None:
description = self.description
_dopost(method, auth=True, title=title, \
description=description, photoset_id=self.id)
self.__title = title
self.__description = description
return True
#XXX: Delete isn't handled well as the python object will still exist
def delete(self):
"""Deletes the photoset.
"""
method = 'flickr.photosets.delete'
_dopost(method, auth=True, photoset_id=self.id)
return True
def create(cls, photo, title, description=''):
"""Create a new photoset.
photo - primary photo
"""
if not isinstance(photo, Photo):
raise TypeError("Photo expected")
method = 'flickr.photosets.create'
data = _dopost(method, auth=True, title=title,\
description=description,\
primary_photo_id=photo.id)
set = Photoset(data.rsp.photoset.id, title, Photo(photo.id),
photos=1, description=description)
return set
create = classmethod(create)
class User(object):
"""A Flickr user."""
def __init__(self, id, username=None, isadmin=None, ispro=None, \
realname=None, location=None, firstdate=None, count=None):
"""id required, rest optional."""
self.__loaded = False #so we don't keep loading data
self.__id = id
self.__username = username
self.__isadmin = isadmin
self.__ispro = ispro
self.__realname = realname
self.__location = location
self.__photos_firstdate = firstdate
self.__photos_count = count
#property fu
id = property(lambda self: self._general_getattr('id'))
username = property(lambda self: self._general_getattr('username'))
isadmin = property(lambda self: self._general_getattr('isadmin'))
ispro = property(lambda self: self._general_getattr('ispro'))
realname = property(lambda self: self._general_getattr('realname'))
location = property(lambda self: self._general_getattr('location'))
photos_firstdate = property(lambda self: \
self._general_getattr('photos_firstdate'))
photos_firstdatetaken = property(lambda self: \
self._general_getattr\
('photos_firstdatetaken'))
photos_count = property(lambda self: \
self._general_getattr('photos_count'))
icon_server= property(lambda self: self._general_getattr('icon_server'))
icon_url= property(lambda self: self._general_getattr('icon_url'))
def _general_getattr(self, var):
"""Generic get attribute function."""
if getattr(self, "_%s__%s" % (self.__class__.__name__, var)) is None \
and not self.__loaded:
self._load_properties()
return getattr(self, "_%s__%s" % (self.__class__.__name__, var))
def _load_properties(self):
"""Load User properties from Flickr."""
method = 'flickr.people.getInfo'
data = _doget(method, user_id=self.__id)
self.__loaded = True
person = data.rsp.person
self.__isadmin = person.isadmin
self.__ispro = person.ispro
self.__icon_server = person.iconserver
if int(person.iconserver) > 0:
self.__icon_url = 'http://photos%s.flickr.com/buddyicons/%s.jpg' \
% (person.iconserver, self.__id)
else:
self.__icon_url = 'http://www.flickr.com/images/buddyicon.jpg'
self.__username = person.username.text
self.__realname = getattr((getattr(person, 'realname', '')), 'text', '')
self.__location = getattr((getattr(person, 'location', '')), 'text', '')
self.__photos_count = getattr((getattr(getattr(person, 'photos', None), 'count', '')), 'text', '')
if self.__photos_count:
self.__photos_firstdate = person.photos.firstdate.text
self.__photos_firstdatetaken = person.photos.firstdatetaken.text
else:
self.__photos_firstdate = None
self.__photos_firstdatetaken = None
def __str__(self):
return '<Flickr User %s>' % self.id
def getPhotosets(self):
"""Returns a list of Photosets."""
method = 'flickr.photosets.getList'
data = _doget(method, user_id=self.id)
sets = []
if not getattr(data.rsp.photosets, 'photoset',None):
return sets #N.B. returns an empty set
if isinstance(data.rsp.photosets.photoset, list):
for photoset in data.rsp.photosets.photoset:
sets.append(Photoset(photoset.id, photoset.title.text,\
Photo(photoset.primary),\
secret=photoset.secret, \
server=photoset.server, \
description=photoset.description.text,
photos=photoset.photos))
else:
photoset = data.rsp.photosets.photoset
sets.append(Photoset(photoset.id, photoset.title.text,\
Photo(photoset.primary),\
secret=photoset.secret, \
server=photoset.server, \
description=photoset.description.text,
photos=photoset.photos))
return sets
def getPublicFavorites(self, per_page='', page=''):
return favorites_getPublicList(user_id=self.id, per_page=per_page, \
page=page)
def getFavorites(self, per_page='', page=''):
return favorites_getList(user_id=self.id, per_page=per_page, \
page=page)
def getGalleries(self, per_page='', page=''):
return galleries_getList(user_id=self.id, per_page=per_page, \
page=page)
class Group(object):
"""Flickr Group Pool"""
def __init__(self, id, name=None, members=None, online=None,\
privacy=None, chatid=None, chatcount=None):
self.__loaded = False
self.__id = id
self.__name = name
self.__members = members
self.__online = online
self.__privacy = privacy
self.__chatid = chatid
self.__chatcount = chatcount
self.__url = None
id = property(lambda self: self._general_getattr('id'))
name = property(lambda self: self._general_getattr('name'))
members = property(lambda self: self._general_getattr('members'))
online = property(lambda self: self._general_getattr('online'))
privacy = property(lambda self: self._general_getattr('privacy'))
chatid = property(lambda self: self._general_getattr('chatid'))
chatcount = property(lambda self: self._general_getattr('chatcount'))
def _general_getattr(self, var):
"""Generic get attribute function."""
if getattr(self, "_%s__%s" % (self.__class__.__name__, var)) is None \
and not self.__loaded:
self._load_properties()
return getattr(self, "_%s__%s" % (self.__class__.__name__, var))
def _load_properties(self):
"""Loads the properties from Flickr."""
method = 'flickr.groups.getInfo'
data = _doget(method, group_id=self.id)
self.__loaded = True
group = data.rsp.group
self.__name = group.name.text
self.__description = group.description.text
self.__members = group.members.text
self.__privacy = group.privacy.text
def __str__(self):
return '<Flickr Group %s>' % self.id
def getPhotos(self, tags='', per_page='', page=''):
"""Get a list of photo objects for this group"""
method = 'flickr.groups.pools.getPhotos'
data = _doget(method, group_id=self.id, tags=tags,\
per_page=per_page, page=page)
photos = []
for photo in data.rsp.photos.photo:
photos.append(_parse_photo(photo))
return photos
def add(self, photo):
"""Adds a Photo to the group"""
method = 'flickr.groups.pools.add'
_dopost(method, auth=True, photo_id=photo.id, group_id=self.id)
return True
def remove(self, photo):
"""Remove a Photo from the group"""
method = 'flickr.groups.pools.remove'
_dopost(method, auth=True, photo_id=photo.id, group_id=self.id)
return True
class Tag(object):
def __init__(self, id, author, raw, text):
self.id = id
self.author = author
self.raw = raw
self.text = text
def __str__(self):
return '<Flickr Tag %s (%s)>' % (self.id, self.text)
class Exif(object):
def __init__(self, camera, tags):
self.camera = camera
self.tags = tags
def __str__(self):
return '<Flickr Exif>'
@staticmethod
def getExif(photo_id_):
method = 'flickr.photos.getExif'
data = _doget(method, photo_id=photo_id_)
return Exif.parse(data.rsp.photo)
@staticmethod
def parse(photo):
camera = getattr(photo, 'camera', '')
tags = []
if hasattr(photo, 'exif'):
if isinstance(photo.exif, list):
tags = [ExifTag.parse(e) for e in photo.exif]
else:
tags = [ExifTag.parse(photo.exif)]
return Exif(camera, tags)
class ExifTag(object):
def __init__(self, tagspace, tagspaceid, tag, label, raw, clean):
self.tagspace = tagspace
self.tagspaceid = tagspaceid
self.tag = tag
self.label = label
self.raw = raw
self.clean = clean
def __str__(self):
return '<Flickr ExifTag %s (%s)>' % (self.tag, self.label)
@staticmethod
def parse(exif):
raw = ''
if hasattr(exif, 'raw'):
raw = exif.raw.text
clean = ''
if hasattr(exif, 'clean'):
clean = exif.clean.text
return ExifTag(exif.tagspace, exif.tagspaceid, exif.tag, exif.label,
raw, clean)
class Gallery(object):
"""Represents a Flickr Gallery.
Takes gallery_id as argument.
"""
# There are other attributes a Gallery could have,
# but defining them here might create errors.
# Might be useful to define them here, though,
# if the user wants to change them when creating
# an instance.
def __init__(self, id, owner=None, title=None, description=None, \
date_create=None, date_update=None, count_photos=None, \
count_videos=None, primary_photo_id=None, \
primary_photo_server=None, primary_photo_farm=None, \
primary_photo_secret=None):
self.__loaded = False
self.__url = None
self.__id = id
self.__owner = owner
self.__title = title
self.__description = description
self.__date_create = date_create
self.__date_update = date_update
self.__count_photos = count_photos
self.__count_videos = count_videos
self.__primary_photo_id = primary_photo_id
self.__primary_photo_server = primary_photo_server
self.__primary_photo_farm = primary_photo_farm
self.__primary_photo_secret = primary_photo_secret
id = property(lambda self: self._general_getattr('id'))
url = property(lambda self: self._general_getattr('url'))
owner = property(lambda self: self._general_getattr('owner'))
title = property(lambda self: self._general_getattr('title'))
description = property(lambda self: self._general_getattr('description'))
date_create = property(lambda self: self._general_getattr('date_create'))
date_update = property(lambda self: self._general_getattr('date_update'))
count_photos = property(lambda self: self._general_getattr('count_photos'))
count_videos = property(lambda self: self._general_getattr('count_videos'))
primary_photo_id = property(lambda self: self._general_getattr('primary_photo_id'))
primary_photo_server = property(lambda self: self._general_getattr('primary_photo_server'))
primary_photo_farm = property(lambda self: self._general_getattr('primary_photo_farm'))
primary_photo_secret = property(lambda self: self._general_getattr('primary_photo_secret'))
def _general_getattr(self, var):
"""Generic get attribute function."""
if getattr(self, "_%s__%s" % (self.__class__.__name__, var)) is None \
and not self.__loaded:
self._load_properties()
return getattr(self, "_%s__%s" % (self.__class__.__name__, var))
def _load_properties(self):
"""Loads the properties from Flickr."""
method = 'flickr.galleries.getInfo'
data = _doget(method, gallery_id=self.id)
self.__loaded = True
gallery = data.rsp.gallery
self.__url = gallery.url
self.__owner = gallery.owner
self.__title = gallery.title.text
self.__description = gallery.description.text
self.__date_create = gallery.date_create
self.__date_update = gallery.date_update
self.__count_photos = gallery.count_photos
self.__count_videos = gallery.count_videos
self.__primary_photo_id = gallery.primary_photo_id
self.__primary_photo_server = gallery.primary_photo_server
self.__primary_photo_farm = gallery.primary_photo_farm
self.__primary_photo_secret = gallery.primary_photo_secret
def __str__(self):
return '<Flickr Gallery %s>' % self.id
def addPhoto(self, photo, comment=''):
"""Add a new Photo to the Gallery."""
method = 'flickr.galleries.addPhoto'
_dopost(method, auth=True, photo_id=photo.id, gallery_id=self.id, \
comment=comment)
return True
def editMeta(self, title='', description=''):
"""Modify the meta-data for a gallery.
In original API, title is required, but here, if not
specified, it will use the current title. (So it's optional)
Calling this function without any parameters will blank out the description.
"""
method = 'flickr.galleries.editMeta'
if title == '':
title = self.title
_dopost(method, auth=True, gallery_id=self.id, title=title, \
description=description)
return True
def editPhoto(self, photo, comment):
"""Change the comment for the given Photo."""
method = 'flickr.galleries.editPhoto'
_dopost(method, auth=True, gallery_id=self.id, photo_id=photo.id, \
comment=comment)
return True
def editPhotos(self, primary_photo, *photos):
"""Modify the photos in a gallery. Use this method to add,
remove and re-order photos."""
method = 'flickr.galleries.editPhotos'
photo_ids = ','.join([photo.id for photo in photos])
_dopost(method, auth=True, gallery_id=self.id, \
primary_photo_id=primary_photo.id, photo_ids=photo_ids)
return True
def getPhotos(self, per_page='', page='', **extras):
"""Return the list of photos for a gallery.
*extras (optional): A comma-delimited list of extra information
to fetch for each returned record. Currently supported fields are:
description, license, date_upload, date_taken, owner_name,
icon_server, original_format, last_update, geo, tags, machine_tags,
o_dims, views, media, path_alias, url_sq, url_t, url_s, url_m, url_o
"""
method = 'flickr.galleries.getPhotos'
extras = ','.join('%s=%s' % (i, v) for i, v in list(dict(extras).items()))
data = _doget(method, gallery_id=self.id, per_page=per_page, \
page=page, extras=extras)
photos = {} # dict with photo instance as key and comment as value.
# if there's no comment, '' will be assigned.
for photo in data.rsp.photos.photo:
if photo.has_comment == '1':
photos[_parse_photo(photo)] = photo.comment.text
elif photo.has_comment == '0':
photos[_parse_photo(photo)] = ''
else: # Shouldn't EVER get here
raise FlickrError
return photos
#Flickr API methods
#see api docs http://www.flickr.com/services/api/
#for details of each param
#XXX: Could be Photo.search(cls)
def photos_search(user_id='', auth=False, tags='', tag_mode='', text='',\
min_upload_date='', max_upload_date='',\
min_taken_date='', max_taken_date='', \
license='', per_page='', page='', sort='',\
safe_search='', content_type='',in_gallery='', **kwargs):
"""Returns a list of Photo objects.
If auth=True then will auth the user. Can see private etc
"""
method = 'flickr.photos.search'
data = _doget(method, auth=auth, user_id=user_id, tags=tags, text=text,\
min_upload_date=min_upload_date,\
max_upload_date=max_upload_date, \
min_taken_date=min_taken_date, \
max_taken_date=max_taken_date, \
license=license, per_page=per_page,\
page=page, sort=sort, safe_search=safe_search, \
in_gallery=in_gallery, \
content_type=content_type, \
tag_mode=tag_mode, **kwargs)
photos = []
if 'photo' in data.rsp.photos.__dict__:
if isinstance(data.rsp.photos.photo, list):
for photo in data.rsp.photos.photo:
photos.append(_parse_photo(photo))
else:
photos = [_parse_photo(data.rsp.photos.photo)]
return photos
def photos_search_pages(user_id='', auth=False, tags='', tag_mode='', text='',\
min_upload_date='', max_upload_date='',\
min_taken_date='', max_taken_date='', \
license='', per_page='', page='', sort='',\
safe_search='', content_type='',in_gallery='', **kwargs):
"""Returns the number of pages for the previous function (photos_search())
"""
method = 'flickr.photos.search'
data = _doget(method, auth=auth, user_id=user_id, tags=tags, text=text,\
min_upload_date=min_upload_date,\
max_upload_date=max_upload_date, \
min_taken_date=min_taken_date, \
max_taken_date=max_taken_date, \
license=license, per_page=per_page,\
page=page, sort=sort, safe_search=safe_search, \
in_gallery=in_gallery, \
content_type=content_type, \
tag_mode=tag_mode, **kwargs)
return data.rsp.photos.pages
def photos_get_recent(extras='', per_page='', page=''):
"""http://www.flickr.com/services/api/flickr.photos.getRecent.html
"""
method = 'flickr.photos.getRecent'
data = _doget(method, extras=extras, per_page=per_page, page=page)
photos = []
if 'photo' in data.rsp.photos.__dict__:
if isinstance(data.rsp.photos.photo, list):
for photo in data.rsp.photos.photo:
photos.append(_parse_photo(photo))
else:
photos = [_parse_photo(data.rsp.photos.photo)]
return photos
#XXX: Could be class method in User
def people_findByEmail(email):
"""Returns User object."""
method = 'flickr.people.findByEmail'
data = _doget(method, find_email=email)
user = User(data.rsp.user.id, username=data.rsp.user.username.text)
return user
def people_findByUsername(username):
"""Returns User object."""
method = 'flickr.people.findByUsername'
data = _doget(method, username=username)
user = User(data.rsp.user.id, username=data.rsp.user.username.text)
return user
#XXX: Should probably be in User as a list User.public
def people_getPublicPhotos(user_id, per_page='', page=''):
"""Returns list of Photo objects."""
method = 'flickr.people.getPublicPhotos'
data = _doget(method, user_id=user_id, per_page=per_page, page=page)
photos = []
if hasattr(data.rsp.photos, "photo"): # Check if there are photos at all (may be been paging too far).
if isinstance(data.rsp.photos.photo, list):
for photo in data.rsp.photos.photo:
photos.append(_parse_photo(photo))
else:
photos = [_parse_photo(data.rsp.photos.photo)]
return photos
#XXX: These are also called from User
def favorites_getList(user_id='', per_page='', page=''):
"""Returns list of Photo objects."""
method = 'flickr.favorites.getList'
data = _doget(method, auth=True, user_id=user_id, per_page=per_page,\
page=page)
photos = []
if isinstance(data.rsp.photos.photo, list):
for photo in data.rsp.photos.photo:
photos.append(_parse_photo(photo))
else:
photos = [_parse_photo(data.rsp.photos.photo)]
return photos
def favorites_getPublicList(user_id, per_page='', page=''):
"""Returns list of Photo objects."""
method = 'flickr.favorites.getPublicList'
data = _doget(method, auth=False, user_id=user_id, per_page=per_page,\
page=page)
photos = []
if isinstance(data.rsp.photos.photo, list):
for photo in data.rsp.photos.photo:
photos.append(_parse_photo(photo))
else:
photos = [_parse_photo(data.rsp.photos.photo)]
return photos
def favorites_add(photo_id):
"""Add a photo to the user's favorites."""
method = 'flickr.favorites.add'
_dopost(method, auth=True, photo_id=photo_id)
return True
def favorites_remove(photo_id):
"""Remove a photo from the user's favorites."""
method = 'flickr.favorites.remove'
_dopost(method, auth=True, photo_id=photo_id)
return True
def groups_getPublicGroups():
"""Get a list of groups the auth'd user is a member of."""
method = 'flickr.groups.getPublicGroups'
data = _doget(method, auth=True)
groups = []
if isinstance(data.rsp.groups.group, list):
for group in data.rsp.groups.group:
groups.append(Group(group.id, name=group.name))
else:
group = data.rsp.groups.group
groups = [Group(group.id, name=group.name)]
return groups
def groups_pools_getGroups():
"""Get a list of groups the auth'd user can post photos to."""
method = 'flickr.groups.pools.getGroups'
data = _doget(method, auth=True)
groups = []
if isinstance(data.rsp.groups.group, list):
for group in data.rsp.groups.group:
groups.append(Group(group.id, name=group.name, \
privacy=group.privacy))
else:
group = data.rsp.groups.group
groups = [Group(group.id, name=group.name, privacy=group.privacy)]
return groups
def tags_getListUser(user_id=''):
"""Returns a list of tags for the given user (in string format)"""
method = 'flickr.tags.getListUser'
auth = user_id == ''
data = _doget(method, auth=auth, user_id=user_id)
if isinstance(data.rsp.tags.tag, list):
return [tag.text for tag in data.rsp.tags.tag]
else:
return [data.rsp.tags.tag.text]
def tags_getListUserPopular(user_id='', count=''):
"""Gets the popular tags for a user in dictionary form tag=>count"""
method = 'flickr.tags.getListUserPopular'
auth = user_id == ''
data = _doget(method, auth=auth, user_id=user_id)
result = {}
if isinstance(data.rsp.tags.tag, list):
for tag in data.rsp.tags.tag:
result[tag.text] = tag.count
else:
result[data.rsp.tags.tag.text] = data.rsp.tags.tag.count
return result
def tags_getrelated(tag):
"""Gets the related tags for given tag."""
method = 'flickr.tags.getRelated'
data = _doget(method, auth=False, tag=tag)
if isinstance(data.rsp.tags.tag, list):
return [tag.text for tag in data.rsp.tags.tag]
else:
return [data.rsp.tags.tag.text]
def contacts_getPublicList(user_id):
"""Gets the contacts (Users) for the user_id"""
method = 'flickr.contacts.getPublicList'
data = _doget(method, auth=False, user_id=user_id)
try:
if isinstance(data.rsp.contacts.contact, list):
return [User(user.nsid, username=user.username) \
for user in data.rsp.contacts.contact]
except AttributeError:
return "No users in the list"
except:
return "Unknown error"
# else:
# user = data.rsp.contacts.contact
# return [User(user.nsid, username=user.username)]
def interestingness():
method = 'flickr.interestingness.getList'
data = _doget(method)
photos = []
if isinstance(data.rsp.photos.photo , list):
for photo in data.rsp.photos.photo:
photos.append(_parse_photo(photo))
else:
photos = [_parse_photo(data.rsp.photos.photo)]
return photos
def galleries_create(title, description, primary_photo_id=None):
"""Create a new gallery."""
method = 'flickr.galleries.create'
if primary_photo_id is None:
_dopost(method, auth=True, title=title, description=description,
primary_photo_id=primary_photo_id)
elif primary_photo_id is not None:
_dopost(method, auth=True, title=title, description=description)
def galleries_getList(user_id='', per_page='', page=''):
"""Returns list of Gallery objects."""
method = 'flickr.galleries.getList'
data = _doget(method, auth=False, user_id=user_id, per_page=per_page, \
page=page)
galleries = []
if isinstance(data.rsp.galleries.gallery, list):
for gallery in data.rsp.galleries.gallery:
galleries.append(_parse_gallery(gallery))
else:
galleries = [_parse_gallery(data.rsp.galleries.gallery)]
return galleries
def test_login():
method = 'flickr.test.login'
data = _doget(method, auth=True)
user = User(data.rsp.user.id, username=data.rsp.user.username.text)
return user
def test_echo():
method = 'flickr.test.echo'
data = _doget(method)
return data.rsp.stat
#useful methods
def _doget(method, auth=False, **params):
#uncomment to check you aren't killing the flickr server
#print "***** do get %s" % method
params = _prepare_params(params)
url = '%s%s/?api_key=%s&method=%s&%s%s'% \
(HOST, API, API_KEY, method, urlencode(params),
_get_auth_url_suffix(method, auth, params))
#another useful debug print statement
if debug:
print("_doget", url)
return _get_data(minidom.parse(urlopen(url)))
def _dopost(method, auth=False, **params):
#uncomment to check you aren't killing the flickr server
#print "***** do post %s" % method
params = _prepare_params(params)
url = '%s%s/?api_key=%s%s'% \
(HOST, API, API_KEY, _get_auth_url_suffix(method, auth, params))
# There's no reason this can't be str(urlencode(params)). I just wanted to
# have it the same as the rest.
payload = '%s' % (urlencode(params))
#another useful debug print statement
if debug:
print("_dopost url", url)
print("_dopost payload", payload)
return _get_data(minidom.parse(urlopen(url, payload)))
def _prepare_params(params):
"""Convert lists to strings with ',' between items."""
for (key, value) in list(params.items()):
if isinstance(value, list):
params[key] = ','.join([item for item in value])
return params
def _get_data(xml):
"""Given a bunch of XML back from Flickr, we turn it into a data structure
we can deal with (after checking for errors)."""
data = unmarshal(xml)
if not data.rsp.stat == 'ok':
msg = "ERROR [%s]: %s" % (data.rsp.err.code, data.rsp.err.msg)
raise FlickrError(msg)
return data
def _get_api_sig(params):
"""Generate API signature."""
token = userToken()
parameters = ['api_key', 'auth_token']
for item in list(params.items()):
parameters.append(item[0])
parameters.sort()
api_string = [API_SECRET]
for item in parameters:
for chocolate in list(params.items()):
if item == chocolate[0]:
api_string.append(item)
api_string.append(str(chocolate[1]))
if item == 'api_key':
api_string.append('api_key')
api_string.append(API_KEY)
if item == 'auth_token':
api_string.append('auth_token')
api_string.append(token)
api_signature = hashlib.md5(''.join(api_string)).hexdigest()
return api_signature
def _get_auth_url_suffix(method, auth, params):
"""Figure out whether we want to authorize, and if so, construct a suitable
URL suffix to pass to the Flickr API."""
authentication = False
# auth may be passed in via the API, AUTH may be set globally (in the same
# manner as API_KEY, etc). We do a few more checks than may seem necessary
# because we allow the 'auth' parameter to actually contain the
# authentication token, not just True/False.
if auth or AUTH:
token = userToken()
authentication = True
elif auth != False:
token = auth
authentication = True
elif AUTH != False:
token = AUTH
authentication = True
# If we're not authenticating, no suffix is required.
if not authentication:
return ''
full_params = params
full_params['method'] = method
return '&auth_token=%s&api_sig=%s' % (token, _get_api_sig(full_params))
def _parse_photo(photo):
"""Create a Photo object from photo data."""
owner = User(photo.owner)
title = photo.title
ispublic = photo.ispublic
isfriend = photo.isfriend
isfamily = photo.isfamily
secret = photo.secret
server = photo.server
farm = photo.farm
p = Photo(photo.id, owner=owner, title=title, ispublic=ispublic,\
isfriend=isfriend, isfamily=isfamily, secret=secret, \
server=server, farm=farm)
return p
def _parse_gallery(gallery):
"""Create a Gallery object from gallery data."""
# This might not work!! NEEDS TESTING
url = gallery.url
owner = User(gallery.owner)
title = gallery.title.text
description = gallery.description.text
date_create = gallery.date_create
date_update = gallery.date_update
count_photos = gallery.count_photos
count_videos = gallery.count_videos
primary_photo_id = gallery.primary_photo_id
primary_photo_server = gallery.primary_photo_server
primary_photo_farm = gallery.primary_photo_farm
primary_photo_secret = gallery.primary_photo_secret
g = Gallery(gallery.id, owner=owner, title=title, description=description, \
date_create=date_create, date_update=date_update, \
count_photos=count_photos, count_videos=count_videos, \
primary_photo_id=primary_photo_id, \
primary_photo_server=primary_photo_server, \
primary_photo_farm=primary_photo_farm, \
primary_photo_secret=primary_photo_secret)
return g
#stolen methods
class Bag: pass
#unmarshal taken and modified from pyamazon.py
#makes the xml easy to work with
def unmarshal(element):
rc = Bag()
if isinstance(element, minidom.Element):
for key in list(element.attributes.keys()):
setattr(rc, key, element.attributes[key].value)
childElements = [e for e in element.childNodes \
if isinstance(e, minidom.Element)]
if childElements:
for child in childElements:
key = child.tagName
if hasattr(rc, key):
if type(getattr(rc, key)) != type([]):
setattr(rc, key, [getattr(rc, key)])
setattr(rc, key, getattr(rc, key) + [unmarshal(child)])
elif isinstance(child, minidom.Element) and \
(child.tagName == 'Details'):
# make the first Details element a key
setattr(rc,key,[unmarshal(child)])
#dbg: because otherwise 'hasattr' only tests
#dbg: on the second occurence: if there's a
#dbg: single return to a query, it's not a
#dbg: list. This module should always
#dbg: return a list of Details objects.
else:
setattr(rc, key, unmarshal(child))
else:
#jec: we'll have the main part of the element stored in .text
#jec: will break if tag <text> is also present
text = "".join([e.data for e in element.childNodes \
if isinstance(e, minidom.Text)])
setattr(rc, 'text', text)
return rc
#unique items from a list from the cookbook
def uniq(alist): # Fastest without order preserving
set = {}
list(map(set.__setitem__, alist, []))
return list(set.keys())
## Only the "getList" module is complete.
## Work in Progress; Nearly Finished
class Blogs():
def getList(self,auth=True):
"""blogs.getList requires READ authentication"""
# please read documentation on how to use this
method = 'flickr.blogs.getList'
if auth==True : data = _doget(method, auth=True)
if not auth==True : data = _doget(method, auth=False)
bID = []
bName = []
bNeedsPword = []
bURL = []
try:
for plog in data.rsp.blogs.blog:
bID.append(plog.id)
bName.append(plog.name)
bNeedsPword.append(plog.needspassword)
bURL.append(plog.url)
except TypeError:
try:
bID.append(data.rsp.blogs.blog.id)
bName.append(data.rsp.blogs.blog.name)
bNeedsPword.append(data.rsp.blogs.blog.needspassword)
bURL.append(data.rsp.blogs.blog.url)
except AttributeError:
return "AttributeError, unexplained!"
except:
return "Unknown error!"
except AttributeError:
return "There are no blogs!"
myReturn = [bID,bName,bNeedsPword,bURL]
return myReturn
def postPhoto(self, blogID, photoID, title, description, bpassword):
"""blogs.postPhoto requires WRITE authentication"""
method = 'flickr.blogs.postPhoto'
return None
class Urls():
def getUserPhotosURL(self,userid):
"""Returns user URL in an array (to access, use array[1])"""
method = 'flickr.urls.getUserPhotos'
data = _doget(method, user_id=userid)
return [data.rsp.user.nsid,data.rsp.user.url]
class Auth():
def getFrob(self):
"""Returns a frob that is used in authentication"""
method = 'flickr.auth.getFrob'
sig_str = API_SECRET + 'api_key' + API_KEY + 'method' + method
signature_hash = hashlib.md5(sig_str).hexdigest()
data = _doget(method, auth=False, api_sig=signature_hash)
return data.rsp.frob.text
def loginLink(self, permission, frob):
"""Generates a link that the user should be sent to"""
myAuth = Auth()
sig_str = API_SECRET + 'api_key' + API_KEY + 'frob' + frob + 'perms' + permission
signature_hash = hashlib.md5(sig_str).hexdigest()
perms = permission
link = "http://flickr.com/services/auth/?api_key=%s&perms=%s&frob=%s&api_sig=%s" % (API_KEY, perms, frob, signature_hash)
return link
def getToken(self, frob):
"""This token is what needs to be used in future API calls"""
method = 'flickr.auth.getToken'
sig_str = API_SECRET + 'api_key' + API_KEY + 'frob' + frob + 'method' + method
signature_hash = hashlib.md5(sig_str).hexdigest()
data = _doget(method, auth=False, api_sig=signature_hash,
api_key=API_KEY, frob=frob)
return data.rsp.auth.token.text
def userToken():
# This method allows you flickr.py to retrive the saved token
# as once the token for a program has been got from flickr,
# it cannot be got again, so flickr.py saves it in a file
# called token.txt (default) somewhere.
#if not tokenPath == '':
# f = file(os.path.join(tokenPath,tokenFile),'r')
#else:
# f = file(tokenFile,'r')
#token = f.read()
#f.close()
token = ""
return token
def getUserPhotosURL(userid):
"""Returns user URL in an array (to access, use array[1])"""
# This addition has been added upon request of
# nsteinmetz. It will be "cleaned up" at another
# time.
method = 'flickr.urls.getUserPhotos'
data = _doget(method, user_id=userid)
userurl = [data.rsp.user.nsid,data.rsp.user.url]
return userurl
if __name__ == '__main__':
print(test_echo())
|
|
# tictactoevariation.py
# author: Vivi Huang
from random import randrange
import math
# global constants
num_square=16
O="O"
X="X"
EMPTY=" "
TIE="TIE"
callnum=18
lastcall=""
def ask_yes_no(question):
"""Ask a yes or no question."""
response = None
while response not in ("y", "n"):
response = input(question).lower()
return response
def ask_number(question, low, high):
"""Ask for a number within a range."""
response = None
while response not in range(low, high):
response = int(input(question))
return response
"""
def make_board():
random_board=[]
for x in range(num_square):
random=randrange(2,2*num_square+1)
random_board.append(random)
print(random_board)
#for a in range(num_square):
#checkrandom(a,random_board)
for num in range(num_square):
j=0
while j<number:
while random_board[num]==random_board[j] and num!=j:
random_board[num]=randrange(2,2*num_square+1)
j=0
j=j+1
print(random_board)
return random_board
"""
def make_board():
random_board=[]
for i in range(num_square):
r=randrange(2,2*num_square+1)
while r in random_board:
r=randrange(2,2*num_square+1)
random_board.append(r)
print(random_board)
return random_board
random_board=make_board()
"""
def alternate_display_board(random_board):
print("")
for i in range(number):
# if len(str(random_board[i]))==1:
# random_board[i]=" "+str(+random_board[i])
if (i%4)==0:
if len(str(random_board[i]))==1:
print('\t'+" "+str(random_board[i]),end=" ")
else:
print("\t"+str(random_board[i]),end=" ")
else:
if len(str(random_board[i]))==1:
if i%4==2:
print(str(random_board[i])+" ",end=" ")
elif i%4==3:
print(str(random_board[i]),end=" ")
else:
print(" "+str(random_board[i]),end=" ")
else:
print (random_board[i], end = " ")
if (i +1 < 16):
if(((i+1) % 4 )== 0):
print("\n\t-----------------")
else:
print("|", end=" ")
print("\n")
"""
def alternate_display_board(random_board):
print("")
for i in range(num_square):
if (i%4)==0:
if len(str(random_board[i]))==1:
print('\t'+" "+str(random_board[i]),end=" ")
else:
print("\t"+str(random_board[i]),end=" ")
else:
if len(str(random_board[i]))==1:
print(" "+str(random_board[i]),end=" ")
else:
print (random_board[i], end = " ")
if (i +1 < 16):
if(((i+1) % 4 )== 0):
print("\n\t-----------------")
else:
print("|", end=" ")
print("\n")
def display_instruct():
"""Display game instructions."""
print (
"""
Welcome to the greatest intellectual challenge of all time: Tic-Tac-Toe variation.
This will be a showdown between your human brain and my silicon processor.
Here is a 4 x 4 board with each square randomly given a different number between
2 and 32. You will make your move known by entering a number, 1 - 18.
""")
alternate_display_board(random_board)
print(
"""
There are two players X and O. Play goes as follows:
step 1: Player X calls out a number from 1 to 18.
step 2: Player O calls out a number from 1 to 18 that player O has not called out before.
step 3: Player O adds that number to the last number called out by
X, and if that square is on the board and unmarked, that square is marked O.
step 4: Player X calls out a number from 1 to 18 that player X has not called out before.
step 5: Player X adds that number to the last number called out by
O, and if that square is on the board and unmarked, that square is marked X.
step 6: Repeat from step 2.
Play ends when either X or O has four in a row, or a column, or a diagonal,
and is declared a winner; or when no more moves are possible by either player.
Prepare yourself, human. The ultimate battle is about to begin.
""")
display_instruct()
def pieces():
"""Determine if player or computer goes first."""
go_first = ask_yes_no("Do you require the first call? (y/n): ")
if go_first == "y":
print ("\nThen call the first number.")
hum = O
com = X
foundit=1
else:
print ("\nI will call first.")
com = O
hum = X
foundit=0
return com, hum,foundit
computer,human,foundit=pieces()
def record_board():
"""Create new game board."""
OXboard = []
for square in range(num_square):
OXboard.append(EMPTY)
return OXboard
OXboard=record_board()
def legal_move(proposecall,alreadycall,lastcall,random_board):
# OXboard and random_board can be together
if proposecall in range(1,callnum+1):
a=True
else:
a=False
if proposecall not in alreadycall[:-1]:
b=True
else:
b=False
if (proposecall+lastcall) in random_board:
c=True
else:
c=False
if a==b==c==True:
legal=True
else:
legal=False
return legal
def findsquarenum(move):
for i in range(num_square):
if random_board[i]==move:
return i
num_row=int(math.sqrt(num_square))
def winner(board,OXboard,lastcall,already):
ylist=[]
# get coordinates of each square
for y in range(num_row):
ylist.append([])
for x in range(num_row):
ylist[y].append(x+y*num_row)
for j in range(num_row):
i=0
# check each colomn
while (i <= num_row-2) and (board[ylist[i][j]]==board[ylist[i+1][j]]!=EMPTY):
i=i+1
if i==num_row-1:
winner=board[ylist[i][j]]
return winner
for j in range(num_row):
# check each row
i=0
while (i<=num_row-2) and board[ylist[j][i]]==board[ylist[j][i+1]]!=EMPTY:
i=i+1
if i==num_row-1:
winner=board[ylist[j][i]]
return winner
#check each diagonal
i=0
while (i<=num_row-2) and board[ylist[i][i]]==board[ylist[i+1][i+1]]!=EMPTY:
i=i+1
if i==num_row-1:
winner=board[ylist[i][i]]
return winner
i=0
while (i<=num_row-2) and board[ylist[i][num_row-1-i]]==board[ylist[i+1][num_row-1-(i+1)]]!=EMPTY:
i=i+1
if i==num_row-1:
winner=board[ylist[i][num_row-1-i]]
return winner
if EMPTY not in OXboard:
return TIE
for k in range(1,callnum+1):
if k not in already:
if k+lastcall in board:
break
else:
if k==callnum:
return TIE
return None
lastcall=0
pick=[]
comalready=[]
humalready=[]
check=0
def human_move(human,foundit,lastcall):
"""Get human move."""
propose=""
while propose not in range(1,callnum+1):
propose = ask_number("Call out a number from 1 to 18: ", 1, callnum+1)
humalready.append(propose)
if foundit==1:
lastcall=propose
foundit=2
legal=False
print("You called out",propose)
check=1
return(lastcall,propose+0,foundit,legal,check)
if foundit==2:
move=lastcall+propose
legal=legal_move(propose,humalready,lastcall,random_board)
lastcall=propose
check=0
return (lastcall,move,foundit,legal,check)
def computer_move(OXboard, computer, human,foundit,comalready,lastcall):
"""Make computer move."""
# make a copy to work with since function will be changing list
tryboard = OXboard[:]
print ("I shall call out number",)
# computer makes the first move. Choose a random number(1-18), no need to check
if foundit==0:
lastcall=0
foundit=2
notapplicable="madeuptofillthespot"
propose=randrange(1,callnum+1)
print ("\t\t\t",propose,"!!!!!!")
comalready.append(propose)
lastcall=propose
legal=False
return (notapplicable,lastcall,foundit,legal,comalready)
# not the first move
if foundit==2:
# if computer can win, take that move
pick=[]
for j in range(num_square):
if OXboard[j]==EMPTY:
pick.append(j)
for square in pick:
tryboard[square] = computer
if winner(tryboard,OXboard,lastcall,comalready) == computer:
propose=random_board[square]-lastcall
if propose in range(1,callnum+1):
print("\t\t\t",propose,"!!!!!!")
comalready.append(propose)
lastcall=propose
legal=True
return (random_board[square],lastcall,foundit,legal,comalready)
# done checking this move, undo it
tryboard[square]=EMPTY
# if human can win, block that move
tryboard[square] = human
if winner(tryboard,OXboard,lastcall,comalready) == human:
propose=random_board[square]-lastcall
if propose in range(1,callnum+1):
print("\t\t\t",propose,"!!!!!!")
lastcall=propose
comalready.append(lastcall)
legal=True
return (random_board[square],lastcall,foundit,legal,comalready)
# done checkin this move, undo it
tryboard[square] = EMPTY
# since no one can win on next move, pick best open square
propose=randrange(1,callnum+1)
legal=legal_move(propose,comalready,lastcall,random_board)
while legal==False:
propose=randrange(1,callnum+1)
legal=legal_move(propose,comalready,lastcall,random_board)
print("\t\t\t",propose,"!!!!!!")
comalready.append(propose)
move=propose+lastcall
lastcall=propose
return (move,lastcall,foundit,legal,comalready)
def next_turn(turn):
"""Switch turns."""
if turn == X:
return O
else:
return X
#main
turn=O
lastcall=0
propose=0
already=[]
notonboard=0
while not winner(random_board,OXboard,lastcall,already):
print("turn:",turn)
if turn == human:
print("Oh it's your turn! Go ahead!")
propose,move,foundit,legal,check = human_move(OXboard, foundit,lastcall)
if foundit!=1:
if legal==True:
squarenum=findsquarenum(move)
random_board[squarenum]=human
OXboard[squarenum] = human
print("You called out",propose,"! And that will make a",move)
if foundit==2:
if legal==False:
move=lastcall+propose
if move not in random_board:
notonboard=1
if propose in humalready[:-1]:
notonboard=0
while propose in humalready[:-1]:
print("Foolish human! You have already called",propose)
print("Here're all the numbers you have called out:")
print(humalready[:-1])
print("I'll give you another chance to call out a legal number.")
propose = ask_number("Call out a number from 1 to 18: ", 1, callnum+1)
move=lastcall+propose
squarenum=findsquarenum(move)
if squarenum!=None:
random_board[squarenum]=human
OXboard[squarenum]=human
else:
notonboard=1
if check!=1:
print("You called out",propose,"! And that will make a",move)
if notonboard==1 and check!=1:
print("And",move,"is not on board! HAHA foolish human!")
already=comalready
lastcall=propose
if turn == computer:
print("Oh it's my turn! Sorry!")
move,propose,foundit,legal,notreallyuseful=computer_move(OXboard, computer, human,foundit,comalready,lastcall)
if foundit!=0:
if legal==True:
squarenum=findsquarenum(move)
random_board[squarenum]=computer
print("And that will make a",move)
OXboard[squarenum] = computer
already=humalready
lastcall=propose
alternate_display_board(random_board)
turn=next_turn(turn)
the_winner = winner(random_board,OXboard,lastcall,already)
if the_winner==TIE:
if EMPTY in OXboard:
print("Turn:",turn)
if turn==human:
print("Because there does not exist a number from 1-18 that has not been")
print("called by you that can make a legal move on the board")
print("(The human has called out:",humalready,")")
if turn==computer:
print("Because there does not exist a number from 1-18 that has not been")
print("called by me that can make a legal move on the board")
print("(The computer has called out:",comalready,")")
def congrat_winner(the_winner, computer, human):
"""Congratulate the winner."""
if the_winner != TIE:
print (the_winner, "won!\n" )
else:
print ("It's a tie!\n")
if the_winner == computer:
print ("As I predicted, human, I am triumphant once more. \n" \
"Proof that computers are superior to humans in all regards.")
elif the_winner == human:
print ("No, no! It cannot be! Somehow you tricked me, human. \n" \
"But never again! I, the computer, so swears it!")
elif the_winner == TIE:
print ("You were most lucky, human, and somehow managed to tie me. \n" \
"Celebrate today... for this is the best you will ever achieve.")
congrat_winner(the_winner, computer, human)
|
|
# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
from .... import core
from ....framework import IrGraph
from ....framework import IrNode
__all__ = ['FakeQAT2MkldnnINT8KernelPass', 'FakeQAT2MkldnnINT8PerfPass']
class FakeQAT2MkldnnINT8KernelPass(object):
"""
Convert QuantizationFreezePass generated IrGraph to MKL-DNN supported INT8
IrGraph. Following transformations did in this pass:
1. Convert int8 range weights with float32 data type, which are generated by
the QuantizationFreezePass, to float32 range weights with float32 data type
by using the corresponding scales. This conversion is because MKL-DNN INT8
conv2d kernel and mul kernel now only support float32 weights input, hence
weights quantization will happen inside the conv2d and mul INT8 kernel.
2. Create the new conv2d or mul op with the converted weights and link its output
to fake_dequantize_abs_max op's output and set conv2d's attribute "force_fp32
_output" as true
3. Transform fake_quantize_xx op to quantize op
4. Remove fake_dequantize_abs_max op
"""
def __init__(self, _scope=None, _place=None):
"""
Args:
scope(fluid.Scope): scope is used to initialize the new parameters.
place(fluid.CPUPlace): place is used to initialize the new parameters.
Examples:
.. code-block:: python
# The original graph will be rewrite.
import paddle.fluid as fluid
from paddle.fluid.contrib.slim.quantization \
import FakeQAT2MkldnnINT8KernelPass
from paddle.fluid.framework import IrGraph
from paddle.fluid import core
graph = IrGraph(core.Graph(fluid.Program().desc), for_test=False)
place = fluid.CPUPlace()
mkldnn_pass = FakeQAT2MkldnnINT8KernelPass(fluid.global_scope(),
place)
mkldnn_pass.apply(graph)
"""
self._scope = _scope
self._place = _place
self._quantize_type = [
'fake_quantize_moving_average_abs_max',
'fake_quantize_range_abs_max'
]
self._dequantize_type = ['fake_dequantize_max_abs']
self._quantize_dequantize_type = [
'fake_quantize_dequantize_moving_average_abs_max'
]
self._quantizable_ops = ['conv2d', 'depthwise_conv2d', 'mul']
self._conv_ops = ['conv2d', 'depthwise_conv2d']
self._pool_ops = ['pool2d']
self._in_scale = {}
self._max_range = {}
self._new_output = {}
self._s8_max = 127
def apply(self, graph):
"""
Quantize the graph for running MKL-DNN INT8 inference. According
to activation quantization type, the graph will transform fake
quantize ops to quantize ops and remove the fake dequantize ops.
Args:
graph(IrGraph): the applied graph.
"""
assert isinstance(graph,
IrGraph), 'graph must be the instance of IrGraph.'
ops = graph.all_op_nodes()
persistable_vars = [p.name() for p in graph.all_persistable_nodes()]
# Collect the _in_scales and _max_range to calculate the new scales for MKL-DNN
# INT8 conv2d and mul
for op_node in ops:
if op_node.name() in self._dequantize_type:
input_name = op_node.input("X")[0]
scale_name = op_node.input("Scale")[0]
self._in_scale[input_name] = self._load_param(self._scope,
scale_name)[0]
self._max_range[input_name] = op_node.op().attr("max_range")
self._new_output[input_name] = op_node.output("Out")[0]
if op_node.name() in self._quantize_dequantize_type:
inputs = op_node.op().input_names()
attrs = op_node.op().attr_names()
input_name = op_node.input("X")[0]
scale_name = op_node.input("InScale")[0]
self._in_scale[input_name] = self._load_param(self._scope,
scale_name)[0]
# self._max_range[input_name] = op_node.op().attr("max_range")
self._new_output[input_name] = op_node.output("Out")[0]
for op_node in ops:
if op_node.name() in self._quantizable_ops:
if op_node.name() in self._conv_ops:
self._transform_to_conv_mkldnn(graph, op_node)
elif op_node.name() in self._pool_ops:
self._transform_to_pool_mkldnn(graph, op_node)
else:
self._transform_to_mul_mkldnn(graph, op_node)
elif op_node.name() in self._quantize_type:
self._transform_to_quantize_mkldnn(graph, op_node)
elif op_node.name() in self._dequantize_type:
self._remove_fake_dequantize_op(graph, op_node)
self._remove_unused_var_nodes(graph)
return graph
def _transform_to_pool_mkldnn(self, graph, op):
output_name = op.output("Out")[0]
input_name = op.input("X")[0]
def _transform_to_conv_mkldnn(self, graph, op_node):
weight_name = op_node.input("Filter")[0]
output_name = op_node.output("Output")[0]
# Convert int8 range weights to fp32 range weights
weight = self._load_param(self._scope, weight_name)
w_fp32 = np.divide(
np.multiply(weight, self._s8_max), self._max_range[output_name])
w_fp32 = w_fp32.reshape(weight.shape)
self._restore_var(weight_name, w_fp32)
input_var_node = graph._find_node_by_name(op_node.inputs,
op_node.input("Input")[0])
weight_var_node = graph._find_node_by_name(op_node.inputs, weight_name)
# Set fake_dequantize_abs_max's output as new output of conv2d
output_var_node = graph._find_node_by_name(
graph.all_var_nodes(), self._new_output[output_name])
attrs = {
name: op_node.op().attr(name)
for name in op_node.op().attr_names()
}
conv_op_node = graph.create_op_node(
op_type='conv2d',
attrs=attrs,
inputs={'Input': input_var_node,
'Filter': weight_var_node},
outputs={'Output': output_var_node})
# Based on the QAT's scales to calculate the scales of MKL-DNN INT8 conv2d
scale_in = self._s8_max / self._in_scale[output_name]
scale_w = []
scale_w = [self._max_range[output_name] / self._s8_max]
conv_op_node.set_attr("Scale_weights", scale_w)
conv_op_node.set_attr("Scale_in", scale_in)
conv_op_node.set_attr("Scale_out", 1.0)
conv_op_node.set_attr("use_mkldnn", 1)
conv_op_node.set_attr("force_fp32_output", 1)
graph.link_to(input_var_node, conv_op_node)
graph.link_to(weight_var_node, conv_op_node)
graph.link_to(conv_op_node, output_var_node)
graph.safe_remove_nodes(op_node)
def _transform_to_mul_mkldnn(self, graph, op_node):
# For MKL-DNN INT8 mul, input Y should be the weights
weight_name = op_node.input("Y")[0]
output_name = op_node.output("Out")[0]
# Convert int8 range weights to fp32 range weights
weight = self._load_param(self._scope, weight_name)
w_fp32 = np.divide(
np.multiply(weight, self._s8_max), self._max_range[output_name])
w_fp32 = w_fp32.reshape(weight.shape)
self._restore_var(weight_name, w_fp32)
input_var_node = graph._find_node_by_name(op_node.inputs,
op_node.input("X")[0])
weight_var_node = graph._find_node_by_name(op_node.inputs, weight_name)
# Set fake_dequantize_abs_max's output as new output of mul
output_var_node = graph._find_node_by_name(
graph.all_var_nodes(), self._new_output[output_name])
attrs = {
name: op_node.op().attr(name)
for name in op_node.op().attr_names()
}
mul_op_node = graph.create_op_node(
op_type='mul',
attrs=attrs,
inputs={'X': input_var_node,
'Y': weight_var_node},
outputs={'Out': output_var_node})
# Based on the QAT's scales to calculate MKL-DNN INT8 mul's scales
scale_in = self._s8_max / self._in_scale[output_name]
scale_w = []
scale_w = [self._max_range[output_name] / self._s8_max]
mul_op_node.set_attr("scale_y", scale_w)
mul_op_node.set_attr("scale_x", scale_in)
mul_op_node.set_attr("scale_out", 1.0)
mul_op_node.set_attr("use_mkldnn", 1)
mul_op_node.set_attr("force_fp32_output", 1)
graph.link_to(input_var_node, mul_op_node)
graph.link_to(weight_var_node, mul_op_node)
graph.link_to(mul_op_node, output_var_node)
graph.safe_remove_nodes(op_node)
def _transform_to_quantize_mkldnn(self, graph, op_node):
"""
Transform fake_quantize_xx op to quantize mkldnn op in the graph.
"""
input_var_node = graph._find_node_by_name(op_node.inputs,
op_node.input("X")[0])
output_var_node = graph._find_node_by_name(op_node.outputs,
op_node.output("Out")[0])
scale_in = self._s8_max / self._load_param(
self._scope, op_node.input("InScale")[0])[0]
quant_op_node = graph.create_op_node(
op_type='quantize',
attrs={
'data_format': 'MKLDNNLAYOUT',
'use_mkldnn': 1,
'Scale': scale_in,
'is_negative_input': 1
},
inputs={'Input': input_var_node},
outputs={'Output': output_var_node})
graph.link_to(input_var_node, quant_op_node)
graph.link_to(quant_op_node, output_var_node)
graph.safe_remove_nodes(op_node)
def _remove_fake_dequantize_op(self, graph, op_node):
input_var_node = graph._find_node_by_name(op_node.inputs,
op_node.input("X")[0])
graph.safe_remove_nodes(op_node)
def _load_param(self, scope, param_name):
return np.array(scope.find_var(param_name).get_tensor())
def _restore_var(self, name, array):
tensor = self._scope.find_var(name).get_tensor()
tensor.set(array, self._place)
def _remove_unused_var_nodes(self, graph):
all_used_vars = set()
ops = graph.all_op_nodes()
for op_node in ops:
for input_node in op_node.inputs:
all_used_vars.add(input_node)
for output_node in op_node.outputs:
all_used_vars.add(output_node)
all_used_vars = {n.node for n in all_used_vars}
all_unused_vars = {
n
for n in filter(lambda node: node.node not in all_used_vars,
graph.all_var_nodes())
}
graph.safe_remove_nodes(all_unused_vars)
class FakeQAT2MkldnnINT8PerfPass(object):
"""
Transform a QAT model IrGraph into MKL-DNN supported INT8 IrGraph.
The pass consists of the following transformations:
1. gather scale values from fake quantize/dequantize operators,
2. extract FP32 inference model graph from the QAT graph, i.e.
a. remove fake quantize/dequantize operators,
b. dequantize conv2d and mul's weights,
3. optimize the FP32 graph using standard FP32 optimization fuses
(e.g. `conv2d`+`bn` -> `conv2d`),
4. quantize the optimized FP32 graph using standard INT8v2 quantization
passes (`cpu_quantize_pass`, `cpu_quantize_squash_pass`).
"""
def __init__(self, _scope=None, _place=None, _core=None, _debug=False):
self._scope = _scope
self._place = _place
self._core = _core
self._debug = _debug
self._quantize_types = [
'fake_quantize_moving_average_abs_max',
'fake_quantize_range_abs_max',
'fake_quantize_dequantize_moving_average_abs_max'
]
self._fake_quantize_types = [
'fake_quantize_moving_average_abs_max',
'fake_quantize_dequantize_moving_average_abs_max'
]
self._fake_dequantize_types = ['fake_dequantize_max_abs']
self._conv_ops = ['conv2d', 'depthwise_conv2d']
self._pool_ops = ['pool2d']
self._mul_ops = ['mul']
self._fc_ops = ['fc']
self._weight_scales = {}
# Collect the Input and Output sclaes from Fake QAT models
self._var_quant_scales = {}
self._max_range = {}
self._s8_max = 127
def apply(self, graph):
assert isinstance(graph,
IrGraph), 'graph must be the instance of IrGraph.'
graph = self._gather_scales(graph)
graph = self._remove_fake_ops(graph)
graph = self._dequantize_weights(graph)
graph = self._optimize_fp32_graph(graph)
graph = self._compute_weight_scales(graph)
graph = self._update_conv_relu_scales(graph)
graph = self._update_pooling_scales(graph)
graph = self._quantize_fp32_graph(graph)
graph = self._remove_unused_var_nodes(graph)
return graph
def _convert_scale2tensor(self, scale):
tensor = core.LoDTensor()
tensor.set(scale, core.CPUPlace())
return tensor
def _gather_scales(self, graph):
for op in graph.all_op_nodes():
if op.name() in self._quantize_types:
bit_length = op.op().attr("bit_length")
assert bit_length == 8, 'Unsupported number quantization bits ({}). Only 8 is supported now.'.format(
bit_length)
input_name = op.input("X")[0]
scale_name = op.input("InScale")[0]
# Gather new weights scale after folding batchnorm in convolution
scale = np.array(1.0 / self._load_param(
self._scope, scale_name)[0]).astype(np.float64)
lod_tensor = self._convert_scale2tensor(scale)
use_unsigned_int = False
self._var_quant_scales[input_name] = (use_unsigned_int,
lod_tensor)
self._var_quant_scales[scale_name.replace(".scale", "")] = (
use_unsigned_int, lod_tensor)
if op.name() in self._fake_dequantize_types:
input_name = op.input("X")[0]
_max_range = op.op().attr("max_range")
self._weight_scales[input_name] = _max_range
return graph
def _update_pooling_scales(self, graph):
for op in graph.all_op_nodes():
if op.name() in self._pool_ops:
input_name = op.input("X")[0]
output_name = op.output("Out")[0]
if input_name in self._var_quant_scales:
self._var_quant_scales[
output_name] = self._var_quant_scales[input_name]
return graph
def _load_param(self, scope, param_name):
return np.array(scope.find_var(param_name).get_tensor())
def _remove_fake_ops(self, graph):
for op in graph.all_op_nodes():
if op.name() in self._fake_quantize_types:
op_out = graph._find_node_by_name(op.outputs,
op.output("Out")[0])
next_op = op_out.outputs[0]
if next_op.name() not in self._mul_ops:
self._remove_fake_quantize(graph, op)
for op in graph.all_op_nodes():
if op.name() in self._fake_dequantize_types:
op_in = graph._find_node_by_name(op.inputs, op.input("X")[0])
prev_op = op_in.inputs[0]
if prev_op.name() not in self._mul_ops:
self._remove_fake_dequantize(graph, op)
return graph
def _remove_fake_quantize(self, graph, op):
fake_quant_in = graph._find_node_by_name(op.inputs, op.input("X")[0])
fake_quant_in_scale = graph._find_node_by_name(op.inputs,
op.input("InScale")[0])
fake_quant_out = graph._find_node_by_name(op.outputs,
op.output("Out")[0])
fake_quant_out_scale = graph._find_node_by_name(
op.outputs, op.output("OutScale")[0])
next_ops = fake_quant_out.outputs
for next_op in next_ops:
self._swap_inputs(next_op, fake_quant_out, fake_quant_in)
graph.link_to(fake_quant_in, next_op)
graph.safe_remove_nodes(
{op, fake_quant_in_scale, fake_quant_out, fake_quant_out_scale})
return graph
def _remove_fake_dequantize(self, graph, op):
fake_dequant_in = graph._find_node_by_name(op.inputs, op.input("X")[0])
fake_dequant_out = graph._find_node_by_name(op.outputs,
op.output("Out")[0])
next_ops = fake_dequant_out.outputs
for next_op in next_ops:
self._swap_inputs(next_op, fake_dequant_out, fake_dequant_in)
graph.link_to(fake_dequant_in, next_op)
graph.safe_remove_nodes({op, fake_dequant_out})
return graph
def _swap_inputs(self, op, old_input, new_input):
for input_name in op.op().input_names():
if old_input.name() in op.input(input_name):
op.op().set_input(input_name, [
new_input.name() if x == old_input.name() else x
for x in op.input(input_name)
])
def _dequantize_weights(self, graph):
for op in graph.all_op_nodes():
if op.name() in self._conv_ops:
self._dequantize_conv_weights(graph, op)
return graph
def _dequantize_conv_weights(self, graph, op_node):
weight_name = op_node.input("Filter")[0]
output_name = op_node.output("Output")[0]
# Convert int8 range weights to fp32 range weights
scales = self._weight_scales[output_name]
weight = self._load_param(self._scope, weight_name)
w_fp32 = np.divide(np.multiply(weight, self._s8_max), scales)
w_fp32 = w_fp32.reshape(weight.shape)
self._restore_var(weight_name, w_fp32)
def _dequantize_mul_weights(self, graph, op_node):
weight_name = op_node.input("Y")[0]
output_name = op_node.output("Out")[0]
scales = self._weight_scales[output_name]
weight = self._load_param(self._scope, weight_name)
w_fp32 = np.divide(np.multiply(weight, self._s8_max), scales)
w_fp32 = w_fp32.reshape(weight.shape)
self._restore_var(weight_name, w_fp32)
def _restore_var(self, name, array):
tensor = self._scope.find_var(name).get_tensor()
tensor.set(array, self._place)
def _optimize_fp32_graph(self, graph):
graph = self._apply_pass(graph, 'mkldnn_placement_pass',
['mkldnn_enabled_op_types'], [set()])
graph = self._apply_pass(graph, 'depthwise_conv_mkldnn_pass')
graph = self._apply_pass(graph, 'conv_bn_fuse_pass')
graph = self._apply_pass(graph, 'conv_eltwiseadd_bn_fuse_pass')
graph = self._apply_pass(graph, 'conv_bias_mkldnn_fuse_pass')
graph = self._apply_pass(graph, 'conv_elementwise_add_mkldnn_fuse_pass')
graph = self._apply_pass(graph, 'conv_relu_mkldnn_fuse_pass')
graph = self._apply_pass(graph, 'conv_relu6_mkldnn_fuse_pass')
return graph
def _apply_pass(self, graph, pass_name, attrs=None, attr_values=None):
ir_pass = core.get_pass(pass_name)
cpp_graph = graph.graph
if not cpp_graph.has('__param_scope__'):
cpp_graph.set_not_owned('__param_scope__', self._scope)
if attrs:
assert attr_values and len(attrs) == len(
attr_values
), "Different number of pass attributes and their values."
for attr, value in zip(attrs, attr_values):
ir_pass.set(attr, value)
ir_pass.apply(cpp_graph)
if self._debug:
graph.draw('.', 'qat_fp32_{}'.format(pass_name),
graph.all_op_nodes())
self._remove_unused_var_nodes(graph)
return graph
def _remove_unused_var_nodes(self, graph):
all_used_vars = set()
ops = graph.all_op_nodes()
for op_node in ops:
for input_node in op_node.inputs:
all_used_vars.add(input_node)
for output_node in op_node.outputs:
all_used_vars.add(output_node)
all_used_vars = {n.node for n in all_used_vars}
all_unused_vars = {
n
for n in filter(lambda node: node.node not in all_used_vars,
graph.all_var_nodes())
}
graph.safe_remove_nodes(all_unused_vars)
return graph
def _compute_weight_scales(self, graph):
def _compute_var_scales(ops, out_name, w_name, axis):
for op in graph.all_op_nodes():
if op.op().type() in ops:
weight_var_name = op.input(w_name)[0]
weights = np.array(
self._load_param(self._scope, weight_var_name))
scales = 1.0 / np.amax(
np.abs(weights.reshape(weights.shape[0], -1)),
axis=axis)
lod_tensor = self._convert_scale2tensor(
scales.astype(np.float64))
use_unsigned_int = False
self._var_quant_scales[weight_var_name] = (use_unsigned_int,
lod_tensor)
_compute_var_scales(self._conv_ops, "Output", "Filter", axis=1)
_compute_var_scales(self._fc_ops, "Out", "W", axis=0)
return graph
def _find_avg_pooling_ids(self, graph):
ids = []
for op in graph.all_op_nodes():
if op.name() in self._pool_ops:
if op.op().attr("pooling_type") == "avg":
ids.append(op.id())
return set(ids) if len(ids) else set([-1])
def _transform_to_quantize_mkldnn(self, graph, op_node):
"""
Transform fake_quantize_xx op to quantize mkldnn op in the graph.
"""
input_var_node = graph._find_node_by_name(op_node.inputs,
op_node.input("X")[0])
output_var_node = graph._find_node_by_name(op_node.outputs,
op_node.output("Out")[0])
scale_in = self._s8_max / self._load_param(
self._scope, op_node.input("InScale")[0])[0]
quant_op_node = graph.create_op_node(
op_type='quantize',
attrs={
'data_format': 'MKLDNNLAYOUT',
'use_mkldnn': 1,
'Scale': scale_in,
'is_negative_input': 1
},
inputs={'Input': input_var_node},
outputs={'Output': output_var_node})
graph.link_to(input_var_node, quant_op_node)
graph.link_to(quant_op_node, output_var_node)
graph.safe_remove_nodes(op_node)
return quant_op_node
def _update_conv_relu_scales(self, graph):
for op in graph.all_op_nodes():
if op.name() in self._conv_ops:
out_name = op.output("Output")[0]
if out_name in self._var_quant_scales and \
op.op().attr("fuse_activation") == 'relu' and \
op.op().attr("fuse_residual_connection") == False:
_, tensor = self._var_quant_scales[out_name]
self._var_quant_scales[out_name] = (True, tensor)
return graph
def _quantize_fp32_graph(self, graph):
ir_pass = self._core.get_pass('cpu_quantize_placement_pass')
cpp_graph = graph.graph
ir_pass.set('quantize_enabled_op_types', {'conv2d', 'pool2d'})
ir_pass.set('quantize_excluded_op_ids',
self._find_avg_pooling_ids(graph))
ir_pass.apply(cpp_graph)
if self._debug:
graph.draw('.', 'qat_int8_{}'.format(ir_pass.type()),
graph.all_op_nodes())
graph = self._apply_pass(graph, 'cpu_quantize_pass',
['quant_var_scales'],
[self._var_quant_scales])
graph = self._apply_pass(graph, 'cpu_quantize_squash_pass')
return graph
|
|
"""
This module implements the TextResponse class which adds encoding handling and
discovering (through HTTP headers) to base Response class.
See documentation in docs/topics/request-response.rst
"""
import json
import warnings
from contextlib import suppress
from typing import Generator, Tuple
from urllib.parse import urljoin
import parsel
from w3lib.encoding import (html_body_declared_encoding, html_to_unicode,
http_content_type_encoding, resolve_encoding)
from w3lib.html import strip_html5_whitespace
from scrapy.exceptions import ScrapyDeprecationWarning
from scrapy.http import Request
from scrapy.http.response import Response
from scrapy.utils.python import memoizemethod_noargs, to_unicode
from scrapy.utils.response import get_base_url
_NONE = object()
class TextResponse(Response):
_DEFAULT_ENCODING = 'ascii'
_cached_decoded_json = _NONE
attributes: Tuple[str, ...] = Response.attributes + ("encoding",)
def __init__(self, *args, **kwargs):
self._encoding = kwargs.pop('encoding', None)
self._cached_benc = None
self._cached_ubody = None
self._cached_selector = None
super().__init__(*args, **kwargs)
def _set_url(self, url):
if isinstance(url, str):
self._url = to_unicode(url, self.encoding)
else:
super()._set_url(url)
def _set_body(self, body):
self._body = b'' # used by encoding detection
if isinstance(body, str):
if self._encoding is None:
raise TypeError('Cannot convert unicode body - '
f'{type(self).__name__} has no encoding')
self._body = body.encode(self._encoding)
else:
super()._set_body(body)
@property
def encoding(self):
return self._declared_encoding() or self._body_inferred_encoding()
def _declared_encoding(self):
return (
self._encoding
or self._headers_encoding()
or self._body_declared_encoding()
)
def body_as_unicode(self):
"""Return body as unicode"""
warnings.warn('Response.body_as_unicode() is deprecated, '
'please use Response.text instead.',
ScrapyDeprecationWarning, stacklevel=2)
return self.text
def json(self):
"""
.. versionadded:: 2.2
Deserialize a JSON document to a Python object.
"""
if self._cached_decoded_json is _NONE:
self._cached_decoded_json = json.loads(self.text)
return self._cached_decoded_json
@property
def text(self):
""" Body as unicode """
# access self.encoding before _cached_ubody to make sure
# _body_inferred_encoding is called
benc = self.encoding
if self._cached_ubody is None:
charset = f'charset={benc}'
self._cached_ubody = html_to_unicode(charset, self.body)[1]
return self._cached_ubody
def urljoin(self, url):
"""Join this Response's url with a possible relative url to form an
absolute interpretation of the latter."""
return urljoin(get_base_url(self), url)
@memoizemethod_noargs
def _headers_encoding(self):
content_type = self.headers.get(b'Content-Type', b'')
return http_content_type_encoding(to_unicode(content_type))
def _body_inferred_encoding(self):
if self._cached_benc is None:
content_type = to_unicode(self.headers.get(b'Content-Type', b''))
benc, ubody = html_to_unicode(content_type, self.body,
auto_detect_fun=self._auto_detect_fun,
default_encoding=self._DEFAULT_ENCODING)
self._cached_benc = benc
self._cached_ubody = ubody
return self._cached_benc
def _auto_detect_fun(self, text):
for enc in (self._DEFAULT_ENCODING, 'utf-8', 'cp1252'):
try:
text.decode(enc)
except UnicodeError:
continue
return resolve_encoding(enc)
@memoizemethod_noargs
def _body_declared_encoding(self):
return html_body_declared_encoding(self.body)
@property
def selector(self):
from scrapy.selector import Selector
if self._cached_selector is None:
self._cached_selector = Selector(self)
return self._cached_selector
def xpath(self, query, **kwargs):
return self.selector.xpath(query, **kwargs)
def css(self, query):
return self.selector.css(query)
def follow(self, url, callback=None, method='GET', headers=None, body=None,
cookies=None, meta=None, encoding=None, priority=0,
dont_filter=False, errback=None, cb_kwargs=None, flags=None):
# type: (...) -> Request
"""
Return a :class:`~.Request` instance to follow a link ``url``.
It accepts the same arguments as ``Request.__init__`` method,
but ``url`` can be not only an absolute URL, but also
* a relative URL
* a :class:`~scrapy.link.Link` object, e.g. the result of
:ref:`topics-link-extractors`
* a :class:`~scrapy.selector.Selector` object for a ``<link>`` or ``<a>`` element, e.g.
``response.css('a.my_link')[0]``
* an attribute :class:`~scrapy.selector.Selector` (not SelectorList), e.g.
``response.css('a::attr(href)')[0]`` or
``response.xpath('//img/@src')[0]``
See :ref:`response-follow-example` for usage examples.
"""
if isinstance(url, parsel.Selector):
url = _url_from_selector(url)
elif isinstance(url, parsel.SelectorList):
raise ValueError("SelectorList is not supported")
encoding = self.encoding if encoding is None else encoding
return super().follow(
url=url,
callback=callback,
method=method,
headers=headers,
body=body,
cookies=cookies,
meta=meta,
encoding=encoding,
priority=priority,
dont_filter=dont_filter,
errback=errback,
cb_kwargs=cb_kwargs,
flags=flags,
)
def follow_all(self, urls=None, callback=None, method='GET', headers=None, body=None,
cookies=None, meta=None, encoding=None, priority=0,
dont_filter=False, errback=None, cb_kwargs=None, flags=None,
css=None, xpath=None):
# type: (...) -> Generator[Request, None, None]
"""
A generator that produces :class:`~.Request` instances to follow all
links in ``urls``. It accepts the same arguments as the :class:`~.Request`'s
``__init__`` method, except that each ``urls`` element does not need to be
an absolute URL, it can be any of the following:
* a relative URL
* a :class:`~scrapy.link.Link` object, e.g. the result of
:ref:`topics-link-extractors`
* a :class:`~scrapy.selector.Selector` object for a ``<link>`` or ``<a>`` element, e.g.
``response.css('a.my_link')[0]``
* an attribute :class:`~scrapy.selector.Selector` (not SelectorList), e.g.
``response.css('a::attr(href)')[0]`` or
``response.xpath('//img/@src')[0]``
In addition, ``css`` and ``xpath`` arguments are accepted to perform the link extraction
within the ``follow_all`` method (only one of ``urls``, ``css`` and ``xpath`` is accepted).
Note that when passing a ``SelectorList`` as argument for the ``urls`` parameter or
using the ``css`` or ``xpath`` parameters, this method will not produce requests for
selectors from which links cannot be obtained (for instance, anchor tags without an
``href`` attribute)
"""
arguments = [x for x in (urls, css, xpath) if x is not None]
if len(arguments) != 1:
raise ValueError(
"Please supply exactly one of the following arguments: urls, css, xpath"
)
if not urls:
if css:
urls = self.css(css)
if xpath:
urls = self.xpath(xpath)
if isinstance(urls, parsel.SelectorList):
selectors = urls
urls = []
for sel in selectors:
with suppress(_InvalidSelector):
urls.append(_url_from_selector(sel))
return super().follow_all(
urls=urls,
callback=callback,
method=method,
headers=headers,
body=body,
cookies=cookies,
meta=meta,
encoding=encoding,
priority=priority,
dont_filter=dont_filter,
errback=errback,
cb_kwargs=cb_kwargs,
flags=flags,
)
class _InvalidSelector(ValueError):
"""
Raised when a URL cannot be obtained from a Selector
"""
def _url_from_selector(sel):
# type: (parsel.Selector) -> str
if isinstance(sel.root, str):
# e.g. ::attr(href) result
return strip_html5_whitespace(sel.root)
if not hasattr(sel.root, 'tag'):
raise _InvalidSelector(f"Unsupported selector: {sel}")
if sel.root.tag not in ('a', 'link'):
raise _InvalidSelector("Only <a> and <link> elements are supported; "
f"got <{sel.root.tag}>")
href = sel.root.get('href')
if href is None:
raise _InvalidSelector(f"<{sel.root.tag}> element has no href attribute: {sel}")
return strip_html5_whitespace(href)
|
|
""" Basic functions for manipulating 2d arrays
"""
from __future__ import division, absolute_import, print_function
from numpy.core.numeric import (
asanyarray, arange, zeros, greater_equal, multiply, ones, asarray,
where, int8, int16, int32, int64, empty, promote_types, diagonal,
)
from numpy.core import iinfo
__all__ = [
'diag', 'diagflat', 'eye', 'fliplr', 'flipud', 'rot90', 'tri', 'triu',
'tril', 'vander', 'histogram2d', 'mask_indices', 'tril_indices',
'tril_indices_from', 'triu_indices', 'triu_indices_from', ]
i1 = iinfo(int8)
i2 = iinfo(int16)
i4 = iinfo(int32)
def _min_int(low, high):
""" get small int that fits the range """
if high <= i1.max and low >= i1.min:
return int8
if high <= i2.max and low >= i2.min:
return int16
if high <= i4.max and low >= i4.min:
return int32
return int64
def fliplr(m):
"""
Flip array in the left/right direction.
Flip the entries in each row in the left/right direction.
Columns are preserved, but appear in a different order than before.
Parameters
----------
m : array_like
Input array, must be at least 2-D.
Returns
-------
f : ndarray
A view of `m` with the columns reversed. Since a view
is returned, this operation is :math:`\\mathcal O(1)`.
See Also
--------
flipud : Flip array in the up/down direction.
rot90 : Rotate array counterclockwise.
Notes
-----
Equivalent to A[:,::-1]. Requires the array to be at least 2-D.
Examples
--------
>>> A = np.diag([1.,2.,3.])
>>> A
array([[ 1., 0., 0.],
[ 0., 2., 0.],
[ 0., 0., 3.]])
>>> np.fliplr(A)
array([[ 0., 0., 1.],
[ 0., 2., 0.],
[ 3., 0., 0.]])
>>> A = np.random.randn(2,3,5)
>>> np.all(np.fliplr(A)==A[:,::-1,...])
True
"""
m = asanyarray(m)
if m.ndim < 2:
raise ValueError("Input must be >= 2-d.")
return m[:, ::-1]
def flipud(m):
"""
Flip array in the up/down direction.
Flip the entries in each column in the up/down direction.
Rows are preserved, but appear in a different order than before.
Parameters
----------
m : array_like
Input array.
Returns
-------
out : array_like
A view of `m` with the rows reversed. Since a view is
returned, this operation is :math:`\\mathcal O(1)`.
See Also
--------
fliplr : Flip array in the left/right direction.
rot90 : Rotate array counterclockwise.
Notes
-----
Equivalent to ``A[::-1,...]``.
Does not require the array to be two-dimensional.
Examples
--------
>>> A = np.diag([1.0, 2, 3])
>>> A
array([[ 1., 0., 0.],
[ 0., 2., 0.],
[ 0., 0., 3.]])
>>> np.flipud(A)
array([[ 0., 0., 3.],
[ 0., 2., 0.],
[ 1., 0., 0.]])
>>> A = np.random.randn(2,3,5)
>>> np.all(np.flipud(A)==A[::-1,...])
True
>>> np.flipud([1,2])
array([2, 1])
"""
m = asanyarray(m)
if m.ndim < 1:
raise ValueError("Input must be >= 1-d.")
return m[::-1, ...]
def rot90(m, k=1):
"""
Rotate an array by 90 degrees in the counter-clockwise direction.
The first two dimensions are rotated; therefore, the array must be at
least 2-D.
Parameters
----------
m : array_like
Array of two or more dimensions.
k : integer
Number of times the array is rotated by 90 degrees.
Returns
-------
y : ndarray
Rotated array.
See Also
--------
fliplr : Flip an array horizontally.
flipud : Flip an array vertically.
Examples
--------
>>> m = np.array([[1,2],[3,4]], int)
>>> m
array([[1, 2],
[3, 4]])
>>> np.rot90(m)
array([[2, 4],
[1, 3]])
>>> np.rot90(m, 2)
array([[4, 3],
[2, 1]])
"""
m = asanyarray(m)
if m.ndim < 2:
raise ValueError("Input must >= 2-d.")
k = k % 4
if k == 0:
return m
elif k == 1:
return fliplr(m).swapaxes(0, 1)
elif k == 2:
return fliplr(flipud(m))
else:
# k == 3
return fliplr(m.swapaxes(0, 1))
def eye(N, M=None, k=0, dtype=float):
"""
Return a 2-D array with ones on the diagonal and zeros elsewhere.
Parameters
----------
N : int
Number of rows in the output.
M : int, optional
Number of columns in the output. If None, defaults to `N`.
k : int, optional
Index of the diagonal: 0 (the default) refers to the main diagonal,
a positive value refers to an upper diagonal, and a negative value
to a lower diagonal.
dtype : data-type, optional
Data-type of the returned array.
Returns
-------
I : ndarray of shape (N,M)
An array where all elements are equal to zero, except for the `k`-th
diagonal, whose values are equal to one.
See Also
--------
identity : (almost) equivalent function
diag : diagonal 2-D array from a 1-D array specified by the user.
Examples
--------
>>> np.eye(2, dtype=int)
array([[1, 0],
[0, 1]])
>>> np.eye(3, k=1)
array([[ 0., 1., 0.],
[ 0., 0., 1.],
[ 0., 0., 0.]])
"""
if M is None:
M = N
m = zeros((N, M), dtype=dtype)
if k >= M:
return m
if k >= 0:
i = k
else:
i = (-k) * M
m[:M-k].flat[i::M+1] = 1
return m
def diag(v, k=0):
"""
Extract a diagonal or construct a diagonal array.
See the more detailed documentation for ``numpy.diagonal`` if you use this
function to extract a diagonal and wish to write to the resulting array;
whether it returns a copy or a view depends on what version of numpy you
are using.
Parameters
----------
v : array_like
If `v` is a 2-D array, return a copy of its `k`-th diagonal.
If `v` is a 1-D array, return a 2-D array with `v` on the `k`-th
diagonal.
k : int, optional
Diagonal in question. The default is 0. Use `k>0` for diagonals
above the main diagonal, and `k<0` for diagonals below the main
diagonal.
Returns
-------
out : ndarray
The extracted diagonal or constructed diagonal array.
See Also
--------
diagonal : Return specified diagonals.
diagflat : Create a 2-D array with the flattened input as a diagonal.
trace : Sum along diagonals.
triu : Upper triangle of an array.
tril : Lower triangle of an array.
Examples
--------
>>> x = np.arange(9).reshape((3,3))
>>> x
array([[0, 1, 2],
[3, 4, 5],
[6, 7, 8]])
>>> np.diag(x)
array([0, 4, 8])
>>> np.diag(x, k=1)
array([1, 5])
>>> np.diag(x, k=-1)
array([3, 7])
>>> np.diag(np.diag(x))
array([[0, 0, 0],
[0, 4, 0],
[0, 0, 8]])
"""
v = asanyarray(v)
s = v.shape
if len(s) == 1:
n = s[0]+abs(k)
res = zeros((n, n), v.dtype)
if k >= 0:
i = k
else:
i = (-k) * n
res[:n-k].flat[i::n+1] = v
return res
elif len(s) == 2:
return diagonal(v, k)
else:
raise ValueError("Input must be 1- or 2-d.")
def diagflat(v, k=0):
"""
Create a two-dimensional array with the flattened input as a diagonal.
Parameters
----------
v : array_like
Input data, which is flattened and set as the `k`-th
diagonal of the output.
k : int, optional
Diagonal to set; 0, the default, corresponds to the "main" diagonal,
a positive (negative) `k` giving the number of the diagonal above
(below) the main.
Returns
-------
out : ndarray
The 2-D output array.
See Also
--------
diag : MATLAB work-alike for 1-D and 2-D arrays.
diagonal : Return specified diagonals.
trace : Sum along diagonals.
Examples
--------
>>> np.diagflat([[1,2], [3,4]])
array([[1, 0, 0, 0],
[0, 2, 0, 0],
[0, 0, 3, 0],
[0, 0, 0, 4]])
>>> np.diagflat([1,2], 1)
array([[0, 1, 0],
[0, 0, 2],
[0, 0, 0]])
"""
try:
wrap = v.__array_wrap__
except AttributeError:
wrap = None
v = asarray(v).ravel()
s = len(v)
n = s + abs(k)
res = zeros((n, n), v.dtype)
if (k >= 0):
i = arange(0, n-k)
fi = i+k+i*n
else:
i = arange(0, n+k)
fi = i+(i-k)*n
res.flat[fi] = v
if not wrap:
return res
return wrap(res)
def tri(N, M=None, k=0, dtype=float):
"""
An array with ones at and below the given diagonal and zeros elsewhere.
Parameters
----------
N : int
Number of rows in the array.
M : int, optional
Number of columns in the array.
By default, `M` is taken equal to `N`.
k : int, optional
The sub-diagonal at and below which the array is filled.
`k` = 0 is the main diagonal, while `k` < 0 is below it,
and `k` > 0 is above. The default is 0.
dtype : dtype, optional
Data type of the returned array. The default is float.
Returns
-------
tri : ndarray of shape (N, M)
Array with its lower triangle filled with ones and zero elsewhere;
in other words ``T[i,j] == 1`` for ``i <= j + k``, 0 otherwise.
Examples
--------
>>> np.tri(3, 5, 2, dtype=int)
array([[1, 1, 1, 0, 0],
[1, 1, 1, 1, 0],
[1, 1, 1, 1, 1]])
>>> np.tri(3, 5, -1)
array([[ 0., 0., 0., 0., 0.],
[ 1., 0., 0., 0., 0.],
[ 1., 1., 0., 0., 0.]])
"""
if M is None:
M = N
m = greater_equal.outer(arange(N, dtype=_min_int(0, N)),
arange(-k, M-k, dtype=_min_int(-k, M - k)))
# Avoid making a copy if the requested type is already bool
m = m.astype(dtype, copy=False)
return m
def tril(m, k=0):
"""
Lower triangle of an array.
Return a copy of an array with elements above the `k`-th diagonal zeroed.
Parameters
----------
m : array_like, shape (M, N)
Input array.
k : int, optional
Diagonal above which to zero elements. `k = 0` (the default) is the
main diagonal, `k < 0` is below it and `k > 0` is above.
Returns
-------
tril : ndarray, shape (M, N)
Lower triangle of `m`, of same shape and data-type as `m`.
See Also
--------
triu : same thing, only for the upper triangle
Examples
--------
>>> np.tril([[1,2,3],[4,5,6],[7,8,9],[10,11,12]], -1)
array([[ 0, 0, 0],
[ 4, 0, 0],
[ 7, 8, 0],
[10, 11, 12]])
"""
m = asanyarray(m)
mask = tri(*m.shape[-2:], k=k, dtype=bool)
return where(mask, m, zeros(1, m.dtype))
def triu(m, k=0):
"""
Upper triangle of an array.
Return a copy of a matrix with the elements below the `k`-th diagonal
zeroed.
Please refer to the documentation for `tril` for further details.
See Also
--------
tril : lower triangle of an array
Examples
--------
>>> np.triu([[1,2,3],[4,5,6],[7,8,9],[10,11,12]], -1)
array([[ 1, 2, 3],
[ 4, 5, 6],
[ 0, 8, 9],
[ 0, 0, 12]])
"""
m = asanyarray(m)
mask = tri(*m.shape[-2:], k=k-1, dtype=bool)
return where(mask, zeros(1, m.dtype), m)
# Originally borrowed from John Hunter and matplotlib
def vander(x, N=None, increasing=False):
"""
Generate a Vandermonde matrix.
The columns of the output matrix are powers of the input vector. The
order of the powers is determined by the `increasing` boolean argument.
Specifically, when `increasing` is False, the `i`-th output column is
the input vector raised element-wise to the power of ``N - i - 1``. Such
a matrix with a geometric progression in each row is named for Alexandre-
Theophile Vandermonde.
Parameters
----------
x : array_like
1-D input array.
N : int, optional
Number of columns in the output. If `N` is not specified, a square
array is returned (``N = len(x)``).
increasing : bool, optional
Order of the powers of the columns. If True, the powers increase
from left to right, if False (the default) they are reversed.
.. versionadded:: 1.9.0
Returns
-------
out : ndarray
Vandermonde matrix. If `increasing` is False, the first column is
``x^(N-1)``, the second ``x^(N-2)`` and so forth. If `increasing` is
True, the columns are ``x^0, x^1, ..., x^(N-1)``.
See Also
--------
polynomial.polynomial.polyvander
Examples
--------
>>> x = np.array([1, 2, 3, 5])
>>> N = 3
>>> np.vander(x, N)
array([[ 1, 1, 1],
[ 4, 2, 1],
[ 9, 3, 1],
[25, 5, 1]])
>>> np.column_stack([x**(N-1-i) for i in range(N)])
array([[ 1, 1, 1],
[ 4, 2, 1],
[ 9, 3, 1],
[25, 5, 1]])
>>> x = np.array([1, 2, 3, 5])
>>> np.vander(x)
array([[ 1, 1, 1, 1],
[ 8, 4, 2, 1],
[ 27, 9, 3, 1],
[125, 25, 5, 1]])
>>> np.vander(x, increasing=True)
array([[ 1, 1, 1, 1],
[ 1, 2, 4, 8],
[ 1, 3, 9, 27],
[ 1, 5, 25, 125]])
The determinant of a square Vandermonde matrix is the product
of the differences between the values of the input vector:
>>> np.linalg.det(np.vander(x))
48.000000000000043
>>> (5-3)*(5-2)*(5-1)*(3-2)*(3-1)*(2-1)
48
"""
x = asarray(x)
if x.ndim != 1:
raise ValueError("x must be a one-dimensional array or sequence.")
if N is None:
N = len(x)
v = empty((len(x), N), dtype=promote_types(x.dtype, int))
tmp = v[:, ::-1] if not increasing else v
if N > 0:
tmp[:, 0] = 1
if N > 1:
tmp[:, 1:] = x[:, None]
multiply.accumulate(tmp[:, 1:], out=tmp[:, 1:], axis=1)
return v
def histogram2d(x, y, bins=10, range=None, normed=False, weights=None):
"""
Compute the bi-dimensional histogram of two data samples.
Parameters
----------
x : array_like, shape (N,)
An array containing the x coordinates of the points to be
histogrammed.
y : array_like, shape (N,)
An array containing the y coordinates of the points to be
histogrammed.
bins : int or array_like or [int, int] or [array, array], optional
The bin specification:
* If int, the number of bins for the two dimensions (nx=ny=bins).
* If array_like, the bin edges for the two dimensions
(x_edges=y_edges=bins).
* If [int, int], the number of bins in each dimension
(nx, ny = bins).
* If [array, array], the bin edges in each dimension
(x_edges, y_edges = bins).
* A combination [int, array] or [array, int], where int
is the number of bins and array is the bin edges.
range : array_like, shape(2,2), optional
The leftmost and rightmost edges of the bins along each dimension
(if not specified explicitly in the `bins` parameters):
``[[xmin, xmax], [ymin, ymax]]``. All values outside of this range
will be considered outliers and not tallied in the histogram.
normed : bool, optional
If False, returns the number of samples in each bin. If True,
returns the bin density ``bin_count / sample_count / bin_area``.
weights : array_like, shape(N,), optional
An array of values ``w_i`` weighing each sample ``(x_i, y_i)``.
Weights are normalized to 1 if `normed` is True. If `normed` is
False, the values of the returned histogram are equal to the sum of
the weights belonging to the samples falling into each bin.
Returns
-------
H : ndarray, shape(nx, ny)
The bi-dimensional histogram of samples `x` and `y`. Values in `x`
are histogrammed along the first dimension and values in `y` are
histogrammed along the second dimension.
xedges : ndarray, shape(nx,)
The bin edges along the first dimension.
yedges : ndarray, shape(ny,)
The bin edges along the second dimension.
See Also
--------
histogram : 1D histogram
histogramdd : Multidimensional histogram
Notes
-----
When `normed` is True, then the returned histogram is the sample
density, defined such that the sum over bins of the product
``bin_value * bin_area`` is 1.
Please note that the histogram does not follow the Cartesian convention
where `x` values are on the abscissa and `y` values on the ordinate
axis. Rather, `x` is histogrammed along the first dimension of the
array (vertical), and `y` along the second dimension of the array
(horizontal). This ensures compatibility with `histogramdd`.
Examples
--------
>>> import matplotlib as mpl
>>> import matplotlib.pyplot as plt
Construct a 2D-histogram with variable bin width. First define the bin
edges:
>>> xedges = [0, 1, 1.5, 3, 5]
>>> yedges = [0, 2, 3, 4, 6]
Next we create a histogram H with random bin content:
>>> x = np.random.normal(3, 1, 100)
>>> y = np.random.normal(1, 1, 100)
>>> H, xedges, yedges = np.histogram2d(y, x, bins=(xedges, yedges))
Or we fill the histogram H with a determined bin content:
>>> H = np.ones((4, 4)).cumsum().reshape(4, 4)
>>> print H[::-1] # This shows the bin content in the order as plotted
[[ 13. 14. 15. 16.]
[ 9. 10. 11. 12.]
[ 5. 6. 7. 8.]
[ 1. 2. 3. 4.]]
Imshow can only do an equidistant representation of bins:
>>> fig = plt.figure(figsize=(7, 3))
>>> ax = fig.add_subplot(131)
>>> ax.set_title('imshow: equidistant')
>>> im = plt.imshow(H, interpolation='nearest', origin='low',
extent=[xedges[0], xedges[-1], yedges[0], yedges[-1]])
pcolormesh can display exact bin edges:
>>> ax = fig.add_subplot(132)
>>> ax.set_title('pcolormesh: exact bin edges')
>>> X, Y = np.meshgrid(xedges, yedges)
>>> ax.pcolormesh(X, Y, H)
>>> ax.set_aspect('equal')
NonUniformImage displays exact bin edges with interpolation:
>>> ax = fig.add_subplot(133)
>>> ax.set_title('NonUniformImage: interpolated')
>>> im = mpl.image.NonUniformImage(ax, interpolation='bilinear')
>>> xcenters = xedges[:-1] + 0.5 * (xedges[1:] - xedges[:-1])
>>> ycenters = yedges[:-1] + 0.5 * (yedges[1:] - yedges[:-1])
>>> im.set_data(xcenters, ycenters, H)
>>> ax.images.append(im)
>>> ax.set_xlim(xedges[0], xedges[-1])
>>> ax.set_ylim(yedges[0], yedges[-1])
>>> ax.set_aspect('equal')
>>> plt.show()
"""
from numpy import histogramdd
try:
N = len(bins)
except TypeError:
N = 1
if N != 1 and N != 2:
xedges = yedges = asarray(bins, float)
bins = [xedges, yedges]
hist, edges = histogramdd([x, y], bins, range, normed, weights)
return hist, edges[0], edges[1]
def mask_indices(n, mask_func, k=0):
"""
Return the indices to access (n, n) arrays, given a masking function.
Assume `mask_func` is a function that, for a square array a of size
``(n, n)`` with a possible offset argument `k`, when called as
``mask_func(a, k)`` returns a new array with zeros in certain locations
(functions like `triu` or `tril` do precisely this). Then this function
returns the indices where the non-zero values would be located.
Parameters
----------
n : int
The returned indices will be valid to access arrays of shape (n, n).
mask_func : callable
A function whose call signature is similar to that of `triu`, `tril`.
That is, ``mask_func(x, k)`` returns a boolean array, shaped like `x`.
`k` is an optional argument to the function.
k : scalar
An optional argument which is passed through to `mask_func`. Functions
like `triu`, `tril` take a second argument that is interpreted as an
offset.
Returns
-------
indices : tuple of arrays.
The `n` arrays of indices corresponding to the locations where
``mask_func(np.ones((n, n)), k)`` is True.
See Also
--------
triu, tril, triu_indices, tril_indices
Notes
-----
.. versionadded:: 1.4.0
Examples
--------
These are the indices that would allow you to access the upper triangular
part of any 3x3 array:
>>> iu = np.mask_indices(3, np.triu)
For example, if `a` is a 3x3 array:
>>> a = np.arange(9).reshape(3, 3)
>>> a
array([[0, 1, 2],
[3, 4, 5],
[6, 7, 8]])
>>> a[iu]
array([0, 1, 2, 4, 5, 8])
An offset can be passed also to the masking function. This gets us the
indices starting on the first diagonal right of the main one:
>>> iu1 = np.mask_indices(3, np.triu, 1)
with which we now extract only three elements:
>>> a[iu1]
array([1, 2, 5])
"""
m = ones((n, n), int)
a = mask_func(m, k)
return where(a != 0)
def tril_indices(n, k=0, m=None):
"""
Return the indices for the lower-triangle of an (n, m) array.
Parameters
----------
n : int
The row dimension of the arrays for which the returned
indices will be valid.
k : int, optional
Diagonal offset (see `tril` for details).
m : int, optional
.. versionadded:: 1.9.0
The column dimension of the arrays for which the returned
arrays will be valid.
By default `m` is taken equal to `n`.
Returns
-------
inds : tuple of arrays
The indices for the triangle. The returned tuple contains two arrays,
each with the indices along one dimension of the array.
See also
--------
triu_indices : similar function, for upper-triangular.
mask_indices : generic function accepting an arbitrary mask function.
tril, triu
Notes
-----
.. versionadded:: 1.4.0
Examples
--------
Compute two different sets of indices to access 4x4 arrays, one for the
lower triangular part starting at the main diagonal, and one starting two
diagonals further right:
>>> il1 = np.tril_indices(4)
>>> il2 = np.tril_indices(4, 2)
Here is how they can be used with a sample array:
>>> a = np.arange(16).reshape(4, 4)
>>> a
array([[ 0, 1, 2, 3],
[ 4, 5, 6, 7],
[ 8, 9, 10, 11],
[12, 13, 14, 15]])
Both for indexing:
>>> a[il1]
array([ 0, 4, 5, 8, 9, 10, 12, 13, 14, 15])
And for assigning values:
>>> a[il1] = -1
>>> a
array([[-1, 1, 2, 3],
[-1, -1, 6, 7],
[-1, -1, -1, 11],
[-1, -1, -1, -1]])
These cover almost the whole array (two diagonals right of the main one):
>>> a[il2] = -10
>>> a
array([[-10, -10, -10, 3],
[-10, -10, -10, -10],
[-10, -10, -10, -10],
[-10, -10, -10, -10]])
"""
return where(tri(n, m, k=k, dtype=bool))
def tril_indices_from(arr, k=0):
"""
Return the indices for the lower-triangle of arr.
See `tril_indices` for full details.
Parameters
----------
arr : array_like
The indices will be valid for square arrays whose dimensions are
the same as arr.
k : int, optional
Diagonal offset (see `tril` for details).
See Also
--------
tril_indices, tril
Notes
-----
.. versionadded:: 1.4.0
"""
if arr.ndim != 2:
raise ValueError("input array must be 2-d")
return tril_indices(arr.shape[-2], k=k, m=arr.shape[-1])
def triu_indices(n, k=0, m=None):
"""
Return the indices for the upper-triangle of an (n, m) array.
Parameters
----------
n : int
The size of the arrays for which the returned indices will
be valid.
k : int, optional
Diagonal offset (see `triu` for details).
m : int, optional
.. versionadded:: 1.9.0
The column dimension of the arrays for which the returned
arrays will be valid.
By default `m` is taken equal to `n`.
Returns
-------
inds : tuple, shape(2) of ndarrays, shape(`n`)
The indices for the triangle. The returned tuple contains two arrays,
each with the indices along one dimension of the array. Can be used
to slice a ndarray of shape(`n`, `n`).
See also
--------
tril_indices : similar function, for lower-triangular.
mask_indices : generic function accepting an arbitrary mask function.
triu, tril
Notes
-----
.. versionadded:: 1.4.0
Examples
--------
Compute two different sets of indices to access 4x4 arrays, one for the
upper triangular part starting at the main diagonal, and one starting two
diagonals further right:
>>> iu1 = np.triu_indices(4)
>>> iu2 = np.triu_indices(4, 2)
Here is how they can be used with a sample array:
>>> a = np.arange(16).reshape(4, 4)
>>> a
array([[ 0, 1, 2, 3],
[ 4, 5, 6, 7],
[ 8, 9, 10, 11],
[12, 13, 14, 15]])
Both for indexing:
>>> a[iu1]
array([ 0, 1, 2, 3, 5, 6, 7, 10, 11, 15])
And for assigning values:
>>> a[iu1] = -1
>>> a
array([[-1, -1, -1, -1],
[ 4, -1, -1, -1],
[ 8, 9, -1, -1],
[12, 13, 14, -1]])
These cover only a small part of the whole array (two diagonals right
of the main one):
>>> a[iu2] = -10
>>> a
array([[ -1, -1, -10, -10],
[ 4, -1, -1, -10],
[ 8, 9, -1, -1],
[ 12, 13, 14, -1]])
"""
return where(~tri(n, m, k=k-1, dtype=bool))
def triu_indices_from(arr, k=0):
"""
Return the indices for the upper-triangle of arr.
See `triu_indices` for full details.
Parameters
----------
arr : ndarray, shape(N, N)
The indices will be valid for square arrays.
k : int, optional
Diagonal offset (see `triu` for details).
Returns
-------
triu_indices_from : tuple, shape(2) of ndarray, shape(N)
Indices for the upper-triangle of `arr`.
See Also
--------
triu_indices, triu
Notes
-----
.. versionadded:: 1.4.0
"""
if arr.ndim != 2:
raise ValueError("input array must be 2-d")
return triu_indices(arr.shape[-2], k=k, m=arr.shape[-1])
|
|
import queue
grids = []
################################################################################
grids.append('''\
OOOOOOOOOOOO
OOO O
OO O
OO X O
O # # O
O O
O O
O # # O
OO X OO
OOX OO
OO X OOO
OOOOOOOOOOOO''')
grids.append('''\
OOOOOOOOOOOO
OOOOOOOOOOOO
OOOOOOOOOOOO
OOO OOO
OO ## OO
OO O # OO
OO OO
OO XX OO
OOO OOO
OOOOOOOOOOOO
OOOOOOOOOOOO
OOOOOOOOOOOO''')
grids.append('''\
OOOOOOOOOOOO
O O
O O
O # # O
O O
O O
O O
O O
O X # O
O O
O O
OOOOOOOOOOOO''')
grids.append('''\
OOOOOOOOOOOO
O O OO
O # OO
O OO
O OO
OOX OOO
O OO
O #OO
O OO
O O OOO
OOOOOOOOOOOO
OOOOOOOOOOOO''')
grids.append('''\
OOOOOOOOOOOO
O O O O
O # O
O # O
O X O
O O
O O
O # O
O # O
O O
O O
OOOOOOOOOOOO''')
grids.append('''\
OOOOOOOOOOOO
O O
O O # O
O OO
O O
O O
O X O
O O
O # O
O # O
O O
OOOOOOOOOOOO''')
################################################################################
def reader(grid):
walls = set()
blocks = set()
targets = set()
for y, line in enumerate(grid.split('\n')):
for x, char in enumerate(line):
if char == 'O':
walls.add((y, x))
elif char == '#':
blocks.add((y, x))
elif char == 'X':
targets.add((y, x))
return walls, blocks, targets
def worker(walls, blocks, targets):
states = {frozenset(blocks)}
jobs = queue.Queue()
jobs.put((blocks, None))
while not jobs.empty():
job = jobs.get()
# Pick a block to move.
for block in job[0]:
# Move up.
offset = 1
temp = (block[0] - offset, block[1])
while temp not in walls and temp not in job[0]:
offset += 1
temp = (block[0] - offset, block[1])
offset -= 1
# Check for movement.
if offset:
copy = set(job[0])
copy.remove(block)
copy.add((block[0] - offset, block[1]))
if copy not in states:
if targets.issubset(copy):
return (copy, job)
states.add(frozenset(copy))
jobs.put((copy, job))
# Move down.
offset = 1
temp = (block[0] + offset, block[1])
while temp not in walls and temp not in job[0]:
offset += 1
temp = (block[0] + offset, block[1])
offset -= 1
# Check for movement.
if offset:
copy = set(job[0])
copy.remove(block)
copy.add((block[0] + offset, block[1]))
if copy not in states:
if targets.issubset(copy):
return (copy, job)
states.add(frozenset(copy))
jobs.put((copy, job))
# Move left.
offset = 1
temp = (block[0], block[1] - offset)
while temp not in walls and temp not in job[0]:
offset += 1
temp = (block[0], block[1] - offset)
offset -= 1
# Check for movement.
if offset:
copy = set(job[0])
copy.remove(block)
copy.add((block[0], block[1] - offset))
if copy not in states:
if targets.issubset(copy):
return (copy, job)
states.add(frozenset(copy))
jobs.put((copy, job))
# Move right.
offset = 1
temp = (block[0], block[1] + offset)
while temp not in walls and temp not in job[0]:
offset += 1
temp = (block[0], block[1] + offset)
offset -= 1
# Check for movement.
if offset:
copy = set(job[0])
copy.remove(block)
copy.add((block[0], block[1] + offset))
if copy not in states:
if targets.issubset(copy):
return (copy, job)
states.add(frozenset(copy))
jobs.put((copy, job))
print(len(states), 'Unique States')
print('No Solution Found!')
return (blocks, None)
def opener(walls, answer, targets):
if answer[1] is not None:
opener(walls, answer[1], targets)
print(render(walls, answer[0], targets))
def render(walls, blocks, targets):
box = {}
for y, x in walls:
if y not in box:
box[y] = {}
box[y][x] = 'O'
for y, x in targets:
box[y][x] = 'X'
for y, x in blocks:
box[y][x] = '#'
max_y = max(box)
max_x = 0
for y in box:
max_x = max(max_x, max(box[y]))
lines = []
for y in range(max_y + 1):
line = ''
for x in range(max_x + 1):
line += box[y].get(x, ' ')
lines.append(line)
return '\n'.join(lines)
################################################################################
if __name__ == '__main__':
walls, blocks, targets = reader(grids[-1])
answer = worker(walls, blocks, targets)
opener(walls, answer, targets); input()
|
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved.
#
import pytest
from snowflake.sqlalchemy import (
AWSBucket,
AzureContainer,
CopyFormatter,
CopyIntoStorage,
CSVFormatter,
ExternalStage,
JSONFormatter,
PARQUETFormatter,
)
from sqlalchemy import Column, Integer, MetaData, Sequence, String, Table
from sqlalchemy.sql import select, text
def test_external_stage(sql_compiler):
assert ExternalStage.prepare_namespace("something") == "something."
assert ExternalStage.prepare_path("prefix") == "/prefix"
# All arguments are handled
assert (
sql_compiler(ExternalStage(name="name", path="prefix/path", namespace="namespace")) == "@namespace.name/prefix/path"
)
# defaults don't ruin things
assert sql_compiler(ExternalStage(name="name", path=None, namespace=None)) == "@name"
def test_copy_into_location(engine_testaccount, sql_compiler):
meta = MetaData()
conn = engine_testaccount.connect()
food_items = Table("python_tests_foods", meta,
Column('id', Integer, Sequence('new_user_id_seq'), primary_key=True),
Column('name', String),
Column('quantity', Integer))
meta.create_all(engine_testaccount)
copy_stmt_1 = CopyIntoStorage(from_=food_items,
into=AWSBucket.from_uri('s3://backup').encryption_aws_sse_kms(
'1234abcd-12ab-34cd-56ef-1234567890ab'),
formatter=CSVFormatter().record_delimiter('|').escape(None).null_if(['null', 'Null']))
assert (sql_compiler(copy_stmt_1) == "COPY INTO 's3://backup' FROM python_tests_foods FILE_FORMAT=(TYPE=csv "
"ESCAPE=None NULL_IF=('null', 'Null') RECORD_DELIMITER='|') ENCRYPTION="
"(KMS_KEY_ID='1234abcd-12ab-34cd-56ef-1234567890ab' TYPE='AWS_SSE_KMS')")
copy_stmt_2 = CopyIntoStorage(from_=select([food_items]).where(food_items.c.id == 1), # Test sub-query
into=AWSBucket.from_uri('s3://backup').credentials(
aws_role='some_iam_role').encryption_aws_sse_s3(),
formatter=JSONFormatter().file_extension('json').compression('zstd'))
assert (sql_compiler(copy_stmt_2) == "COPY INTO 's3://backup' FROM (SELECT python_tests_foods.id, "
"python_tests_foods.name, python_tests_foods.quantity FROM python_tests_foods "
"WHERE python_tests_foods.id = 1) FILE_FORMAT=(TYPE=json COMPRESSION='zstd' "
"FILE_EXTENSION='json') CREDENTIALS=(AWS_ROLE='some_iam_role') "
"ENCRYPTION=(TYPE='AWS_SSE_S3')")
copy_stmt_3 = CopyIntoStorage(from_=food_items,
into=AzureContainer.from_uri(
'azure://snowflake.blob.core.windows.net/snowpile/backup'
).credentials('token'),
formatter=PARQUETFormatter().snappy_compression(True))
assert (sql_compiler(copy_stmt_3) == "COPY INTO 'azure://snowflake.blob.core.windows.net/snowpile/backup' "
"FROM python_tests_foods FILE_FORMAT=(TYPE=parquet SNAPPY_COMPRESSION=true) "
"CREDENTIALS=(AZURE_SAS_TOKEN='token')")
copy_stmt_3.maxfilesize(50000000)
assert (sql_compiler(copy_stmt_3) == "COPY INTO 'azure://snowflake.blob.core.windows.net/snowpile/backup' "
"FROM python_tests_foods FILE_FORMAT=(TYPE=parquet SNAPPY_COMPRESSION=true) "
"MAX_FILE_SIZE = 50000000 "
"CREDENTIALS=(AZURE_SAS_TOKEN='token')")
copy_stmt_4 = CopyIntoStorage(from_=AWSBucket.from_uri('s3://backup').encryption_aws_sse_kms(
'1234abcd-12ab-34cd-56ef-1234567890ab'),
into=food_items,
formatter=CSVFormatter().record_delimiter('|').escape(None).null_if(['null', 'Null']))
assert (sql_compiler(copy_stmt_4) == "COPY INTO python_tests_foods FROM 's3://backup' FILE_FORMAT=(TYPE=csv "
"ESCAPE=None NULL_IF=('null', 'Null') RECORD_DELIMITER='|') ENCRYPTION="
"(KMS_KEY_ID='1234abcd-12ab-34cd-56ef-1234567890ab' TYPE='AWS_SSE_KMS')")
copy_stmt_5 = CopyIntoStorage(from_=AWSBucket.from_uri('s3://backup').encryption_aws_sse_kms(
'1234abcd-12ab-34cd-56ef-1234567890ab'),
into=food_items,
formatter=CSVFormatter().field_delimiter(','))
assert (sql_compiler(copy_stmt_5) == "COPY INTO python_tests_foods FROM 's3://backup' FILE_FORMAT=(TYPE=csv "
"FIELD_DELIMITER=',') ENCRYPTION="
"(KMS_KEY_ID='1234abcd-12ab-34cd-56ef-1234567890ab' TYPE='AWS_SSE_KMS')")
copy_stmt_6 = CopyIntoStorage(from_=food_items, into=ExternalStage(name="stage_name"), formatter=CSVFormatter())
assert sql_compiler(copy_stmt_6) == "COPY INTO @stage_name FROM python_tests_foods FILE_FORMAT=(TYPE=csv)"
copy_stmt_7 = CopyIntoStorage(from_=food_items, into=ExternalStage(name="stage_name", path="prefix/file", namespace="name"), formatter=CSVFormatter())
assert sql_compiler(copy_stmt_7) == "COPY INTO @name.stage_name/prefix/file FROM python_tests_foods FILE_FORMAT=(TYPE=csv)"
# NOTE Other than expect known compiled text, submit it to RegressionTests environment and expect them to fail, but
# because of the right reasons
try:
acceptable_exc_reasons = {'Failure using stage area',
'AWS_ROLE credentials are not allowed for this account.',
'AWS_ROLE credentials are invalid'}
for stmnt in (copy_stmt_1, copy_stmt_2, copy_stmt_3, copy_stmt_4):
with pytest.raises(Exception) as exc:
conn.execute(stmnt)
if not any(map(lambda reason: reason in str(exc) or reason in str(exc.value), acceptable_exc_reasons)):
raise Exception("Not acceptable exception: {} {}".format(str(exc), str(exc.value)))
finally:
conn.close()
food_items.drop(engine_testaccount)
def test_copy_into_storage_csv_extended(sql_compiler):
"""
This test compiles the SQL to read CSV data from a stage and insert it into a
table.
The CSV formatting statements are inserted inline, i.e. no explicit SQL definition
of that format is necessary.
The Stage is a named stage, i.e. we assume that a CREATE STAGE statement was
executed before. This way, the COPY INTO statement does not need to know any
security details (credentials or tokens)
"""
# target table definition (NB: this could be omitted for the test, since the
# SQL statement copies the whole CSV and assumes the target structure matches)
metadata = MetaData()
target_table = Table(
"TEST_IMPORT",
metadata,
Column("COL1", Integer, primary_key=True),
Column("COL2", String),
)
# define a source stage (root path)
root_stage = ExternalStage(
name="AZURE_STAGE",
namespace="ML_POC.PUBLIC",
)
# define a CSV formatter
formatter = (
CSVFormatter()
.compression("AUTO")
.field_delimiter(",")
.record_delimiter(r"\n")
.field_optionally_enclosed_by(None)
.escape(None)
.escape_unenclosed_field(r"\134")
.date_format("AUTO")
.null_if([r"\N"])
.skip_header(1)
.trim_space(False)
.error_on_column_count_mismatch(True)
)
# define CopyInto object; reads all CSV data (=> pattern) from
# the sub-path "testdata" beneath the root stage
copy_into = CopyIntoStorage(
from_=ExternalStage.from_parent_stage(root_stage, "testdata"),
into=target_table,
formatter=formatter
)
copy_into.copy_options = {"pattern": "'.*csv'", "force": "TRUE"}
# check that the result is as expected
result = sql_compiler(copy_into)
expected = (
r"COPY INTO TEST_IMPORT "
r"FROM @ML_POC.PUBLIC.AZURE_STAGE/testdata "
r"FILE_FORMAT=(TYPE=csv COMPRESSION='auto' DATE_FORMAT='AUTO' "
r"ERROR_ON_COLUMN_COUNT_MISMATCH=True ESCAPE=None "
r"ESCAPE_UNENCLOSED_FIELD='\134' FIELD_DELIMITER=',' "
r"FIELD_OPTIONALLY_ENCLOSED_BY=None NULL_IF=('\N') RECORD_DELIMITER='\n' "
r"SKIP_HEADER=1 TRIM_SPACE=False) force = TRUE pattern = '.*csv'"
)
assert result == expected
def test_copy_into_storage_parquet_named_format(sql_compiler):
"""
This test compiles the SQL to read Parquet data from a stage and insert it into a
table. The source file is accessed using a SELECT statement.
The Parquet formatting definitions are defined in a named format which was
explicitly created before.
The Stage is a named stage, i.e. we assume that a CREATE STAGE statement was
executed before. This way, the COPY INTO statement does not need to know any
security details (credentials or tokens)
"""
# target table definition (NB: this could be omitted for the test, as long as
# the statement is not executed)
metadata = MetaData()
target_table = Table(
"TEST_IMPORT",
metadata,
Column("COL1", Integer, primary_key=True),
Column("COL2", String),
)
# define a source stage (root path)
root_stage = ExternalStage(
name="AZURE_STAGE",
namespace="ML_POC.PUBLIC",
)
# define the SELECT statement to access the source file.
# we can probably defined source table metadata and use SQLAlchemy Column objects
# instead of texts, but this seems to be the easiest way.
sel_statement = select(
text("$1:COL1::number"),
text("$1:COL2::varchar")
).select_from(
ExternalStage.from_parent_stage(root_stage, "testdata/out.parquet")
)
# use an existing source format.
formatter = CopyFormatter(format_name="parquet_file_format")
# setup CopyInto object
copy_into = CopyIntoStorage(
from_=sel_statement,
into=target_table,
formatter=formatter
)
copy_into.copy_options = {"force": "TRUE"}
# compile and check the result
result = sql_compiler(copy_into)
expected = (
"COPY INTO TEST_IMPORT "
"FROM (SELECT $1:COL1::number, $1:COL2::varchar "
"FROM @ML_POC.PUBLIC.AZURE_STAGE/testdata/out.parquet) "
"FILE_FORMAT=(format_name = parquet_file_format) force = TRUE"
)
assert result == expected
def test_copy_into_storage_parquet_files(sql_compiler):
"""
This test compiles the SQL to read Parquet data from a stage and insert it into a
table. The source file is accessed using a SELECT statement.
The Parquet formatting definitions are defined in a named format which was
explicitly created before. The format is specified as a property of the stage,
not the CopyInto object.
The Stage is a named stage, i.e. we assume that a CREATE STAGE statement was
executed before. This way, the COPY INTO statement does not need to know any
security details (credentials or tokens).
The FORCE option is set using the corresponding function in CopyInto.
The FILES option is set to choose the files to upload
"""
# target table definition (NB: this could be omitted for the test, as long as
# the statement is not executed)
metadata = MetaData()
target_table = Table(
"TEST_IMPORT",
metadata,
Column("COL1", Integer, primary_key=True),
Column("COL2", String),
)
# define a source stage (root path)
root_stage = ExternalStage(
name="AZURE_STAGE",
namespace="ML_POC.PUBLIC",
)
# define the SELECT statement to access the source file.
# we can probably defined source table metadata and use SQLAlchemy Column objects
# instead of texts, but this seems to be the easiest way.
sel_statement = select(
text("$1:COL1::number"),
text("$1:COL2::varchar")
).select_from(
ExternalStage.from_parent_stage(root_stage, "testdata/out.parquet", file_format="parquet_file_format")
)
# setup CopyInto object
copy_into = CopyIntoStorage(
from_=sel_statement,
into=target_table,
).force(True).files(["foo.txt", "bar.txt"])
# compile and check the result
result = sql_compiler(copy_into)
expected = (
"COPY INTO TEST_IMPORT "
"FROM (SELECT $1:COL1::number, $1:COL2::varchar "
"FROM @ML_POC.PUBLIC.AZURE_STAGE/testdata/out.parquet "
"(file_format => parquet_file_format)) FILES = ('foo.txt','bar.txt') "
"FORCE = true"
)
assert result == expected
def test_copy_into_storage_parquet_pattern(sql_compiler):
"""
This test compiles the SQL to read Parquet data from a stage and insert it into a
table. The source file is accessed using a SELECT statement.
The Parquet formatting definitions are defined in a named format which was
explicitly created before. The format is specified as a property of the stage,
not the CopyInto object.
The Stage is a named stage, i.e. we assume that a CREATE STAGE statement was
executed before. This way, the COPY INTO statement does not need to know any
security details (credentials or tokens).
The FORCE option is set using the corresponding function in CopyInto.
The PATTERN option is set to choose multiple files
"""
# target table definition (NB: this could be omitted for the test, as long as
# the statement is not executed)
metadata = MetaData()
target_table = Table(
"TEST_IMPORT",
metadata,
Column("COL1", Integer, primary_key=True),
Column("COL2", String),
)
# define a source stage (root path)
root_stage = ExternalStage(
name="AZURE_STAGE",
namespace="ML_POC.PUBLIC",
)
# define the SELECT statement to access the source file.
# we can probably defined source table metadata and use SQLAlchemy Column objects
# instead of texts, but this seems to be the easiest way.
sel_statement = select(
text("$1:COL1::number"),
text("$1:COL2::varchar")
).select_from(
ExternalStage.from_parent_stage(root_stage, "testdata/out.parquet", file_format="parquet_file_format")
)
# setup CopyInto object
copy_into = CopyIntoStorage(
from_=sel_statement,
into=target_table,
).force(True).pattern("'.*csv'")
# compile and check the result
result = sql_compiler(copy_into)
expected = (
"COPY INTO TEST_IMPORT "
"FROM (SELECT $1:COL1::number, $1:COL2::varchar "
"FROM @ML_POC.PUBLIC.AZURE_STAGE/testdata/out.parquet "
"(file_format => parquet_file_format)) FORCE = true PATTERN = '.*csv'"
)
assert result == expected
|
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Deep Neural Network estimators."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tempfile
from tensorflow.contrib import layers
from tensorflow.contrib import metrics as metrics_lib
from tensorflow.contrib.framework import deprecated
from tensorflow.contrib.framework import deprecated_arg_values
from tensorflow.contrib.framework import list_variables
from tensorflow.contrib.framework import load_variable
from tensorflow.contrib.framework.python.ops import variables as contrib_variables
from tensorflow.contrib.layers.python.layers import optimizers
from tensorflow.contrib.learn.python.learn import evaluable
from tensorflow.contrib.learn.python.learn import metric_spec
from tensorflow.contrib.learn.python.learn import session_run_hook
from tensorflow.contrib.learn.python.learn import trainable
from tensorflow.contrib.learn.python.learn.estimators import dnn_linear_combined
from tensorflow.contrib.learn.python.learn.estimators import estimator
from tensorflow.contrib.learn.python.learn.utils import checkpoints
from tensorflow.contrib.learn.python.learn.utils import export
from tensorflow.contrib.losses.python.losses import loss_ops
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import logging_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn
from tensorflow.python.ops import partitioned_variables
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables
from tensorflow.python.training import training as train
_CENTERED_BIAS = "centered_bias"
_CENTERED_BIAS_WEIGHT = "centered_bias_weight"
_CLASSES = "classes"
_LOGISTIC = "logistic"
_PROBABILITIES = "probabilities"
# The default learning rate of 0.05 is a historical artifact of the initial
# implementation, but seems a reasonable choice.
_LEARNING_RATE = 0.05
def _as_iterable(preds, output):
for pred in preds:
yield pred[output]
def _get_feature_dict(features):
if isinstance(features, dict):
return features
return {"": features}
def _get_optimizer(optimizer):
if callable(optimizer):
return optimizer()
else:
return optimizer
def _add_hidden_layer_summary(value, tag):
logging_ops.scalar_summary("%s:fraction_of_zero_values" % tag,
nn.zero_fraction(value))
logging_ops.histogram_summary("%s:activation" % tag, value)
def _centered_bias(num_label_columns):
centered_bias = variables.Variable(
array_ops.zeros([num_label_columns]),
collections=[_CENTERED_BIAS, ops.GraphKeys.VARIABLES],
name=_CENTERED_BIAS_WEIGHT)
logging_ops.scalar_summary(
["centered_bias %d" % cb for cb in range(num_label_columns)],
array_ops.reshape(centered_bias, [-1]))
return centered_bias
def _centered_bias_step(targets, loss_fn, num_label_columns):
centered_bias = ops.get_collection(_CENTERED_BIAS)
batch_size = array_ops.shape(targets)[0]
logits = array_ops.reshape(
array_ops.tile(centered_bias[0], [batch_size]),
[batch_size, num_label_columns])
loss = loss_fn(logits, targets)
return train.AdagradOptimizer(0.1).minimize(loss, var_list=centered_bias)
def _get_weight_tensor(features, weight_column_name):
"""Returns the weight tensor of shape [batch_size] or 1."""
if weight_column_name is None:
return 1.0
else:
return array_ops.reshape(
math_ops.to_float(features[weight_column_name]),
shape=(-1,))
def _rescale_eval_loss(loss, weights):
"""Rescales evaluation loss according to the given weights.
The rescaling is needed because in the training loss weights are not
considered in the denominator, whereas for the evaluation loss we should
divide by the sum of weights.
The rescaling factor is:
R = sum_{i} 1 / sum_{i} w_{i}
Args:
loss: the scalar weighted loss.
weights: weight coefficients. Either a scalar, or a `Tensor` of shape
[batch_size].
Returns:
The given loss multiplied by the rescaling factor.
"""
rescaling_factor = math_ops.reduce_mean(weights)
return math_ops.div(loss, rescaling_factor)
def _predictions(logits, n_classes):
"""Returns predictions for the given logits and n_classes."""
predictions = {}
if n_classes == 2:
predictions[_LOGISTIC] = math_ops.sigmoid(logits)
logits = array_ops.concat(1, [array_ops.zeros_like(logits), logits])
predictions[_PROBABILITIES] = nn.softmax(logits)
predictions[_CLASSES] = array_ops.reshape(
math_ops.argmax(logits, 1), shape=(-1, 1))
return predictions
def _dnn_classifier_model_fn(features, targets, mode, params):
"""Deep Neural Net model_fn.
Args:
features: `Tensor` or dict of `Tensor` (depends on data passed to `fit`).
targets: `Tensor` of shape [batch_size, 1] or [batch_size] target labels of
dtype `int32` or `int64` in the range `[0, n_classes)`.
mode: Defines whether this is training, evaluation or prediction.
See `ModeKeys`.
params: A dict of hyperparameters.
The following hyperparameters are expected:
* hidden_units: List of hidden units per layer.
* feature_columns: An iterable containing all the feature columns used by
the model.
* n_classes: number of target classes.
* weight_column_name: A string defining the weight feature column, or
None if there are no weights.
* optimizer: string, `Optimizer` object, or callable that defines the
optimizer to use for training.
* activation_fn: Activation function applied to each layer. If `None`,
will use `tf.nn.relu`.
* dropout: When not `None`, the probability we will drop out a given
coordinate.
* gradient_clip_norm: A float > 0. If provided, gradients are
clipped to their global norm with this clipping ratio.
* enable_centered_bias: A bool. If True, estimator will learn a centered
bias variable for each class. Rest of the model structure learns the
residual after centered bias.
* num_ps_replicas: The number of parameter server replicas.
Returns:
predictions: A dict of `Tensor` objects.
loss: A scalar containing the loss of the step.
train_op: The op for training.
"""
hidden_units = params["hidden_units"]
feature_columns = params["feature_columns"]
n_classes = params["n_classes"]
weight_column_name = params["weight_column_name"]
optimizer = params["optimizer"]
activation_fn = params["activation_fn"]
dropout = params["dropout"]
gradient_clip_norm = params["gradient_clip_norm"]
enable_centered_bias = params["enable_centered_bias"]
num_ps_replicas = params["num_ps_replicas"]
features = _get_feature_dict(features)
parent_scope = "dnn"
num_label_columns = 1 if n_classes == 2 else n_classes
if n_classes == 2:
loss_fn = loss_ops.sigmoid_cross_entropy
else:
loss_fn = loss_ops.sparse_softmax_cross_entropy
input_layer_partitioner = (
partitioned_variables.min_max_variable_partitioner(
max_partitions=num_ps_replicas,
min_slice_size=64 << 20))
with variable_scope.variable_scope(
parent_scope + "/input_from_feature_columns",
values=features.values(),
partitioner=input_layer_partitioner) as scope:
net = layers.input_from_feature_columns(
columns_to_tensors=features,
feature_columns=feature_columns,
weight_collections=[parent_scope],
scope=scope)
hidden_layer_partitioner = (
partitioned_variables.min_max_variable_partitioner(
max_partitions=num_ps_replicas))
for layer_id, num_hidden_units in enumerate(hidden_units):
with variable_scope.variable_scope(
parent_scope + "/hiddenlayer_%d" % layer_id,
values=[net],
partitioner=hidden_layer_partitioner) as scope:
net = layers.fully_connected(
net,
num_hidden_units,
activation_fn=activation_fn,
variables_collections=[parent_scope],
scope=scope)
if dropout is not None and mode == estimator.ModeKeys.TRAIN:
net = layers.dropout(
net,
keep_prob=(1.0 - dropout))
_add_hidden_layer_summary(net, scope.name)
with variable_scope.variable_scope(
parent_scope + "/logits",
values=[net],
partitioner=hidden_layer_partitioner) as scope:
logits = layers.fully_connected(
net,
num_label_columns,
activation_fn=None,
variables_collections=[parent_scope],
scope=scope)
_add_hidden_layer_summary(logits, scope.name)
if enable_centered_bias:
logits = nn.bias_add(logits, _centered_bias(num_label_columns))
if mode == estimator.ModeKeys.TRAIN:
loss = loss_fn(logits, targets,
weight=_get_weight_tensor(features, weight_column_name))
train_ops = [optimizers.optimize_loss(
loss=loss, global_step=contrib_variables.get_global_step(),
learning_rate=_LEARNING_RATE, optimizer=_get_optimizer(optimizer),
clip_gradients=gradient_clip_norm, name=parent_scope)]
if enable_centered_bias:
train_ops.append(_centered_bias_step(targets, loss_fn, num_label_columns))
return None, loss, control_flow_ops.group(*train_ops)
elif mode == estimator.ModeKeys.EVAL:
predictions = _predictions(logits=logits, n_classes=n_classes)
weight = _get_weight_tensor(features, weight_column_name)
training_loss = loss_fn(logits, targets, weight=weight)
loss = _rescale_eval_loss(training_loss, weight)
return predictions, loss, []
else: # mode == estimator.ModeKeys.INFER:
predictions = _predictions(logits=logits, n_classes=n_classes)
return predictions, None, []
class DNNClassifier(evaluable.Evaluable, trainable.Trainable):
"""A classifier for TensorFlow DNN models.
Example:
```python
education = sparse_column_with_hash_bucket(column_name="education",
hash_bucket_size=1000)
occupation = sparse_column_with_hash_bucket(column_name="occupation",
hash_bucket_size=1000)
education_emb = embedding_column(sparse_id_column=education, dimension=16,
combiner="sum")
occupation_emb = embedding_column(sparse_id_column=occupation, dimension=16,
combiner="sum")
estimator = DNNClassifier(
feature_columns=[education_emb, occupation_emb],
hidden_units=[1024, 512, 256])
# Or estimator using the ProximalAdagradOptimizer optimizer with
# regularization.
estimator = DNNClassifier(
feature_columns=[education_emb, occupation_emb],
hidden_units=[1024, 512, 256],
optimizer=tf.train.ProximalAdagradOptimizer(
learning_rate=0.1,
l1_regularization_strength=0.001
))
# Input builders
def input_fn_train: # returns x, Y
pass
estimator.fit(input_fn=input_fn_train)
def input_fn_eval: # returns x, Y
pass
estimator.evaluate(input_fn=input_fn_eval)
estimator.predict(x=x)
```
Input of `fit` and `evaluate` should have following features,
otherwise there will be a `KeyError`:
* if `weight_column_name` is not `None`, a feature with
`key=weight_column_name` whose value is a `Tensor`.
* for each `column` in `feature_columns`:
- if `column` is a `SparseColumn`, a feature with `key=column.name`
whose `value` is a `SparseTensor`.
- if `column` is a `WeightedSparseColumn`, two features: the first with
`key` the id column name, the second with `key` the weight column name.
Both features' `value` must be a `SparseTensor`.
- if `column` is a `RealValuedColumn`, a feature with `key=column.name`
whose `value` is a `Tensor`.
"""
def __init__(self,
hidden_units,
feature_columns,
model_dir=None,
n_classes=2,
weight_column_name=None,
optimizer=None,
activation_fn=nn.relu,
dropout=None,
gradient_clip_norm=None,
enable_centered_bias=None,
config=None):
"""Initializes a DNNClassifier instance.
Args:
hidden_units: List of hidden units per layer. All layers are fully
connected. Ex. `[64, 32]` means first layer has 64 nodes and second one
has 32.
feature_columns: An iterable containing all the feature columns used by
the model. All items in the set should be instances of classes derived
from `FeatureColumn`.
model_dir: Directory to save model parameters, graph and etc. This can also
be used to load checkpoints from the directory into a estimator to continue
training a previously saved model.
n_classes: number of target classes. Default is binary classification.
It must be greater than 1.
weight_column_name: A string defining feature column name representing
weights. It is used to down weight or boost examples during training. It
will be multiplied by the loss of the example.
optimizer: An instance of `tf.Optimizer` used to train the model. If
`None`, will use an Adagrad optimizer.
activation_fn: Activation function applied to each layer. If `None`, will
use `tf.nn.relu`.
dropout: When not `None`, the probability we will drop out a given
coordinate.
gradient_clip_norm: A float > 0. If provided, gradients are
clipped to their global norm with this clipping ratio. See
tf.clip_by_global_norm for more details.
enable_centered_bias: A bool. If True, estimator will learn a centered
bias variable for each class. Rest of the model structure learns the
residual after centered bias.
config: `RunConfig` object to configure the runtime settings.
Returns:
A `DNNClassifier` estimator.
Raises:
ValueError: If `n_classes` < 2.
"""
if enable_centered_bias is None:
enable_centered_bias = True
dnn_linear_combined._changing_default_center_bias() # pylint: disable=protected-access
self._hidden_units = hidden_units
self._feature_columns = feature_columns
self._model_dir = model_dir or tempfile.mkdtemp()
if n_classes <= 1:
raise ValueError(
"Classification requires n_classes >= 2. Given: {}".format(n_classes))
self._n_classes = n_classes
self._weight_column_name = weight_column_name
optimizer = optimizer or "Adagrad"
num_ps_replicas = config.num_ps_replicas if config else 0
self._estimator = estimator.Estimator(
model_fn=_dnn_classifier_model_fn,
model_dir=self._model_dir,
config=config,
params={
"hidden_units": hidden_units,
"feature_columns": feature_columns,
"n_classes": n_classes,
"weight_column_name": weight_column_name,
"optimizer": optimizer,
"activation_fn": activation_fn,
"dropout": dropout,
"gradient_clip_norm": gradient_clip_norm,
"enable_centered_bias": enable_centered_bias,
"num_ps_replicas": num_ps_replicas,
})
def fit(self, x=None, y=None, input_fn=None, steps=None, batch_size=None,
monitors=None, max_steps=None):
"""See trainable.Trainable."""
# TODO(roumposg): Remove when deprecated monitors are removed.
if monitors is not None:
deprecated_monitors = [
m for m in monitors
if not isinstance(m, session_run_hook.SessionRunHook)
]
for monitor in deprecated_monitors:
monitor.set_estimator(self)
monitor._lock_estimator() # pylint: disable=protected-access
result = self._estimator.fit(x=x, y=y, input_fn=input_fn, steps=steps,
batch_size=batch_size, monitors=monitors,
max_steps=max_steps)
if monitors is not None:
for monitor in deprecated_monitors:
monitor._unlock_estimator() # pylint: disable=protected-access
return result
def evaluate(self, x=None, y=None, input_fn=None, feed_fn=None,
batch_size=None, steps=None, metrics=None, name=None):
"""See evaluable.Evaluable."""
if metrics is None:
metrics = {}
metrics.update({
"accuracy": metric_spec.MetricSpec(
metric_fn=metrics_lib.streaming_accuracy,
prediction_key=_CLASSES,
weight_key=self._weight_column_name)})
if self._n_classes == 2:
metrics.update({
"auc": metric_spec.MetricSpec(
metric_fn=metrics_lib.streaming_auc,
prediction_key=_LOGISTIC,
weight_key=self._weight_column_name)})
return self._estimator.evaluate(
x=x, y=y, input_fn=input_fn, feed_fn=feed_fn, batch_size=batch_size,
steps=steps, metrics=metrics, name=name)
@deprecated_arg_values(
estimator.AS_ITERABLE_DATE, estimator.AS_ITERABLE_INSTRUCTIONS,
as_iterable=False)
def predict(self, x=None, input_fn=None, batch_size=None, as_iterable=False):
"""Returns predicted classes for given features.
Args:
x: features.
input_fn: Input function. If set, x must be None.
batch_size: Override default batch size.
as_iterable: If True, return an iterable which keeps yielding predictions
for each example until inputs are exhausted. Note: The inputs must
terminate if you want the iterable to terminate (e.g. be sure to pass
num_epochs=1 if you are using something like read_batch_features).
Returns:
Numpy array of predicted classes (or an iterable of predicted classes if
as_iterable is True).
"""
preds = self._estimator.predict(x=x, input_fn=input_fn,
batch_size=batch_size, outputs=[_CLASSES],
as_iterable=as_iterable)
if as_iterable:
return _as_iterable(preds, output=_CLASSES)
return preds[_CLASSES].reshape(-1)
@deprecated_arg_values(
estimator.AS_ITERABLE_DATE, estimator.AS_ITERABLE_INSTRUCTIONS,
as_iterable=False)
def predict_proba(
self, x=None, input_fn=None, batch_size=None, as_iterable=False):
"""Returns prediction probabilities for given features.
Args:
x: features.
input_fn: Input function. If set, x and y must be None.
batch_size: Override default batch size.
as_iterable: If True, return an iterable which keeps yielding predictions
for each example until inputs are exhausted. Note: The inputs must
terminate if you want the iterable to terminate (e.g. be sure to pass
num_epochs=1 if you are using something like read_batch_features).
Returns:
Numpy array of predicted probabilities (or an iterable of predicted
probabilities if as_iterable is True).
"""
preds = self._estimator.predict(x=x, input_fn=input_fn,
batch_size=batch_size,
outputs=[_PROBABILITIES],
as_iterable=as_iterable)
if as_iterable:
return _as_iterable(preds, output=_PROBABILITIES)
return preds[_PROBABILITIES]
def get_variable_names(self):
"""Returns list of all variable names in this model.
Returns:
List of names.
"""
return [name for name, _ in list_variables(self._model_dir)]
def get_variable_value(self, name):
"""Returns value of the variable given by name.
Args:
name: string, name of the tensor.
Returns:
`Tensor` object.
"""
return load_variable(self._model_dir, name)
def export(self,
export_dir,
input_fn=None,
input_feature_key=None,
use_deprecated_input_fn=True,
signature_fn=None,
default_batch_size=1,
exports_to_keep=None):
"""See BaseEstimator.export."""
def default_input_fn(unused_estimator, examples):
return layers.parse_feature_columns_from_examples(
examples, self._feature_columns)
return self._estimator.export(
export_dir=export_dir,
input_fn=input_fn or default_input_fn,
input_feature_key=input_feature_key,
use_deprecated_input_fn=use_deprecated_input_fn,
signature_fn=(
signature_fn or export.classification_signature_fn_with_prob),
prediction_key=_PROBABILITIES,
default_batch_size=default_batch_size,
exports_to_keep=exports_to_keep)
@property
def model_dir(self):
return self._model_dir
@property
@deprecated("2016-10-13", "This method inspects the private state of the "
"object, and should not be used")
def weights_(self):
hiddenlayer_weights = [checkpoints.load_variable(
self._model_dir, name=("dnn/hiddenlayer_%d/weights" % i))
for i, _ in enumerate(self._hidden_units)]
logits_weights = [checkpoints.load_variable(
self._model_dir, name="dnn/logits/weights")]
return hiddenlayer_weights + logits_weights
@property
@deprecated("2016-10-13", "This method inspects the private state of the "
"object, and should not be used")
def bias_(self):
hiddenlayer_bias = [checkpoints.load_variable(
self._model_dir, name=("dnn/hiddenlayer_%d/biases" % i))
for i, _ in enumerate(self._hidden_units)]
logits_bias = [checkpoints.load_variable(
self._model_dir, name="dnn/logits/biases")]
centered_bias = [checkpoints.load_variable(
self._model_dir, name=_CENTERED_BIAS_WEIGHT)]
return hiddenlayer_bias + logits_bias + centered_bias
@property
def config(self):
return self._estimator.config
class DNNRegressor(dnn_linear_combined.DNNLinearCombinedRegressor):
"""A regressor for TensorFlow DNN models.
Example:
```python
education = sparse_column_with_hash_bucket(column_name="education",
hash_bucket_size=1000)
occupation = sparse_column_with_hash_bucket(column_name="occupation",
hash_bucket_size=1000)
education_emb = embedding_column(sparse_id_column=education, dimension=16,
combiner="sum")
occupation_emb = embedding_column(sparse_id_column=occupation, dimension=16,
combiner="sum")
estimator = DNNRegressor(
feature_columns=[education_emb, occupation_emb],
hidden_units=[1024, 512, 256])
# Or estimator using the ProximalAdagradOptimizer optimizer with
# regularization.
estimator = DNNRegressor(
feature_columns=[education_emb, occupation_emb],
hidden_units=[1024, 512, 256],
optimizer=tf.train.ProximalAdagradOptimizer(
learning_rate=0.1,
l1_regularization_strength=0.001
))
# Input builders
def input_fn_train: # returns x, Y
pass
estimator.fit(input_fn=input_fn_train)
def input_fn_eval: # returns x, Y
pass
estimator.evaluate(input_fn=input_fn_eval)
estimator.predict(x=x)
```
Input of `fit` and `evaluate` should have following features,
otherwise there will be a `KeyError`:
* if `weight_column_name` is not `None`, a feature with
`key=weight_column_name` whose value is a `Tensor`.
* for each `column` in `feature_columns`:
- if `column` is a `SparseColumn`, a feature with `key=column.name`
whose `value` is a `SparseTensor`.
- if `column` is a `WeightedSparseColumn`, two features: the first with
`key` the id column name, the second with `key` the weight column name.
Both features' `value` must be a `SparseTensor`.
- if `column` is a `RealValuedColumn`, a feature with `key=column.name`
whose `value` is a `Tensor`.
"""
def __init__(self,
hidden_units,
feature_columns,
model_dir=None,
weight_column_name=None,
optimizer=None,
activation_fn=nn.relu,
dropout=None,
gradient_clip_norm=None,
enable_centered_bias=None,
config=None):
"""Initializes a `DNNRegressor` instance.
Args:
hidden_units: List of hidden units per layer. All layers are fully
connected. Ex. `[64, 32]` means first layer has 64 nodes and second one
has 32.
feature_columns: An iterable containing all the feature columns used by
the model. All items in the set should be instances of classes derived
from `FeatureColumn`.
model_dir: Directory to save model parameters, graph and etc. This can also
be used to load checkpoints from the directory into a estimator to continue
training a previously saved model.
weight_column_name: A string defining feature column name representing
weights. It is used to down weight or boost examples during training. It
will be multiplied by the loss of the example.
optimizer: An instance of `tf.Optimizer` used to train the model. If
`None`, will use an Adagrad optimizer.
activation_fn: Activation function applied to each layer. If `None`, will
use `tf.nn.relu`.
dropout: When not `None`, the probability we will drop out a given
coordinate.
gradient_clip_norm: A `float` > 0. If provided, gradients are clipped
to their global norm with this clipping ratio. See
`tf.clip_by_global_norm` for more details.
enable_centered_bias: A bool. If True, estimator will learn a centered
bias variable for each class. Rest of the model structure learns the
residual after centered bias.
config: `RunConfig` object to configure the runtime settings.
Returns:
A `DNNRegressor` estimator.
"""
if enable_centered_bias is None:
enable_centered_bias = True
dnn_linear_combined._changing_default_center_bias() # pylint: disable=protected-access
super(DNNRegressor, self).__init__(
model_dir=model_dir,
weight_column_name=weight_column_name,
dnn_feature_columns=feature_columns,
dnn_optimizer=optimizer,
dnn_hidden_units=hidden_units,
dnn_activation_fn=activation_fn,
dnn_dropout=dropout,
gradient_clip_norm=gradient_clip_norm,
enable_centered_bias=enable_centered_bias,
config=config)
self.feature_columns = feature_columns
self.optimizer = optimizer
self.activation_fn = activation_fn
self.dropout = dropout
self.hidden_units = hidden_units
self._feature_columns_inferred = False
@property
def weights_(self):
return self.dnn_weights_
@property
def bias_(self):
return self.dnn_bias_
|
|
# Software License Agreement (BSD License)
#
# Copyright (c) 2009, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# Revision $Id$
import os
import itertools
import socket
import stat
import sys
try:
from xmlrpc.client import ServerProxy
except ImportError:
from xmlrpclib import ServerProxy
from os.path import isfile, isdir
import roslib.packages
import roslaunch
import roslaunch.netapi
from roswtf.environment import paths, is_executable
from roswtf.rules import warning_rule, error_rule
## check if node is cannot be located in package
def roslaunch_missing_node_check(ctx):
nodes = []
for filename, rldeps in ctx.launch_file_deps.items():
nodes.extend(rldeps.nodes)
errors = []
for pkg, node_type in nodes:
paths = roslib.packages.find_node(pkg, node_type)
if not paths:
errors.append("node [%s] in package [%s]"%(node_type, pkg))
return errors
## check if two nodes with same name in package
def roslaunch_duplicate_node_check(ctx):
nodes = []
for filename, rldeps in ctx.launch_file_deps.items():
nodes.extend(rldeps.nodes)
warnings = []
for pkg, node_type in nodes:
paths = roslib.packages.find_node(pkg, node_type)
if len(paths) > 1:
warnings.append("node [%s] in package [%s]\n"%(node_type, pkg))
return warnings
def pycrypto_check(ctx):
try:
import Crypto
except ImportError as e:
return True
def paramiko_check(ctx):
try:
import paramiko
except ImportError as e:
return True
def paramiko_system_keys(ctx):
try:
import paramiko
ssh = paramiko.SSHClient()
try:
ssh.load_system_host_keys() #default location
except:
return True
except: pass
def paramiko_ssh(ctx, address, port, username, password):
try:
import paramiko
ssh = paramiko.SSHClient()
import roslaunch.remoteprocess
err_msg = roslaunch.remoteprocess.ssh_check_known_hosts(ssh, address, port, username=username)
if err_msg:
return err_msg
if not password: #use SSH agent
ssh.connect(address, port, username)
else: #use SSH with login/pass
ssh.connect(address, port, username, password)
except paramiko.BadHostKeyException:
return "Unable to verify host key for [%s:%s]"%(address, port)
except paramiko.AuthenticationException:
return "Authentication to [%s:%s] failed"%(address, port)
except paramiko.SSHException as e:
return "[%s:%s]: %s"%(address, port, e)
except ImportError:
pass
def _load_roslaunch_config(ctx):
config = roslaunch.ROSLaunchConfig()
loader = roslaunch.XmlLoader()
# TODO load roscore
for launch_file in ctx.launch_files:
loader.load(launch_file, config, verbose=False)
try:
config.assign_machines()
except roslaunch.RLException as e:
return config, []
machines = []
for n in itertools.chain(config.nodes, config.tests):
if n.machine not in machines:
machines.append(n.machine)
return config, machines
def roslaunch_load_check(ctx):
config = roslaunch.ROSLaunchConfig()
loader = roslaunch.XmlLoader()
# TODO load roscore
for launch_file in ctx.launch_files:
loader.load(launch_file, config, verbose=False)
try:
config.assign_machines()
except roslaunch.RLException as e:
return str(e)
def roslaunch_machine_name_check(ctx):
config, machines = _load_roslaunch_config(ctx)
bad = []
for m in machines:
try:
#TODO IPV6: only check for IPv6 when IPv6 is enabled
socket.getaddrinfo(m.address, 0, 0, 0, socket.SOL_TCP)
except socket.gaierror:
bad.append(m.address)
return ''.join([' * %s\n'%b for b in bad])
def roslaunch_ssh_check(ctx):
import roslaunch.core
if not ctx.launch_files:
return # not relevant
config, machines = _load_roslaunch_config(ctx)
err_msgs = []
for m in machines:
socket.setdefaulttimeout(3.)
# only check if the machine requires ssh to connect
if not roslaunch.core.is_machine_local(m):
err_msg = paramiko_ssh(ctx, m.address, m.ssh_port, m.user, m.password)
if err_msg:
err_msgs.append(err_msg)
return err_msgs
def roslaunch_missing_pkgs_check(ctx):
# rospack depends does not return depends that it cannot find, so
# we have to manually determine this
config, machines = _load_roslaunch_config(ctx)
missing = []
for n in config.nodes:
pkg = n.package
try:
roslib.packages.get_pkg_dir(pkg, required=True)
except:
missing.append(pkg)
return missing
def roslaunch_config_errors(ctx):
config, machines = _load_roslaunch_config(ctx)
return config.config_errors
def roslaunch_missing_deps_check(ctx):
missing = []
for pkg, miss in ctx.launch_file_missing_deps.items():
if miss:
missing.append("%s/manifest.xml: %s"%(pkg, ', '.join(miss)))
return missing
def roslaunch_respawn_check(ctx):
respawn = []
for uri in ctx.roslaunch_uris:
try:
r = ServerProxy(uri)
code, msg, val = r.list_processes()
active, _ = val
respawn.extend([a for a in active if a[1] > 1])
#TODO: children processes
#code, msg, val = r.list_children()
except:
pass # error for another rule
return ["%s (%s)"%(a[0], a[1]) for a in respawn]
def roslaunch_uris_check(ctx):
# check for any roslaunch processes that cannot be contacted
bad = []
# uris only contains the parent launches
for uri in ctx.roslaunch_uris:
try:
r = ServerProxy(uri)
code, msg, val = r.list_children()
# check the children launches
if code == 1:
for child_uri in val:
try:
r = ServerProxy(uri)
code, msg, val = r.get_pid()
except:
bad.append(child_uri)
except:
bad.append(uri)
return bad
def roslaunch_dead_check(ctx):
dead = []
for uri in ctx.roslaunch_uris:
try:
r = ServerProxy(uri)
code, msg, val = r.list_processes()
_, dead_list = val
dead.extend([d[0] for d in dead_list])
#TODO: children processes
#code, msg, val = r.list_children()
except:
pass # error for another rule
return dead
online_roslaunch_warnings = [
(roslaunch_respawn_check,"These nodes have respawned at least once:"),
(roslaunch_dead_check,"These nodes have died:"),
# disabling for now as roslaunches don't do cleanup
#(roslaunch_uris_check,"These roslaunch processes can no longer be contacted and may have exited:"),
]
online_roslaunch_errors = [
(roslaunch_ssh_check,"SSH failures:"),
]
static_roslaunch_warnings = [
(roslaunch_duplicate_node_check, "Multiple nodes of same name in packages:"),
(pycrypto_check, "pycrypto is not installed"),
(paramiko_check, "paramiko is not installed"),
(paramiko_system_keys, "cannot load SSH host keys -- your known_hosts file may be corrupt") ,
(roslaunch_config_errors, "Loading your launch files reported the following configuration errors:"),
]
static_roslaunch_errors = [
# Disabling, because we've removed package dependencies from manifests.
#(roslaunch_missing_deps_check,
# "Package %(pkg)s is missing roslaunch dependencies.\nPlease add the following tags to %(pkg)s/manifest.xml:"),
(roslaunch_missing_pkgs_check,
"Cannot find the following required packages:"),
(roslaunch_missing_node_check, "Several nodes in your launch file could not be located. These are either typed incorrectly or need to be built:"),
(roslaunch_machine_name_check,"Cannot resolve the following hostnames:"),
(roslaunch_load_check, "roslaunch load failed"),
]
def wtf_check_static(ctx):
if not ctx.launch_files:
return
#NOTE: roslaunch files are already loaded separately into context
#TODO: check each machine name
#TODO: bidirectional ping for each machine
for r in static_roslaunch_warnings:
warning_rule(r, r[0](ctx), ctx)
for r in static_roslaunch_errors:
error_rule(r, r[0](ctx), ctx)
def _load_online_ctx(ctx):
ctx.roslaunch_uris = roslaunch.netapi.get_roslaunch_uris()
def wtf_check_online(ctx):
_load_online_ctx(ctx)
for r in online_roslaunch_warnings:
warning_rule(r, r[0](ctx), ctx)
for r in online_roslaunch_errors:
error_rule(r, r[0](ctx), ctx)
|
|
# -*- coding:utf-8 -*-
"""
pygcm.request
~~~~~~~~~~~~~
Request related modules.
"""
import json
from collections import Iterable
from pygcm.exceptions import GCMException
from pygcm.compat import urllib2, urlencode, basestring
from pygcm.base_config import SENDER_URL, DEFAULT_ENCODING
# HTTP request constants declaration
def enum(**enums):
return type('Enum', (), enums)
method = enum(
get = 'GET',
post = 'POST',
put = 'PUT',
delete = 'DELETE',
patch = 'PATCH'
)
status_code = enum(
success = 200,
invalid_field = 400,
auth_failed = 401,
internal_error = 500,
service_unavailable = 503
)
status_group = enum(
fail = [status_code.auth_failed,
status_code.invalid_field],
success = [status_code.success],
retryable = [status_code.internal_error,
status_code.service_unavailable]
)
class RequestHandler(object):
"""Requests wrapper
Handles requests holding specific configuation"""
def __init__(self, **kwargs):
self._url = kwargs.get('url')
self._headers = kwargs.get('headers', {})
self._params = kwargs.get('params', {})
self.proxies = kwargs.get('proxies', {})
if self.proxies:
if isinstance(self.proxies, dict):
urllib2.install_opener(
urllib2.build_opener(
urllib2.ProxyHandler(self.proxies)))
@property
def url(self):
return self._url
@property
def headers(self):
return self._headers
@property
def params(self):
return self._params
@property
def ready(self):
"""Can add another 'ready' status """
return self._url is not None
def _send(self, request_type, headers=None, params=None):
"""Each send funtion sends a request.
If success, returns `dict` containing response information.
Returns `None` if retryable failure occured.
:param headers: should contains authorization header including api-key.
:param params: should contains device key. (Others are options)
"""
if request_type != method.post:
raise GCMException("Google does not support other methods yet")
if not self.ready:
raise GCMException("RequestHandler is not ready to send")
p = params or self._params
request = urllib2.Request(
self._url,
data=p.encode(DEFAULT_ENCODING),
headers=headers or self._headers
)
try:
resp = urllib2.urlopen(request)
except urllib2.HTTPError as e:
if e.code in status_group.fail:
raise GCMException(
"Request failed with unexpected error: code " + str(e.code)
)
if e.code in status_group.retryable:
return None
raise GCMException(e)
return json.loads(resp.read())
def post(self, headers=None, params=None):
return self._send(method.post, headers=headers, params=params)
class RequestBuilder(object):
"""RequestBuilder for GCM.
Can add various data into request params."""
_HEADERS = ['Content-Type', 'Authorization']
_PARAMS = [
'registration_ids', 'collapse_key',
'data', 'delay_while_idle', 'time_to_live'
]
_CONTENT_TYPE_JSON = 'application/json'
def __init__(self, api_key, content_type=None):
"""Initialize request builder.
Auth key should be prefixed by 'key='.
Default content type is 'json'
"""
content_type = content_type or self._CONTENT_TYPE_JSON
if not isinstance(api_key, basestring):
raise GCMException("Invalid api key")
auth_key = 'key=' + api_key
self._url = SENDER_URL
self._headers = dict.fromkeys(self._HEADERS, None)
self._params = dict.fromkeys(self._PARAMS, None)
self._data = dict()
self._construct_headers(auth_key, content_type)
def _construct_headers(self, authorization, content_type):
self._headers.update({
'Content-Type' : content_type,
'Authorization' : authorization
})
def add_devices(self, ids):
if not isinstance(ids, Iterable):
raise GCMException("Should add list object in id params.")
self._params.update({'registration_ids' : ids})
def add_whole_data(self, data):
self._params.update({'data' : data})
def add_devices_and_rebuild(self, ids):
self.add_devices(ids)
return self.build()
def add_options(self, collapse_key=None,
delay_while_idle=None, time_to_live=None):
self._params.update({
'collapse_key' : collapse_key,
'delay_while_idle' : delay_while_idle,
'time_to_live' : time_to_live
})
def add_data(self, k, v):
self._data.update({k : v})
def add_message(self, msg):
self.add_data('message', msg)
def add_headers(self, k, v):
self._headers.update({k : v})
def _remove_option(self, k):
if self._params.get(k) is None:
self._params.pop(k, None)
def _clean_params(self):
map(lambda k : self._remove_option(k), self._PARAMS)
def _get_content_type(self):
return self._headers.get('Content-Type', '')
def build(self):
self._clean_params()
self._params.update({'data' : self._data})
params = json.dumps(self._params) \
if self._json_request() else urlencode(self._params)
return RequestHandler(url=self._url,
headers=self._headers,
params=params)
def _json_request(self):
"""Returns True if request content type of request is json"""
return 'json' in self._get_content_type()
def flush(self):
self._params = dict.fromkeys(self._PARAMS, None)
self._data = dict()
|
|
#!/usr/bin/python
#
# Copyright (c) 2011 The Bitcoin developers
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
import time
import json
import pprint
import hashlib
import struct
import re
import base64
import httplib
import sys
from multiprocessing import Process
ERR_SLEEP = 15
MAX_NONCE = 1000000L
settings = {}
pp = pprint.PrettyPrinter(indent=4)
class BitcoinRPC:
OBJID = 1
def __init__(self, host, port, username, password):
authpair = "%s:%s" % (username, password)
self.authhdr = "Basic %s" % (base64.b64encode(authpair))
self.conn = httplib.HTTPConnection(host, port, False, 30)
def rpc(self, method, params=None):
self.OBJID += 1
obj = { 'version' : '1.1',
'method' : method,
'id' : self.OBJID }
if params is None:
obj['params'] = []
else:
obj['params'] = params
self.conn.request('POST', '/', json.dumps(obj),
{ 'Authorization' : self.authhdr,
'Content-type' : 'application/json' })
resp = self.conn.getresponse()
if resp is None:
print "JSON-RPC: no response"
return None
body = resp.read()
resp_obj = json.loads(body)
if resp_obj is None:
print "JSON-RPC: cannot JSON-decode body"
return None
if 'error' in resp_obj and resp_obj['error'] != None:
return resp_obj['error']
if 'result' not in resp_obj:
print "JSON-RPC: no result in object"
return None
return resp_obj['result']
def getblockcount(self):
return self.rpc('getblockcount')
def getwork(self, data=None):
return self.rpc('getwork', data)
def uint32(x):
return x & 0xffffffffL
def bytereverse(x):
return uint32(( ((x) << 24) | (((x) << 8) & 0x00ff0000) |
(((x) >> 8) & 0x0000ff00) | ((x) >> 24) ))
def bufreverse(in_buf):
out_words = []
for i in range(0, len(in_buf), 4):
word = struct.unpack('@I', in_buf[i:i+4])[0]
out_words.append(struct.pack('@I', bytereverse(word)))
return ''.join(out_words)
def wordreverse(in_buf):
out_words = []
for i in range(0, len(in_buf), 4):
out_words.append(in_buf[i:i+4])
out_words.reverse()
return ''.join(out_words)
class Miner:
def __init__(self, id):
self.id = id
self.max_nonce = MAX_NONCE
def work(self, datastr, targetstr):
# decode work data hex string to binary
static_data = datastr.decode('hex')
static_data = bufreverse(static_data)
# the first 76b of 80b do not change
blk_hdr = static_data[:76]
# decode 256-bit target value
targetbin = targetstr.decode('hex')
targetbin = targetbin[::-1] # byte-swap and dword-swap
targetbin_str = targetbin.encode('hex')
target = long(targetbin_str, 16)
# pre-hash first 76b of block header
static_hash = hashlib.sha256()
static_hash.update(blk_hdr)
for nonce in xrange(self.max_nonce):
# encode 32-bit nonce value
nonce_bin = struct.pack("<I", nonce)
# hash final 4b, the nonce value
hash1_o = static_hash.copy()
hash1_o.update(nonce_bin)
hash1 = hash1_o.digest()
# sha256 hash of sha256 hash
hash_o = hashlib.sha256()
hash_o.update(hash1)
hash = hash_o.digest()
# quick test for winning solution: high 32 bits zero?
if hash[-4:] != '\0\0\0\0':
continue
# convert binary hash to 256-bit Python long
hash = bufreverse(hash)
hash = wordreverse(hash)
hash_str = hash.encode('hex')
l = long(hash_str, 16)
# proof-of-work test: hash < target
if l < target:
print time.asctime(), "PROOF-OF-WORK found: %064x" % (l,)
return (nonce + 1, nonce_bin)
else:
print time.asctime(), "PROOF-OF-WORK false positive %064x" % (l,)
# return (nonce + 1, nonce_bin)
return (nonce + 1, None)
def submit_work(self, rpc, original_data, nonce_bin):
nonce_bin = bufreverse(nonce_bin)
nonce = nonce_bin.encode('hex')
solution = original_data[:152] + nonce + original_data[160:256]
param_arr = [ solution ]
result = rpc.getwork(param_arr)
print time.asctime(), "--> Upstream RPC result:", result
def iterate(self, rpc):
work = rpc.getwork()
if work is None:
time.sleep(ERR_SLEEP)
return
if 'data' not in work or 'target' not in work:
time.sleep(ERR_SLEEP)
return
time_start = time.time()
(hashes_done, nonce_bin) = self.work(work['data'],
work['target'])
time_end = time.time()
time_diff = time_end - time_start
self.max_nonce = long(
(hashes_done * settings['scantime']) / time_diff)
if self.max_nonce > 0xfffffffaL:
self.max_nonce = 0xfffffffaL
if settings['hashmeter']:
print "HashMeter(%d): %d hashes, %.2f Khash/sec" % (
self.id, hashes_done,
(hashes_done / 1000.0) / time_diff)
if nonce_bin is not None:
self.submit_work(rpc, work['data'], nonce_bin)
def loop(self):
rpc = BitcoinRPC(settings['host'], settings['port'],
settings['rpcuser'], settings['rpcpass'])
if rpc is None:
return
while True:
self.iterate(rpc)
def miner_thread(id):
miner = Miner(id)
miner.loop()
if __name__ == '__main__':
if len(sys.argv) != 2:
print "Usage: pyminer.py CONFIG-FILE"
sys.exit(1)
f = open(sys.argv[1])
for line in f:
# skip comment lines
m = re.search('^\s*#', line)
if m:
continue
# parse key=value lines
m = re.search('^(\w+)\s*=\s*(\S.*)$', line)
if m is None:
continue
settings[m.group(1)] = m.group(2)
f.close()
if 'host' not in settings:
settings['host'] = '127.0.0.1'
if 'port' not in settings:
settings['port'] = 9335
if 'threads' not in settings:
settings['threads'] = 1
if 'hashmeter' not in settings:
settings['hashmeter'] = 0
if 'scantime' not in settings:
settings['scantime'] = 30L
if 'rpcuser' not in settings or 'rpcpass' not in settings:
print "Missing username and/or password in cfg file"
sys.exit(1)
settings['port'] = int(settings['port'])
settings['threads'] = int(settings['threads'])
settings['hashmeter'] = int(settings['hashmeter'])
settings['scantime'] = long(settings['scantime'])
thr_list = []
for thr_id in range(settings['threads']):
p = Process(target=miner_thread, args=(thr_id,))
p.start()
thr_list.append(p)
time.sleep(1) # stagger threads
print settings['threads'], "mining threads started"
print time.asctime(), "Miner Starts - %s:%s" % (settings['host'], settings['port'])
try:
for thr_proc in thr_list:
thr_proc.join()
except KeyboardInterrupt:
pass
print time.asctime(), "Miner Stops - %s:%s" % (settings['host'], settings['port'])
|
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
author_match_filter.py -- find the authors in VIVO, and match them to authors
in the source. This is used to match the pubs to an author.
There are two inputs:
- authors in VIVO keyed by name parts.
- authors in the source keyed by name parts.
There are two cases:
1. The source indicates the author is not a UF author. In this case, the
author will automatically be added as a "stub."
No attempt is made to match stubs. This leads to proliferation of stubs
but is consistent with "Assert what you know" and avoids assuming two stubs
are the same.
2. The source indicates the author is at UF. In this case, extensive
disambiguation matching occurs, based on name and name parts. If no match
occurs, the author will be added as a UFEntity. If multiple matches occur,
one is selected at random and a disambiguation report entry is produced
showing all the possible matches and the one that was selected. Many
disambiguation cases involve two URI. Randomly selecting one cuts the
effort to assign these potentially by half. If exactly one match occurs,
the match is made, the URI provided in the update data.
NOTE: Add checking so that if a pub is missing a ISSN output DOI of publication for manual inspection
"""
__author__ = "Alex Loiacono and Nicholas Rejack"
__copyright__ = "Copyright 2015 (c) Alex Loiacono and Nicholas Rejack"
__license__ = "New BSD License"
__version__ = "0.01"
from vivopump import read_csv_fp, write_csv_fp, get_parms, get_vivo_journals
import utils
import sys
def get_author_name_parts(author_data, max_list_length=50):
author_list = []
author_names = author_data.split(' and ')
list_length = 0
for display_name in author_names:
list_length += 1
if list_length > max_list_length:
break
# occasional leading '-' before some initials
display_name = display_name.replace(' -', ' ')
author_dict = {'display_name': display_name,
'suffix': '',
'corresponding': 'false',
'uf': 'false'}
if ' Jr.,' in display_name:
author_dict['suffix'] = 'Jr.'
display_name = display_name.replace(' Jr.,', '')
if ' III,' in display_name:
author_dict['suffix'] = 'III'
display_name = display_name.replace(' III,', '')
if ',' in display_name:
k = display_name.find(',')
author_dict['last'] = display_name[0:k]
remainder = display_name[k + 2:]
if ' ' in remainder:
k = remainder.find(' ')
author_dict['first'] = remainder[0:k].replace('.', '')
if ' ' in remainder:
k = remainder.find(' ')
author_dict['first'] = remainder[0:k].replace('.', '')
author_dict['middle'] = remainder[k + 1:].replace('.', '')
else:
author_dict['first'] = remainder.replace('.', '')
author_dict['middle'] = ''
else:
author_dict['last'] = display_name
author_dict['first'] = ''
author_dict['middle'] = ''
author_list.append(author_dict)
utils.print_err("{} Authors in list: {}".format(len(author_list), author_list))
return author_list
def get_author_uris(author_row_data,title,disamb_dict,paper_uri):
author_list_out = []
author_data = get_author_name_parts(author_row_data)
#utils.print_err("Author data is:\n {}".format(author_data))
for author in author_data:
utils.print_err("author is: \n{}".format(author))
author_uris = utils.get_author_disambiguation_data(
vivo_auth_disambig_data,
author['last'],
author['first'],
author['middle'])
#utils.print_err("author_uris: \n{}".format(author_uris))
count = len(author_uris)
if count == 1:
author_list_builder = author_uris[0]
else:
author_list_builder = author_uris[0]
utils.print_err("Disamb: {}".format(author_uris))
disamb_dict.append("Paper: {} -- at {}\n{} : \n{} \n\n".format(title, paper_uri, author['display_name'], author_uris))
if len(author_list_out) == 0:
author_list_out = author_list_builder
utils.print_err("author_list_out: \n{}".format(author_list_out))
elif len(author_list_out) >=1 and len(author_list_builder) >0:
author_list_out += ";"
author_list_out += author_list_builder
return author_list_out
def get_author_affiliation(affiliation_row_data):
from vivopump import replace_initials
affiliation_list_out = []
affiliation_parts = affiliation_row_data.split('. ')
utils.print_err("affiliation_parts = \n{}".format(affiliation_parts))
for affiliation in affiliation_parts:
utils.print_err("affiliation = \n{}".format(affiliation))
if '(Reprint Author)' in affiliation:
utils.print_err("\nReprint Author found \n")
if len(affiliation_list_out) > 0:
affiliation_list_out += ';true'
else:
utils.print_err("\naffiliation_list_out < 0\n")
affiliation_list_out = 'true'
else:
if len(affiliation_list_out) > 0:
affiliation_list_out += ';false'
else:
affiliation_list_out = 'false'
return affiliation_list_out
parms = get_parms()
# Create file to hold disambiguation data
disamb_file = open('disambiguation.txt', 'w+')
disamb_dict = []
# Piped in file
data_in = read_csv_fp(sys.stdin)
print >>sys.stderr, len(data_in)
# file_name = '/Users/asura/git/vivo-pump/author_list.csv'
# @TODO: pass file name path as a command line parameter
file_name = 'vivo_author_list.csv'
#utils.print_err("Using static disambiguation file: {}".format(file_name))
vivo_journals = get_vivo_journals(parms)
# get dictionaries of authors keyed by name parts
vivo_auth_disambig_data = utils.get_vivo_disambiguation_data_from_csv(
file_name)
utils.print_err("Finished loading {} entries from: {}"
.format(len(vivo_auth_disambig_data), file_name))
data_out = {}
row_out = 0
for row_index, row_data in data_in.items():
utils.print_err("\nrow_index is: \n{}".format(row_index))
utils.print_err("\nrow_data is: \n{}".format(row_data))
data_out['author'] = get_author_uris(row_data['author'],row_data['title'],disamb_dict,row_data['uri'])
data_out['affiliation'] = get_author_affiliation(row_data['affiliation'])
try:
if len(vivo_journals.get(row_data['issn'])) > 0:
issn_uri = vivo_journals.get(row_data['issn'])
else:
utils.print_err("\nISSN not found: {}\n".format(row_data['issn']))
issn_uri = ''
except TypeError:
continue
# try:
# issn_uri = vivo_journals.get(row_data['issn'])
# except KeyError:
# utils.print_err("\nISSN not found: {}\n".format(row_data['issn']))
# issn_uri = ''
utils.print_err("data_out is: \n{}".format(data_out))
data_in[row_index]['author'] = data_out['author']
data_in[row_index]['affiliation'] = data_out['affiliation']
data_in[row_index]['journal'] = issn_uri
data_in[row_index].pop('issn')
for line in disamb_dict:
disamb_file.write(line)
disamb_file.close()
write_csv_fp(sys.stdout, data_in)
|
|
from pyscriptic import settings, submit
class ContainerType(object):
"""
Lists information about a particular container type, such as a 96-well
plate.
Attributes
----------
title : str
wells : int
Number of wells in the container. Wells can be referenced using either
a 1-indexed number (i.e. 1-96) or by row and column (i.e. A1-H12).
max_well_capacity : float
Max volume capacity for each well, in microliters.
well_dead_volume : float
Well dead volume, in microliters.
capabilities : list of str
List of supported instructions that can be performed on this container
type.
"""
def __init__(self, title, wells, max_well_capacity, well_dead_volume,
capabilities):
self.title = title
self.wells = wells
self.max_well_capacity = max_well_capacity
self.well_dead_volume = well_dead_volume
self.capabilities = capabilities
# XXX: Which attribute is the storage name? (i.e. "warm_37") .location?
class ContainerProperties(object):
"""
Lists the properties about a particular instance of a container, such as
where that container is stored and what liquids it contains.
Attributes
----------
container_id : str
location : str
container_type : :class:`pyscriptic.containers.ContainerType`
well_count : int
well_type : str
well_depth_mm : int
well_volume_ul : int
well_coating : str
sterile : bool
device : :class:`pyscriptic.containers.ContainerDevice`
aliquots : list of :class:`pyscriptic.containers.ContainerAliquot`
"""
def __init__(self, container_id, location, container_type, well_count,
well_type, well_depth_mm, well_volume_ul, well_coating,
sterile, device, aliquots):
self.container_id = container_id
self.location = location
self.container_type = container_type
self.well_count = well_count
self.well_type = well_type
self.well_depth_mm = well_depth_mm
self.well_volume_ul = well_volume_ul
self.well_coating = well_coating
self.sterile = sterile
self.device = device
self.aliquots = aliquots
class ContainerDevice(object):
"""
Attributes
----------
device_id : str
name : str
make : str
model : str
device_class : str
"""
def __init__(self, device_id, name, make, model, device_class):
self.device_id = device_id
self.name = name
self.make = make
self.model = model
self.device_class = device_class
class ContainerAliquot(object):
"""
Attributes
----------
aliquot_id : str
volume_ul : float
concentration_um : float
mass_mg : float
created_by_run_id : str
well_idx : str
"""
def __init__(self, aliquot_id, volume_ul, concentration_um, mass_mg,
created_by_run_id, well_idx):
self.aliquot_id = aliquot_id
self.volume_ul = volume_ul
self.concentration_um = concentration_um
self.mass_mg = mass_mg
self.created_by_run_id = created_by_run_id
self.well_idx = well_idx
CONTAINERS = {
"96-pcr": ContainerType(
"96 well V-bottom (PCR) plate",
96, 160, 15,
["pipette", "sangerseq", "spin", "thermocycle", "incubate",
"gel_separate"],
),
"96-flat": ContainerType(
"96 well flat-bottom optically clear plate",
96, 360, 20,
["pipette", "sangerseq", "spin", "absorbance", "fluorescence",
"luminescence", "incubate", "gel_separate"],
),
"96-flat-uv": ContainerType(
"96 well flat-bottom UV transparent plate",
96, 360, 20,
["pipette", "sangerseq", "spin", "absorbance", "fluorescence",
"luminescence", "incubate", "gel_separate"],
),
"96-deep": ContainerType(
"96 well flat-bottom extended capacity optically opaque plate",
96, 2000, 15,
["pipette", "sangerseq", "spin", "incubate", "gel_separate"],
),
"384-pcr": ContainerType(
"384 well V-bottom (PCR) plate",
384, 50, 8,
["pipette", "sangerseq", "spin", "thermocycle", "incubate",
"gel_separate"],
),
"384-flat": ContainerType(
"384 well flat-bottom optically clear plate",
384, 112, 12,
["pipette", "sangerseq", "spin", "absorbance", "fluorescence",
"luminescence", "incubate", "gel_separate"],
),
"pcr-0.5": ContainerType(
"0.5 mL PCR tube",
1, 500, 15,
["pipette", "sangerseq", "spin", "incubate", "gel_separate"],
),
"micro-1.5": ContainerType(
"1.5 mL microtube",
1, 1500, 15,
["pipette", "sangerseq", "spin", "incubate", "gel_separate"],
),
"micro-2.0": ContainerType(
"2.0 mL microtube",
1, 2000, 15,
["pipette", "sangerseq", "spin", "incubate", "gel_separate"],
),
}
def _device_from_response(response):
"""
Parameters
----------
response : dict of str, str
Returns
-------
ContainerDevice
"""
return ContainerDevice(
device_id=response["id"],
name=response["name"],
make=response["make"],
model=response["model"],
device_class=response["device_class"],
)
def _aliquot_from_response(response):
"""
Parameters
----------
response : dict
Returns
-------
ContainerAliquot
"""
return ContainerAliquot(
aliquot_id=response["id"],
volume_ul=response["volume_ul"],
concentration_um=response["concentration_um"],
mass_mg=response["mass_mg"],
created_by_run_id=response["created_by_run_id"],
well_idx=response["well_idx"],
)
def _container_properties_from_response(response):
"""
Parameters
----------
dict
Returns
-------
ContainerProperties
"""
assert response["container_type"] in CONTAINERS.keys()
return ContainerProperties(
container_id=response["id"],
location=response["location"],
container_type=CONTAINERS[response["container_type"]],
well_count=response["well_count"],
well_type=response["well_type"],
well_depth_mm=response["well_depth_mm"],
well_volume_ul=response["well_volume_ul"],
well_coating=response["well_coating"],
sterile=response["sterile"],
device=_device_from_response(response["device"]),
aliquots=[_aliquot_from_response(i) for i in response["aliquots"]],
)
def get_container(container_id):
"""
Retrieves information about a given container available within the
currently active organization.
Parameters
----------
container_id : str
Returns
-------
:class:`pyscriptic.containers.ContainerProperties`
Notes
-----
.. [1] https://www.transcriptic.com/platform/#containers_show
"""
url = "{}/containers/{}".format(
settings.get_organization(),
container_id,
)
response = submit.get_request(
url,
)
return _container_properties_from_response(response)
def list_containers():
"""
Lists all containers available within the currently active organization.
Returns
-------
list of :class:`pyscriptic.containers.ContainerProperties`
Notes
-----
.. [1] https://www.transcriptic.com/platform/#containers_index
"""
url = "containers"
response = submit.get_request(
url,
)
return [_container_properties_from_response(i) for i in response]
def mail_container(container_id, address_id, condition):
"""
Sends a request to mail a container to a given address.
Parameters
----------
container_id : str
address_id : str
condition : str
Returns
-------
id : str
Notes
-----
.. [1] https://www.transcriptic.com/platform/#instr_storage
"""
assert condition in ["ambient", "dry_ice"]
url = "{}/containers/{}/mail".format(
settings.get_organization(),
container_id,
)
content = {
"address": address_id,
"condition": condition,
}
response = submit.post_request(
url,
content,
)
return response["id"]
|
|
#
# This file is part of pyasn1 software.
#
# Copyright (c) 2005-2017, Ilya Etingof <[email protected]>
# License: http://pyasn1.sf.net/license.html
#
import sys
from pyasn1.type import tag, tagmap
from pyasn1 import error
__all__ = ['NamedType', 'OptionalNamedType', 'DefaultedNamedType', 'NamedTypes']
class NamedType(object):
"""Create named field object for a constructed ASN.1 type.
The |NamedType| object represents a single name and ASN.1 type of a constructed ASN.1 type.
|NamedType| objects are immutable and duck-type Python :class:`tuple` objects
holding *name* and *asn1Object* components.
Parameters
----------
name: :py:class:`str`
Field name
asn1Object:
ASN.1 type object
"""
isOptional = False
isDefaulted = False
def __init__(self, name, asn1Object):
self.__name = name
self.__type = asn1Object
self.__nameAndType = name, asn1Object
def __repr__(self):
return '%s(%r, %r)' % (self.__class__.__name__, self.__name, self.__type)
def __eq__(self, other):
return self.__nameAndType == other
def __ne__(self, other):
return self.__nameAndType != other
def __lt__(self, other):
return self.__nameAndType < other
def __le__(self, other):
return self.__nameAndType <= other
def __gt__(self, other):
return self.__nameAndType > other
def __ge__(self, other):
return self.__nameAndType >= other
def __hash__(self):
return hash(self.__nameAndType)
def __getitem__(self, idx):
return self.__nameAndType[idx]
def __iter__(self):
return iter(self.__nameAndType)
@property
def name(self):
return self.__name
@property
def asn1Object(self):
return self.__type
# Backward compatibility
def getName(self):
return self.name
def getType(self):
return self.asn1Object
class OptionalNamedType(NamedType):
__doc__ = NamedType.__doc__
isOptional = True
class DefaultedNamedType(NamedType):
__doc__ = NamedType.__doc__
isDefaulted = True
class NamedTypes(object):
"""Create a collection of named fields for a constructed ASN.1 type.
The NamedTypes object represents a collection of named fields of a constructed ASN.1 type.
*NamedTypes* objects are immutable and duck-type Python :class:`dict` objects
holding *name* as keys and ASN.1 type object as values.
Parameters
----------
*namedTypes: :class:`~pyasn1.type.namedtype.NamedType`
"""
def __init__(self, *namedTypes, **kwargs):
self.__namedTypes = namedTypes
self.__namedTypesLen = len(self.__namedTypes)
self.__minTagSet = self.__computeMinTagSet()
self.__nameToPosMap = self.__computeNameToPosMap()
self.__tagToPosMap = self.__computeTagToPosMap()
self.__ambiguousTypes = 'terminal' not in kwargs and self.__computeAmbiguousTypes() or {}
self.__uniqueTagMap = self.__computeTagMaps(unique=True)
self.__nonUniqueTagMap = self.__computeTagMaps(unique=False)
self.__hasOptionalOrDefault = bool([True for namedType in self.__namedTypes
if namedType.isDefaulted or namedType.isOptional])
self.__requiredComponents = frozenset(
[idx for idx, nt in enumerate(self.__namedTypes) if not nt.isOptional and not nt.isDefaulted]
)
self.__keys = frozenset([namedType.name for namedType in self.__namedTypes])
self.__values = tuple([namedType.asn1Object for namedType in self.__namedTypes])
self.__items = tuple([(namedType.name, namedType.asn1Object) for namedType in self.__namedTypes])
def __repr__(self):
return '%s(%s)' % (
self.__class__.__name__, ', '.join([repr(x) for x in self.__namedTypes])
)
def __eq__(self, other):
return self.__namedTypes == other
def __ne__(self, other):
return self.__namedTypes != other
def __lt__(self, other):
return self.__namedTypes < other
def __le__(self, other):
return self.__namedTypes <= other
def __gt__(self, other):
return self.__namedTypes > other
def __ge__(self, other):
return self.__namedTypes >= other
def __hash__(self):
return hash(self.__namedTypes)
def __getitem__(self, idx):
try:
return self.__namedTypes[idx]
except TypeError:
return self.__namedTypes[self.__nameToPosMap[idx]]
def __contains__(self, key):
return key in self.__nameToPosMap
def __iter__(self):
return (x[0] for x in self.__namedTypes)
if sys.version_info[0] <= 2:
def __nonzero__(self):
return self.__namedTypesLen > 0
else:
def __bool__(self):
return self.__namedTypesLen > 0
def __len__(self):
return self.__namedTypesLen
# Python dict protocol
def values(self):
return self.__values
def keys(self):
return self.__keys
def items(self):
return self.__items
def clone(self):
return self.__class__(*self.__namedTypes)
class PostponedError(object):
def __init__(self, errorMsg):
self.__errorMsg = errorMsg
def __getitem__(self, item):
raise error.PyAsn1Error(self.__errorMsg)
def __computeTagToPosMap(self):
tagToPosMap = {}
for idx, namedType in enumerate(self.__namedTypes):
tagMap = namedType.asn1Object.tagMap
if isinstance(tagMap, NamedTypes.PostponedError):
return tagMap
if not tagMap:
continue
for _tagSet in tagMap.presentTypes:
if _tagSet in tagToPosMap:
return NamedTypes.PostponedError('Duplicate component tag %s at %s' % (_tagSet, namedType))
tagToPosMap[_tagSet] = idx
return tagToPosMap
def __computeNameToPosMap(self):
nameToPosMap = {}
for idx, namedType in enumerate(self.__namedTypes):
if namedType.name in nameToPosMap:
return NamedTypes.PostponedError('Duplicate component name %s at %s' % (namedType.name, namedType))
nameToPosMap[namedType.name] = idx
return nameToPosMap
def __computeAmbiguousTypes(self):
ambigiousTypes = {}
partialAmbigiousTypes = ()
for idx, namedType in reversed(tuple(enumerate(self.__namedTypes))):
if namedType.isOptional or namedType.isDefaulted:
partialAmbigiousTypes = (namedType,) + partialAmbigiousTypes
else:
partialAmbigiousTypes = (namedType,)
if len(partialAmbigiousTypes) == len(self.__namedTypes):
ambigiousTypes[idx] = self
else:
ambigiousTypes[idx] = NamedTypes(*partialAmbigiousTypes, **dict(terminal=True))
return ambigiousTypes
def getTypeByPosition(self, idx):
"""Return ASN.1 type object by its position in fields set.
Parameters
----------
idx: :py:class:`int`
Field index
Returns
-------
:
ASN.1 type
Raises
------
: :class:`~pyasn1.error.PyAsn1Error`
If given position is out of fields range
"""
try:
return self.__namedTypes[idx].asn1Object
except IndexError:
raise error.PyAsn1Error('Type position out of range')
def getPositionByType(self, tagSet):
"""Return field position by its ASN.1 type.
Parameters
----------
tagSet: :class:`~pysnmp.type.tag.TagSet`
ASN.1 tag set distinguishing one ASN.1 type from others.
Returns
-------
: :py:class:`int`
ASN.1 type position in fields set
Raises
------
: :class:`~pyasn1.error.PyAsn1Error`
If *tagSet* is not present or ASN.1 types are not unique within callee *NamedTypes*
"""
try:
return self.__tagToPosMap[tagSet]
except KeyError:
raise error.PyAsn1Error('Type %s not found' % (tagSet,))
def getNameByPosition(self, idx):
"""Return field name by its position in fields set.
Parameters
----------
idx: :py:class:`idx`
Field index
Returns
-------
: :py:class:`str`
Field name
Raises
------
: :class:`~pyasn1.error.PyAsn1Error`
If given field name is not present in callee *NamedTypes*
"""
try:
return self.__namedTypes[idx].name
except IndexError:
raise error.PyAsn1Error('Type position out of range')
def getPositionByName(self, name):
"""Return field position by filed name.
Parameters
----------
name: :py:class:`str`
Field name
Returns
-------
: :py:class:`int`
Field position in fields set
Raises
------
: :class:`~pyasn1.error.PyAsn1Error`
If *name* is not present or not unique within callee *NamedTypes*
"""
try:
return self.__nameToPosMap[name]
except KeyError:
raise error.PyAsn1Error('Name %s not found' % (name,))
def getTagMapNearPosition(self, idx):
"""Return ASN.1 types that are allowed at or past given field position.
Some ASN.1 serialization allow for skipping optional and defaulted fields.
Some constructed ASN.1 types allow reordering of the fields. When recovering
such objects it may be important to know which types can possibly be
present at any given position in the field sets.
Parameters
----------
idx: :py:class:`int`
Field index
Returns
-------
: :class:`~pyasn1.type.tagmap.TagMap`
Map if ASN.1 types allowed at given field position
Raises
------
: :class:`~pyasn1.error.PyAsn1Error`
If given position is out of fields range
"""
try:
return self.__ambiguousTypes[idx].tagMap
except KeyError:
raise error.PyAsn1Error('Type position out of range')
def getPositionNearType(self, tagSet, idx):
"""Return the closest field position where given ASN.1 type is allowed.
Some ASN.1 serialization allow for skipping optional and defaulted fields.
Some constructed ASN.1 types allow reordering of the fields. When recovering
such objects it may be important to know at which field position, in field set,
given *tagSet* is allowed at or past *idx* position.
Parameters
----------
tagSet: :class:`~pyasn1.type.tag.TagSet`
ASN.1 type which field position to look up
idx: :py:class:`int`
Field position at or past which to perform ASN.1 type look up
Returns
-------
: :py:class:`int`
Field position in fields set
Raises
------
: :class:`~pyasn1.error.PyAsn1Error`
If *tagSet* is not present or not unique within callee *NamedTypes*
or *idx* is out of fields range
"""
try:
return idx + self.__ambiguousTypes[idx].getPositionByType(tagSet)
except KeyError:
raise error.PyAsn1Error('Type position out of range')
def __computeMinTagSet(self):
minTagSet = None
for namedType in self.__namedTypes:
asn1Object = namedType.asn1Object
try:
tagSet = asn1Object.minTagSet
except AttributeError:
tagSet = asn1Object.tagSet
if minTagSet is None or tagSet < minTagSet:
minTagSet = tagSet
return minTagSet or tag.TagSet()
@property
def minTagSet(self):
"""Return the minimal TagSet among ASN.1 type in callee *NamedTypes*.
Some ASN.1 types/serialization protocols require ASN.1 types to be
arranged based on their numerical tag value. The *minTagSet* property
returns that.
Returns
-------
: :class:`~pyasn1.type.tagset.TagSet`
Minimal TagSet among ASN.1 types in callee *NamedTypes*
"""
return self.__minTagSet
def __computeTagMaps(self, unique):
presentTypes = {}
skipTypes = {}
defaultType = None
for namedType in self.__namedTypes:
tagMap = namedType.asn1Object.tagMap
if isinstance(tagMap, NamedTypes.PostponedError):
return tagMap
for tagSet in tagMap:
if unique and tagSet in presentTypes:
return NamedTypes.PostponedError('Non-unique tagSet %s of %s at %s' % (tagSet, namedType, self))
presentTypes[tagSet] = namedType.asn1Object
skipTypes.update(tagMap.skipTypes)
if defaultType is None:
defaultType = tagMap.defaultType
elif tagMap.defaultType is not None:
return NamedTypes.PostponedError('Duplicate default ASN.1 type at %s' % (self,))
return tagmap.TagMap(presentTypes, skipTypes, defaultType)
@property
def tagMap(self):
"""Return a *TagMap* object from tags and types recursively.
Return a :class:`~pyasn1.type.tagmap.TagMap` object by
combining tags from *TagMap* objects of children types and
associating them with their immediate child type.
Example
-------
.. code-block:: python
OuterType ::= CHOICE {
innerType INTEGER
}
Calling *.tagMap* on *OuterType* will yield a map like this:
.. code-block:: python
Integer.tagSet -> Choice
"""
return self.__nonUniqueTagMap
@property
def tagMapUnique(self):
"""Return a *TagMap* object from unique tags and types recursively.
Return a :class:`~pyasn1.type.tagmap.TagMap` object by
combining tags from *TagMap* objects of children types and
associating them with their immediate child type.
Example
-------
.. code-block:: python
OuterType ::= CHOICE {
innerType INTEGER
}
Calling *.tagMapUnique* on *OuterType* will yield a map like this:
.. code-block:: python
Integer.tagSet -> Choice
Note
----
Duplicate *TagSet* objects found in the tree of children
types would cause error.
"""
return self.__uniqueTagMap
@property
def hasOptionalOrDefault(self):
return self.__hasOptionalOrDefault
@property
def namedTypes(self):
return iter(self.__namedTypes)
@property
def requiredComponents(self):
return self.__requiredComponents
|
|
#
# Copyright 2017 Ipsen Pharma. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import sys
from googleapiclient.errors import HttpError
from yaml import load
from ga_autosetup.BatchAPICalls import BatchAPICalls
from ga_autosetup.GAServiceTools import *
from ga_autosetup.output import log, log_action
def format_with_analytics(text, account=None, webproperty=None):
account_name = account.name if account is not None else "?"
account_num = account.id if account is not None else "?"
property_name = webproperty.name if webproperty is not None else "?"
if webproperty is None:
website = "?"
domain = "?"
else:
website = webproperty.website()
split = website.split(".")
if len(split) >= 2:
domain = split[-2] + "." + split[-1]
else:
domain = website
return text.format(
account=account_name, account_num=account_num, property=property_name,
website=website, domain=domain)
def is_configured(entry, in_list):
return entry in in_list and in_list[entry] is not None
def json_for_view(yaml, account, webproperty):
json = dict(
name=format_with_analytics(yaml['name'], account, webproperty)
)
if is_configured('botFilteringEnabled', yaml):
json['botFilteringEnabled'] = yaml['botFilteringEnabled']
if is_configured('currency', yaml):
json['currency'] = yaml['currency']
return json
def json_for_hostname_filter(yaml, account, webproperty):
details_json = dict(
field=yaml['field'],
matchType=yaml['matchType'],
expressionValue=format_with_analytics(yaml['expressionValue'], account, webproperty)
)
if is_configured('caseSensitive', yaml):
details_json['caseSensitive'] = yaml['caseSensitive']
json = dict(name=format_with_analytics(yaml['name'], account, webproperty))
if yaml['include']:
json['type'] = "INCLUDE"
json['includeDetails'] = details_json
else:
json['type'] = "EXCLUDE"
json['excludeDetails'] = details_json
return json
def json_for_filter(yaml):
details_json = dict(
field=yaml['field'],
matchType=yaml['matchType'],
expressionValue=yaml['expressionValue']
)
if is_configured('caseSensitive', yaml):
details_json['caseSensitive'] = yaml['caseSensitive']
json = dict(name=yaml['name'])
if yaml['include']:
json['type'] = "INCLUDE"
json['includeDetails'] = details_json
else:
json['type'] = "EXCLUDE"
json['excludeDetails'] = details_json
return json
def find_view_config(profile, account, webproperty, yaml):
for viewname in yaml:
if profile.name == format_with_analytics(yaml[viewname]['name'], account, webproperty):
if is_configured('filters', yaml[viewname]):
return yaml[viewname]['filters']
return None
# for filtername in yaml[viewname]['filters']:
return None
def define_views_and_filters(config_yaml, accounts):
"""
Configure every artifact that will be linked to one another in ulterior pass
:param config_yaml:
:param accounts:
"""
for account in accounts:
# Commands are not run just yet: they are all accumulated in a batch statement in
# order to optimize API calls
batch = BatchAPICalls()
# Users
if is_configured('users', config_yaml):
log("Manage users of {!s}", account)
try:
for email, rights in config_yaml['users'].items():
if str(rights).lower() == "remove":
batch.add(account.remove_user(email))
else:
batch.add(account.add_user(email, rights))
except HttpError as error:
log("Cannot manage users of {!s} caused by {!s}", account, error, color='yellow')
# Create default views
if is_configured('web_views', config_yaml):
log("Define default views for {!s}", account)
for webproperty in account.list_webproperties():
log("\tDefine default views for {!s}", webproperty)
# Views for WebProperties
for view_name in config_yaml['web_views']:
view_config_yaml = config_yaml['web_views'][view_name]
json = json_for_view(view_config_yaml, account, webproperty)
batch.add(webproperty.define_profile(view_config_yaml.get('update'), json))
# Create default filters
if is_configured('filters', config_yaml) or is_configured('hostname_filters',
config_yaml):
log("Define default filters for {!s}", account)
# View filters
if is_configured('filters', config_yaml):
for filtername in config_yaml['filters']:
json = json_for_filter(config_yaml['filters'][filtername])
batch.add(account.define_filter(json))
for webproperty in account.list_webproperties():
log("\tDefine default filters for {!s}", webproperty)
# Hostname filters
if is_configured('hostname_filters', config_yaml):
for filtername in config_yaml['hostname_filters']:
json = json_for_hostname_filter(
config_yaml['hostname_filters'][filtername], account,
webproperty)
batch.add(account.define_filter(json))
# Let's go! Run all commands for this pass and this account in batch
batch.execute()
def link_views_to_filters(config_yaml, accounts):
"""
Link artifacts created previously to one another
:param config:
:param accounts:
"""
for account in accounts:
# Commands are not run just yet: they are all accumulated in a batch statement in
# order to optimize API calls
batch = BatchAPICalls()
if is_configured('web_views', config_yaml):
log("Apply default filters to default views for " + str(account))
for webproperty in account.list_webproperties():
log("\tApply default filters to default views for " + str(webproperty))
for profile in webproperty.list_profiles():
view_config = find_view_config(profile, account, webproperty,
config_yaml['web_views'])
if view_config:
for filtername in view_config:
account_filter = account.find_filter(
format_with_analytics(filtername, account, webproperty))
if account_filter:
batch.add(profile.add_filter(account_filter))
# Let's go! Run all commands for this pass and this account in batch
batch.execute()
def do_actions(config, actions_in_config, accounts):
"""
:param config:
:param accounts:
"""
if is_configured(actions_in_config, config_yaml):
for account in accounts:
if is_configured(actions_in_config, config_yaml):
# Commands are not run just yet: they are all accumulated in a batch
# statement in order to optimize API calls
batch = BatchAPICalls()
log("Executing {} for {}", actions_in_config, account)
for action_config in config_yaml[actions_in_config]:
log("\tExecuting action: {}", action_config['label'])
if action_config['action'].lower() == 'rename-views':
do_rename_views(batch, account, action_config)
if action_config['action'].lower() == 'rename-filters':
do_rename_filters(batch, account, action_config)
if action_config['action'].lower() == 'set-data-retention-ttl':
set_data_retention_ttl(batch, account, action_config)
# Let's go! Run all commands for this pass and this account in batch
batch.execute()
def do_rename_views(batch, account, action_config):
for webproperty in account.list_webproperties():
log("\t\tExamining views for {!s}", webproperty)
for profile in webproperty.list_profiles():
old_name_re = re.compile(format_with_analytics(action_config['from'], account, webproperty))
if (old_name_re.match(profile.name)):
new_name = format_with_analytics(old_name_re.sub(action_config['to'], profile.name),
account, webproperty)
log_action("\t\t\tRenaming {!s} to '{}'", profile, new_name)
batch.add(profile.rename_to(new_name))
def do_rename_filters(batch, account, action_config):
old_name_re = re.compile(format_with_analytics(action_config['from'], account))
log("\t\tExamining filters for {!s}", account)
for filter in account.list_filters():
if old_name_re.match(filter.name):
new_name = format_with_analytics(old_name_re.sub(action_config['to'], filter.name), account)
log_action("\t\t\tRenaming {!s} to '{}'", filter, new_name)
batch.add(filter.rename_to(new_name))
def set_data_retention_ttl(batch, account, action_config):
log("\t\tExamining properties for {!s}", account)
for webproperty in account.list_webproperties():
if webproperty.dataRetentionTtl != action_config['to']:
log_action("\t\t\tChanging retention on {!s} to '{!s}'", webproperty, action_config['to'])
batch.add(webproperty.change_retention(action_config['to']))
if __name__ == '__main__':
# Read configuration from YAML file
if len(sys.argv) == 1:
log("Usage: {} <config yaml file> <account id>, <account id>...", sys.argv[0], color='red')
exit(1)
try:
with open(sys.argv[1], 'r') as stream:
config_yaml = load(stream)
except FileNotFoundError as exc:
log("Error reading yaml configuration file: {}", sys.argv[1], color='red')
exit(1)
selected_accounts = sys.argv[2:]
log_action("Running {} for accounts {!s}", sys.argv[1], selected_accounts)
# Authenticate and construct service boilerplate
service = GAServiceTools()
# First pass on all accounts.
# Execute all remediation actions
do_actions(config_yaml, 'actions_before', service.list_accounts(selected_accounts))
# Second pass on all accounts.
# Configure every artifact that will be linked to one another in next pass
# Ask for accounts list again in order to clear caches for this pass
define_views_and_filters(config_yaml, service.list_accounts(selected_accounts))
# Third pass on all accounts.
# link artifacts created previously to one another
# Ask for accounts list again in order to clear caches for this pass
link_views_to_filters(config_yaml, service.list_accounts(selected_accounts))
# Last pass on all accounts.
# Execute all remediation actions
do_actions(config_yaml, 'actions_after', service.list_accounts(selected_accounts))
|
|
# Copyright (C) 2009 Google Inc. All rights reserved.
# Copyright (C) 2009 Apple Inc. All rights reserved.
# Copyright (C) 2011 Daniel Bates ([email protected]). All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import atexit
import os
import shutil
import unittest
from webkitpy.common.system.executive import Executive, ScriptError
from webkitpy.common.system.executive_mock import MockExecutive
from webkitpy.common.system.filesystem import FileSystem
from webkitpy.common.system.filesystem_mock import MockFileSystem
from webkitpy.common.checkout.scm.detection import detect_scm_system
from webkitpy.common.checkout.scm.git import Git, AmbiguousCommitError
from webkitpy.common.checkout.scm.scm import SCM
class SCMTestBase(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(SCMTestBase, self).__init__(*args, **kwargs)
self.scm = None
self.executive = None
self.fs = None
self.original_cwd = None
def setUp(self):
self.executive = Executive()
self.fs = FileSystem()
self.original_cwd = self.fs.getcwd()
def tearDown(self):
self._chdir(self.original_cwd)
def _join(self, *comps):
return self.fs.join(*comps)
def _chdir(self, path):
self.fs.chdir(path)
def _mkdir(self, path):
assert not self.fs.exists(path)
self.fs.maybe_make_directory(path)
def _mkdtemp(self, **kwargs):
return str(self.fs.mkdtemp(**kwargs))
def _remove(self, path):
self.fs.remove(path)
def _rmtree(self, path):
self.fs.rmtree(path)
def _run(self, *args, **kwargs):
return self.executive.run_command(*args, **kwargs)
def _run_silent(self, args, **kwargs):
self.executive.run_command(args, **kwargs)
def _write_text_file(self, path, contents):
self.fs.write_text_file(path, contents)
def _write_binary_file(self, path, contents):
self.fs.write_binary_file(path, contents)
def _make_diff(self, command, *args):
# We use this wrapper to disable output decoding. diffs should be treated as
# binary files since they may include text files of multiple differnet encodings.
return self._run([command, "diff"] + list(args), decode_output=False)
def _git_diff(self, *args):
return self._make_diff("git", *args)
def _shared_test_add_recursively(self):
self._mkdir("added_dir")
self._write_text_file("added_dir/added_file", "new stuff")
self.scm.add("added_dir/added_file")
self.assertIn("added_dir/added_file", self.scm._added_files())
def _shared_test_delete_recursively(self):
self._mkdir("added_dir")
self._write_text_file("added_dir/added_file", "new stuff")
self.scm.add("added_dir/added_file")
self.assertIn("added_dir/added_file", self.scm._added_files())
self.scm.delete("added_dir/added_file")
self.assertNotIn("added_dir", self.scm._added_files())
def _shared_test_delete_recursively_or_not(self):
self._mkdir("added_dir")
self._write_text_file("added_dir/added_file", "new stuff")
self._write_text_file("added_dir/another_added_file", "more new stuff")
self.scm.add("added_dir/added_file")
self.scm.add("added_dir/another_added_file")
self.assertIn("added_dir/added_file", self.scm._added_files())
self.assertIn("added_dir/another_added_file", self.scm._added_files())
self.scm.delete("added_dir/added_file")
self.assertIn("added_dir/another_added_file", self.scm._added_files())
def _shared_test_exists(self, scm, commit_function):
self._chdir(scm.checkout_root)
self.assertFalse(scm.exists('foo.txt'))
self._write_text_file('foo.txt', 'some stuff')
self.assertFalse(scm.exists('foo.txt'))
scm.add('foo.txt')
commit_function('adding foo')
self.assertTrue(scm.exists('foo.txt'))
scm.delete('foo.txt')
commit_function('deleting foo')
self.assertFalse(scm.exists('foo.txt'))
def _shared_test_move(self):
self._write_text_file('added_file', 'new stuff')
self.scm.add('added_file')
self.scm.move('added_file', 'moved_file')
self.assertIn('moved_file', self.scm._added_files())
def _shared_test_move_recursive(self):
self._mkdir("added_dir")
self._write_text_file('added_dir/added_file', 'new stuff')
self._write_text_file('added_dir/another_added_file', 'more new stuff')
self.scm.add('added_dir')
self.scm.move('added_dir', 'moved_dir')
self.assertIn('moved_dir/added_file', self.scm._added_files())
self.assertIn('moved_dir/another_added_file', self.scm._added_files())
class GitTest(SCMTestBase):
def setUp(self):
super(GitTest, self).setUp()
self._set_up_git_checkouts()
def tearDown(self):
super(GitTest, self).tearDown()
self._tear_down_git_checkouts()
def _set_up_git_checkouts(self):
"""Sets up fresh git repository with one commit. Then sets up a second git repo that tracks the first one."""
self.untracking_checkout_path = self._mkdtemp(suffix="git_test_checkout2")
self._run(['git', 'init', self.untracking_checkout_path])
self._chdir(self.untracking_checkout_path)
self._write_text_file('foo_file', 'foo')
self._run(['git', 'add', 'foo_file'])
self._run(['git', 'commit', '-am', 'dummy commit'])
self.untracking_scm = detect_scm_system(self.untracking_checkout_path)
self.tracking_git_checkout_path = self._mkdtemp(suffix="git_test_checkout")
self._run(['git', 'clone', '--quiet', self.untracking_checkout_path, self.tracking_git_checkout_path])
self._chdir(self.tracking_git_checkout_path)
self.tracking_scm = detect_scm_system(self.tracking_git_checkout_path)
def _tear_down_git_checkouts(self):
self._run(['rm', '-rf', self.tracking_git_checkout_path])
self._run(['rm', '-rf', self.untracking_checkout_path])
def test_remote_branch_ref(self):
self.assertEqual(self.tracking_scm._remote_branch_ref(), 'refs/remotes/origin/master')
self._chdir(self.untracking_checkout_path)
self.assertRaises(ScriptError, self.untracking_scm._remote_branch_ref)
def test_create_patch(self):
self._write_text_file('test_file_commit1', 'contents')
self._run(['git', 'add', 'test_file_commit1'])
scm = self.tracking_scm
scm.commit_locally_with_message('message')
patch = scm.create_patch()
self.assertNotRegexpMatches(patch, r'Subversion Revision:')
def test_exists(self):
scm = self.untracking_scm
self._shared_test_exists(scm, scm.commit_locally_with_message)
def test_rename_files(self):
scm = self.tracking_scm
scm.move('foo_file', 'bar_file')
scm.commit_locally_with_message('message')
def test_commit_position_from_git_log(self):
git_log = """
commit 624c3081c0
Author: foobarbaz1 <[email protected]>
Date: Mon Sep 28 19:10:30 2015 -0700
Test foo bar baz qux 123.
BUG=000000
Review URL: https://codereview.chromium.org/999999999
Cr-Commit-Position: refs/heads/master@{#1234567}
"""
scm = self.tracking_scm
self.assertEqual(scm._commit_position_from_git_log(git_log), 1234567)
def test_timestamp_of_revision(self):
scm = self.tracking_scm
scm.most_recent_log_matching(scm._commit_position_regex_for_timestamp(), scm.checkout_root)
class GitTestWithMock(SCMTestBase):
def make_scm(self):
scm = Git(cwd=".", executive=MockExecutive(), filesystem=MockFileSystem())
scm.read_git_config = lambda *args, **kw: "MOCKKEY:MOCKVALUE"
return scm
def test_timestamp_of_revision(self):
scm = self.make_scm()
scm.find_checkout_root = lambda path: ''
scm._run_git = lambda args: 'Date: 2013-02-08 08:05:49 +0000'
self.assertEqual(scm.timestamp_of_revision('some-path', '12345'), '2013-02-08T08:05:49Z')
scm._run_git = lambda args: 'Date: 2013-02-08 01:02:03 +0130'
self.assertEqual(scm.timestamp_of_revision('some-path', '12345'), '2013-02-07T23:32:03Z')
scm._run_git = lambda args: 'Date: 2013-02-08 01:55:21 -0800'
self.assertEqual(scm.timestamp_of_revision('some-path', '12345'), '2013-02-08T09:55:21Z')
|
|
import pytest
import os
import ipaddress
from scapy.all import rdpcap, IP, IPv6, TCP, UDP
from .test_utils import ExampleTest
class TestPcapSplitter(ExampleTest):
pytestmark = [pytest.mark.pcapsplitter, pytest.mark.no_network]
def test_split_by_file_size(self, tmpdir):
args = {
"-f": os.path.join("pcap_examples", "many-protocols.pcap"),
"-o": tmpdir,
"-m": "file-size",
"-p": "100000",
}
self.run_example(args=args)
assert len(os.listdir(tmpdir)) == 30
for filename in os.listdir(tmpdir):
if not os.path.splitext(filename)[0].endswith("29"):
assert (
98500 <= os.path.getsize(os.path.join(tmpdir, filename)) <= 101500
)
def test_split_by_packet_count(self, tmpdir):
args = {
"-f": os.path.join("pcap_examples", "many-protocols.pcap"),
"-o": tmpdir,
"-m": "packet-count",
"-p": "300",
}
self.run_example(args=args)
assert len(os.listdir(tmpdir)) == 16
for filename in os.listdir(tmpdir):
if not os.path.splitext(filename)[0].endswith("15"):
packets = rdpcap(os.path.join(tmpdir, filename))
assert len(packets) == 300
def test_split_by_client_ip(self, tmpdir):
args = {
"-f": os.path.join("pcap_examples", "many-protocols.pcap"),
"-o": tmpdir,
"-m": "client-ip",
}
self.run_example(args=args)
assert len(os.listdir(tmpdir)) == 5
for filename in os.listdir(tmpdir):
packets = rdpcap(os.path.join(tmpdir, filename))
if os.path.splitext(filename)[0].endswith("miscellaneous"):
for packet in packets:
assert not packet.haslayer(TCP) and not packet.haslayer(UDP)
else:
ip_addr = os.path.splitext(filename)[0][25:]
try:
ip_addr = ipaddress.ip_address(ip_addr.replace("-", "."))
except ValueError:
ip_addr = ipaddress.ip_address(ip_addr.replace("-", ":"))
for packet in packets:
assert packet.haslayer(TCP) or packet.haslayer(UDP)
if isinstance(ip_addr, ipaddress.IPv4Address):
assert packet.haslayer(IP)
assert (
ipaddress.ip_address(packet[IP].src) == ip_addr
or ipaddress.ip_address(packet[IP].dst) == ip_addr
)
else:
assert packet.haslayer(IPv6)
assert (
ipaddress.ip_address(packet[IPv6].src) == ip_addr
or ipaddress.ip_address(packet[IPv6].dst) == ip_addr
)
def test_split_by_server_ip(self, tmpdir):
args = {
"-f": os.path.join("pcap_examples", "many-protocols.pcap"),
"-o": tmpdir,
"-m": "server-ip",
}
self.run_example(args=args)
assert len(os.listdir(tmpdir)) == 60
for filename in os.listdir(tmpdir):
packets = rdpcap(os.path.join(tmpdir, filename))
if os.path.splitext(filename)[0].endswith("miscellaneous"):
for packet in packets:
assert not packet.haslayer(TCP) and not packet.haslayer(UDP)
else:
ip_addr = os.path.splitext(filename)[0][25:]
try:
ip_addr = ipaddress.ip_address(ip_addr.replace("-", "."))
except ValueError:
ip_addr = ipaddress.ip_address(ip_addr.replace("-", ":"))
for packet in packets:
assert packet.haslayer(TCP) or packet.haslayer(UDP)
if isinstance(ip_addr, ipaddress.IPv4Address):
assert packet.haslayer(IP)
assert (
ipaddress.ip_address(packet[IP].src) == ip_addr
or ipaddress.ip_address(packet[IP].dst) == ip_addr
)
else:
assert packet.haslayer(IPv6)
assert (
ipaddress.ip_address(packet[IPv6].src) == ip_addr
or ipaddress.ip_address(packet[IPv6].dst) == ip_addr
)
def test_split_by_server_port(self, tmpdir):
args = {
"-f": os.path.join("pcap_examples", "many-protocols.pcap"),
"-o": tmpdir,
"-m": "server-port",
}
self.run_example(args=args)
assert len(os.listdir(tmpdir)) == 7
for filename in os.listdir(tmpdir):
packets = rdpcap(os.path.join(tmpdir, filename))
if os.path.splitext(filename)[0].endswith("miscellaneous"):
for packet in packets:
assert not packet.haslayer(TCP) and not packet.haslayer(UDP)
else:
server_port = int(os.path.splitext(filename)[0][27:])
for packet in packets:
assert (
packet.haslayer(TCP)
and (
packet[TCP].sport == server_port
or packet[TCP].dport == server_port
)
) or (
packet.haslayer(UDP)
and (
packet[UDP].sport == server_port
or packet[UDP].dport == server_port
)
)
def test_split_by_client_port(self, tmpdir):
args = {
"-f": os.path.join("pcap_examples", "many-protocols.pcap"),
"-o": tmpdir,
"-m": "client-port",
}
self.run_example(args=args)
assert len(os.listdir(tmpdir)) == 254
for filename in os.listdir(tmpdir):
packets = rdpcap(os.path.join(tmpdir, filename))
if os.path.splitext(filename)[0].endswith("miscellaneous"):
for packet in packets:
assert not packet.haslayer(TCP) and not packet.haslayer(UDP)
else:
client_port = int(os.path.splitext(filename)[0][27:])
for packet in packets:
assert (
packet.haslayer(TCP)
and (
packet[TCP].sport == client_port
or packet[TCP].dport == client_port
)
) or (
packet.haslayer(UDP)
and (
packet[UDP].sport == client_port
or packet[UDP].dport == client_port
)
)
def test_split_by_ip_src_dst(self, tmpdir):
args = {
"-f": os.path.join("pcap_examples", "many-protocols.pcap"),
"-o": tmpdir,
"-m": "ip-src-dst",
}
self.run_example(args=args)
assert len(os.listdir(tmpdir)) == 65
ip_src_dst_map = {}
for filename in os.listdir(tmpdir):
packets = rdpcap(os.path.join(tmpdir, filename))
if packets[0].haslayer(IP):
ip_type = IP
ip_src_dst = frozenset([packets[0][IP].src, packets[0][IP].dst])
elif packets[0].haslayer(IPv6):
ip_type = IPv6
ip_src_dst = frozenset([packets[0][IPv6].src, packets[0][IPv6].dst])
else:
non_ip = frozenset([])
assert non_ip not in ip_src_dst_map
ip_src_dst_map[non_ip] = True
continue
assert ip_src_dst not in ip_src_dst_map
ip_src_dst_map[ip_src_dst] = True
for packet in packets:
assert packet.haslayer(ip_type)
assert ip_src_dst == frozenset(
[packet[ip_type].src, packet[ip_type].dst]
)
def test_split_by_connection(self, tmpdir):
args = {
"-f": os.path.join("pcap_examples", "many-protocols.pcap"),
"-o": tmpdir,
"-m": "connection",
}
self.run_example(args=args)
assert len(os.listdir(tmpdir)) == 254
connection_map = {}
for filename in os.listdir(tmpdir):
packets = rdpcap(os.path.join(tmpdir, filename))
if packets[0].haslayer(TCP):
trans_layer = TCP
elif packets[0].haslayer(UDP):
trans_layer = UDP
else:
trans_layer = None
if trans_layer is not None:
net_layer = IP if packets[0].haslayer(IP) else IPv6
else:
net_layer = None
if net_layer is not None and trans_layer is not None:
conn = frozenset(
[
trans_layer,
packets[0][net_layer].src,
packets[0][net_layer].dst,
packets[0][trans_layer].sport,
packets[0][trans_layer].dport,
]
)
else:
conn = frozenset([])
assert not conn in connection_map
connection_map[conn] = True
if len(conn) == 0:
continue
for packet in packets:
assert packet.haslayer(net_layer) and packet.haslayer(trans_layer)
packet_conn = frozenset(
[
trans_layer,
packet[net_layer].src,
packet[net_layer].dst,
packet[trans_layer].sport,
packet[trans_layer].dport,
]
)
assert packet_conn == conn
def test_split_by_bpf_filter(self, tmpdir):
args = {
"-f": os.path.join("pcap_examples", "many-protocols.pcap"),
"-o": tmpdir,
"-m": "bpf-filter",
"-p": "udp",
}
self.run_example(args=args)
assert len(os.listdir(tmpdir)) == 2
for filename in os.listdir(tmpdir):
packets = rdpcap(os.path.join(tmpdir, filename))
match_bpf = not os.path.splitext(filename)[0].endswith("not-match-bpf")
for packet in packets:
assert packet.haslayer(UDP) == match_bpf
def test_split_by_round_robin(self, tmpdir):
divide_by = 10
args = {
"-f": os.path.join("pcap_examples", "many-protocols.pcap"),
"-o": tmpdir,
"-m": "round-robin",
"-p": str(divide_by),
}
self.run_example(args=args)
num_of_packets_per_file = int(
len(rdpcap(os.path.join("pcap_examples", "many-protocols.pcap")))
/ divide_by
)
assert len(os.listdir(tmpdir)) == divide_by
for filename in os.listdir(tmpdir):
assert (
num_of_packets_per_file
<= len(rdpcap(os.path.join(tmpdir, filename)))
<= num_of_packets_per_file + 1
)
def test_input_file_not_given(self):
args = {}
completed_process = self.run_example(args=args, expected_return_code=1)
assert "ERROR: Input file name was not given" in completed_process.stdout
def test_output_dir_not_given(self):
args = {"-f": os.path.join("pcap_examples", "many-protocols.pcap")}
completed_process = self.run_example(args=args, expected_return_code=1)
assert "ERROR: Output directory name was not given" in completed_process.stdout
def test_split_method_not_given(self, tmpdir):
args = {
"-f": os.path.join("pcap_examples", "many-protocols.pcap"),
"-o": tmpdir,
}
completed_process = self.run_example(args=args, expected_return_code=1)
assert "ERROR: Split method was not given" in completed_process.stdout
def test_output_dir_not_exist(self):
args = {
"-f": os.path.join("pcap_examples", "many-protocols.pcap"),
"-o": "blablablalba12345",
}
completed_process = self.run_example(args=args, expected_return_code=1)
assert "ERROR: Output directory doesn't exist" in completed_process.stdout
def test_input_file_not_exist(self, tmpdir):
args = {
"-f": os.path.join("pcap_examples", "many-protocols123.pcap"),
"-o": tmpdir,
"-m": "ip-src-dst",
}
completed_process = self.run_example(args=args, expected_return_code=1)
assert "ERROR: Error opening input pcap file" in completed_process.stdout
def test_split_method_not_exist(self, tmpdir):
args = {
"-f": os.path.join("pcap_examples", "many-protocols123.pcap"),
"-o": tmpdir,
"-m": "blabla",
}
completed_process = self.run_example(args=args, expected_return_code=1)
assert "ERROR: Unknown method 'blabla'" in completed_process.stdout
|
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django import template
from django.template import defaultfilters as filters
from django.utils.translation import ugettext_lazy as _ # noqa
from horizon import tables
from horizon.utils import filters as utils_filters
class ServiceFilterAction(tables.FilterAction):
def filter(self, table, services, filter_string):
q = filter_string.lower()
def comp(service):
if q in service.type.lower():
return True
return False
return filter(comp, services)
def get_stats(service):
return template.loader.render_to_string('admin/services/_stats.html',
{'service': service})
def get_enabled(service, reverse=False):
options = ["Enabled", "Disabled"]
if reverse:
options.reverse()
# if not configured in this region, neither option makes sense
if service.host:
return options[0] if not service.disabled else options[1]
return None
class ServicesTable(tables.DataTable):
id = tables.Column('id', verbose_name=_('Id'), hidden=True)
name = tables.Column("name", verbose_name=_('Name'))
service_type = tables.Column('__unicode__', verbose_name=_('Service'))
host = tables.Column('host', verbose_name=_('Host'))
enabled = tables.Column(get_enabled,
verbose_name=_('Enabled'),
status=True)
class Meta:
name = "services"
verbose_name = _("Services")
table_actions = (ServiceFilterAction,)
multi_select = False
status_columns = ["enabled"]
def get_available(zone):
return zone.zoneState['available']
def get_zone_hosts(zone):
hosts = zone.hosts
host_details = []
for name, services in hosts.items():
up = all([s['active'] and s['available'] for k, s in services.items()])
up = _("Services Up") if up else _("Services Down")
host_details.append("%(host)s (%(up)s)" % {'host': name, 'up': up})
return host_details
class ZonesTable(tables.DataTable):
name = tables.Column('zoneName', verbose_name=_('Name'))
hosts = tables.Column(get_zone_hosts,
verbose_name=_('Hosts'),
wrap_list=True,
filters=(filters.unordered_list,))
available = tables.Column(get_available,
verbose_name=_('Available'),
status=True,
filters=(filters.yesno, filters.capfirst))
def get_object_id(self, zone):
return zone.zoneName
class Meta:
name = "zones"
verbose_name = _("Availability Zones")
multi_select = False
status_columns = ["available"]
class NovaServiceFilterAction(tables.FilterAction):
def filter(self, table, services, filter_string):
q = filter_string.lower()
def comp(service):
if q in service.type.lower():
return True
return False
return filter(comp, services)
class NovaServicesTable(tables.DataTable):
binary = tables.Column("binary", verbose_name=_('Name'))
host = tables.Column('host', verbose_name=_('Host'))
zone = tables.Column('zone', verbose_name=_('Zone'))
status = tables.Column('status', verbose_name=_('Status'))
state = tables.Column('state', verbose_name=_('State'))
updated_at = tables.Column('updated_at',
verbose_name=_('Updated At'),
filters=(utils_filters.parse_isotime,
filters.timesince))
def get_object_id(self, obj):
return "%s-%s-%s" % (obj.binary, obj.host, obj.zone)
class Meta:
name = "nova_services"
verbose_name = _("Compute Services")
table_actions = (NovaServiceFilterAction,)
multi_select = False
def get_aggregate_hosts(aggregate):
return [host for host in aggregate.hosts]
def get_metadata(aggregate):
return [' = '.join([key, val]) for key, val
in aggregate.metadata.iteritems()]
class AggregatesTable(tables.DataTable):
name = tables.Column("name",
verbose_name=_("Name"))
availability_zone = tables.Column("availability_zone",
verbose_name=_("Availability Zone"))
hosts = tables.Column(get_aggregate_hosts,
verbose_name=_("Hosts"),
wrap_list=True,
filters=(filters.unordered_list,))
metadata = tables.Column(get_metadata,
verbose_name=_("Metadata"),
wrap_list=True,
filters=(filters.unordered_list,))
class Meta:
name = "aggregates"
verbose_name = _("Host Aggregates")
class NetworkAgentsFilterAction(tables.FilterAction):
def filter(self, table, agents, filter_string):
q = filter_string.lower()
def comp(agent):
if q in agent.agent_type.lower():
return True
return False
return filter(comp, agents)
def get_network_agent_status(agent):
if agent.admin_state_up:
return _('Enabled')
return _('Disabled')
def get_network_agent_state(agent):
if agent.alive:
return _('Up')
return _('Down')
class NetworkAgentsTable(tables.DataTable):
agent_type = tables.Column('agent_type', verbose_name=_('Type'))
binary = tables.Column("binary", verbose_name=_('Name'))
host = tables.Column('host', verbose_name=_('Host'))
status = tables.Column(get_network_agent_status, verbose_name=_('Status'))
state = tables.Column(get_network_agent_state, verbose_name=_('State'))
heartbeat_timestamp = tables.Column('heartbeat_timestamp',
verbose_name=_('Updated At'),
filters=(utils_filters.parse_isotime,
filters.timesince))
def get_object_id(self, obj):
return "%s-%s" % (obj.binary, obj.host)
class Meta:
name = "network_agents"
verbose_name = _("Network Agents")
table_actions = (NetworkAgentsFilterAction,)
multi_select = False
|
|
from rest_framework import status
from rest_framework.reverse import reverse
from .base import BaseTest
class PaymentMethodsViewTest(BaseTest):
maxDiff = None
def test_introspection_authenticated(self):
self.login(is_staff=True)
url = reverse("api-checkout-payment-methods")
resp = self.client.get(url)
self.assertEqual(resp.status_code, status.HTTP_200_OK)
self.assertDictEqual(
resp.data,
{
"credit-card": {
"type": "nested object",
"required": False,
"read_only": False,
"label": "Credit Card",
"children": {
"method_type": {
"type": "choice",
"required": True,
"read_only": False,
"label": "Method type",
"choices": [
{"value": "credit-card", "display_name": "Credit Card"}
],
},
"enabled": {
"type": "boolean",
"required": False,
"read_only": False,
"label": "Enabled",
},
"pay_balance": {
"type": "boolean",
"required": False,
"read_only": False,
"label": "Pay balance",
},
"amount": {
"type": "decimal",
"required": False,
"read_only": False,
"label": "Amount",
},
"reference": {
"type": "string",
"required": False,
"read_only": False,
"label": "Reference",
"max_length": 128,
},
},
},
"cash": {
"type": "nested object",
"required": False,
"read_only": False,
"label": "Cash",
"children": {
"method_type": {
"type": "choice",
"required": True,
"read_only": False,
"label": "Method type",
"choices": [{"value": "cash", "display_name": "Cash"}],
},
"enabled": {
"type": "boolean",
"required": False,
"read_only": False,
"label": "Enabled",
},
"pay_balance": {
"type": "boolean",
"required": False,
"read_only": False,
"label": "Pay balance",
},
"amount": {
"type": "decimal",
"required": False,
"read_only": False,
"label": "Amount",
},
"reference": {
"type": "string",
"required": False,
"read_only": False,
"label": "Reference",
"max_length": 128,
},
},
},
"pay-later": {
"type": "nested object",
"required": False,
"read_only": False,
"label": "Pay Later",
"children": {
"method_type": {
"type": "choice",
"required": True,
"read_only": False,
"label": "Method type",
"choices": [
{"value": "pay-later", "display_name": "Pay Later"}
],
},
"enabled": {
"type": "boolean",
"required": False,
"read_only": False,
"label": "Enabled",
},
"pay_balance": {
"type": "boolean",
"required": False,
"read_only": False,
"label": "Pay balance",
},
"amount": {
"type": "decimal",
"required": False,
"read_only": False,
"label": "Amount",
},
"reference": {
"type": "string",
"required": False,
"read_only": False,
"label": "Reference",
"max_length": 128,
},
},
},
},
)
def test_introspection_anonymous(self):
url = reverse("api-checkout-payment-methods")
resp = self.client.get(url)
self.assertEqual(resp.status_code, status.HTTP_200_OK)
self.assertDictEqual(
resp.data,
{
"credit-card": {
"type": "nested object",
"required": False,
"read_only": False,
"label": "Credit Card",
"children": {
"method_type": {
"type": "choice",
"required": True,
"read_only": False,
"label": "Method type",
"choices": [
{"value": "credit-card", "display_name": "Credit Card"}
],
},
"enabled": {
"type": "boolean",
"required": False,
"read_only": False,
"label": "Enabled",
},
"pay_balance": {
"type": "boolean",
"required": False,
"read_only": False,
"label": "Pay balance",
},
"amount": {
"type": "decimal",
"required": False,
"read_only": False,
"label": "Amount",
},
"reference": {
"type": "string",
"required": False,
"read_only": False,
"label": "Reference",
"max_length": 128,
},
},
},
"pay-later": {
"type": "nested object",
"required": False,
"read_only": False,
"label": "Pay Later",
"children": {
"method_type": {
"type": "choice",
"required": True,
"read_only": False,
"label": "Method type",
"choices": [
{"value": "pay-later", "display_name": "Pay Later"}
],
},
"enabled": {
"type": "boolean",
"required": False,
"read_only": False,
"label": "Enabled",
},
"pay_balance": {
"type": "boolean",
"required": False,
"read_only": False,
"label": "Pay balance",
},
"amount": {
"type": "decimal",
"required": False,
"read_only": False,
"label": "Amount",
},
"reference": {
"type": "string",
"required": False,
"read_only": False,
"label": "Reference",
"max_length": 128,
},
},
},
},
)
|
|
# Copyright 2010 Google Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import errno
import httplib
import os
import random
import re
import socket
import time
import urlparse
from vulpo import config, UserAgent
from vulpo.connection import AWSAuthConnection
from vulpo.exception import InvalidUriError
from vulpo.exception import ResumableTransferDisposition
from vulpo.exception import ResumableUploadException
from vulpo.scs.keyfile import KeyFile
try:
from hashlib import md5
except ImportError:
from md5 import md5
"""
Handler for Google Cloud Storage resumable uploads. See
http://code.google.com/apis/storage/docs/developer-guide.html#resumable
for details.
Resumable uploads will retry failed uploads, resuming at the byte
count completed by the last upload attempt. If too many retries happen with
no progress (per configurable num_retries param), the upload will be
aborted in the current process.
The caller can optionally specify a tracker_file_name param in the
ResumableUploadHandler constructor. If you do this, that file will
save the state needed to allow retrying later, in a separate process
(e.g., in a later run of gsutil).
"""
class ResumableUploadHandler(object):
BUFFER_SIZE = 8192
RETRYABLE_EXCEPTIONS = (httplib.HTTPException, IOError, socket.error,
socket.gaierror)
# (start, end) response indicating server has nothing (upload protocol uses
# inclusive numbering).
SERVER_HAS_NOTHING = (0, -1)
def __init__(self, tracker_file_name=None, num_retries=None):
"""
Constructor. Instantiate once for each uploaded file.
:type tracker_file_name: string
:param tracker_file_name: optional file name to save tracker URI.
If supplied and the current process fails the upload, it can be
retried in a new process. If called with an existing file containing
a valid tracker URI, we'll resume the upload from this URI; else
we'll start a new resumable upload (and write the URI to this
tracker file).
:type num_retries: int
:param num_retries: the number of times we'll re-try a resumable upload
making no progress. (Count resets every time we get progress, so
upload can span many more than this number of retries.)
"""
self.tracker_file_name = tracker_file_name
self.num_retries = num_retries
self.server_has_bytes = 0 # Byte count at last server check.
self.tracker_uri = None
if tracker_file_name:
self._load_tracker_uri_from_file()
# Save upload_start_point in instance state so caller can find how
# much was transferred by this ResumableUploadHandler (across retries).
self.upload_start_point = None
def _load_tracker_uri_from_file(self):
f = None
try:
f = open(self.tracker_file_name, 'r')
uri = f.readline().strip()
self._set_tracker_uri(uri)
except IOError, e:
# Ignore non-existent file (happens first time an upload
# is attempted on a file), but warn user for other errors.
if e.errno != errno.ENOENT:
# Will restart because self.tracker_uri is None.
print('Couldn\'t read URI tracker file (%s): %s. Restarting '
'upload from scratch.' %
(self.tracker_file_name, e.strerror))
except InvalidUriError, e:
# Warn user, but proceed (will restart because
# self.tracker_uri is None).
print('Invalid tracker URI (%s) found in URI tracker file '
'(%s). Restarting upload from scratch.' %
(uri, self.tracker_file_name))
finally:
if f:
f.close()
def _save_tracker_uri_to_file(self):
"""
Saves URI to tracker file if one was passed to constructor.
"""
if not self.tracker_file_name:
return
f = None
try:
with os.fdopen(os.open(self.tracker_file_name,
os.O_WRONLY | os.O_CREAT, 0600), 'w') as f:
f.write(self.tracker_uri)
except IOError, e:
raise ResumableUploadException(
'Couldn\'t write URI tracker file (%s): %s.\nThis can happen'
'if you\'re using an incorrectly configured upload tool\n'
'(e.g., gsutil configured to save tracker files to an '
'unwritable directory)' %
(self.tracker_file_name, e.strerror),
ResumableTransferDisposition.ABORT)
def _set_tracker_uri(self, uri):
"""
Called when we start a new resumable upload or get a new tracker
URI for the upload. Saves URI and resets upload state.
Raises InvalidUriError if URI is syntactically invalid.
"""
parse_result = urlparse.urlparse(uri)
if (parse_result.scheme.lower() not in ['http', 'https'] or
not parse_result.netloc):
raise InvalidUriError('Invalid tracker URI (%s)' % uri)
self.tracker_uri = uri
self.tracker_uri_host = parse_result.netloc
self.tracker_uri_path = '%s?%s' % (
parse_result.path, parse_result.query)
self.server_has_bytes = 0
def get_tracker_uri(self):
"""
Returns upload tracker URI, or None if the upload has not yet started.
"""
return self.tracker_uri
def get_upload_id(self):
"""
Returns the upload ID for the resumable upload, or None if the upload
has not yet started.
"""
# We extract the upload_id from the tracker uri. We could retrieve the
# upload_id from the headers in the response but this only works for
# the case where we get the tracker uri from the service. In the case
# where we get the tracker from the tracking file we need to do this
# logic anyway.
delim = '?upload_id='
if self.tracker_uri and delim in self.tracker_uri:
return self.tracker_uri[self.tracker_uri.index(delim) + len(delim):]
else:
return None
def _remove_tracker_file(self):
if (self.tracker_file_name and
os.path.exists(self.tracker_file_name)):
os.unlink(self.tracker_file_name)
def _build_content_range_header(self, range_spec='*', length_spec='*'):
return 'bytes %s/%s' % (range_spec, length_spec)
def _query_server_state(self, conn, file_length):
"""
Queries server to find out state of given upload.
Note that this method really just makes special case use of the
fact that the upload server always returns the current start/end
state whenever a PUT doesn't complete.
Returns HTTP response from sending request.
Raises ResumableUploadException if problem querying server.
"""
# Send an empty PUT so that server replies with this resumable
# transfer's state.
put_headers = {}
put_headers['Content-Range'] = (
self._build_content_range_header('*', file_length))
put_headers['Content-Length'] = '0'
return AWSAuthConnection.make_request(conn, 'PUT',
path=self.tracker_uri_path,
auth_path=self.tracker_uri_path,
headers=put_headers,
host=self.tracker_uri_host)
def _query_server_pos(self, conn, file_length):
"""
Queries server to find out what bytes it currently has.
Returns (server_start, server_end), where the values are inclusive.
For example, (0, 2) would mean that the server has bytes 0, 1, *and* 2.
Raises ResumableUploadException if problem querying server.
"""
resp = self._query_server_state(conn, file_length)
if resp.status == 200:
# To handle the boundary condition where the server has the complete
# file, we return (server_start, file_length-1). That way the
# calling code can always simply read up through server_end. (If we
# didn't handle this boundary condition here, the caller would have
# to check whether server_end == file_length and read one fewer byte
# in that case.)
return (0, file_length - 1) # Completed upload.
if resp.status != 308:
# This means the server didn't have any state for the given
# upload ID, which can happen (for example) if the caller saved
# the tracker URI to a file and then tried to restart the transfer
# after that upload ID has gone stale. In that case we need to
# start a new transfer (and the caller will then save the new
# tracker URI to the tracker file).
raise ResumableUploadException(
'Got non-308 response (%s) from server state query' %
resp.status, ResumableTransferDisposition.START_OVER)
got_valid_response = False
range_spec = resp.getheader('range')
if range_spec:
# Parse 'bytes=<from>-<to>' range_spec.
m = re.search('bytes=(\d+)-(\d+)', range_spec)
if m:
server_start = long(m.group(1))
server_end = long(m.group(2))
got_valid_response = True
else:
# No Range header, which means the server does not yet have
# any bytes. Note that the Range header uses inclusive 'from'
# and 'to' values. Since Range 0-0 would mean that the server
# has byte 0, omitting the Range header is used to indicate that
# the server doesn't have any bytes.
return self.SERVER_HAS_NOTHING
if not got_valid_response:
raise ResumableUploadException(
'Couldn\'t parse upload server state query response (%s)' %
str(resp.getheaders()), ResumableTransferDisposition.START_OVER)
if conn.debug >= 1:
print 'Server has: Range: %d - %d.' % (server_start, server_end)
return (server_start, server_end)
def _start_new_resumable_upload(self, key, headers=None):
"""
Starts a new resumable upload.
Raises ResumableUploadException if any errors occur.
"""
conn = key.bucket.connection
if conn.debug >= 1:
print 'Starting new resumable upload.'
self.server_has_bytes = 0
# Start a new resumable upload by sending a POST request with an
# empty body and the "X-Goog-Resumable: start" header. Include any
# caller-provided headers (e.g., Content-Type) EXCEPT Content-Length
# (and raise an exception if they tried to pass one, since it's
# a semantic error to specify it at this point, and if we were to
# include one now it would cause the server to expect that many
# bytes; the POST doesn't include the actual file bytes We set
# the Content-Length in the subsequent PUT, based on the uploaded
# file size.
post_headers = {}
for k in headers:
if k.lower() == 'content-length':
raise ResumableUploadException(
'Attempt to specify Content-Length header (disallowed)',
ResumableTransferDisposition.ABORT)
post_headers[k] = headers[k]
post_headers[conn.provider.resumable_upload_header] = 'start'
resp = conn.make_request(
'POST', key.bucket.name, key.name, post_headers)
# Get tracker URI from response 'Location' header.
body = resp.read()
# Check for various status conditions.
if resp.status in [500, 503]:
# Retry status 500 and 503 errors after a delay.
raise ResumableUploadException(
'Got status %d from attempt to start resumable upload. '
'Will wait/retry' % resp.status,
ResumableTransferDisposition.WAIT_BEFORE_RETRY)
elif resp.status != 200 and resp.status != 201:
raise ResumableUploadException(
'Got status %d from attempt to start resumable upload. '
'Aborting' % resp.status,
ResumableTransferDisposition.ABORT)
# Else we got 200 or 201 response code, indicating the resumable
# upload was created.
tracker_uri = resp.getheader('Location')
if not tracker_uri:
raise ResumableUploadException(
'No resumable tracker URI found in resumable initiation '
'POST response (%s)' % body,
ResumableTransferDisposition.WAIT_BEFORE_RETRY)
self._set_tracker_uri(tracker_uri)
self._save_tracker_uri_to_file()
def _upload_file_bytes(self, conn, http_conn, fp, file_length,
total_bytes_uploaded, cb, num_cb, headers):
"""
Makes one attempt to upload file bytes, using an existing resumable
upload connection.
Returns (etag, generation, metageneration) from server upon success.
Raises ResumableUploadException if any problems occur.
"""
buf = fp.read(self.BUFFER_SIZE)
if cb:
# The cb_count represents the number of full buffers to send between
# cb executions.
if num_cb > 2:
cb_count = file_length / self.BUFFER_SIZE / (num_cb-2)
elif num_cb < 0:
cb_count = -1
else:
cb_count = 0
i = 0
cb(total_bytes_uploaded, file_length)
# Build resumable upload headers for the transfer. Don't send a
# Content-Range header if the file is 0 bytes long, because the
# resumable upload protocol uses an *inclusive* end-range (so, sending
# 'bytes 0-0/1' would actually mean you're sending a 1-byte file).
if not headers:
put_headers = {}
else:
put_headers = headers.copy()
if file_length:
if total_bytes_uploaded == file_length:
range_header = self._build_content_range_header(
'*', file_length)
else:
range_header = self._build_content_range_header(
'%d-%d' % (total_bytes_uploaded, file_length - 1),
file_length)
put_headers['Content-Range'] = range_header
# Set Content-Length to the total bytes we'll send with this PUT.
put_headers['Content-Length'] = str(file_length - total_bytes_uploaded)
http_request = AWSAuthConnection.build_base_http_request(
conn, 'PUT', path=self.tracker_uri_path, auth_path=None,
headers=put_headers, host=self.tracker_uri_host)
http_conn.putrequest('PUT', http_request.path)
for k in put_headers:
http_conn.putheader(k, put_headers[k])
http_conn.endheaders()
# Turn off debug on http connection so upload content isn't included
# in debug stream.
http_conn.set_debuglevel(0)
while buf:
http_conn.send(buf)
for alg in self.digesters:
self.digesters[alg].update(buf)
total_bytes_uploaded += len(buf)
if cb:
i += 1
if i == cb_count or cb_count == -1:
cb(total_bytes_uploaded, file_length)
i = 0
buf = fp.read(self.BUFFER_SIZE)
http_conn.set_debuglevel(conn.debug)
if cb:
cb(total_bytes_uploaded, file_length)
if total_bytes_uploaded != file_length:
# Abort (and delete the tracker file) so if the user retries
# they'll start a new resumable upload rather than potentially
# attempting to pick back up later where we left off.
raise ResumableUploadException(
'File changed during upload: EOF at %d bytes of %d byte file.' %
(total_bytes_uploaded, file_length),
ResumableTransferDisposition.ABORT)
resp = http_conn.getresponse()
# Restore http connection debug level.
http_conn.set_debuglevel(conn.debug)
if resp.status == 200:
# Success.
return (resp.getheader('etag'),
resp.getheader('x-goog-generation'),
resp.getheader('x-goog-metageneration'))
# Retry timeout (408) and status 500 and 503 errors after a delay.
elif resp.status in [408, 500, 503]:
disposition = ResumableTransferDisposition.WAIT_BEFORE_RETRY
else:
# Catch all for any other error codes.
disposition = ResumableTransferDisposition.ABORT
raise ResumableUploadException('Got response code %d while attempting '
'upload (%s)' %
(resp.status, resp.reason), disposition)
def _attempt_resumable_upload(self, key, fp, file_length, headers, cb,
num_cb):
"""
Attempts a resumable upload.
Returns (etag, generation, metageneration) from server upon success.
Raises ResumableUploadException if any problems occur.
"""
(server_start, server_end) = self.SERVER_HAS_NOTHING
conn = key.bucket.connection
if self.tracker_uri:
# Try to resume existing resumable upload.
try:
(server_start, server_end) = (
self._query_server_pos(conn, file_length))
self.server_has_bytes = server_start
if server_end:
# If the server already has some of the content, we need to
# update the digesters with the bytes that have already been
# uploaded to ensure we get a complete hash in the end.
print 'Catching up hash digest(s) for resumed upload'
fp.seek(0)
# Read local file's bytes through position server has. For
# example, if server has (0, 3) we want to read 3-0+1=4 bytes.
bytes_to_go = server_end + 1
while bytes_to_go:
chunk = fp.read(min(key.BufferSize, bytes_to_go))
if not chunk:
raise ResumableUploadException(
'Hit end of file during resumable upload hash '
'catchup. This should not happen under\n'
'normal circumstances, as it indicates the '
'server has more bytes of this transfer\nthan'
' the current file size. Restarting upload.',
ResumableTransferDisposition.START_OVER)
for alg in self.digesters:
self.digesters[alg].update(chunk)
bytes_to_go -= len(chunk)
if conn.debug >= 1:
print 'Resuming transfer.'
except ResumableUploadException, e:
if conn.debug >= 1:
print 'Unable to resume transfer (%s).' % e.message
self._start_new_resumable_upload(key, headers)
else:
self._start_new_resumable_upload(key, headers)
# upload_start_point allows the code that instantiated the
# ResumableUploadHandler to find out the point from which it started
# uploading (e.g., so it can correctly compute throughput).
if self.upload_start_point is None:
self.upload_start_point = server_end
total_bytes_uploaded = server_end + 1
# Corner case: Don't attempt to seek if we've already uploaded the
# entire file, because if the file is a stream (e.g., the KeyFile
# wrapper around input key when copying between providers), attempting
# to seek to the end of file would result in an InvalidRange error.
if file_length < total_bytes_uploaded:
fp.seek(total_bytes_uploaded)
conn = key.bucket.connection
# Get a new HTTP connection (vs conn.get_http_connection(), which reuses
# pool connections) because httplib requires a new HTTP connection per
# transaction. (Without this, calling http_conn.getresponse() would get
# "ResponseNotReady".)
http_conn = conn.new_http_connection(self.tracker_uri_host, conn.port,
conn.is_secure)
http_conn.set_debuglevel(conn.debug)
# Make sure to close http_conn at end so if a local file read
# failure occurs partway through server will terminate current upload
# and can report that progress on next attempt.
try:
return self._upload_file_bytes(conn, http_conn, fp, file_length,
total_bytes_uploaded, cb, num_cb,
headers)
except (ResumableUploadException, socket.error):
resp = self._query_server_state(conn, file_length)
if resp.status == 400:
raise ResumableUploadException('Got 400 response from server '
'state query after failed resumable upload attempt. This '
'can happen for various reasons, including specifying an '
'invalid request (e.g., an invalid canned ACL) or if the '
'file size changed between upload attempts',
ResumableTransferDisposition.ABORT)
else:
raise
finally:
http_conn.close()
def _check_final_md5(self, key, etag):
"""
Checks that etag from server agrees with md5 computed before upload.
This is important, since the upload could have spanned a number of
hours and multiple processes (e.g., gsutil runs), and the user could
change some of the file and not realize they have inconsistent data.
"""
if key.bucket.connection.debug >= 1:
print 'Checking md5 against etag.'
if key.md5 != etag.strip('"\''):
# Call key.open_read() before attempting to delete the
# (incorrect-content) key, so we perform that request on a
# different HTTP connection. This is neededb because httplib
# will return a "Response not ready" error if you try to perform
# a second transaction on the connection.
key.open_read()
key.close()
key.delete()
raise ResumableUploadException(
'File changed during upload: md5 signature doesn\'t match etag '
'(incorrect uploaded object deleted)',
ResumableTransferDisposition.ABORT)
def handle_resumable_upload_exception(self, e, debug):
if (e.disposition == ResumableTransferDisposition.ABORT_CUR_PROCESS):
if debug >= 1:
print('Caught non-retryable ResumableUploadException (%s); '
'aborting but retaining tracker file' % e.message)
raise
elif (e.disposition == ResumableTransferDisposition.ABORT):
if debug >= 1:
print('Caught non-retryable ResumableUploadException (%s); '
'aborting and removing tracker file' % e.message)
self._remove_tracker_file()
raise
else:
if debug >= 1:
print('Caught ResumableUploadException (%s) - will retry' %
e.message)
def track_progress_less_iterations(self, server_had_bytes_before_attempt,
roll_back_md5=True, debug=0):
# At this point we had a re-tryable failure; see if made progress.
if self.server_has_bytes > server_had_bytes_before_attempt:
self.progress_less_iterations = 0 # If progress, reset counter.
else:
self.progress_less_iterations += 1
if roll_back_md5:
# Rollback any potential hash updates, as we did not
# make any progress in this iteration.
self.digesters = self.digesters_before_attempt
if self.progress_less_iterations > self.num_retries:
# Don't retry any longer in the current process.
raise ResumableUploadException(
'Too many resumable upload attempts failed without '
'progress. You might try this upload again later',
ResumableTransferDisposition.ABORT_CUR_PROCESS)
# Use binary exponential backoff to desynchronize client requests.
sleep_time_secs = random.random() * (2**self.progress_less_iterations)
if debug >= 1:
print ('Got retryable failure (%d progress-less in a row).\n'
'Sleeping %3.1f seconds before re-trying' %
(self.progress_less_iterations, sleep_time_secs))
time.sleep(sleep_time_secs)
def send_file(self, key, fp, headers, cb=None, num_cb=10, hash_algs=None):
"""
Upload a file to a key into a bucket on GS, using GS resumable upload
protocol.
:type key: :class:`vulpo.scs.key.Key` or subclass
:param key: The Key object to which data is to be uploaded
:type fp: file-like object
:param fp: The file pointer to upload
:type headers: dict
:param headers: The headers to pass along with the PUT request
:type cb: function
:param cb: a callback function that will be called to report progress on
the upload. The callback should accept two integer parameters, the
first representing the number of bytes that have been successfully
transmitted to GS, and the second representing the total number of
bytes that need to be transmitted.
:type num_cb: int
:param num_cb: (optional) If a callback is specified with the cb
parameter, this parameter determines the granularity of the callback
by defining the maximum number of times the callback will be called
during the file transfer. Providing a negative integer will cause
your callback to be called with each buffer read.
:type hash_algs: dictionary
:param hash_algs: (optional) Dictionary mapping hash algorithm
descriptions to corresponding state-ful hashing objects that
implement update(), digest(), and copy() (e.g. hashlib.md5()).
Defaults to {'md5': md5()}.
Raises ResumableUploadException if a problem occurs during the transfer.
"""
if not headers:
headers = {}
# If Content-Type header is present and set to None, remove it.
# This is gsutil's way of asking vulpo to refrain from auto-generating
# that header.
CT = 'Content-Type'
if CT in headers and headers[CT] is None:
del headers[CT]
headers['User-Agent'] = UserAgent
# Determine file size different ways for case where fp is actually a
# wrapper around a Key vs an actual file.
if isinstance(fp, KeyFile):
file_length = fp.getkey().size
else:
fp.seek(0, os.SEEK_END)
file_length = fp.tell()
fp.seek(0)
debug = key.bucket.connection.debug
# Compute the MD5 checksum on the fly.
if hash_algs is None:
hash_algs = {'md5': md5}
self.digesters = dict(
(alg, hash_algs[alg]()) for alg in hash_algs or {})
# Use num-retries from constructor if one was provided; else check
# for a value specified in the vulpo config file; else default to 5.
if self.num_retries is None:
self.num_retries = config.getint('Vulpo', 'num_retries', 6)
self.progress_less_iterations = 0
while True: # Retry as long as we're making progress.
server_had_bytes_before_attempt = self.server_has_bytes
self.digesters_before_attempt = dict(
(alg, self.digesters[alg].copy())
for alg in self.digesters)
try:
# Save generation and metageneration in class state so caller
# can find these values, for use in preconditions of future
# operations on the uploaded object.
(etag, self.generation, self.metageneration) = (
self._attempt_resumable_upload(key, fp, file_length,
headers, cb, num_cb))
# Get the final digests for the uploaded content.
for alg in self.digesters:
key.local_hashes[alg] = self.digesters[alg].digest()
# Upload succceded, so remove the tracker file (if have one).
self._remove_tracker_file()
self._check_final_md5(key, etag)
key.generation = self.generation
if debug >= 1:
print 'Resumable upload complete.'
return
except self.RETRYABLE_EXCEPTIONS, e:
if debug >= 1:
print('Caught exception (%s)' % e.__repr__())
if isinstance(e, IOError) and e.errno == errno.EPIPE:
# Broken pipe error causes httplib to immediately
# close the socket (http://bugs.python.org/issue5542),
# so we need to close the connection before we resume
# the upload (which will cause a new connection to be
# opened the next time an HTTP request is sent).
key.bucket.connection.connection.close()
except ResumableUploadException, e:
self.handle_resumable_upload_exception(e, debug)
self.track_progress_less_iterations(server_had_bytes_before_attempt,
True, debug)
|
|
import itertools
import os
import textwrap
import pytest
from tests.lib import pyversion # noqa: F401
from tests.lib import PipTestEnvironment, ResolverVariant, TestData, assert_all_changes
from tests.lib.local_repos import local_checkout
from tests.lib.path import Path
from tests.lib.wheel import make_wheel
@pytest.mark.network
def test_no_upgrade_unless_requested(script: PipTestEnvironment) -> None:
"""
No upgrade if not specifically requested.
"""
script.pip("install", "INITools==0.1")
result = script.pip("install", "INITools")
assert (
not result.files_created
), "pip install INITools upgraded when it should not have"
def test_invalid_upgrade_strategy_causes_error(script: PipTestEnvironment) -> None:
"""
It errors out when the upgrade-strategy is an invalid/unrecognised one
"""
result = script.pip_install_local(
"--upgrade", "--upgrade-strategy=bazinga", "simple", expect_error=True
)
assert result.returncode
assert "invalid choice" in result.stderr
@pytest.mark.usefixtures("with_wheel")
def test_only_if_needed_does_not_upgrade_deps_when_satisfied(
script: PipTestEnvironment, resolver_variant: ResolverVariant
) -> None:
"""
It doesn't upgrade a dependency if it already satisfies the requirements.
"""
script.pip_install_local("simple==2.0")
result = script.pip_install_local(
"--upgrade", "--upgrade-strategy=only-if-needed", "require_simple"
)
assert (
script.site_packages / "require_simple-1.0.dist-info"
) not in result.files_deleted, "should have installed require_simple==1.0"
assert (
script.site_packages / "simple-2.0.dist-info"
) not in result.files_deleted, "should not have uninstalled simple==2.0"
msg = "Requirement already satisfied"
if resolver_variant == "legacy":
msg = msg + ", skipping upgrade: simple"
assert (
msg in result.stdout
), "did not print correct message for not-upgraded requirement"
@pytest.mark.usefixtures("with_wheel")
def test_only_if_needed_does_upgrade_deps_when_no_longer_satisfied(
script: PipTestEnvironment,
) -> None:
"""
It does upgrade a dependency if it no longer satisfies the requirements.
"""
script.pip_install_local("simple==1.0")
result = script.pip_install_local(
"--upgrade", "--upgrade-strategy=only-if-needed", "require_simple"
)
assert (
script.site_packages / "require_simple-1.0.dist-info"
) not in result.files_deleted, "should have installed require_simple==1.0"
expected = script.site_packages / "simple-3.0.dist-info"
result.did_create(expected, message="should have installed simple==3.0")
expected = script.site_packages / "simple-1.0.dist-info"
assert expected in result.files_deleted, "should have uninstalled simple==1.0"
@pytest.mark.usefixtures("with_wheel")
def test_eager_does_upgrade_dependencies_when_currently_satisfied(
script: PipTestEnvironment,
) -> None:
"""
It does upgrade a dependency even if it already satisfies the requirements.
"""
script.pip_install_local("simple==2.0")
result = script.pip_install_local(
"--upgrade", "--upgrade-strategy=eager", "require_simple"
)
assert (
script.site_packages / "require_simple-1.0.dist-info"
) not in result.files_deleted, "should have installed require_simple==1.0"
assert (
script.site_packages / "simple-2.0.dist-info"
) in result.files_deleted, "should have uninstalled simple==2.0"
@pytest.mark.usefixtures("with_wheel")
def test_eager_does_upgrade_dependencies_when_no_longer_satisfied(
script: PipTestEnvironment,
) -> None:
"""
It does upgrade a dependency if it no longer satisfies the requirements.
"""
script.pip_install_local("simple==1.0")
result = script.pip_install_local(
"--upgrade", "--upgrade-strategy=eager", "require_simple"
)
assert (
script.site_packages / "require_simple-1.0.dist-info"
) not in result.files_deleted, "should have installed require_simple==1.0"
result.did_create(
script.site_packages / "simple-3.0.dist-info",
message="should have installed simple==3.0",
)
assert (
script.site_packages / "simple-1.0.dist-info" in result.files_deleted
), "should have uninstalled simple==1.0"
@pytest.mark.network
@pytest.mark.usefixtures("with_wheel")
def test_upgrade_to_specific_version(script: PipTestEnvironment) -> None:
"""
It does upgrade to specific version requested.
"""
script.pip("install", "INITools==0.1")
result = script.pip("install", "INITools==0.2")
assert result.files_created, "pip install with specific version did not upgrade"
assert script.site_packages / "INITools-0.1.dist-info" in result.files_deleted
result.did_create(script.site_packages / "INITools-0.2.dist-info")
@pytest.mark.network
@pytest.mark.usefixtures("with_wheel")
def test_upgrade_if_requested(script: PipTestEnvironment) -> None:
"""
And it does upgrade if requested.
"""
script.pip("install", "INITools==0.1")
result = script.pip("install", "--upgrade", "INITools")
assert result.files_created, "pip install --upgrade did not upgrade"
result.did_not_create(script.site_packages / "INITools-0.1.dist-info")
def test_upgrade_with_newest_already_installed(
script: PipTestEnvironment, data: TestData, resolver_variant: ResolverVariant
) -> None:
"""
If the newest version of a package is already installed, the package should
not be reinstalled and the user should be informed.
"""
script.pip("install", "-f", data.find_links, "--no-index", "simple")
result = script.pip(
"install", "--upgrade", "-f", data.find_links, "--no-index", "simple"
)
assert not result.files_created, "simple upgraded when it should not have"
if resolver_variant == "2020-resolver":
msg = "Requirement already satisfied"
else:
msg = "already up-to-date"
assert msg in result.stdout, result.stdout
@pytest.mark.network
def test_upgrade_force_reinstall_newest(script: PipTestEnvironment) -> None:
"""
Force reinstallation of a package even if it is already at its newest
version if --force-reinstall is supplied.
"""
result = script.pip("install", "INITools")
result.did_create(script.site_packages / "initools")
result2 = script.pip("install", "--upgrade", "--force-reinstall", "INITools")
assert result2.files_updated, "upgrade to INITools 0.3 failed"
result3 = script.pip("uninstall", "initools", "-y")
assert_all_changes(result, result3, [script.venv / "build", "cache"])
@pytest.mark.network
def test_uninstall_before_upgrade(script: PipTestEnvironment) -> None:
"""
Automatic uninstall-before-upgrade.
"""
result = script.pip("install", "INITools==0.2")
result.did_create(script.site_packages / "initools")
result2 = script.pip("install", "INITools==0.3")
assert result2.files_created, "upgrade to INITools 0.3 failed"
result3 = script.pip("uninstall", "initools", "-y")
assert_all_changes(result, result3, [script.venv / "build", "cache"])
@pytest.mark.network
def test_uninstall_before_upgrade_from_url(script: PipTestEnvironment) -> None:
"""
Automatic uninstall-before-upgrade from URL.
"""
result = script.pip("install", "INITools==0.2")
result.did_create(script.site_packages / "initools")
result2 = script.pip(
"install",
"https://files.pythonhosted.org/packages/source/I/INITools/INITools-"
"0.3.tar.gz",
)
assert result2.files_created, "upgrade to INITools 0.3 failed"
result3 = script.pip("uninstall", "initools", "-y")
assert_all_changes(result, result3, [script.venv / "build", "cache"])
@pytest.mark.network
def test_upgrade_to_same_version_from_url(script: PipTestEnvironment) -> None:
"""
When installing from a URL the same version that is already installed, no
need to uninstall and reinstall if --upgrade is not specified.
"""
result = script.pip("install", "INITools==0.3")
result.did_create(script.site_packages / "initools")
result2 = script.pip(
"install",
"https://files.pythonhosted.org/packages/source/I/INITools/INITools-"
"0.3.tar.gz",
)
assert (
script.site_packages / "initools" not in result2.files_updated
), "INITools 0.3 reinstalled same version"
result3 = script.pip("uninstall", "initools", "-y")
assert_all_changes(result, result3, [script.venv / "build", "cache"])
@pytest.mark.network
def test_upgrade_from_reqs_file(script: PipTestEnvironment) -> None:
"""
Upgrade from a requirements file.
"""
script.scratch_path.joinpath("test-req.txt").write_text(
textwrap.dedent(
"""\
PyLogo<0.4
# and something else to test out:
INITools==0.3
"""
)
)
install_result = script.pip("install", "-r", script.scratch_path / "test-req.txt")
script.scratch_path.joinpath("test-req.txt").write_text(
textwrap.dedent(
"""\
PyLogo
# and something else to test out:
INITools
"""
)
)
script.pip("install", "--upgrade", "-r", script.scratch_path / "test-req.txt")
uninstall_result = script.pip(
"uninstall", "-r", script.scratch_path / "test-req.txt", "-y"
)
assert_all_changes(
install_result,
uninstall_result,
[script.venv / "build", "cache", script.scratch / "test-req.txt"],
)
def test_uninstall_rollback(script: PipTestEnvironment, data: TestData) -> None:
"""
Test uninstall-rollback (using test package with a setup.py
crafted to fail on install).
"""
result = script.pip("install", "-f", data.find_links, "--no-index", "broken==0.1")
result.did_create(script.site_packages / "broken.py")
result2 = script.pip(
"install",
"-f",
data.find_links,
"--no-index",
"broken===0.2broken",
expect_error=True,
)
assert result2.returncode == 1, str(result2)
assert (
script.run("python", "-c", "import broken; print(broken.VERSION)").stdout
== "0.1\n"
)
assert_all_changes(
result.files_after,
result2,
[script.venv / "build"],
)
@pytest.mark.network
@pytest.mark.usefixtures("with_wheel")
def test_should_not_install_always_from_cache(script: PipTestEnvironment) -> None:
"""
If there is an old cached package, pip should download the newer version
Related to issue #175
"""
script.pip("install", "INITools==0.2")
script.pip("uninstall", "-y", "INITools")
result = script.pip("install", "INITools==0.1")
result.did_not_create(script.site_packages / "INITools-0.2.dist-info")
result.did_create(script.site_packages / "INITools-0.1.dist-info")
@pytest.mark.network
@pytest.mark.usefixtures("with_wheel")
def test_install_with_ignoreinstalled_requested(script: PipTestEnvironment) -> None:
"""
Test old conflicting package is completely ignored
"""
script.pip("install", "INITools==0.1")
result = script.pip("install", "-I", "INITools==0.3")
assert result.files_created, "pip install -I did not install"
# both the old and new metadata should be present.
assert os.path.exists(script.site_packages_path / "INITools-0.1.dist-info")
assert os.path.exists(script.site_packages_path / "INITools-0.3.dist-info")
@pytest.mark.network
def test_upgrade_vcs_req_with_no_dists_found(
script: PipTestEnvironment, tmpdir: Path
) -> None:
"""It can upgrade a VCS requirement that has no distributions otherwise."""
req = "{checkout}#egg=pip-test-package".format(
checkout=local_checkout(
"git+https://github.com/pypa/pip-test-package.git",
tmpdir,
)
)
script.pip("install", req)
result = script.pip("install", "-U", req)
assert not result.returncode
@pytest.mark.network
def test_upgrade_vcs_req_with_dist_found(script: PipTestEnvironment) -> None:
"""It can upgrade a VCS requirement that has distributions on the index."""
# TODO(pnasrat) Using local_checkout fails on windows - oddness with the
# test path urls/git.
req = "{url}#egg=pretend".format(
url=(
"git+git://github.com/alex/pretend@e7f26ad7dbcb4a02a4995aade4"
"743aad47656b27"
),
)
script.pip("install", req, expect_stderr=True)
result = script.pip("install", "-U", req, expect_stderr=True)
assert "pypi.org" not in result.stdout, result.stdout
@pytest.mark.parametrize(
"req1, req2",
list(
itertools.product(
["foo.bar", "foo_bar", "foo-bar"],
["foo.bar", "foo_bar", "foo-bar"],
)
),
)
def test_install_find_existing_package_canonicalize(
script: PipTestEnvironment, req1: str, req2: str
) -> None:
"""Ensure an already-installed dist is found no matter how the dist name
was normalized on installation. (pypa/pip#8645)
"""
# Create and install a package that's not available in the later stage.
req_container = script.scratch_path.joinpath("foo-bar")
req_container.mkdir()
req_path = make_wheel("foo_bar", "1.0").save_to_dir(req_container)
script.pip("install", "--no-index", req_path)
# Depend on the previously installed, but now unavailable package.
pkg_container = script.scratch_path.joinpath("pkg")
pkg_container.mkdir()
make_wheel(
"pkg",
"1.0",
metadata_updates={"Requires-Dist": req2},
).save_to_dir(pkg_container)
# Ensure the previously installed package can be correctly used to match
# the dependency.
result = script.pip(
"install",
"--no-index",
"--find-links",
pkg_container,
"pkg",
)
satisfied_message = f"Requirement already satisfied: {req2}"
assert satisfied_message in result.stdout, str(result)
|
|
#! /usr/bin/env python2
# Copyright (c) 2011 ARM Limited
# All rights reserved
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Giacomo Gabrielli
# Pipeline activity viewer for the O3 CPU model.
import optparse
import os
import sys
import copy
# Temporary storage for instructions. The queue is filled in out-of-order
# until it reaches 'max_threshold' number of instructions. It is then
# sorted out and instructions are printed out until their number drops to
# 'min_threshold'.
# It is assumed that the instructions are not out of order for more then
# 'min_threshold' places - otherwise they will appear out of order.
insts = {
'queue': [] , # Instructions to print.
'max_threshold':2000, # Instructions are sorted out and printed when
# their number reaches this threshold.
'min_threshold':1000, # Printing stops when this number is reached.
'sn_start':0, # The first instruction seq. number to be printed.
'sn_stop':0, # The last instruction seq. number to be printed.
'tick_start':0, # The first tick to be printed
'tick_stop':0, # The last tick to be printed
'tick_drift':2000, # Used to calculate the start and the end of main
# loop. We assume here that the instructions are not
# out of order for more then 2000 CPU ticks,
# otherwise the print may not start/stop
# at the time specified by tick_start/stop.
'only_committed':0, # Set if only committed instructions are printed.
}
def process_trace(trace, outfile, cycle_time, width, color, timestamps,
committed_only, store_completions, start_tick, stop_tick, start_sn, stop_sn):
global insts
insts['sn_start'] = start_sn
insts['sn_stop'] = stop_sn
insts['tick_start'] = start_tick
insts['tick_stop'] = stop_tick
insts['tick_drift'] = insts['tick_drift'] * cycle_time
insts['only_committed'] = committed_only
line = None
fields = None
# Skip lines up to the starting tick
if start_tick != 0:
while True:
line = trace.readline()
if not line: return
fields = line.split(':')
if fields[0] != 'O3PipeView': continue
if int(fields[2]) >= start_tick: break
elif start_sn != 0:
while True:
line = trace.readline()
if not line: return
fields = line.split(':')
if fields[0] != 'O3PipeView': continue
if fields[1] == 'fetch' and int(fields[5]) >= start_sn: break
else:
line = trace.readline()
if not line: return
fields = line.split(':')
# Skip lines up to next instruction fetch
while fields[0] != 'O3PipeView' or fields[1] != 'fetch':
line = trace.readline()
if not line: return
fields = line.split(':')
# Print header
outfile.write('// f = fetch, d = decode, n = rename, p = dispatch, '
'i = issue, c = complete, r = retire')
if store_completions:
outfile.write(', s = store-complete')
outfile.write('\n\n')
outfile.write(' ' + 'timeline'.center(width) +
' ' + 'tick'.center(15) +
' ' + 'pc.upc'.center(12) +
' ' + 'disasm'.ljust(25) +
' ' + 'seq_num'.center(10))
if timestamps:
outfile.write('timestamps'.center(25))
outfile.write('\n')
# Region of interest
curr_inst = {}
while True:
if fields[0] == 'O3PipeView':
curr_inst[fields[1]] = int(fields[2])
if fields[1] == 'fetch':
if ((stop_tick > 0 and int(fields[2]) > stop_tick+insts['tick_drift']) or
(stop_sn > 0 and int(fields[5]) > (stop_sn+insts['max_threshold']))):
print_insts(outfile, cycle_time, width, color, timestamps, 0)
return
(curr_inst['pc'], curr_inst['upc']) = fields[3:5]
curr_inst['sn'] = int(fields[5])
curr_inst['disasm'] = ' '.join(fields[6][:-1].split())
elif fields[1] == 'retire':
if curr_inst['retire'] == 0:
curr_inst['disasm'] = '-----' + curr_inst['disasm']
if store_completions:
curr_inst[fields[3]] = int(fields[4])
queue_inst(outfile, curr_inst, cycle_time, width, color, timestamps, store_completions)
line = trace.readline()
if not line:
print_insts(outfile, cycle_time, width, color, timestamps, store_completions, 0)
return
fields = line.split(':')
#Sorts out instructions according to sequence number
def compare_by_sn(a, b):
return cmp(a['sn'], b['sn'])
# Puts new instruction into the print queue.
# Sorts out and prints instructions when their number reaches threshold value
def queue_inst(outfile, inst, cycle_time, width, color, timestamps, store_completions):
global insts
l_copy = copy.deepcopy(inst)
insts['queue'].append(l_copy)
if len(insts['queue']) > insts['max_threshold']:
print_insts(outfile, cycle_time, width, color, timestamps, store_completions, insts['min_threshold'])
# Sorts out and prints instructions in print queue
def print_insts(outfile, cycle_time, width, color, timestamps, store_completions, lower_threshold):
global insts
insts['queue'].sort(compare_by_sn)
while len(insts['queue']) > lower_threshold:
print_item=insts['queue'].pop(0)
# As the instructions are processed out of order the main loop starts
# earlier then specified by start_sn/tick and finishes later then what
# is defined in stop_sn/tick.
# Therefore, here we have to filter out instructions that reside out of
# the specified boundaries.
if (insts['sn_start'] > 0 and print_item['sn'] < insts['sn_start']):
continue; # earlier then the starting sequence number
if (insts['sn_stop'] > 0 and print_item['sn'] > insts['sn_stop']):
continue; # later then the ending sequence number
if (insts['tick_start'] > 0 and print_item['fetch'] < insts['tick_start']):
continue; # earlier then the starting tick number
if (insts['tick_stop'] > 0 and print_item['fetch'] > insts['tick_stop']):
continue; # later then the ending tick number
if (insts['only_committed'] != 0 and print_item['retire'] == 0):
continue; # retire is set to zero if it hasn't been completed
print_inst(outfile, print_item, cycle_time, width, color, timestamps, store_completions)
# Prints a single instruction
def print_inst(outfile, inst, cycle_time, width, color, timestamps, store_completions):
if color:
from m5.util.terminal import termcap
else:
from m5.util.terminal import no_termcap as termcap
# Pipeline stages
stages = [{'name': 'fetch',
'color': termcap.Blue + termcap.Reverse,
'shorthand': 'f'},
{'name': 'decode',
'color': termcap.Yellow + termcap.Reverse,
'shorthand': 'd'},
{'name': 'rename',
'color': termcap.Magenta + termcap.Reverse,
'shorthand': 'n'},
{'name': 'dispatch',
'color': termcap.Green + termcap.Reverse,
'shorthand': 'p'},
{'name': 'issue',
'color': termcap.Red + termcap.Reverse,
'shorthand': 'i'},
{'name': 'complete',
'color': termcap.Cyan + termcap.Reverse,
'shorthand': 'c'},
{'name': 'retire',
'color': termcap.Blue + termcap.Reverse,
'shorthand': 'r'}
]
if store_completions:
stages.append(
{'name': 'store',
'color': termcap.Yellow + termcap.Reverse,
'shorthand': 's'})
# Print
time_width = width * cycle_time
base_tick = (inst['fetch'] / time_width) * time_width
# Find out the time of the last event - it may not
# be 'retire' if the instruction is not comlpeted.
last_event_time = max(inst['fetch'], inst['decode'],inst['rename'],
inst['dispatch'],inst['issue'], inst['complete'], inst['retire'])
if store_completions:
last_event_time = max(last_event_time, inst['store'])
# Timeline shorter then time_width is printed in compact form where
# the print continues at the start of the same line.
if ((last_event_time - inst['fetch']) < time_width):
num_lines = 1 # compact form
else:
num_lines = ((last_event_time - base_tick) / time_width) + 1
curr_color = termcap.Normal
# This will visually distinguish completed and abandoned intructions.
if inst['retire'] == 0: dot = '=' # abandoned instruction
else: dot = '.' # completed instruction
for i in range(num_lines):
start_tick = base_tick + i * time_width
end_tick = start_tick + time_width
if num_lines == 1: # compact form
end_tick += (inst['fetch'] - base_tick)
events = []
for stage_idx in range(len(stages)):
tick = inst[stages[stage_idx]['name']]
if tick != 0:
if tick >= start_tick and tick < end_tick:
events.append((tick % time_width,
stages[stage_idx]['name'],
stage_idx, tick))
events.sort()
outfile.write('[')
pos = 0
if num_lines == 1 and events[0][2] != 0: # event is not fetch
curr_color = stages[events[0][2] - 1]['color']
for event in events:
if (stages[event[2]]['name'] == 'dispatch' and
inst['dispatch'] == inst['issue']):
continue
outfile.write(curr_color + dot * ((event[0] / cycle_time) - pos))
outfile.write(stages[event[2]]['color'] +
stages[event[2]]['shorthand'])
if event[3] != last_event_time: # event is not the last one
curr_color = stages[event[2]]['color']
else:
curr_color = termcap.Normal
pos = (event[0] / cycle_time) + 1
outfile.write(curr_color + dot * (width - pos) + termcap.Normal +
']-(' + str(base_tick + i * time_width).rjust(15) + ') ')
if i == 0:
outfile.write('%s.%s %s [%s]' % (
inst['pc'].rjust(10),
inst['upc'],
inst['disasm'].ljust(25),
str(inst['sn']).rjust(10)))
if timestamps:
outfile.write(' f=%s, r=%s' % (inst['fetch'], inst['retire']))
outfile.write('\n')
else:
outfile.write('...'.center(12) + '\n')
def validate_range(my_range):
my_range = [int(i) for i in my_range.split(':')]
if (len(my_range) != 2 or
my_range[0] < 0 or
my_range[1] > 0 and my_range[0] >= my_range[1]):
return None
return my_range
def main():
# Parse options
usage = ('%prog [OPTION]... TRACE_FILE')
parser = optparse.OptionParser(usage=usage)
parser.add_option(
'-o',
dest='outfile',
default=os.path.join(os.getcwd(), 'o3-pipeview.out'),
help="output file (default: '%default')")
parser.add_option(
'-t',
dest='tick_range',
default='0:-1',
help="tick range (default: '%default'; -1 == inf.)")
parser.add_option(
'-i',
dest='inst_range',
default='0:-1',
help="instruction range (default: '%default'; -1 == inf.)")
parser.add_option(
'-w',
dest='width',
type='int', default=80,
help="timeline width (default: '%default')")
parser.add_option(
'--color',
action='store_true', default=False,
help="enable colored output (default: '%default')")
parser.add_option(
'-c', '--cycle-time',
type='int', default=1000,
help="CPU cycle time in ticks (default: '%default')")
parser.add_option(
'--timestamps',
action='store_true', default=False,
help="print fetch and retire timestamps (default: '%default')")
parser.add_option(
'--only_committed',
action='store_true', default=False,
help="display only committed (completed) instructions (default: '%default')")
parser.add_option(
'--store_completions',
action='store_true', default=False,
help="additionally display store completion ticks (default: '%default')")
(options, args) = parser.parse_args()
if len(args) != 1:
parser.error('incorrect number of arguments')
sys.exit(1)
tick_range = validate_range(options.tick_range)
if not tick_range:
parser.error('invalid range')
sys.exit(1)
inst_range = validate_range(options.inst_range)
if not inst_range:
parser.error('invalid range')
sys.exit(1)
# Process trace
print 'Processing trace... ',
with open(args[0], 'r') as trace:
with open(options.outfile, 'w') as out:
process_trace(trace, out, options.cycle_time, options.width,
options.color, options.timestamps,
options.only_committed, options.store_completions,
*(tick_range + inst_range))
print 'done!'
if __name__ == '__main__':
sys.path.append(os.path.join(
os.path.dirname(os.path.abspath(__file__)),
'..', 'src', 'python'))
main()
|
|
"""Test environment for client library tests.
This module has functions for creating keyspaces, tablets for the client
library test.
"""
#!/usr/bin/env python
# coding: utf-8
import hashlib
import random
import struct
import threading
import time
import traceback
import unittest
import environment
import tablet
import utils
from clientlib_tests import topo_schema
from clientlib_tests import db_class_unsharded
from clientlib_tests import db_class_sharded
from clientlib_tests import db_class_lookup
from vtdb import database_context
from vtdb import db_object
from vtdb import keyrange
from vtdb import keyrange_constants
from vtdb import keyspace
from vtdb import dbexceptions
from vtdb import shard_constants
from vtdb import vtdb_logger
from vtdb import vtgatev2
from vtdb import vtgate_cursor
from zk import zkocc
conn_class = vtgatev2
__tablets = None
shard_names = ['-80', '80-']
shard_kid_map = {'-80': [527875958493693904, 626750931627689502,
345387386794260318, 332484755310826578,
1842642426274125671, 1326307661227634652,
1761124146422844620, 1661669973250483744,
3361397649937244239, 2444880764308344533],
'80-': [9767889778372766922, 9742070682920810358,
10296850775085416642, 9537430901666854108,
10440455099304929791, 11454183276974683945,
11185910247776122031, 10460396697869122981,
13379616110062597001, 12826553979133932576],
}
pack_kid = struct.Struct('!Q').pack
def setUpModule():
try:
environment.topo_server().setup()
setup_topology()
# start mysql instance external to the test
global __tablets
setup_procs = []
for tablet in __tablets:
setup_procs.append(tablet.init_mysql())
utils.wait_procs(setup_procs)
create_db()
start_tablets()
utils.VtGate().start()
except:
tearDownModule()
raise
def tearDownModule():
global __tablets
if utils.options.skip_teardown:
return
if __tablets is not None:
tablet.kill_tablets(__tablets)
teardown_procs = []
for t in __tablets:
teardown_procs.append(t.teardown_mysql())
utils.wait_procs(teardown_procs, raise_on_error=False)
environment.topo_server().teardown()
utils.kill_sub_processes()
utils.remove_tmp_files()
if __tablets is not None:
for t in __tablets:
t.remove_tree()
def setup_topology():
global __tablets
if __tablets is None:
__tablets = []
keyspaces = topo_schema.keyspaces
for ks in keyspaces:
ks_name = ks[0]
ks_type = ks[1]
utils.run_vtctl(['CreateKeyspace', ks_name])
if ks_type == shard_constants.UNSHARDED:
shard_master = tablet.Tablet()
shard_replica = tablet.Tablet()
shard_master.init_tablet('master', keyspace=ks_name, shard='0')
__tablets.append(shard_master)
shard_replica.init_tablet('replica', keyspace=ks_name, shard='0')
__tablets.append(shard_replica)
elif ks_type == shard_constants.RANGE_SHARDED:
utils.run_vtctl(['SetKeyspaceShardingInfo', '-force', ks_name,
'keyspace_id', 'uint64'])
for shard_name in shard_names:
shard_master = tablet.Tablet()
shard_replica = tablet.Tablet()
shard_master.init_tablet('master', keyspace=ks_name, shard=shard_name)
__tablets.append(shard_master)
shard_replica.init_tablet('replica', keyspace=ks_name, shard=shard_name)
__tablets.append(shard_replica)
utils.run_vtctl(['RebuildKeyspaceGraph', ks_name], auto_log=True)
def create_db():
global __tablets
for t in __tablets:
t.create_db(t.dbname)
ks_name = t.keyspace
for table_tuple in topo_schema.keyspace_table_map[ks_name]:
t.mquery(t.dbname, table_tuple[1])
def start_tablets():
global __tablets
# start tablets
for t in __tablets:
t.start_vttablet(wait_for_state=None)
# wait for them to come in serving state
for t in __tablets:
t.wait_for_vttablet_state('SERVING')
# InitShardMaster for master tablets
for t in __tablets:
if t.tablet_type == 'master':
utils.run_vtctl(['InitShardMaster', t.keyspace+'/'+t.shard,
t.tablet_alias], auto_log=True)
for ks in topo_schema.keyspaces:
ks_name = ks[0]
ks_type = ks[1]
utils.run_vtctl(['RebuildKeyspaceGraph', ks_name],
auto_log=True)
if ks_type == shard_constants.RANGE_SHARDED:
utils.check_srv_keyspace('test_nj', ks_name,
'Partitions(master): -80 80-\n' +
'Partitions(rdonly): -80 80-\n' +
'Partitions(replica): -80 80-\n')
def get_connection(user=None, password=None):
timeout = 10.0
conn = None
vtgate_addrs = {"vt": [utils.vtgate.addr(),]}
conn = conn_class.connect(vtgate_addrs, timeout,
user=user, password=password)
return conn
def get_keyrange(shard_name):
kr = None
if shard_name == keyrange_constants.SHARD_ZERO:
kr = keyrange.KeyRange(keyrange_constants.NON_PARTIAL_KEYRANGE)
else:
kr = keyrange.KeyRange(shard_name)
return kr
def _delete_all(keyspace, shard_name, table_name):
vtgate_conn = get_connection()
# This write is to set up the test with fresh insert
# and hence performing it directly on the connection.
vtgate_conn.begin()
vtgate_conn._execute("delete from %s" % table_name, {},
keyspace, 'master',
keyranges=[get_keyrange(shard_name)])
vtgate_conn.commit()
def restart_vtgate(extra_args={}):
port = utils.vtgate.port
utils.vtgate.kill()
utils.VtGate(port=port).start(extra_args=extra_args)
def populate_table():
keyspace = "KS_UNSHARDED"
_delete_all(keyspace, keyrange_constants.SHARD_ZERO, 'vt_unsharded')
vtgate_conn = get_connection()
cursor = vtgate_conn.cursor(keyspace, 'master', keyranges=[get_keyrange(keyrange_constants.SHARD_ZERO),],writable=True)
cursor.begin()
for x in xrange(10):
cursor.execute('insert into vt_unsharded (id, msg) values (%s, %s)' % (str(x), 'msg'), {})
cursor.commit()
class TestUnshardedTable(unittest.TestCase):
def setUp(self):
self.vtgate_addrs = {"vt": [utils.vtgate.addr(),]}
self.dc = database_context.DatabaseContext(self.vtgate_addrs)
self.all_ids = []
with database_context.WriteTransaction(self.dc) as context:
for x in xrange(20):
ret_id = db_class_unsharded.VtUnsharded.insert(context.get_cursor(),
msg="test message")
self.all_ids.append(ret_id)
def tearDown(self):
_delete_all("KS_UNSHARDED", "0", 'vt_unsharded')
def test_read(self):
id_val = self.all_ids[0]
with database_context.ReadFromMaster(self.dc) as context:
rows = db_class_unsharded.VtUnsharded.select_by_id(
context.get_cursor(), id_val)
expected = 1
self.assertEqual(len(rows), expected, "wrong number of rows fetched %d, expected %d" % (len(rows), expected))
self.assertEqual(rows[0].id, id_val, "wrong row fetched")
def test_update_and_read(self):
id_val = self.all_ids[0]
where_column_value_pairs = [('id', id_val)]
with database_context.WriteTransaction(self.dc) as context:
update_cols = [('msg', "test update"),]
db_class_unsharded.VtUnsharded.update_columns(context.get_cursor(),
where_column_value_pairs,
update_column_value_pairs=update_cols)
with database_context.ReadFromMaster(self.dc) as context:
rows = db_class_unsharded.VtUnsharded.select_by_id(context.get_cursor(), id_val)
self.assertEqual(len(rows), 1, "wrong number of rows fetched")
self.assertEqual(rows[0].msg, "test update", "wrong row fetched")
def test_delete_and_read(self):
id_val = self.all_ids[-1]
where_column_value_pairs = [('id', id_val)]
with database_context.WriteTransaction(self.dc) as context:
db_class_unsharded.VtUnsharded.delete_by_columns(context.get_cursor(),
where_column_value_pairs)
with database_context.ReadFromMaster(self.dc) as context:
rows = db_class_unsharded.VtUnsharded.select_by_id(context.get_cursor(), id_val)
self.assertEqual(len(rows), 0, "wrong number of rows fetched")
self.all_ids = self.all_ids[:-1]
def test_count(self):
with database_context.ReadFromMaster(self.dc) as context:
count = db_class_unsharded.VtUnsharded.get_count(
context.get_cursor(), msg="test message")
expected = len(self.all_ids)
self.assertEqual(count, expected, "wrong count fetched; expected %d got %d" % (expected, count))
def test_min_id(self):
with database_context.ReadFromMaster(self.dc) as context:
min_id = db_class_unsharded.VtUnsharded.get_min(
context.get_cursor())
expected = min(self.all_ids)
self.assertEqual(min_id, expected, "wrong min value fetched; expected %d got %d" % (expected, min_id))
def test_max_id(self):
with database_context.ReadFromMaster(self.dc) as context:
max_id = db_class_unsharded.VtUnsharded.get_max(
context.get_cursor())
self.all_ids.sort()
expected = max(self.all_ids)
self.assertEqual(max_id, expected, "wrong max value fetched; expected %d got %d" % (expected, max_id))
class TestRangeSharded(unittest.TestCase):
def populate_tables(self):
self.user_id_list = []
self.song_id_list = []
self.user_song_map = {}
r = random.Random()
# This should create the lookup entries and sharding key.
with database_context.WriteTransaction(self.dc) as context:
for x in xrange(20):
# vt_user - EntityRangeSharded; creates username:user_id lookup
user_id = db_class_sharded.VtUser.insert(context.get_cursor(),
username="user%s" % x, msg="test message")
self.user_id_list.append(user_id)
# vt_user_email - RangeSharded; references user_id:keyspace_id hash
email = 'user%[email protected]' % x
m = hashlib.md5()
m.update(email)
email_hash = m.digest()
entity_id_map={'user_id':user_id}
db_class_sharded.VtUserEmail.insert(
context.get_cursor(entity_id_map=entity_id_map),
user_id=user_id, email=email,
email_hash=email_hash)
# vt_song - EntityRangeSharded; creates song_id:user_id lookup
num_songs_for_user = r.randint(1, 5)
for i in xrange(num_songs_for_user):
song_id = db_class_sharded.VtSong.insert(context.get_cursor(),
user_id=user_id, title="Test Song")
self.song_id_list.append(song_id)
self.user_song_map.setdefault(user_id, []).append(song_id)
# vt_song_detail - RangeSharded; references song_id:user_id lookup
entity_id_map = {'song_id':song_id}
db_class_sharded.VtSongDetail.insert(context.get_cursor(entity_id_map=entity_id_map),
song_id=song_id, album_name="Test album",
artist="Test artist")
def setUp(self):
self.vtgate_addrs = {"vt": [utils.vtgate.addr(),]}
self.dc = database_context.DatabaseContext(self.vtgate_addrs)
self.populate_tables()
def tearDown(self):
with database_context.WriteTransaction(self.dc) as context:
for uid in self.user_id_list:
try:
db_class_sharded.VtUser.delete_by_columns(context.get_cursor(entity_id_map={'id':uid}),
[('id', uid),])
db_class_sharded.VtUserEmail.delete_by_columns(context.get_cursor(entity_id_map={'user_id':uid}),
[('user_id', uid),])
db_class_sharded.VtSong.delete_by_columns(context.get_cursor(entity_id_map={'user_id':uid}),
[('user_id', uid),])
song_id_list = self.user_song_map[uid]
for sid in song_id_list:
db_class_sharded.VtSongDetail.delete_by_columns(context.get_cursor(entity_id_map={'song_id':sid}),
[('song_id', sid),])
except dbexceptions.DatabaseError as e:
if str(e) == "DB Row not found":
pass
def test_sharding_key_read(self):
user_id = self.user_id_list[0]
with database_context.ReadFromMaster(self.dc) as context:
where_column_value_pairs = [('id', user_id),]
entity_id_map = dict(where_column_value_pairs)
rows = db_class_sharded.VtUser.select_by_columns(
context.get_cursor(entity_id_map=entity_id_map),
where_column_value_pairs)
self.assertEqual(len(rows), 1, "wrong number of rows fetched")
where_column_value_pairs = [('user_id', user_id),]
entity_id_map = dict(where_column_value_pairs)
rows = db_class_sharded.VtUserEmail.select_by_columns(
context.get_cursor(entity_id_map=entity_id_map),
where_column_value_pairs)
self.assertEqual(len(rows), 1, "wrong number of rows fetched")
where_column_value_pairs = [('user_id', user_id),]
entity_id_map = dict(where_column_value_pairs)
rows = db_class_sharded.VtSong.select_by_columns(
context.get_cursor(entity_id_map=entity_id_map),
where_column_value_pairs)
self.assertEqual(len(rows), len(self.user_song_map[user_id]), "wrong number of rows fetched")
def test_entity_id_read(self):
user_id = self.user_id_list[0]
with database_context.ReadFromMaster(self.dc) as context:
entity_id_map = {'username': 'user0'}
rows = db_class_sharded.VtUser.select_by_columns(
context.get_cursor(entity_id_map=entity_id_map),
[('id', user_id),])
self.assertEqual(len(rows), 1, "wrong number of rows fetched")
where_column_value_pairs = [('id', self.user_song_map[user_id][0]),]
entity_id_map = dict(where_column_value_pairs)
rows = db_class_sharded.VtSong.select_by_columns(
context.get_cursor(entity_id_map=entity_id_map),
where_column_value_pairs)
self.assertEqual(len(rows), 1, "wrong number of rows fetched")
where_column_value_pairs = [('song_id', self.user_song_map[user_id][0]),]
entity_id_map = dict(where_column_value_pairs)
rows = db_class_sharded.VtSongDetail.select_by_columns(
context.get_cursor(entity_id_map=entity_id_map),
where_column_value_pairs)
self.assertEqual(len(rows), 1, "wrong number of rows fetched")
def test_in_clause_read(self):
with database_context.ReadFromMaster(self.dc) as context:
user_id_list = [self.user_id_list[0], self.user_id_list[1]]
where_column_value_pairs = (('id', user_id_list),)
entity_id_map = dict(where_column_value_pairs)
rows = db_class_sharded.VtUser.select_by_ids(
context.get_cursor(entity_id_map=entity_id_map),
where_column_value_pairs)
self.assertEqual(len(rows), 2, "wrong number of rows fetched")
got = [row.id for row in rows]
got.sort()
self.assertEqual(user_id_list, got, "wrong rows fetched; expected %s got %s" % (user_id_list, got))
username_list = [row.username for row in rows]
username_list.sort()
where_column_value_pairs = (('username', username_list),)
entity_id_map = dict(where_column_value_pairs)
rows = db_class_sharded.VtUser.select_by_ids(
context.get_cursor(entity_id_map=entity_id_map),
where_column_value_pairs)
self.assertEqual(len(rows), 2, "wrong number of rows fetched")
got = [row.username for row in rows]
got.sort()
self.assertEqual(username_list, got, "wrong rows fetched; expected %s got %s" % (username_list, got))
where_column_value_pairs = (('user_id', user_id_list),)
entity_id_map = dict(where_column_value_pairs)
rows = db_class_sharded.VtUserEmail.select_by_ids(
context.get_cursor(entity_id_map=entity_id_map),
where_column_value_pairs)
self.assertEqual(len(rows), 2, "wrong number of rows fetched")
got = [row.user_id for row in rows]
got.sort()
self.assertEqual(user_id_list, got, "wrong rows fetched; expected %s got %s" % (user_id_list, got))
song_id_list = []
for user_id in user_id_list:
song_id_list.extend(self.user_song_map[user_id])
song_id_list.sort()
where_column_value_pairs = [('id', song_id_list),]
entity_id_map = dict(where_column_value_pairs)
rows = db_class_sharded.VtSong.select_by_columns(
context.get_cursor(entity_id_map=entity_id_map),
where_column_value_pairs)
got = [row.id for row in rows]
got.sort()
self.assertEqual(song_id_list, got, "wrong rows fetched %s got %s" % (song_id_list, got))
where_column_value_pairs = [('song_id', song_id_list),]
entity_id_map = dict(where_column_value_pairs)
rows = db_class_sharded.VtSongDetail.select_by_columns(
context.get_cursor(entity_id_map=entity_id_map),
where_column_value_pairs)
got = [row.song_id for row in rows]
got.sort()
self.assertEqual(song_id_list, got, "wrong rows fetched %s got %s" % (song_id_list, got))
def test_keyrange_read(self):
where_column_value_pairs = []
with database_context.ReadFromMaster(self.dc) as context:
rows1 = db_class_sharded.VtUser.select_by_columns(
context.get_cursor(keyrange='-80'), where_column_value_pairs)
rows2 = db_class_sharded.VtUser.select_by_columns(
context.get_cursor(keyrange='80-'), where_column_value_pairs)
fetched_rows = len(rows1) + len(rows2)
expected = len(self.user_id_list)
self.assertEqual(fetched_rows, expected, "wrong number of rows fetched expected:%d got:%d" % (expected, fetched_rows))
def test_scatter_read(self):
where_column_value_pairs = []
with database_context.ReadFromMaster(self.dc) as context:
rows = db_class_sharded.VtUser.select_by_columns(
context.get_cursor(keyrange=keyrange_constants.NON_PARTIAL_KEYRANGE),
where_column_value_pairs)
self.assertEqual(len(rows), len(self.user_id_list), "wrong number of rows fetched, expecting %d got %d" % (len(self.user_id_list), len(rows)))
def test_streaming_read(self):
where_column_value_pairs = []
with database_context.ReadFromMaster(self.dc) as context:
rows = db_class_sharded.VtUser.select_by_columns_streaming(
context.get_cursor(keyrange=keyrange_constants.NON_PARTIAL_KEYRANGE),
where_column_value_pairs)
got_user_id_list = []
for r in rows:
got_user_id_list.append(r.id)
self.assertEqual(len(got_user_id_list), len(self.user_id_list), "wrong number of rows fetched")
def update_columns(self):
with database_context.WriteTransaction(self.dc) as context:
user_id = self.user_id_list[1]
where_column_value_pairs = [('id', user_id),]
entity_id_map = {'id': user_id}
new_username = 'new_user%s' % user_id
update_cols = [('username', new_username),]
db_class_sharded.VtUser.update_columns(context.get_cursor(entity_id_map=entity_id_map),
where_column_value_pairs,
update_column_value_pairs=update_cols)
# verify the updated value.
where_column_value_pairs = [('id', user_id),]
rows = db_class_sharded.VtUser.select_by_columns(
context.get_cursor(entity_id_map={'id': user_id}),
where_column_value_pairs)
self.assertEqual(len(rows), 1, "wrong number of rows fetched")
self.assertEqual(new_username, rows[0].username)
where_column_value_pairs = [('user_id', user_id),]
entity_id_map = {'user_id': user_id}
new_email = 'new_user%[email protected]' % user_id
m = hashlib.md5()
m.update(new_email)
email_hash = m.digest()
update_cols = [('email', new_email), ('email_hash', email_hash)]
db_class_sharded.VtUserEmail.update_columns(context.get_cursor(entity_id_map={'user_id':user_id}),
where_column_value_pairs,
update_column_value_pairs=update_cols)
# verify the updated value.
with database_context.ReadFromMaster(self.dc) as context:
where_column_value_pairs = [('user_id', user_id),]
entity_id_map = dict(where_column_value_pairs)
rows = db_class_sharded.VtUserEmail.select_by_ids(
context.get_cursor(entity_id_map=entity_id_map),
where_column_value_pairs)
self.assertEqual(len(rows), 1, "wrong number of rows fetched")
self.assertEqual(new_email, rows[0].email)
self.user_id_list.sort()
def delete_columns(self):
user_id = self.user_id_list[-1]
with database_context.WriteTransaction(self.dc) as context:
where_column_value_pairs = [('id', user_id),]
entity_id_map = {'id': user_id}
db_class_sharded.VtUser.delete_by_columns(context.get_cursor(entity_id_map=entity_id_map),
where_column_value_pairs)
where_column_value_pairs = [('user_id', user_id),]
entity_id_map = {'user_id': user_id}
db_class_sharded.VtUserEmail.delete_by_columns(context.get_cursor(entity_id_map=entity_id_map),
where_column_value_pairs)
with database_context.ReadFromMaster(self.dc) as context:
rows = db_class_sharded.VtUser.select_by_columns(
context.get_cursor(entity_id_map=entity_id_map),
where_column_value_pairs)
self.assertEqual(len(rows), 0, "wrong number of rows fetched")
rows = db_class_sharded.VtUserEmail.select_by_ids(
context.get_cursor(entity_id_map=entity_id_map),
where_column_value_pairs)
self.assertEqual(len(rows), 0, "wrong number of rows fetched")
self.user_id_list = self.user_id_list[:-1]
self.user_id_list.sort()
def test_count(self):
with database_context.ReadFromMaster(self.dc) as context:
count = db_class_sharded.VtUser.get_count(
context.get_cursor(keyrange=keyrange_constants.NON_PARTIAL_KEYRANGE),
msg="test message")
expected = len(self.user_id_list)
self.assertEqual(count, expected, "wrong count fetched; expected %d got %d" % (expected, count))
def test_min_id(self):
with database_context.ReadFromMaster(self.dc) as context:
min_id = db_class_sharded.VtUser.get_min(
context.get_cursor(keyrange=keyrange_constants.NON_PARTIAL_KEYRANGE))
self.user_id_list.sort()
expected = min(self.user_id_list)
rows1 = db_class_sharded.VtUser.select_by_columns(
context.get_cursor(keyrange=keyrange_constants.NON_PARTIAL_KEYRANGE), [])
id_list = [row.id for row in rows1]
self.assertEqual(min_id, expected, "wrong min value fetched; expected %d got %d" % (expected, min_id))
def test_max_id(self):
with database_context.ReadFromMaster(self.dc) as context:
max_id = db_class_sharded.VtUser.get_max(
context.get_cursor(keyrange=keyrange_constants.NON_PARTIAL_KEYRANGE))
expected = max(self.user_id_list)
self.assertEqual(max_id, expected, "wrong max value fetched; expected %d got %d" % (expected, max_id))
def test_batch_read(self):
# TODO(sougou): fix
return
# 1. Create select queries using DB classes.
query_list = []
bv_list = []
user_id_list = [self.user_id_list[0], self.user_id_list[1]]
where_column_value_pairs = (('id', user_id_list),)
entity_id_map = dict(where_column_value_pairs)
q, bv = db_class_sharded.VtUser.create_select_query(where_column_value_pairs)
query_list.append(q)
bv_list.append(bv)
where_column_value_pairs = (('user_id', user_id_list),)
q, bv = db_class_sharded.VtUserEmail.create_select_query(where_column_value_pairs)
query_list.append(q)
bv_list.append(bv)
with database_context.ReadFromMaster(self.dc) as context:
# 2. Cursor Creation using one of the DB classes.
cursor = context.get_cursor(entity_id_map=entity_id_map)(db_class_sharded.VtUser)
# 3. Batch execution of reads.
results = db_object.execute_batch_read(
cursor, query_list, bv_list)
self.assertEqual(len(results), len(query_list))
res_ids = [row.id for row in results[0]]
res_user_ids = [row.user_id for row in results[1]]
self.assertEqual(res_ids, user_id_list)
self.assertEqual(res_user_ids, user_id_list)
def test_batch_write(self):
# TODO(sougou): fix
return
# 1. Create DMLs using DB Classes.
query_list = []
bv_list = []
# Update VtUser table.
user_id = self.user_id_list[1]
where_column_value_pairs = (('id', user_id),)
entity_id_map = dict(where_column_value_pairs)
new_username = 'new_user%s' % user_id
update_cols = [('username', new_username),]
q, bv = db_class_sharded.VtUser.create_update_query(
where_column_value_pairs, update_column_value_pairs=update_cols)
query_list.append(q)
bv_list.append(bv)
# Update VtUserEmail table.
where_column_value_pairs = [('user_id', user_id),]
new_email = 'new_user%[email protected]' % user_id
m = hashlib.md5()
m.update(new_email)
email_hash = m.digest()
update_cols = [('email', new_email), ('email_hash', email_hash)]
q, bv = db_class_sharded.VtUserEmail.create_update_query(
where_column_value_pairs, update_column_value_pairs=update_cols)
query_list.append(q)
bv_list.append(bv)
# Delete a VtSong row
where_column_value_pairs = [('user_id', user_id),]
q, bv = db_class_sharded.VtSong.create_delete_query(where_column_value_pairs)
query_list.append(q)
bv_list.append(bv)
with database_context.WriteTransaction(self.dc) as context:
# 2. Routing for query_list is done by associating
# the common entity_id to the cursor.
# NOTE: cursor creation needs binding to a particular db class,
# so we create a writable cursor using the common entity (user_id).
# This entity_id is used to derive the keyspace_id for routing the dmls.
entity_id_map = {'id': user_id}
cursor = context.get_cursor(entity_id_map=entity_id_map)(db_class_sharded.VtUser)
# 3. Execute the writable batch query.
results = db_object.execute_batch_write(
cursor, query_list, bv_list)
# 4. Verify results
self.assertEqual(len(results), len(query_list))
self.assertEqual(results[0]['rowcount'], 1, "VtUser update didn't update 1 row")
self.assertEqual(results[1]['rowcount'], 1, "VtUserEmail update didn't update 1 row")
self.assertEqual(results[2]['rowcount'], len(self.user_song_map[user_id]),
"VtSong deleted '%d' rows, expected '%d'" % (results[2]['rowcount'], len(self.user_song_map[user_id])))
if __name__ == '__main__':
utils.main()
|
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._virtual_machine_extensions_operations import build_create_or_update_request_initial, build_delete_request_initial, build_get_request, build_list_request, build_update_request_initial
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class VirtualMachineExtensionsOperations:
"""VirtualMachineExtensionsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.compute.v2021_03_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _create_or_update_initial(
self,
resource_group_name: str,
vm_name: str,
vm_extension_name: str,
extension_parameters: "_models.VirtualMachineExtension",
**kwargs: Any
) -> "_models.VirtualMachineExtension":
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualMachineExtension"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(extension_parameters, 'VirtualMachineExtension')
request = build_create_or_update_request_initial(
resource_group_name=resource_group_name,
vm_name=vm_name,
vm_extension_name=vm_extension_name,
subscription_id=self._config.subscription_id,
content_type=content_type,
json=_json,
template_url=self._create_or_update_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('VirtualMachineExtension', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('VirtualMachineExtension', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/extensions/{vmExtensionName}'} # type: ignore
@distributed_trace_async
async def begin_create_or_update(
self,
resource_group_name: str,
vm_name: str,
vm_extension_name: str,
extension_parameters: "_models.VirtualMachineExtension",
**kwargs: Any
) -> AsyncLROPoller["_models.VirtualMachineExtension"]:
"""The operation to create or update the extension.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_name: The name of the virtual machine where the extension should be created or
updated.
:type vm_name: str
:param vm_extension_name: The name of the virtual machine extension.
:type vm_extension_name: str
:param extension_parameters: Parameters supplied to the Create Virtual Machine Extension
operation.
:type extension_parameters: ~azure.mgmt.compute.v2021_03_01.models.VirtualMachineExtension
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either VirtualMachineExtension or the
result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.compute.v2021_03_01.models.VirtualMachineExtension]
:raises: ~azure.core.exceptions.HttpResponseError
"""
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualMachineExtension"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
vm_name=vm_name,
vm_extension_name=vm_extension_name,
extension_parameters=extension_parameters,
content_type=content_type,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('VirtualMachineExtension', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/extensions/{vmExtensionName}'} # type: ignore
async def _update_initial(
self,
resource_group_name: str,
vm_name: str,
vm_extension_name: str,
extension_parameters: "_models.VirtualMachineExtensionUpdate",
**kwargs: Any
) -> "_models.VirtualMachineExtension":
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualMachineExtension"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(extension_parameters, 'VirtualMachineExtensionUpdate')
request = build_update_request_initial(
resource_group_name=resource_group_name,
vm_name=vm_name,
vm_extension_name=vm_extension_name,
subscription_id=self._config.subscription_id,
content_type=content_type,
json=_json,
template_url=self._update_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VirtualMachineExtension', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/extensions/{vmExtensionName}'} # type: ignore
@distributed_trace_async
async def begin_update(
self,
resource_group_name: str,
vm_name: str,
vm_extension_name: str,
extension_parameters: "_models.VirtualMachineExtensionUpdate",
**kwargs: Any
) -> AsyncLROPoller["_models.VirtualMachineExtension"]:
"""The operation to update the extension.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_name: The name of the virtual machine where the extension should be updated.
:type vm_name: str
:param vm_extension_name: The name of the virtual machine extension.
:type vm_extension_name: str
:param extension_parameters: Parameters supplied to the Update Virtual Machine Extension
operation.
:type extension_parameters:
~azure.mgmt.compute.v2021_03_01.models.VirtualMachineExtensionUpdate
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either VirtualMachineExtension or the
result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.compute.v2021_03_01.models.VirtualMachineExtension]
:raises: ~azure.core.exceptions.HttpResponseError
"""
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualMachineExtension"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._update_initial(
resource_group_name=resource_group_name,
vm_name=vm_name,
vm_extension_name=vm_extension_name,
extension_parameters=extension_parameters,
content_type=content_type,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('VirtualMachineExtension', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/extensions/{vmExtensionName}'} # type: ignore
async def _delete_initial(
self,
resource_group_name: str,
vm_name: str,
vm_extension_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_request_initial(
resource_group_name=resource_group_name,
vm_name=vm_name,
vm_extension_name=vm_extension_name,
subscription_id=self._config.subscription_id,
template_url=self._delete_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/extensions/{vmExtensionName}'} # type: ignore
@distributed_trace_async
async def begin_delete(
self,
resource_group_name: str,
vm_name: str,
vm_extension_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""The operation to delete the extension.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_name: The name of the virtual machine where the extension should be deleted.
:type vm_name: str
:param vm_extension_name: The name of the virtual machine extension.
:type vm_extension_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
vm_name=vm_name,
vm_extension_name=vm_extension_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/extensions/{vmExtensionName}'} # type: ignore
@distributed_trace_async
async def get(
self,
resource_group_name: str,
vm_name: str,
vm_extension_name: str,
expand: Optional[str] = None,
**kwargs: Any
) -> "_models.VirtualMachineExtension":
"""The operation to get the extension.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_name: The name of the virtual machine containing the extension.
:type vm_name: str
:param vm_extension_name: The name of the virtual machine extension.
:type vm_extension_name: str
:param expand: The expand expression to apply on the operation.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VirtualMachineExtension, or the result of cls(response)
:rtype: ~azure.mgmt.compute.v2021_03_01.models.VirtualMachineExtension
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualMachineExtension"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_request(
resource_group_name=resource_group_name,
vm_name=vm_name,
vm_extension_name=vm_extension_name,
subscription_id=self._config.subscription_id,
expand=expand,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VirtualMachineExtension', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/extensions/{vmExtensionName}'} # type: ignore
@distributed_trace_async
async def list(
self,
resource_group_name: str,
vm_name: str,
expand: Optional[str] = None,
**kwargs: Any
) -> "_models.VirtualMachineExtensionsListResult":
"""The operation to get all extensions of a Virtual Machine.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_name: The name of the virtual machine containing the extension.
:type vm_name: str
:param expand: The expand expression to apply on the operation.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VirtualMachineExtensionsListResult, or the result of cls(response)
:rtype: ~azure.mgmt.compute.v2021_03_01.models.VirtualMachineExtensionsListResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualMachineExtensionsListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_list_request(
resource_group_name=resource_group_name,
vm_name=vm_name,
subscription_id=self._config.subscription_id,
expand=expand,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VirtualMachineExtensionsListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/extensions'} # type: ignore
|
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslo_db import exception as db_exc
from nova import context
from nova import exception
from nova import objects
from nova.objects import fields
from nova import test
from nova.tests import fixtures
from nova.tests import uuidsentinel
DISK_INVENTORY = dict(
total=200,
reserved=10,
min_unit=2,
max_unit=5,
step_size=1,
allocation_ratio=1.0,
resource_class=fields.ResourceClass.DISK_GB
)
DISK_ALLOCATION = dict(
consumer_id=uuidsentinel.disk_consumer,
used=2,
resource_class=fields.ResourceClass.DISK_GB
)
class ResourceProviderBaseCase(test.TestCase):
def setUp(self):
super(ResourceProviderBaseCase, self).setUp()
self.context = context.RequestContext('fake-user', 'fake-project')
def _make_allocation(self, rp_uuid=None):
rp_uuid = rp_uuid or uuidsentinel.allocation_resource_provider
rp = objects.ResourceProvider(
context=self.context,
uuid=rp_uuid,
name=rp_uuid)
rp.create()
alloc = objects.Allocation(
self.context,
resource_provider=rp,
**DISK_ALLOCATION
)
alloc.create()
return rp, alloc
class ResourceProviderTestCase(ResourceProviderBaseCase):
"""Test resource-provider objects' lifecycles."""
def test_create_resource_provider_requires_uuid(self):
resource_provider = objects.ResourceProvider(
context = self.context)
self.assertRaises(exception.ObjectActionError,
resource_provider.create)
def test_create_resource_provider(self):
created_resource_provider = objects.ResourceProvider(
context=self.context,
uuid=uuidsentinel.fake_resource_provider,
name=uuidsentinel.fake_resource_name,
)
created_resource_provider.create()
self.assertIsInstance(created_resource_provider.id, int)
retrieved_resource_provider = objects.ResourceProvider.get_by_uuid(
self.context,
uuidsentinel.fake_resource_provider
)
self.assertEqual(retrieved_resource_provider.id,
created_resource_provider.id)
self.assertEqual(retrieved_resource_provider.uuid,
created_resource_provider.uuid)
self.assertEqual(retrieved_resource_provider.name,
created_resource_provider.name)
self.assertEqual(0, created_resource_provider.generation)
self.assertEqual(0, retrieved_resource_provider.generation)
def test_save_resource_provider(self):
created_resource_provider = objects.ResourceProvider(
context=self.context,
uuid=uuidsentinel.fake_resource_provider,
name=uuidsentinel.fake_resource_name,
)
created_resource_provider.create()
created_resource_provider.name = 'new-name'
created_resource_provider.save()
retrieved_resource_provider = objects.ResourceProvider.get_by_uuid(
self.context,
uuidsentinel.fake_resource_provider
)
self.assertEqual('new-name', retrieved_resource_provider.name)
def test_destroy_resource_provider(self):
created_resource_provider = objects.ResourceProvider(
context=self.context,
uuid=uuidsentinel.fake_resource_provider,
name=uuidsentinel.fake_resource_name,
)
created_resource_provider.create()
created_resource_provider.destroy()
self.assertRaises(exception.NotFound,
objects.ResourceProvider.get_by_uuid,
self.context,
uuidsentinel.fake_resource_provider)
self.assertRaises(exception.NotFound,
created_resource_provider.destroy)
def test_destroy_allocated_resource_provider_fails(self):
rp, allocation = self._make_allocation()
self.assertRaises(exception.ResourceProviderInUse,
rp.destroy)
def test_destroy_resource_provider_destroy_inventory(self):
resource_provider = objects.ResourceProvider(
context=self.context,
uuid=uuidsentinel.fake_resource_provider,
name=uuidsentinel.fake_resource_name,
)
resource_provider.create()
disk_inventory = objects.Inventory(
context=self.context,
resource_provider=resource_provider,
**DISK_INVENTORY
)
disk_inventory.create()
inventories = objects.InventoryList.get_all_by_resource_provider_uuid(
self.context, resource_provider.uuid)
self.assertEqual(1, len(inventories))
resource_provider.destroy()
inventories = objects.InventoryList.get_all_by_resource_provider_uuid(
self.context, resource_provider.uuid)
self.assertEqual(0, len(inventories))
def test_create_inventory_with_uncreated_provider(self):
resource_provider = objects.ResourceProvider(
context=self.context,
uuid=uuidsentinel.inventory_resource_provider
)
disk_inventory = objects.Inventory(
context=self.context,
resource_provider=resource_provider,
**DISK_INVENTORY
)
self.assertRaises(exception.ObjectActionError,
disk_inventory.create)
def test_create_and_update_inventory(self):
resource_provider = objects.ResourceProvider(
context=self.context,
uuid=uuidsentinel.inventory_resource_provider,
name='foo',
)
resource_provider.create()
resource_class = fields.ResourceClass.DISK_GB
disk_inventory = objects.Inventory(
context=self.context,
resource_provider=resource_provider,
**DISK_INVENTORY
)
disk_inventory.create()
self.assertEqual(resource_class, disk_inventory.resource_class)
self.assertEqual(resource_provider,
disk_inventory.resource_provider)
self.assertEqual(DISK_INVENTORY['allocation_ratio'],
disk_inventory.allocation_ratio)
self.assertEqual(DISK_INVENTORY['total'],
disk_inventory.total)
disk_inventory.total = 32
disk_inventory.save()
inventories = objects.InventoryList.get_all_by_resource_provider_uuid(
self.context, resource_provider.uuid)
self.assertEqual(1, len(inventories))
self.assertEqual(32, inventories[0].total)
inventories[0].total = 33
inventories[0].save()
reloaded_inventories = (
objects.InventoryList.get_all_by_resource_provider_uuid(
self.context, resource_provider.uuid))
self.assertEqual(33, reloaded_inventories[0].total)
@mock.patch('nova.objects.resource_provider.LOG')
def test_set_inventory_over_capacity(self, mock_log):
rp = objects.ResourceProvider(context=self.context,
uuid=uuidsentinel.rp_uuid,
name=uuidsentinel.rp_name)
rp.create()
disk_inv = objects.Inventory(
resource_provider=rp,
resource_class=fields.ResourceClass.DISK_GB,
total=1024,
reserved=15,
min_unit=10,
max_unit=100,
step_size=10,
allocation_ratio=1.0)
vcpu_inv = objects.Inventory(
resource_provider=rp,
resource_class=fields.ResourceClass.VCPU,
total=12,
reserved=0,
min_unit=1,
max_unit=12,
step_size=1,
allocation_ratio=16.0)
inv_list = objects.InventoryList(objects=[disk_inv, vcpu_inv])
rp.set_inventory(inv_list)
self.assertFalse(mock_log.warning.called)
# Allocate something reasonable for the above inventory
alloc = objects.Allocation(
context=self.context,
resource_provider=rp,
consumer_id=uuidsentinel.consumer,
resource_class='DISK_GB',
used=512)
alloc.create()
# Update our inventory to over-subscribe us after the above allocation
disk_inv.total = 400
rp.set_inventory(inv_list)
# We should succeed, but have logged a warning for going over on disk
mock_log.warning.assert_called_once_with(
mock.ANY, {'uuid': rp.uuid, 'resource': 'DISK_GB'})
def test_provider_modify_inventory(self):
rp = objects.ResourceProvider(context=self.context,
uuid=uuidsentinel.rp_uuid,
name=uuidsentinel.rp_name)
rp.create()
saved_generation = rp.generation
disk_inv = objects.Inventory(
resource_provider=rp,
resource_class=fields.ResourceClass.DISK_GB,
total=1024,
reserved=15,
min_unit=10,
max_unit=100,
step_size=10,
allocation_ratio=1.0)
vcpu_inv = objects.Inventory(
resource_provider=rp,
resource_class=fields.ResourceClass.VCPU,
total=12,
reserved=0,
min_unit=1,
max_unit=12,
step_size=1,
allocation_ratio=16.0)
# set to new list
inv_list = objects.InventoryList(objects=[disk_inv, vcpu_inv])
rp.set_inventory(inv_list)
# generation has bumped
self.assertEqual(saved_generation + 1, rp.generation)
saved_generation = rp.generation
new_inv_list = objects.InventoryList.get_all_by_resource_provider_uuid(
self.context, uuidsentinel.rp_uuid)
self.assertEqual(2, len(new_inv_list))
resource_classes = [inv.resource_class for inv in new_inv_list]
self.assertIn(fields.ResourceClass.VCPU, resource_classes)
self.assertIn(fields.ResourceClass.DISK_GB, resource_classes)
# reset list to just disk_inv
inv_list = objects.InventoryList(objects=[disk_inv])
rp.set_inventory(inv_list)
# generation has bumped
self.assertEqual(saved_generation + 1, rp.generation)
saved_generation = rp.generation
new_inv_list = objects.InventoryList.get_all_by_resource_provider_uuid(
self.context, uuidsentinel.rp_uuid)
self.assertEqual(1, len(new_inv_list))
resource_classes = [inv.resource_class for inv in new_inv_list]
self.assertNotIn(fields.ResourceClass.VCPU, resource_classes)
self.assertIn(fields.ResourceClass.DISK_GB, resource_classes)
self.assertEqual(1024, new_inv_list[0].total)
# update existing disk inv to new settings
disk_inv = objects.Inventory(
resource_provider=rp,
resource_class=fields.ResourceClass.DISK_GB,
total=2048,
reserved=15,
min_unit=10,
max_unit=100,
step_size=10,
allocation_ratio=1.0)
rp.update_inventory(disk_inv)
# generation has bumped
self.assertEqual(saved_generation + 1, rp.generation)
saved_generation = rp.generation
new_inv_list = objects.InventoryList.get_all_by_resource_provider_uuid(
self.context, uuidsentinel.rp_uuid)
self.assertEqual(1, len(new_inv_list))
self.assertEqual(2048, new_inv_list[0].total)
# fail when inventory bad
disk_inv = objects.Inventory(
resource_provider=rp,
resource_class=fields.ResourceClass.DISK_GB,
total=2048,
reserved=2048)
disk_inv.obj_set_defaults()
error = self.assertRaises(exception.InvalidInventoryCapacity,
rp.update_inventory, disk_inv)
self.assertIn("Invalid inventory for '%s'"
% fields.ResourceClass.DISK_GB, str(error))
self.assertIn("on resource provider '%s'." % rp.uuid, str(error))
# generation has not bumped
self.assertEqual(saved_generation, rp.generation)
# delete inventory
rp.delete_inventory(fields.ResourceClass.DISK_GB)
# generation has bumped
self.assertEqual(saved_generation + 1, rp.generation)
saved_generation = rp.generation
new_inv_list = objects.InventoryList.get_all_by_resource_provider_uuid(
self.context, uuidsentinel.rp_uuid)
result = new_inv_list.find(fields.ResourceClass.DISK_GB)
self.assertIsNone(result)
self.assertRaises(exception.NotFound, rp.delete_inventory,
fields.ResourceClass.DISK_GB)
# check inventory list is empty
inv_list = objects.InventoryList.get_all_by_resource_provider_uuid(
self.context, uuidsentinel.rp_uuid)
self.assertEqual(0, len(inv_list))
# add some inventory
rp.add_inventory(vcpu_inv)
inv_list = objects.InventoryList.get_all_by_resource_provider_uuid(
self.context, uuidsentinel.rp_uuid)
self.assertEqual(1, len(inv_list))
# generation has bumped
self.assertEqual(saved_generation + 1, rp.generation)
saved_generation = rp.generation
# add same inventory again
self.assertRaises(db_exc.DBDuplicateEntry,
rp.add_inventory, vcpu_inv)
# generation has not bumped
self.assertEqual(saved_generation, rp.generation)
# fail when generation wrong
rp.generation = rp.generation - 1
self.assertRaises(exception.ConcurrentUpdateDetected,
rp.set_inventory, inv_list)
def test_delete_inventory_not_found(self):
rp = objects.ResourceProvider(context=self.context,
uuid=uuidsentinel.rp_uuid,
name=uuidsentinel.rp_name)
rp.create()
error = self.assertRaises(exception.NotFound, rp.delete_inventory,
'DISK_GB')
self.assertIn('No inventory of class DISK_GB found for delete',
str(error))
def test_delete_inventory_with_allocation(self):
rp, allocation = self._make_allocation()
disk_inv = objects.Inventory(resource_provider=rp,
resource_class='DISK_GB',
total=2048)
disk_inv.obj_set_defaults()
inv_list = objects.InventoryList(objects=[disk_inv])
rp.set_inventory(inv_list)
error = self.assertRaises(exception.InventoryInUse,
rp.delete_inventory,
'DISK_GB')
self.assertIn(
"Inventory for 'DISK_GB' on resource provider '%s' in use"
% rp.uuid, str(error))
def test_update_inventory_not_found(self):
rp = objects.ResourceProvider(context=self.context,
uuid=uuidsentinel.rp_uuid,
name=uuidsentinel.rp_name)
rp.create()
disk_inv = objects.Inventory(resource_provider=rp,
resource_class='DISK_GB',
total=2048)
disk_inv.obj_set_defaults()
error = self.assertRaises(exception.NotFound, rp.update_inventory,
disk_inv)
self.assertIn('No inventory of class DISK_GB found for update',
str(error))
@mock.patch('nova.objects.resource_provider.LOG')
def test_update_inventory_violates_allocation(self, mock_log):
# Compute nodes that are reconfigured have to be able to set
# their inventory to something that violates allocations so
# we need to make that possible.
rp, allocation = self._make_allocation()
disk_inv = objects.Inventory(resource_provider=rp,
resource_class='DISK_GB',
total=2048)
disk_inv.obj_set_defaults()
inv_list = objects.InventoryList(objects=[disk_inv])
rp.set_inventory(inv_list)
# attempt to set inventory to less than currently allocated
# amounts
new_total = 1
disk_inv = objects.Inventory(
resource_provider=rp,
resource_class=fields.ResourceClass.DISK_GB, total=new_total)
disk_inv.obj_set_defaults()
rp.update_inventory(disk_inv)
usages = objects.UsageList.get_all_by_resource_provider_uuid(
self.context, rp.uuid)
self.assertEqual(allocation.used, usages[0].usage)
inv_list = objects.InventoryList.get_all_by_resource_provider_uuid(
self.context, rp.uuid)
self.assertEqual(new_total, inv_list[0].total)
mock_log.warning.assert_called_once_with(
mock.ANY, {'uuid': rp.uuid, 'resource': 'DISK_GB'})
def test_add_invalid_inventory(self):
rp = objects.ResourceProvider(context=self.context,
uuid=uuidsentinel.rp_uuid,
name=uuidsentinel.rp_name)
rp.create()
disk_inv = objects.Inventory(
resource_provider=rp,
resource_class=fields.ResourceClass.DISK_GB,
total=1024, reserved=2048)
disk_inv.obj_set_defaults()
error = self.assertRaises(exception.InvalidInventoryCapacity,
rp.add_inventory,
disk_inv)
self.assertIn("Invalid inventory for '%s'"
% fields.ResourceClass.DISK_GB, str(error))
self.assertIn("on resource provider '%s'."
% rp.uuid, str(error))
class ResourceProviderListTestCase(ResourceProviderBaseCase):
def setUp(self):
super(ResourceProviderListTestCase, self).setUp()
self.useFixture(fixtures.Database())
self.useFixture(fixtures.Database(database='api'))
self.context = context.RequestContext('fake-user', 'fake-project')
def test_get_all_by_filters(self):
for rp_i in ['1', '2']:
uuid = getattr(uuidsentinel, 'rp_uuid_' + rp_i)
name = 'rp_name_' + rp_i
rp = objects.ResourceProvider(self.context, name=name, uuid=uuid)
rp.create()
resource_providers = objects.ResourceProviderList.get_all_by_filters(
self.context)
self.assertEqual(2, len(resource_providers))
resource_providers = objects.ResourceProviderList.get_all_by_filters(
self.context, filters={'name': 'rp_name_1'})
self.assertEqual(1, len(resource_providers))
resource_providers = objects.ResourceProviderList.get_all_by_filters(
self.context, filters={'can_host': 1})
self.assertEqual(0, len(resource_providers))
resource_providers = objects.ResourceProviderList.get_all_by_filters(
self.context, filters={'uuid': getattr(uuidsentinel, 'rp_uuid_2')})
self.assertEqual(1, len(resource_providers))
self.assertEqual('rp_name_2', resource_providers[0].name)
class TestAllocation(ResourceProviderBaseCase):
def test_create_list_and_delete_allocation(self):
resource_provider = objects.ResourceProvider(
context=self.context,
uuid=uuidsentinel.allocation_resource_provider,
name=uuidsentinel.allocation_resource_name
)
resource_provider.create()
resource_class = fields.ResourceClass.DISK_GB
disk_allocation = objects.Allocation(
context=self.context,
resource_provider=resource_provider,
**DISK_ALLOCATION
)
disk_allocation.create()
self.assertEqual(resource_class, disk_allocation.resource_class)
self.assertEqual(resource_provider,
disk_allocation.resource_provider)
self.assertEqual(DISK_ALLOCATION['used'],
disk_allocation.used)
self.assertEqual(DISK_ALLOCATION['consumer_id'],
disk_allocation.consumer_id)
self.assertIsInstance(disk_allocation.id, int)
allocations = objects.AllocationList.get_all_by_resource_provider_uuid(
self.context, resource_provider.uuid)
self.assertEqual(1, len(allocations))
self.assertEqual(DISK_ALLOCATION['used'],
allocations[0].used)
allocations[0].destroy()
allocations = objects.AllocationList.get_all_by_resource_provider_uuid(
self.context, resource_provider.uuid)
self.assertEqual(0, len(allocations))
def test_destroy(self):
rp, allocation = self._make_allocation()
allocations = objects.AllocationList.get_all_by_resource_provider_uuid(
self.context, rp.uuid)
self.assertEqual(1, len(allocations))
objects.Allocation._destroy(self.context, allocation.id)
allocations = objects.AllocationList.get_all_by_resource_provider_uuid(
self.context, rp.uuid)
self.assertEqual(0, len(allocations))
self.assertRaises(exception.NotFound, objects.Allocation._destroy,
self.context, allocation.id)
def test_get_allocations_from_db(self):
rp, allocation = self._make_allocation()
allocations = objects.AllocationList._get_allocations_from_db(
self.context, rp.uuid)
self.assertEqual(1, len(allocations))
self.assertEqual(rp.id, allocations[0].resource_provider_id)
self.assertEqual(allocation.resource_provider.id,
allocations[0].resource_provider_id)
allocations = objects.AllocationList._get_allocations_from_db(
self.context, uuidsentinel.bad_rp_uuid)
self.assertEqual(0, len(allocations))
def test_get_all_by_resource_provider(self):
rp, allocation = self._make_allocation()
allocations = objects.AllocationList.get_all_by_resource_provider_uuid(
self.context, rp.uuid)
self.assertEqual(1, len(allocations))
self.assertEqual(rp.id, allocations[0].resource_provider.id)
self.assertEqual(allocation.resource_provider.id,
allocations[0].resource_provider.id)
def test_get_all_multiple_providers(self):
# This confirms that the join with resource provider is
# behaving.
rp1, allocation1 = self._make_allocation(uuidsentinel.rp1)
rp2, allocation2 = self._make_allocation(uuidsentinel.rp2)
allocations = objects.AllocationList.get_all_by_resource_provider_uuid(
self.context, rp1.uuid)
self.assertEqual(1, len(allocations))
self.assertEqual(rp1.id, allocations[0].resource_provider.id)
self.assertEqual(allocation1.resource_provider.id,
allocations[0].resource_provider.id)
# add more allocations for the first resource provider
# of the same class
alloc3 = objects.Allocation(
self.context,
consumer_id=uuidsentinel.consumer1,
resource_class=fields.ResourceClass.DISK_GB,
resource_provider=rp1,
used=2,
)
alloc3.create()
allocations = objects.AllocationList.get_all_by_resource_provider_uuid(
self.context, rp1.uuid)
self.assertEqual(2, len(allocations))
# add more allocations for the first resource provider
# of a different class
alloc4 = objects.Allocation(
self.context,
consumer_id=uuidsentinel.consumer1,
resource_class=fields.ResourceClass.IPV4_ADDRESS,
resource_provider=rp1,
used=4,
)
alloc4.create()
allocations = objects.AllocationList.get_all_by_resource_provider_uuid(
self.context, rp1.uuid)
self.assertEqual(3, len(allocations))
self.assertEqual(rp1.uuid, allocations[0].resource_provider.uuid)
allocations = objects.AllocationList.get_all_by_resource_provider_uuid(
self.context, rp2.uuid)
self.assertEqual(1, len(allocations))
self.assertEqual(rp2.uuid, allocations[0].resource_provider.uuid)
self.assertIn(fields.ResourceClass.DISK_GB,
[allocation.resource_class
for allocation in allocations])
self.assertNotIn(fields.ResourceClass.IPV4_ADDRESS,
[allocation.resource_class
for allocation in allocations])
class TestAllocationListCreateDelete(ResourceProviderBaseCase):
def test_allocation_checking(self):
"""Test that allocation check logic works with 2 resource classes on
one provider.
If this fails, we get a KeyError at create_all()
"""
consumer_uuid = uuidsentinel.consumer
consumer_uuid2 = uuidsentinel.consumer2
# Create one resource provider with 2 classes
rp1_name = uuidsentinel.rp1_name
rp1_uuid = uuidsentinel.rp1_uuid
rp1_class = fields.ResourceClass.DISK_GB
rp1_used = 6
rp2_class = fields.ResourceClass.IPV4_ADDRESS
rp2_used = 2
rp1 = objects.ResourceProvider(
self.context, name=rp1_name, uuid=rp1_uuid)
rp1.create()
inv = objects.Inventory(resource_provider=rp1,
resource_class=rp1_class,
total=1024)
inv.obj_set_defaults()
inv2 = objects.Inventory(resource_provider=rp1,
resource_class=rp2_class,
total=255, reserved=2)
inv2.obj_set_defaults()
inv_list = objects.InventoryList(objects=[inv, inv2])
rp1.set_inventory(inv_list)
# create the allocations for a first consumer
allocation_1 = objects.Allocation(resource_provider=rp1,
consumer_id=consumer_uuid,
resource_class=rp1_class,
used=rp1_used)
allocation_2 = objects.Allocation(resource_provider=rp1,
consumer_id=consumer_uuid,
resource_class=rp2_class,
used=rp2_used)
allocation_list = objects.AllocationList(
self.context, objects=[allocation_1, allocation_2])
allocation_list.create_all()
# create the allocations for a second consumer, until we have
# allocations for more than one consumer in the db, then we
# won't actually be doing real allocation math, which triggers
# the sql monster.
allocation_1 = objects.Allocation(resource_provider=rp1,
consumer_id=consumer_uuid2,
resource_class=rp1_class,
used=rp1_used)
allocation_2 = objects.Allocation(resource_provider=rp1,
consumer_id=consumer_uuid2,
resource_class=rp2_class,
used=rp2_used)
allocation_list = objects.AllocationList(
self.context, objects=[allocation_1, allocation_2])
# If we are joining wrong, this will be a KeyError
allocation_list.create_all()
def test_allocation_list_create(self):
consumer_uuid = uuidsentinel.consumer
# Create two resource providers
rp1_name = uuidsentinel.rp1_name
rp1_uuid = uuidsentinel.rp1_uuid
rp1_class = fields.ResourceClass.DISK_GB
rp1_used = 6
rp2_name = uuidsentinel.rp2_name
rp2_uuid = uuidsentinel.rp2_uuid
rp2_class = fields.ResourceClass.IPV4_ADDRESS
rp2_used = 2
rp1 = objects.ResourceProvider(
self.context, name=rp1_name, uuid=rp1_uuid)
rp1.create()
rp2 = objects.ResourceProvider(
self.context, name=rp2_name, uuid=rp2_uuid)
rp2.create()
# Two allocations, one for each resource provider.
allocation_1 = objects.Allocation(resource_provider=rp1,
consumer_id=consumer_uuid,
resource_class=rp1_class,
used=rp1_used)
allocation_2 = objects.Allocation(resource_provider=rp2,
consumer_id=consumer_uuid,
resource_class=rp2_class,
used=rp2_used)
allocation_list = objects.AllocationList(
self.context, objects=[allocation_1, allocation_2])
# There's no inventory, we have a failure.
error = self.assertRaises(exception.InvalidInventory,
allocation_list.create_all)
# Confirm that the resource class string, not index, is in
# the exception and resource providers are listed by uuid.
self.assertIn(rp1_class, str(error))
self.assertIn(rp2_class, str(error))
self.assertIn(rp1.uuid, str(error))
self.assertIn(rp2.uuid, str(error))
# Add inventory for one of the two resource providers. This should also
# fail, since rp2 has no inventory.
inv = objects.Inventory(resource_provider=rp1,
resource_class=rp1_class,
total=1024)
inv.obj_set_defaults()
inv_list = objects.InventoryList(objects=[inv])
rp1.set_inventory(inv_list)
self.assertRaises(exception.InvalidInventory,
allocation_list.create_all)
# Add inventory for the second resource provider
inv = objects.Inventory(resource_provider=rp2,
resource_class=rp2_class,
total=255, reserved=2)
inv.obj_set_defaults()
inv_list = objects.InventoryList(objects=[inv])
rp2.set_inventory(inv_list)
# Now the allocations will work.
allocation_list.create_all()
# Check that those allocations changed usage on each
# resource provider.
rp1_usage = objects.UsageList.get_all_by_resource_provider_uuid(
self.context, rp1_uuid)
rp2_usage = objects.UsageList.get_all_by_resource_provider_uuid(
self.context, rp2_uuid)
self.assertEqual(rp1_used, rp1_usage[0].usage)
self.assertEqual(rp2_used, rp2_usage[0].usage)
# redo one allocation
# TODO(cdent): This does not currently behave as expected
# because a new allocataion is created, adding to the total
# used, not replacing.
rp1_used += 1
allocation_1 = objects.Allocation(resource_provider=rp1,
consumer_id=consumer_uuid,
resource_class=rp1_class,
used=rp1_used)
allocation_list = objects.AllocationList(
self.context, objects=[allocation_1])
allocation_list.create_all()
rp1_usage = objects.UsageList.get_all_by_resource_provider_uuid(
self.context, rp1_uuid)
self.assertEqual(rp1_used, rp1_usage[0].usage)
# delete the allocations for the consumer
# NOTE(cdent): The database uses 'consumer_id' for the
# column, presumably because some ids might not be uuids, at
# some point in the future.
consumer_allocations = objects.AllocationList.get_all_by_consumer_id(
self.context, consumer_uuid)
consumer_allocations.delete_all()
rp1_usage = objects.UsageList.get_all_by_resource_provider_uuid(
self.context, rp1_uuid)
rp2_usage = objects.UsageList.get_all_by_resource_provider_uuid(
self.context, rp2_uuid)
self.assertEqual(0, rp1_usage[0].usage)
self.assertEqual(0, rp2_usage[0].usage)
class UsageListTestCase(ResourceProviderBaseCase):
def test_get_all_null(self):
for uuid in [uuidsentinel.rp_uuid_1, uuidsentinel.rp_uuid_2]:
rp = objects.ResourceProvider(self.context, name=uuid, uuid=uuid)
rp.create()
usage_list = objects.UsageList.get_all_by_resource_provider_uuid(
self.context, uuidsentinel.rp_uuid_1)
self.assertEqual(0, len(usage_list))
def test_get_all_one_allocation(self):
db_rp, _ = self._make_allocation(rp_uuid=uuidsentinel.rp_uuid)
inv = objects.Inventory(resource_provider=db_rp,
resource_class=fields.ResourceClass.DISK_GB,
total=1024)
inv.obj_set_defaults()
inv_list = objects.InventoryList(objects=[inv])
db_rp.set_inventory(inv_list)
usage_list = objects.UsageList.get_all_by_resource_provider_uuid(
self.context, db_rp.uuid)
self.assertEqual(1, len(usage_list))
self.assertEqual(2, usage_list[0].usage)
self.assertEqual(fields.ResourceClass.DISK_GB,
usage_list[0].resource_class)
def test_get_inventory_no_allocation(self):
db_rp = objects.ResourceProvider(self.context,
name=uuidsentinel.rp_no_inv,
uuid=uuidsentinel.rp_no_inv)
db_rp.create()
inv = objects.Inventory(resource_provider=db_rp,
resource_class=fields.ResourceClass.DISK_GB,
total=1024)
inv.obj_set_defaults()
inv_list = objects.InventoryList(objects=[inv])
db_rp.set_inventory(inv_list)
usage_list = objects.UsageList.get_all_by_resource_provider_uuid(
self.context, db_rp.uuid)
self.assertEqual(1, len(usage_list))
self.assertEqual(0, usage_list[0].usage)
self.assertEqual(fields.ResourceClass.DISK_GB,
usage_list[0].resource_class)
def test_get_all_multiple_inv(self):
db_rp = objects.ResourceProvider(self.context,
name=uuidsentinel.rp_no_inv,
uuid=uuidsentinel.rp_no_inv)
db_rp.create()
disk_inv = objects.Inventory(
resource_provider=db_rp,
resource_class=fields.ResourceClass.DISK_GB, total=1024)
disk_inv.obj_set_defaults()
vcpu_inv = objects.Inventory(
resource_provider=db_rp,
resource_class=fields.ResourceClass.VCPU, total=24)
vcpu_inv.obj_set_defaults()
inv_list = objects.InventoryList(objects=[disk_inv, vcpu_inv])
db_rp.set_inventory(inv_list)
usage_list = objects.UsageList.get_all_by_resource_provider_uuid(
self.context, db_rp.uuid)
self.assertEqual(2, len(usage_list))
|
|
from __future__ import division
import math, fractions, itertools, functools
from conformal_blocks.lie import SimpleLieAlgebra, TypeALieAlgebra, TypeBLieAlgebra, TypeCLieAlgebra, _Root
try:
import sage.all as sage
def Fraction(x,y):
try:
return sage.Rational((x, y))
except TypeError:
return x/y
except ImportError:
from fractions import Fraction
'''
Created on Nov 10, 2016
@author: mjschust
'''
class ConformalBlocksBundle(object):
"""
A class representing a conformal blocks vector bundle.
"""
def __init__(self, liealg, weights, level):
"""
:param liealg: A SimpleLieAlgebra object.
:param weights: A list of tuples of integers: the weights of the conformal blocks bundle.
:param level: A positive integer: the level of the conformal blocks bundle.
"""
self.liealg = liealg
new_weights = []
for wt in weights:
new_weights.append(tuple(wt))
self.weights = new_weights
self.level = level
self._rank = -1
def get_rank(self):
"""
Computes the rank of the conformal blocks bundle. The algorithm uses factorization, then
the fusion product to compute the 3-point ranks.
:return: An integer: the rank of the bundle.
"""
if self._rank < 0:
self._rank = self._get_rank(self.weights, self.level)
return self._rank
def _get_rank(self, weights, level):
"""
Computes the rank of the conformal blocks bundle with given weights and level.
The algorithm uses the fusion product and factorization.
:param weights: A list of tuples of integers: the list of weights.
:param level: A positive integer: corresponds to the level of the fusion product.
:return: An integer: the rank of the bundle.
"""
# Find weights with largest and smallest corresponding rep's
liealg = self.liealg
min_dim = max_dim = liealg.get_rep_dim(weights[0])
min_index = max_index = 0
for i in range(len(weights)):
dim = liealg.get_rep_dim(weights[i])
if dim < min_dim:
min_dim = dim
min_index = i
if dim > max_dim:
max_dim = dim
max_index = i
# Covers the case when all dimensions are the same
if min_index == max_index:
max_index = min_index + 1
fus_prod = liealg.fusion(weights[min_index], weights[max_index], level)
# indices = min_index, max_index
# factor_list = [wt for (i, wt) in enumerate(weights) if i not in indices]
factor_list = []
for i in range(len(weights)):
if i != min_index and i != max_index:
factor_list.append(weights[i])
multi_fus_prod = liealg.multi_fusion(factor_list, level)
ret_val = 0
for mu_star in fus_prod:
mult = fus_prod[mu_star]
mu = liealg.get_dual_weight(mu_star)
if mu in multi_fus_prod:
ret_val += mult * multi_fus_prod[mu]
return ret_val
#Original version of the above method. Uses less memory but runs an order of magnitude slower.
def _alt_compute_rank(self, weights, level):
# Find weights with largest and smallest corresponding rep's
liealg = self.liealg
min_dim = max_dim = liealg.get_rep_dim(weights[0])
min_index = max_index = 0
for i in range(len(weights)):
dim = liealg.get_rep_dim(weights[i])
if dim < min_dim:
min_dim = dim
min_index = i
if dim > max_dim:
max_dim = dim
max_index = i
# Covers the case when all dimensions are the same
if min_index == max_index:
max_index = min_index + 1
fus_prod = liealg.fusion(weights[min_index], weights[max_index], level)
# indices = min_index, max_index
# factor_list = [wt for (i, wt) in enumerate(weights) if i not in indices]
factor_list = []
for i in range(len(weights)):
if i != min_index and i != max_index:
factor_list.append(weights[i])
# Three point case is given by the fusion product
if len(factor_list) == 1:
dual_wt3 = liealg.get_dual_weight(factor_list[0])
if dual_wt3 in fus_prod:
return fus_prod[dual_wt3]
else:
return 0
# If more than three points, factor
ret_val = 0
for wt in fus_prod:
mult = fus_prod[wt]
if mult > 0:
ret_val = ret_val + mult * self._alt_compute_rank(factor_list + [wt], level)
return ret_val
def get_symmetrized_divisor(self):
"""
Computes the symmetrized divisor associated to the conformal blocks bundle.
:return: A list of numbers: the divisor given in the standard basis D_1, D_2,... of
the symmetric nef cone.
"""
ret_val = []
n = len(self.weights)
weighted_rank = 0
for wt in self.weights:
weighted_rank += self.liealg.casimirScalar(wt)
if self.liealg.exact:
weighted_rank = Fraction(self.get_rank() * weighted_rank, n * (n - 1))
else:
weighted_rank = self.get_rank() * weighted_rank / (n * (n - 1))
point_indices = [i for i in range(0, n)]
for i in range(2, n // 2 + 1):
coord = i * (n - i) * weighted_rank
sum = 0
for subset in itertools.combinations(point_indices, i):
#Could be more efficient here
wt_list1 = []
wt_list2 = []
for j in range(0,n):
if j in subset:
wt_list1.append(self.weights[j])
else:
wt_list2.append(self.weights[j])
prod = self.liealg.multi_fusion(wt_list1, self.level)
for mu_star in prod.keys():
mu = self.liealg.get_dual_weight(mu_star)
sum += self.liealg.casimirScalar(mu) * self._get_rank(wt_list1 + [mu], self.level) * self._get_rank(wt_list2 + [mu_star], self.level)
if self.liealg.exact:
sum = Fraction(sum * math.factorial(i) * math.factorial(n - i), math.factorial(n))
coord = Fraction(coord - sum, 2 * (self.level + self.liealg.dual_coxeter()))
else:
sum = sum*math.factorial(i)*math.factorial(n-i)/math.factorial(n)
coord = (coord - sum) / (2 * (self.level + self.liealg.dual_coxeter()))
ret_val.append(coord)
return ret_val
def get_norm_sym_divisor_ray(self):
"""
Computes the symmetrized divisor associated to the conformal blocks bundle and normalizes the
vector by clearing denominators.
**DOES NOT WORK WELL WITH FP ARITHMETIC**
**DOES NOT WORK IN SAGE**
:return: A list of numbers: the divisor ray given in the standard basis D_1, D_2,... of
the symmetric nef cone.
"""
divisor = self.get_symmetrized_divisor()
if self.liealg.exact:
denom_lcm = functools.reduce(lambda x, y: self._lcm(x, y), [long(q.denominator) for q in divisor])
denom_clear = [long(round(q * denom_lcm)) for q in divisor]
div_gcd = functools.reduce(lambda x, y: fractions.gcd(x, y), denom_clear)
if div_gcd > 0:
return [x//div_gcd for x in denom_clear]
else:
return denom_clear
else:
n_fact = math.factorial(len(self.weights))
int_div = [long(round(n_fact * x)) for x in divisor]
div_gcd = functools.reduce(lambda x, y: fractions.gcd(x, y), int_div)
if div_gcd > 0:
return [x // div_gcd for x in int_div]
else:
return [x for x in int_div]
def _lcm(self, x, y):
return x*y//fractions.gcd(x, y)
def get_F_curves(self):
"""
Generates a list of all F-curves with the same number of points as the conformal
blocks bundle.
:return: A list of partitions of [1, 2,..., n]: the list of F-curves
"""
n = len(self.weights)
all_points = set([x for x in range(1, n+1)])
ret_list = []
if n == 3:
return ret_list
for r_1 in range(1, n - 2):
for sset_1 in itertools.combinations(all_points, r_1):
comp_sset_1 = all_points.difference(sset_1)
for r_2 in range(1, n - r_1 - 1):
for sset_2 in itertools.combinations(comp_sset_1, r_2):
comp_sset_2 = comp_sset_1.difference(sset_2)
for r_3 in range(1, n - r_1 - r_2):
for sset_3 in itertools.combinations(comp_sset_2, r_3):
sset_4 = comp_sset_2.difference(sset_3)
ret_list.append([sset_1, sset_2, sset_3, tuple(sset_4)])
return ret_list
def intersect_F_curve(self, partition):
"""
Computes the intersection of the divisor associated to this conformal blocks bundle with
the given F-curve.
:param partition: A list of 4 lists of integers partitioning the set {1, ..., # points}: the
F-curve to be intersected.
:return: An integer: the intersection number.
"""
ret_val = 0
wt_list1 = [self.weights[point - 1] for point in partition[0]]
wt_list2 = [self.weights[point - 1] for point in partition[1]]
wt_list3 = [self.weights[point - 1] for point in partition[2]]
wt_list4 = [self.weights[point - 1] for point in partition[3]]
prod1 = self.liealg.multi_fusion(wt_list1, self.level)
prod2 = self.liealg.multi_fusion(wt_list2, self.level)
prod3 = self.liealg.multi_fusion(wt_list3, self.level)
prod4 = self.liealg.multi_fusion(wt_list4, self.level)
for wt1 in prod1.keys():
if prod1[wt1] == 0: continue
for wt2 in prod2.keys():
if prod2[wt2] == 0: continue
for wt3 in prod3.keys():
if prod3[wt3] == 0: continue
mu_list = [wt1, wt2, wt3]
mu_prod = self.liealg.multi_fusion(mu_list, self.level)
for wt4 in prod4.keys():
if prod4[wt4] == 0: continue
if mu_prod[self.liealg.get_dual_weight(wt4)] == 0: continue
ret_val += self._degree(wt1, wt2, wt3, wt4, self.level) * \
prod1[wt1] * prod2[wt2] * prod3[wt3] * prod4[wt4]
return ret_val
def _degree(self, wt1, wt2, wt3, wt4, level):
"""
Computes the degree of a four-point conformal blocks vector bundle. Implements Fakhruddin's
formula.
:param wt1: A tuple of integers: a weight of the bundle.
:param wt2: A tuple of integers: a weight of the bundle.
:param wt3: A tuple of integers: a weight of the bundle.
:param wt4: A tuple of integers: a weight of the bundle.
:param level: A positive integer: the level of the bundle.
:return: A positive integer: the degree of the bundle.
"""
liealg = self.liealg
ret_val = self._get_rank([wt1, wt2, wt3, wt4], level) * (
liealg.casimirScalar(wt1) + liealg.casimirScalar(wt2) + liealg.casimirScalar(wt3) + liealg.casimirScalar(wt4))
sum = 0
prod1 = liealg.fusion(wt1, wt2, level)
prod2 = liealg.fusion(wt3, wt4, level)
for mu in prod1.keys():
mu_star = liealg.get_dual_weight(mu)
if mu_star in prod2:
sum += liealg.casimirScalar(mu_star) * prod1[mu] * prod2[mu_star]
prod1 = liealg.fusion(wt1, wt3, level)
prod2 = liealg.fusion(wt2, wt4, level)
for mu in prod1.keys():
mu_star = liealg.get_dual_weight(mu)
if mu_star in prod2:
sum += liealg.casimirScalar(mu_star) * prod1[mu] * prod2[mu_star]
prod1 = liealg.fusion(wt1, wt4, level)
prod2 = liealg.fusion(wt2, wt3, level)
for mu in prod1.keys():
mu_star = liealg.get_dual_weight(mu)
if mu_star in prod2:
sum += liealg.casimirScalar(mu_star) * prod1[mu] * prod2[mu_star]
ret_val -= sum
if liealg.exact:
ret_val = Fraction(ret_val, (2 * (level + liealg.dual_coxeter())))
else:
ret_val = round(ret_val / (2 * (level + liealg.dual_coxeter())))
return ret_val
class SymmetricConformalBlocksBundle(ConformalBlocksBundle):
"""
A class representing a symmetric conformal blocks vector bundle.
"""
def __init__(self, liealg, wt, num_points, level):
"""
:param liealg: A SimpleLieAlgebra object.
:param wt: A list of integers: the weight of the conformal blocks bundle, repeated at each
point.
:param num_points: A positive integer: the number of points of the conformal blocks bundle.
:param level: A positive integer: the level of the conformal blocks bundle.
"""
ConformalBlocksBundle.__init__(self, liealg, [wt for i in range(num_points)], level)
def get_symmetrized_divisor(self):
"""
Computes the symmetrized divisor associated to the conformal blocks bundle. Algorithm is
optimized for the symmetric case.
:return: A list of numbers: the divisor given in the standard basis D_1, D_2,... of
the symmetric nef cone.
"""
ret_val = []
n = len(self.weights)
wt = self.weights[0]
for i in range(2, n // 2 + 1):
if self.liealg.exact:
coord = Fraction(i * (n - i) * self.get_rank() * self.liealg.casimirScalar(wt), n - 1)
else:
coord = i * (n - i) * self.get_rank() * self.liealg.casimirScalar(wt) / (n - 1)
sum_list = [0]
self._weighted_factor(wt, wt, 1, i - 1, n - i, sum_list, {})
if self.liealg.exact:
coord = Fraction(coord - sum_list[0], 2 * (self.level + self.liealg.dual_coxeter()))
else:
coord = (coord - sum_list[0]) / (2 * (self.level + self.liealg.dual_coxeter()))
ret_val.append(coord)
return ret_val
def _weighted_factor(self, wt, wt2, mult, wts_rem, ic, ret_val, rank_dict):
prod = self.liealg.fusion(wt, wt2, self.level)
for wt3 in prod.keys():
if wts_rem > 1:
self._weighted_factor(wt, wt3, mult * prod[wt3], wts_rem - 1, ic, ret_val, rank_dict)
else:
if not wt3 in rank_dict:
wt_list = [wt for i in range(ic)]
wt_list.append(wt3)
rank_dict[wt3] = self._get_rank(wt_list, self.level)
ret_val[0] += self.liealg.casimirScalar(self.liealg.get_dual_weight(wt3)) * mult * prod[wt3] * \
rank_dict[wt3]
def get_sym_F_curves(self):
"""
Generates a list of all F-curves with the same number of points as the conformal
blocks bundle, up to permutation of points.
:return: A list of partitions of [1, 2,..., n]: the list of F-curves
"""
n = len(self.weights)
partitions = []
for part1 in range(int(math.ceil(n / 4)), n - 2):
for part2 in range(int(math.ceil((n - part1) / 3)), min(n - part1 - 2, part1) + 1):
for part3 in range(int(math.ceil((n - part1 - part2) / 2)), min(n - part1 - part2 - 1, part2) + 1):
part4 = n - part1 - part2 - part3
partitions.append((part1, part2, part3, part4))
ret_list = []
for partition in partitions:
p1, p2, p3, p4 = partition[0], partition[1], partition[2], partition[3]
f_curve = [tuple([x for x in range(1, p1 + 1)]), tuple([x for x in range(p1 + 1, p1 + p2 + 1)]),
tuple([x for x in range(p1 + p2 + 1, p1 + p2 + p3 + 1)]),
tuple([x for x in range(p1 + p2 + p3 + 1, n + 1)])]
ret_list.append(f_curve)
return ret_list
|
|
# -*- coding: utf-8 -*-
import httplib as http
import unittest
from django.utils import timezone
from github3.repos.branch import Branch
from nose.tools import * # flake8: noqa
from json import dumps
import mock
import pytest
from framework.auth import Auth
from framework.exceptions import HTTPError
from addons.base.tests.views import (
OAuthAddonAuthViewsTestCaseMixin, OAuthAddonConfigViewsTestCaseMixin
)
from addons.github.tests.utils import create_mock_github, GitHubAddonTestCase
from addons.github.tests.factories import GitHubAccountFactory
from addons.github import utils
from addons.github.api import GitHubClient
from addons.github.serializer import GitHubSerializer
from addons.github.utils import check_permissions
from tests.base import OsfTestCase, get_default_metaschema
from osf_tests.factories import ProjectFactory, UserFactory, AuthUserFactory
pytestmark = pytest.mark.django_db
class TestGitHubAuthViews(GitHubAddonTestCase, OAuthAddonAuthViewsTestCaseMixin, OsfTestCase):
@mock.patch(
'addons.github.models.UserSettings.revoke_remote_oauth_access',
mock.PropertyMock()
)
def test_delete_external_account(self):
super(TestGitHubAuthViews, self).test_delete_external_account()
class TestGitHubConfigViews(GitHubAddonTestCase, OAuthAddonConfigViewsTestCaseMixin, OsfTestCase):
folder = None
Serializer = GitHubSerializer
client = GitHubClient
## Overrides ##
def setUp(self):
super(TestGitHubConfigViews, self).setUp()
self.mock_api_user = mock.patch('addons.github.api.GitHubClient.user')
self.mock_api_credentials_are_valid = mock.patch('addons.github.api.GitHubClient.check_authorization', return_value=True)
self.mock_api_user.return_value = mock.Mock()
self.mock_api_credentials_are_valid.start()
self.mock_api_user.start()
def tearDown(self):
self.mock_api_user.stop()
super(TestGitHubConfigViews, self).tearDown()
def test_folder_list(self):
# GH only lists root folder (repos), this test is superfluous
pass
@mock.patch('addons.github.models.NodeSettings.add_hook')
@mock.patch('addons.github.views.GitHubClient.repo')
def test_set_config(self, mock_repo, mock_add_hook):
# GH selects repos, not folders, so this needs to be overriden
mock_repo.return_value = 'repo_name'
url = self.project.api_url_for('{0}_set_config'.format(self.ADDON_SHORT_NAME))
res = self.app.post_json(url, {
'github_user': 'octocat',
'github_repo': 'repo_name',
}, auth=self.user.auth)
assert_equal(res.status_code, http.OK)
self.project.reload()
assert_equal(
self.project.logs.latest().action,
'{0}_repo_linked'.format(self.ADDON_SHORT_NAME)
)
mock_add_hook.assert_called_once_with(save=False)
# TODO: Test remaining CRUD methods
# TODO: Test exception handling
class TestCRUD(OsfTestCase):
def setUp(self):
super(TestCRUD, self).setUp()
self.github = create_mock_github(user='fred', private=False)
self.user = AuthUserFactory()
self.consolidated_auth = Auth(user=self.user)
self.project = ProjectFactory(creator=self.user)
self.project.add_addon('github', auth=self.consolidated_auth)
self.project.creator.add_addon('github')
self.node_settings = self.project.get_addon('github')
self.node_settings.user_settings = self.project.creator.get_addon('github')
# Set the node addon settings to correspond to the values of the mock repo
self.node_settings.user = self.github.repo.return_value.owner.login
self.node_settings.repo = self.github.repo.return_value.name
self.node_settings.save()
class TestGithubViews(OsfTestCase):
def setUp(self):
super(TestGithubViews, self).setUp()
self.user = AuthUserFactory()
self.consolidated_auth = Auth(user=self.user)
self.project = ProjectFactory(creator=self.user)
self.non_authenticator = UserFactory()
self.project.add_contributor(
contributor=self.non_authenticator,
auth=self.consolidated_auth,
)
self.project.creator.add_addon('github')
self.project.creator.external_accounts.add(GitHubAccountFactory())
self.project.creator.save()
self.project.save()
self.project.add_addon('github', auth=self.consolidated_auth)
self.github = create_mock_github(user='fred', private=False)
self.node_settings = self.project.get_addon('github')
self.node_settings.user_settings = self.project.creator.get_addon('github')
# Set the node addon settings to correspond to the values of the mock repo
self.node_settings.user = self.github.repo.return_value.owner.login
self.node_settings.repo = self.github.repo.return_value.name
self.node_settings.save()
def _get_sha_for_branch(self, branch=None, mock_branches=None):
github_mock = self.github
if mock_branches is None:
mock_branches = github_mock.branches
if branch is None: # Get default branch name
branch = self.github.repo.return_value.default_branch
for each in mock_branches.return_value:
if each.name == branch:
branch_sha = each.commit.sha
return branch_sha
# Tests for _get_refs
@mock.patch('addons.github.api.GitHubClient.branches')
@mock.patch('addons.github.api.GitHubClient.repo')
def test_get_refs_defaults(self, mock_repo, mock_branches):
github_mock = self.github
mock_repo.return_value = github_mock.repo.return_value
mock_branches.return_value = github_mock.branches.return_value
branch, sha, branches = utils.get_refs(self.node_settings)
assert_equal(
branch,
github_mock.repo.return_value.default_branch
)
assert_equal(sha, self._get_sha_for_branch(branch=None)) # Get refs for default branch
assert_equal(
branches,
github_mock.branches.return_value
)
@mock.patch('addons.github.api.GitHubClient.branches')
@mock.patch('addons.github.api.GitHubClient.repo')
def test_get_refs_branch(self, mock_repo, mock_branches):
github_mock = self.github
mock_repo.return_value = github_mock.repo.return_value
mock_branches.return_value = github_mock.branches.return_value
branch, sha, branches = utils.get_refs(self.node_settings, 'master')
assert_equal(branch, 'master')
branch_sha = self._get_sha_for_branch('master')
assert_equal(sha, branch_sha)
assert_equal(
branches,
github_mock.branches.return_value
)
def test_before_fork(self):
url = self.project.api_url + 'fork/before/'
res = self.app.get(url, auth=self.user.auth).maybe_follow()
assert_equal(len(res.json['prompts']), 1)
def test_get_refs_sha_no_branch(self):
with assert_raises(HTTPError):
utils.get_refs(self.node_settings, sha='12345')
def test_get_refs_registered_missing_branch(self):
github_mock = self.github
self.node_settings.registration_data = {
'branches': [
branch.as_json()
for branch in github_mock.branches.return_value
]
}
with mock.patch('osf.models.node.AbstractNode.is_registration', new_callable=mock.PropertyMock) as mock_is_reg:
mock_is_reg.return_value = True
with assert_raises(HTTPError):
utils.get_refs(self.node_settings, branch='nothere')
# Tests for _check_permissions
# make a user with no authorization; make sure check_permissions returns false
def test_permissions_no_auth(self):
github_mock = self.github
# project is set to private right now
connection = github_mock
non_authenticated_user = UserFactory()
non_authenticated_auth = Auth(user=non_authenticated_user)
branch = 'master'
assert_false(check_permissions(self.node_settings, non_authenticated_auth, connection, branch))
# make a repository that doesn't allow push access for this user;
# make sure check_permissions returns false
@mock.patch('addons.github.models.UserSettings.has_auth')
@mock.patch('addons.github.api.GitHubClient.repo')
def test_permissions_no_access(self, mock_repo, mock_has_auth):
github_mock = self.github
mock_has_auth.return_value = True
connection = github_mock
branch = 'master'
mock_repository = mock.NonCallableMock()
mock_repository.user = 'fred'
mock_repository.repo = 'mock-repo'
mock_repository.permissions = dict(push=False)
mock_repo.return_value = mock_repository
assert_false(check_permissions(self.node_settings, self.consolidated_auth, connection, branch, repo=mock_repository))
# make a branch with a different commit than the commit being passed into check_permissions
@mock.patch('addons.github.models.UserSettings.has_auth')
def test_permissions_not_head(self, mock_has_auth):
github_mock = self.github
mock_has_auth.return_value = True
connection = github_mock
mock_branch = mock.NonCallableMock()
mock_branch.commit.sha = '67890'
sha = '12345'
assert_false(check_permissions(self.node_settings, self.consolidated_auth, connection, mock_branch, sha=sha))
# # make sure permissions are not granted for editing a registration
@mock.patch('addons.github.models.UserSettings.has_auth')
def test_permissions(self, mock_has_auth):
github_mock = self.github
mock_has_auth.return_value = True
connection = github_mock
with mock.patch('osf.models.node.AbstractNode.is_registration', new_callable=mock.PropertyMock) as mock_is_reg:
mock_is_reg.return_value = True
assert_false(check_permissions(self.node_settings, self.consolidated_auth, connection, 'master'))
def check_hook_urls(self, urls, node, path, sha):
url = node.web_url_for('addon_view_or_download_file', path=path, provider='github')
expected_urls = {
'view': '{0}?ref={1}'.format(url, sha),
'download': '{0}?action=download&ref={1}'.format(url, sha)
}
assert_equal(urls['view'], expected_urls['view'])
assert_equal(urls['download'], expected_urls['download'])
@mock.patch('addons.github.views.verify_hook_signature')
def test_hook_callback_add_file_not_thro_osf(self, mock_verify):
url = '/api/v1/project/{0}/github/hook/'.format(self.project._id)
timestamp = str(timezone.now())
self.app.post_json(
url,
{
'test': True,
'commits': [{
'id': 'b08dbb5b6fcd74a592e5281c9d28e2020a1db4ce',
'distinct': True,
'message': 'foo',
'timestamp': timestamp,
'url': 'https://github.com/tester/addontesting/commit/b08dbb5b6fcd74a592e5281c9d28e2020a1db4ce',
'author': {'name': 'Illidan', 'email': '[email protected]'},
'committer': {'name': 'Testor', 'email': '[email protected]', 'username': 'tester'},
'added': ['PRJWN3TV'],
'removed': [],
'modified': [],
}]
},
content_type='application/json',
).maybe_follow()
self.project.reload()
assert_equal(self.project.logs.latest().action, 'github_file_added')
urls = self.project.logs.latest().params['urls']
self.check_hook_urls(
urls,
self.project,
path='PRJWN3TV',
sha='b08dbb5b6fcd74a592e5281c9d28e2020a1db4ce',
)
@mock.patch('addons.github.views.verify_hook_signature')
def test_hook_callback_modify_file_not_thro_osf(self, mock_verify):
url = '/api/v1/project/{0}/github/hook/'.format(self.project._id)
timestamp = str(timezone.now())
self.app.post_json(
url,
{'test': True,
'commits': [{'id': 'b08dbb5b6fcd74a592e5281c9d28e2020a1db4ce',
'distinct': True,
'message': ' foo',
'timestamp': timestamp,
'url': 'https://github.com/tester/addontesting/commit/b08dbb5b6fcd74a592e5281c9d28e2020a1db4ce',
'author': {'name': 'Illidan', 'email': '[email protected]'},
'committer': {'name': 'Testor', 'email': '[email protected]',
'username': 'tester'},
'added': [], 'removed':[], 'modified':['PRJWN3TV']}]},
content_type='application/json').maybe_follow()
self.project.reload()
assert_equal(self.project.logs.latest().action, 'github_file_updated')
urls = self.project.logs.latest().params['urls']
self.check_hook_urls(
urls,
self.project,
path='PRJWN3TV',
sha='b08dbb5b6fcd74a592e5281c9d28e2020a1db4ce',
)
@mock.patch('addons.github.views.verify_hook_signature')
def test_hook_callback_remove_file_not_thro_osf(self, mock_verify):
url = '/api/v1/project/{0}/github/hook/'.format(self.project._id)
timestamp = str(timezone.now())
self.app.post_json(
url,
{'test': True,
'commits': [{'id': 'b08dbb5b6fcd74a592e5281c9d28e2020a1db4ce',
'distinct': True,
'message': 'foo',
'timestamp': timestamp,
'url': 'https://github.com/tester/addontesting/commit/b08dbb5b6fcd74a592e5281c9d28e2020a1db4ce',
'author': {'name': 'Illidan', 'email': '[email protected]'},
'committer': {'name': 'Testor', 'email': '[email protected]', 'username': 'tester'},
'added': [], 'removed': ['PRJWN3TV'], 'modified':[]}]},
content_type='application/json').maybe_follow()
self.project.reload()
assert_equal(self.project.logs.latest().action, 'github_file_removed')
urls = self.project.logs.latest().params['urls']
assert_equal(urls, {})
@mock.patch('addons.github.views.verify_hook_signature')
def test_hook_callback_add_file_thro_osf(self, mock_verify):
url = '/api/v1/project/{0}/github/hook/'.format(self.project._id)
self.app.post_json(
url,
{'test': True,
'commits': [{'id': 'b08dbb5b6fcd74a592e5281c9d28e2020a1db4ce',
'distinct': True,
'message': 'Added via the Open Science Framework',
'timestamp': '2014-01-08T14:15:51-08:00',
'url': 'https://github.com/tester/addontesting/commit/b08dbb5b6fcd74a592e5281c9d28e2020a1db4ce',
'author': {'name': 'Illidan', 'email': '[email protected]'},
'committer': {'name': 'Testor', 'email': '[email protected]', 'username': 'tester'},
'added': ['PRJWN3TV'], 'removed':[], 'modified':[]}]},
content_type='application/json').maybe_follow()
self.project.reload()
assert_not_equal(self.project.logs.latest().action, 'github_file_added')
@mock.patch('addons.github.views.verify_hook_signature')
def test_hook_callback_modify_file_thro_osf(self, mock_verify):
url = '/api/v1/project/{0}/github/hook/'.format(self.project._id)
self.app.post_json(
url,
{'test': True,
'commits': [{'id': 'b08dbb5b6fcd74a592e5281c9d28e2020a1db4ce',
'distinct': True,
'message': 'Updated via the Open Science Framework',
'timestamp': '2014-01-08T14:15:51-08:00',
'url': 'https://github.com/tester/addontesting/commit/b08dbb5b6fcd74a592e5281c9d28e2020a1db4ce',
'author': {'name': 'Illidan', 'email': '[email protected]'},
'committer': {'name': 'Testor', 'email': '[email protected]', 'username': 'tester'},
'added': [], 'removed':[], 'modified':['PRJWN3TV']}]},
content_type='application/json').maybe_follow()
self.project.reload()
assert_not_equal(self.project.logs.latest().action, 'github_file_updated')
@mock.patch('addons.github.views.verify_hook_signature')
def test_hook_callback_remove_file_thro_osf(self, mock_verify):
url = '/api/v1/project/{0}/github/hook/'.format(self.project._id)
self.app.post_json(
url,
{'test': True,
'commits': [{'id': 'b08dbb5b6fcd74a592e5281c9d28e2020a1db4ce',
'distinct': True,
'message': 'Deleted via the Open Science Framework',
'timestamp': '2014-01-08T14:15:51-08:00',
'url': 'https://github.com/tester/addontesting/commit/b08dbb5b6fcd74a592e5281c9d28e2020a1db4ce',
'author': {'name': 'Illidan', 'email': '[email protected]'},
'committer': {'name': 'Testor', 'email': '[email protected]', 'username': 'tester'},
'added': [], 'removed':['PRJWN3TV'], 'modified':[]}]},
content_type='application/json').maybe_follow()
self.project.reload()
assert_not_equal(self.project.logs.latest().action, 'github_file_removed')
class TestRegistrationsWithGithub(OsfTestCase):
def setUp(self):
super(TestRegistrationsWithGithub, self).setUp()
self.project = ProjectFactory()
self.consolidated_auth = Auth(user=self.project.creator)
self.project.add_addon('github', auth=self.consolidated_auth)
self.project.creator.add_addon('github')
self.node_settings = self.project.get_addon('github')
self.user_settings = self.project.creator.get_addon('github')
self.node_settings.user_settings = self.user_settings
self.node_settings.user = 'Queen'
self.node_settings.repo = 'Sheer-Heart-Attack'
self.node_settings.save()
class TestGithubSettings(OsfTestCase):
def setUp(self):
super(TestGithubSettings, self).setUp()
self.github = create_mock_github(user='fred', private=False)
self.project = ProjectFactory()
self.auth = self.project.creator.auth
self.consolidated_auth = Auth(user=self.project.creator)
self.project.add_addon('github', auth=self.consolidated_auth)
self.project.creator.add_addon('github')
self.node_settings = self.project.get_addon('github')
self.user_settings = self.project.creator.get_addon('github')
self.node_settings.user_settings = self.user_settings
self.node_settings.user = 'Queen'
self.node_settings.repo = 'Sheer-Heart-Attack'
self.node_settings.save()
@mock.patch('addons.github.models.NodeSettings.add_hook')
@mock.patch('addons.github.api.GitHubClient.repo')
def test_link_repo(self, mock_repo, mock_add_hook):
github_mock = self.github
mock_repo.return_value = github_mock.repo.return_value
url = self.project.api_url + 'github/settings/'
self.app.post_json(
url,
{
'github_user': 'queen',
'github_repo': 'night at the opera',
},
auth=self.auth
).maybe_follow()
self.project.reload()
self.node_settings.reload()
assert_equal(self.node_settings.user, 'queen')
assert_equal(self.node_settings.repo, 'night at the opera')
assert_equal(self.project.logs.latest().action, 'github_repo_linked')
mock_add_hook.assert_called_once_with(save=False)
@mock.patch('addons.github.models.NodeSettings.add_hook')
@mock.patch('addons.github.api.GitHubClient.repo')
def test_link_repo_no_change(self, mock_repo, mock_add_hook):
github_mock = self.github
mock_repo.return_value = github_mock.repo.return_value
log_count = self.project.logs.count()
url = self.project.api_url + 'github/settings/'
self.app.post_json(
url,
{
'github_user': 'Queen',
'github_repo': 'Sheer-Heart-Attack',
},
auth=self.auth
).maybe_follow()
self.project.reload()
self.node_settings.reload()
assert_equal(self.project.logs.count(), log_count)
assert_false(mock_add_hook.called)
@mock.patch('addons.github.api.GitHubClient.repo')
def test_link_repo_non_existent(self, mock_repo):
mock_repo.return_value = None
url = self.project.api_url + 'github/settings/'
res = self.app.post_json(
url,
{
'github_user': 'queen',
'github_repo': 'night at the opera',
},
auth=self.auth,
expect_errors=True
).maybe_follow()
assert_equal(res.status_code, 400)
@mock.patch('addons.github.api.GitHubClient.branches')
def test_link_repo_registration(self, mock_branches):
mock_branches.return_value = [
Branch.from_json(dumps({
'name': 'master',
'commit': {
'sha': '6dcb09b5b57875f334f61aebed695e2e4193db5e',
'url': 'https://api.github.com/repos/octocat/Hello-World/commits/c5b97d5ae6c19d5c5df71a34c7fbeeda2479ccbc',
}
})),
Branch.from_json(dumps({
'name': 'develop',
'commit': {
'sha': '6dcb09b5b57875asdasedawedawedwedaewdwdass',
'url': 'https://api.github.com/repos/octocat/Hello-World/commits/cdcb09b5b57875asdasedawedawedwedaewdwdass',
}
}))
]
registration = self.project.register_node(
schema=get_default_metaschema(),
auth=self.consolidated_auth,
data=''
)
url = registration.api_url + 'github/settings/'
res = self.app.post_json(
url,
{
'github_user': 'queen',
'github_repo': 'night at the opera',
},
auth=self.auth,
expect_errors=True
).maybe_follow()
assert_equal(res.status_code, 400)
@mock.patch('addons.github.models.NodeSettings.delete_hook')
def test_deauthorize(self, mock_delete_hook):
url = self.project.api_url + 'github/user_auth/'
self.app.delete(url, auth=self.auth).maybe_follow()
self.project.reload()
self.node_settings.reload()
assert_equal(self.node_settings.user, None)
assert_equal(self.node_settings.repo, None)
assert_equal(self.node_settings.user_settings, None)
assert_equal(self.project.logs.latest().action, 'github_node_deauthorized')
if __name__ == '__main__':
unittest.main()
|
|
from __future__ import absolute_import, division, print_function
import pytest
pymysql = pytest.importorskip('pymysql')
from datashape import var, DataShape, Record
import itertools
from odo.backends.csv import CSV
from odo import resource, odo
import sqlalchemy
import sqlalchemy as sa
import os
import sys
import csv as csv_module
import getpass
from odo import drop, discover
from odo.utils import tmpfile
pytestmark = pytest.mark.skipif(sys.platform == 'win32',
reason='not well tested on win32 mysql')
username = getpass.getuser()
url = 'mysql+pymysql://{0}@localhost:3306/test'.format(username)
def create_csv(data, file_name):
with open(file_name, 'w') as f:
csv_writer = csv_module.writer(f)
for row in data:
csv_writer.writerow(row)
data = [(1, 2), (10, 20), (100, 200)]
data_floats = [(1.02, 2.02), (102.02, 202.02), (1002.02, 2002.02)]
@pytest.yield_fixture
def csv():
with tmpfile('.csv') as fn:
create_csv(data, fn)
yield CSV(fn)
@pytest.yield_fixture
def fcsv():
with tmpfile('.csv') as fn:
create_csv(data_floats, fn)
yield CSV(fn, columns=list('ab'))
names = ('tbl%d' % i for i in itertools.count())
@pytest.fixture
def name():
return next(names)
@pytest.fixture(scope='module')
def engine():
return sqlalchemy.create_engine(url)
@pytest.yield_fixture
def sql(engine, csv, name):
dshape = discover(csv)
dshape = DataShape(var,
Record([(n, typ)
for n, typ in zip('ab', dshape.measure.types)]))
try:
t = resource('%s::%s' % (url, name), dshape=dshape)
except sqlalchemy.exc.OperationalError as e:
pytest.skip(str(e))
else:
try:
yield t
finally:
drop(t)
@pytest.yield_fixture
def fsql(engine, fcsv, name):
dshape = discover(fcsv)
dshape = DataShape(var,
Record([(n, typ)
for n, typ in zip('ab', dshape.measure.types)]))
try:
t = resource('%s::%s' % (url, name), dshape=dshape)
except sqlalchemy.exc.OperationalError as e:
pytest.skip(str(e))
else:
yield t
drop(t)
@pytest.fixture
def dcsv():
this_dir = os.path.dirname(__file__)
file_name = os.path.join(this_dir, 'dummydata.csv')
dshape = """var * {
Name: string,
RegistrationDate: date,
ZipCode: int64,
Consts: float64
}"""
return CSV(file_name, dshape=dshape)
@pytest.yield_fixture
def dsql(engine, dcsv, name):
t = resource('%s::%s' % (url, name), dshape=discover(dcsv))
yield t
drop(t)
def test_csv_mysql_load(sql, csv):
engine = sql.bind
conn = engine.raw_connection()
cursor = conn.cursor()
full_path = os.path.abspath(csv.path)
load = '''LOAD DATA INFILE '{0}' INTO TABLE {1} FIELDS TERMINATED BY ','
lines terminated by '\n'
'''.format(full_path, sql.name)
cursor.execute(load)
conn.commit()
def test_simple_into(sql, csv):
odo(csv, sql, if_exists="replace")
assert odo(sql, list) == [(1, 2), (10, 20), (100, 200)]
def test_append(sql, csv):
odo(csv, sql, if_exists="replace")
assert odo(sql, list) == [(1, 2), (10, 20), (100, 200)]
odo(csv, sql, if_exists="append")
assert odo(sql, list) == [(1, 2), (10, 20), (100, 200),
(1, 2), (10, 20), (100, 200)]
def test_simple_float_into(fsql, fcsv):
odo(fcsv, fsql, if_exists="replace")
assert odo(fsql, list) == [(1.02, 2.02),
(102.02, 202.02),
(1002.02, 2002.02)]
def test_tryexcept_into(sql, csv):
# uses multi-byte character and fails over to using sql.extend()
odo(csv, sql, if_exists="replace", QUOTE="alpha", FORMAT="csv")
assert odo(sql, list) == [(1, 2), (10, 20), (100, 200)]
@pytest.mark.xfail(raises=KeyError)
def test_failing_argument(sql, csv):
odo(csv, sql, if_exists="replace", skipinitialspace="alpha")
def test_no_header_no_columns(sql, csv):
odo(csv, sql, if_exists="replace")
assert odo(sql, list) == [(1, 2), (10, 20), (100, 200)]
@pytest.mark.xfail
def test_complex_into(dsql, dcsv):
# data from: http://dummydata.me/generate
odo(dcsv, dsql, if_exists="replace")
assert odo(dsql, list) == odo(dcsv, list)
def test_sql_to_csv(sql, csv):
sql = odo(csv, sql)
with tmpfile('.csv') as fn:
csv = odo(sql, fn)
assert odo(csv, list) == data
# explicitly test that we do NOT preserve the header here
assert discover(csv).measure.names != discover(sql).measure.names
def test_sql_select_to_csv(sql, csv):
sql = odo(csv, sql)
query = sa.select([sql.c.a])
with tmpfile('.csv') as fn:
csv = odo(query, fn)
assert odo(csv, list) == [(x,) for x, _ in data]
def test_csv_output_does_not_preserve_header(sql, csv):
sql = odo(csv, sql)
expected = "1,2\n10,20\n100,200\n"
with tmpfile('.csv') as fn:
csv = odo(sql, fn)
with open(csv.path, 'rt') as f:
result = f.read()
assert result == expected
@pytest.mark.xfail(raises=AssertionError,
reason="Remove when all databases are being tested at once")
def test_different_encoding(name):
encoding = 'latin1'
try:
sql = odo(os.path.join(os.path.dirname(__file__), 'encoding.csv'),
url + '::%s' % name,
encoding=encoding)
except sa.exc.OperationalError as e:
pytest.skip(str(e))
else:
try:
result = odo(sql, list)
expected = [(u'1958.001.500131-1A', 1, None, u'', 899),
(u'1958.001.500156-6', 1, None, u'', 899),
(u'1958.001.500162-1', 1, None, u'', 899),
(u'1958.001.500204-2', 1, None, u'', 899),
(u'1958.001.500204-2A', 1, None, u'', 899),
(u'1958.001.500204-2B', 1, None, u'', 899),
(u'1958.001.500223-6', 1, None, u'', 9610),
(u'1958.001.500233-9', 1, None, u'', 4703),
(u'1909.017.000018-3', 1, 30.0, u'sumaria', 899)]
assert result == expected
finally:
drop(sql)
|
|
"""
This pipeline is used to report the results for the ALL modality.
"""
import os
import numpy as np
import matplotlib as mpl
mpl.use('Agg')
import matplotlib.pyplot as plt
import seaborn as sns
#sns.set(style='ticks', palette='Set2')
current_palette = sns.color_palette()
from scipy import interp
from sklearn.externals import joblib
from sklearn.preprocessing import label_binarize
from sklearn.metrics import roc_curve
from sklearn.metrics import auc
from sklearn.metrics import roc_auc_score
from protoclass.data_management import GTModality
from protoclass.validation import labels_to_sensitivity_specificity
# Define the path where all the patients are
path_patients = '/data/prostate/experiments'
# Define the path of the ground for the prostate
path_gt = ['GT_inv/prostate', 'GT_inv/pz', 'GT_inv/cg', 'GT_inv/cap']
# Define the label of the ground-truth which will be provided
label_gt = ['prostate', 'pz', 'cg', 'cap']
# Generate the different path to be later treated
path_patients_list_gt = []
# Create the generator
id_patient_list = [name for name in os.listdir(path_patients)
if os.path.isdir(os.path.join(path_patients, name))]
# Sort the list of patient
id_patient_list = sorted(id_patient_list)
for id_patient in id_patient_list:
# Append for the GT data - Note that we need a list of gt path
path_patients_list_gt.append([os.path.join(path_patients, id_patient, gt)
for gt in path_gt])
# Load all the data once. Splitting into training and testing will be done at
# the cross-validation time
label = []
for idx_pat in range(len(id_patient_list)):
print 'Read patient {}'.format(id_patient_list[idx_pat])
# Create the corresponding ground-truth
gt_mod = GTModality()
gt_mod.read_data_from_path(label_gt,
path_patients_list_gt[idx_pat])
print 'Read the GT data for the current patient ...'
# Extract the corresponding ground-truth for the testing data
# Get the index corresponding to the ground-truth
roi_prostate = gt_mod.extract_gt_data('prostate', output_type='index')
# Get the label of the gt only for the prostate ROI
gt_cap = gt_mod.extract_gt_data('cap', output_type='data')
label.append(gt_cap[roi_prostate])
print 'Data and label extracted for the current patient ...'
testing_label_cv = []
# Go for LOPO cross-validation
for idx_lopo_cv in range(len(id_patient_list)):
# Display some information about the LOPO-CV
print 'Round #{} of the LOPO-CV'.format(idx_lopo_cv + 1)
testing_label = np.ravel(label_binarize(label[idx_lopo_cv], [0, 255]))
testing_label_cv.append(testing_label)
fresults = '/data/prostate/results/mp-mri-prostate/exp-1/aggregation/results.pkl'
results = joblib.load(fresults)
# # Initialise a list for the sensitivity and specificity
# Initilise the mean roc
mean_tpr = []
mean_fpr = np.linspace(0, 1, 30)
auc_pat = []
# Go for each cross-validation iteration
for idx_cv in range(len(testing_label_cv)):
# Print the information about the iteration in the cross-validation
print 'Iteration #{} of the cross-validation'.format(idx_cv+1)
# Get the prediction
pred_score = results[idx_cv][0]
classes = results[idx_cv][1]
pos_class_arg = np.ravel(np.argwhere(classes == 1))[0]
# Compute the fpr and tpr
fpr, tpr, thresh = roc_curve(testing_label_cv[idx_cv],
pred_score[:, pos_class_arg])
# Compute the mean ROC
mean_tpr.append(interp(mean_fpr,
fpr,
tpr))
mean_tpr[idx_cv][0] = 0.0
auc_pat.append(auc(mean_fpr, mean_tpr[-1]))
avg_tpr = np.mean(mean_tpr, axis=0)
std_tpr = np.std(mean_tpr, axis=0)
avg_tpr[-1] = 1.0
# Create an handle for the figure
fig = plt.figure()
ax = fig.add_subplot(111)
ax.plot(mean_fpr, avg_tpr,
label=r'Feature aggregation - AUC $= {:1.3f} \pm {:1.3f}$'.format(
auc(mean_fpr, avg_tpr), np.std(auc_pat)),
lw=2)
ax.fill_between(mean_fpr,
avg_tpr + std_tpr,
avg_tpr - std_tpr,
facecolor=current_palette[0], alpha=0.2)
fresults = '/data/prostate/results/mp-mri-prostate/exp-1/stacking-adaboost/results.pkl'
results = joblib.load(fresults)
# # Initialise a list for the sensitivity and specificity
# Initilise the mean roc
mean_tpr = []
mean_fpr = np.linspace(0, 1, 30)
auc_pat = []
# Go for each cross-validation iteration
for idx_cv in range(len(testing_label_cv)):
# Print the information about the iteration in the cross-validation
print 'Iteration #{} of the cross-validation'.format(idx_cv+1)
# Get the prediction
pred_score = results[idx_cv][0]
classes = results[idx_cv][1]
pos_class_arg = np.ravel(np.argwhere(classes == 1))[0]
# Compute the fpr and tpr
fpr, tpr, thresh = roc_curve(testing_label_cv[idx_cv],
pred_score[:, pos_class_arg])
# Compute the mean ROC
mean_tpr.append(interp(mean_fpr,
fpr,
tpr))
mean_tpr[idx_cv][0] = 0.0
auc_pat.append(auc(mean_fpr, mean_tpr[-1]))
avg_tpr = np.mean(mean_tpr, axis=0)
std_tpr = np.std(mean_tpr, axis=0)
avg_tpr[-1] = 1.0
ax.plot(mean_fpr, avg_tpr,
label=r'Stacking AdaBoost - AUC $= {:1.3f} \pm {:1.3f}$'.format(
auc(mean_fpr, avg_tpr), np.std(auc_pat)),
lw=2)
ax.fill_between(mean_fpr,
avg_tpr + std_tpr,
avg_tpr - std_tpr,
facecolor=current_palette[1], alpha=0.2)
fresults = '/data/prostate/results/mp-mri-prostate/exp-1/stacking-gradient-boosting/results.pkl'
results = joblib.load(fresults)
# # Initialise a list for the sensitivity and specificity
# Initilise the mean roc
mean_tpr = []
mean_fpr = np.linspace(0, 1, 30)
auc_pat = []
# Go for each cross-validation iteration
for idx_cv in range(len(testing_label_cv)):
# Print the information about the iteration in the cross-validation
print 'Iteration #{} of the cross-validation'.format(idx_cv+1)
# Get the prediction
pred_score = results[idx_cv][0]
classes = results[idx_cv][1]
pos_class_arg = np.ravel(np.argwhere(classes == 1))[0]
# Compute the fpr and tpr
fpr, tpr, thresh = roc_curve(testing_label_cv[idx_cv],
pred_score[:, pos_class_arg])
# Compute the mean ROC
mean_tpr.append(interp(mean_fpr,
fpr,
tpr))
mean_tpr[idx_cv][0] = 0.0
auc_pat.append(auc(mean_fpr, mean_tpr[-1]))
avg_tpr = np.mean(mean_tpr, axis=0)
std_tpr = np.std(mean_tpr, axis=0)
avg_tpr[-1] = 1.0
ax.plot(mean_fpr, avg_tpr,
label=r'Stacking Gradient Boosting - AUC $= {:1.3f} \pm {:1.3f}$'.format(
auc(mean_fpr, avg_tpr), np.std(auc_pat)),
lw=2)
ax.fill_between(mean_fpr,
avg_tpr + std_tpr,
avg_tpr - std_tpr,
facecolor=current_palette[2], alpha=0.2)
plt.xlim([0.0, 1.0])
plt.ylim([0.0, 1.0])
plt.xlabel('False Positive Rate')
plt.ylabel('True Positive Rate')
#plt.title(r'ROC curve for sparsity level $\lambda={}$'.format(n_comp[sp]))
handles, labels = ax.get_legend_handles_labels()
lgd = ax.legend(handles, labels, loc='lower right')#,
#bbox_to_anchor=(1.4, 0.1))
# Save the plot
plt.savefig('results/exp-2/coarse_combination.pdf',
bbox_extra_artists=(lgd,),
bbox_inches='tight')
|
|
# Copyright 2020 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import annotations
from textwrap import dedent
import pytest
from pants.backend.codegen.protobuf.python import additional_fields
from pants.backend.codegen.protobuf.python.python_protobuf_subsystem import PythonProtobufMypyPlugin
from pants.backend.codegen.protobuf.python.python_protobuf_subsystem import (
rules as protobuf_subsystem_rules,
)
from pants.backend.codegen.protobuf.python.rules import GeneratePythonFromProtobufRequest
from pants.backend.codegen.protobuf.python.rules import rules as protobuf_rules
from pants.backend.codegen.protobuf.target_types import (
ProtobufSourceField,
ProtobufSourcesGeneratorTarget,
)
from pants.backend.codegen.protobuf.target_types import rules as target_types_rules
from pants.core.util_rules import stripped_source_files
from pants.engine.addresses import Address
from pants.engine.target import GeneratedSources, HydratedSources, HydrateSourcesRequest
from pants.source.source_root import NoSourceRootError
from pants.testutil.python_interpreter_selection import all_major_minor_python_versions
from pants.testutil.rule_runner import QueryRule, RuleRunner, engine_error
GRPC_PROTO_STANZA = """
syntax = "proto3";
package dir1;
// The greeter service definition.
service Greeter {
// Sends a greeting
rpc SayHello (HelloRequest) returns (HelloReply) {}
}
// The request message containing the user's name.
message HelloRequest {
string name = 1;
}
// The response message containing the greetings
message HelloReply {
string message = 1;
}
"""
@pytest.fixture
def rule_runner() -> RuleRunner:
return RuleRunner(
rules=[
*protobuf_rules(),
*protobuf_subsystem_rules(),
*additional_fields.rules(),
*stripped_source_files.rules(),
*target_types_rules(),
QueryRule(HydratedSources, [HydrateSourcesRequest]),
QueryRule(GeneratedSources, [GeneratePythonFromProtobufRequest]),
],
target_types=[ProtobufSourcesGeneratorTarget],
)
def assert_files_generated(
rule_runner: RuleRunner,
address: Address,
*,
expected_files: list[str],
source_roots: list[str],
mypy: bool = False,
extra_args: list[str] | None = None,
) -> None:
args = [f"--source-root-patterns={repr(source_roots)}", *(extra_args or ())]
if mypy:
args.append("--python-protobuf-mypy-plugin")
rule_runner.set_options(args, env_inherit={"PATH", "PYENV_ROOT", "HOME"})
tgt = rule_runner.get_target(address)
protocol_sources = rule_runner.request(
HydratedSources, [HydrateSourcesRequest(tgt[ProtobufSourceField])]
)
generated_sources = rule_runner.request(
GeneratedSources,
[GeneratePythonFromProtobufRequest(protocol_sources.snapshot, tgt)],
)
assert set(generated_sources.snapshot.files) == set(expected_files)
def test_generates_python(rule_runner: RuleRunner) -> None:
# This tests a few things:
# * We generate the correct file names.
# * Protobuf files can import other protobuf files, and those can import others
# (transitive dependencies). We'll only generate the requested target, though.
# * We can handle multiple source roots, which need to be preserved in the final output.
rule_runner.write_files(
{
"src/protobuf/dir1/f.proto": dedent(
"""\
syntax = "proto3";
package dir1;
message Person {
string name = 1;
int32 id = 2;
string email = 3;
}
"""
),
"src/protobuf/dir1/f2.proto": dedent(
"""\
syntax = "proto3";
package dir1;
"""
),
"src/protobuf/dir1/BUILD": "protobuf_sources()",
"src/protobuf/dir2/f.proto": dedent(
"""\
syntax = "proto3";
package dir2;
import "dir1/f.proto";
"""
),
"src/protobuf/dir2/BUILD": (
"protobuf_sources(dependencies=['src/protobuf/dir1'], "
"python_source_root='src/python')"
),
# Test another source root.
"tests/protobuf/test_protos/f.proto": dedent(
"""\
syntax = "proto3";
package test_protos;
import "dir2/f.proto";
"""
),
"tests/protobuf/test_protos/BUILD": (
"protobuf_sources(dependencies=['src/protobuf/dir2'])"
),
}
)
def assert_gen(addr: Address, expected: str) -> None:
assert_files_generated(
rule_runner,
addr,
source_roots=["src/python", "/src/protobuf", "/tests/protobuf"],
expected_files=[expected],
)
assert_gen(
Address("src/protobuf/dir1", relative_file_path="f.proto"), "src/protobuf/dir1/f_pb2.py"
)
assert_gen(
Address("src/protobuf/dir1", relative_file_path="f2.proto"), "src/protobuf/dir1/f2_pb2.py"
)
assert_gen(
Address("src/protobuf/dir2", relative_file_path="f.proto"), "src/python/dir2/f_pb2.py"
)
assert_gen(
Address("tests/protobuf/test_protos", relative_file_path="f.proto"),
"tests/protobuf/test_protos/f_pb2.py",
)
def test_top_level_proto_root(rule_runner: RuleRunner) -> None:
rule_runner.write_files(
{
"protos/f.proto": dedent(
"""\
syntax = "proto3";
package protos;
"""
),
"protos/BUILD": "protobuf_sources()",
}
)
assert_files_generated(
rule_runner,
Address("protos", relative_file_path="f.proto"),
source_roots=["/"],
expected_files=["protos/f_pb2.py"],
)
def test_top_level_python_source_root(rule_runner: RuleRunner) -> None:
rule_runner.write_files(
{
"src/proto/protos/f.proto": dedent(
"""\
syntax = "proto3";
package protos;
"""
),
"src/proto/protos/BUILD": "protobuf_sources(python_source_root='.')",
}
)
assert_files_generated(
rule_runner,
Address("src/proto/protos", relative_file_path="f.proto"),
source_roots=["/", "src/proto"],
expected_files=["protos/f_pb2.py"],
)
def test_bad_python_source_root(rule_runner: RuleRunner) -> None:
rule_runner.write_files(
{
"src/protobuf/dir1/f.proto": dedent(
"""\
syntax = "proto3";
package dir1;
"""
),
"src/protobuf/dir1/BUILD": "protobuf_sources(python_source_root='notasourceroot')",
}
)
with engine_error(NoSourceRootError):
assert_files_generated(
rule_runner,
Address("src/protobuf/dir1", relative_file_path="f.proto"),
source_roots=["src/protobuf"],
expected_files=[],
)
@pytest.mark.platform_specific_behavior
@pytest.mark.parametrize(
"major_minor_interpreter",
all_major_minor_python_versions(PythonProtobufMypyPlugin.default_interpreter_constraints),
)
def test_mypy_plugin(rule_runner: RuleRunner, major_minor_interpreter: str) -> None:
rule_runner.write_files(
{
"src/protobuf/dir1/f.proto": dedent(
"""\
syntax = "proto3";
package dir1;
message Person {
string name = 1;
int32 id = 2;
string email = 3;
}
"""
),
"src/protobuf/dir1/BUILD": "protobuf_sources()",
}
)
assert_files_generated(
rule_runner,
Address("src/protobuf/dir1", relative_file_path="f.proto"),
source_roots=["src/protobuf"],
extra_args=[
"--python-protobuf-mypy-plugin",
f"--mypy-protobuf-interpreter-constraints=['=={major_minor_interpreter}.*']",
],
expected_files=["src/protobuf/dir1/f_pb2.py", "src/protobuf/dir1/f_pb2.pyi"],
)
def test_grpc(rule_runner: RuleRunner) -> None:
rule_runner.write_files(
{
"src/protobuf/dir1/f.proto": dedent(GRPC_PROTO_STANZA),
"src/protobuf/dir1/BUILD": "protobuf_sources(grpc=True)",
}
)
assert_files_generated(
rule_runner,
Address("src/protobuf/dir1", relative_file_path="f.proto"),
source_roots=["src/protobuf"],
expected_files=["src/protobuf/dir1/f_pb2.py", "src/protobuf/dir1/f_pb2_grpc.py"],
)
def test_grpc_mypy_plugin(rule_runner: RuleRunner) -> None:
rule_runner.write_files(
{
"src/protobuf/dir1/f.proto": dedent(GRPC_PROTO_STANZA),
"src/protobuf/dir1/BUILD": "protobuf_sources(grpc=True)",
}
)
assert_files_generated(
rule_runner,
Address("src/protobuf/dir1", relative_file_path="f.proto"),
source_roots=["src/protobuf"],
mypy=True,
expected_files=[
"src/protobuf/dir1/f_pb2.py",
"src/protobuf/dir1/f_pb2.pyi",
"src/protobuf/dir1/f_pb2_grpc.py",
"src/protobuf/dir1/f_pb2_grpc.pyi",
],
)
def test_grpc_pre_v2_mypy_plugin(rule_runner: RuleRunner) -> None:
rule_runner.write_files(
{
"src/protobuf/dir1/f.proto": dedent(GRPC_PROTO_STANZA),
"src/protobuf/dir1/BUILD": "protobuf_sources(grpc=True)",
}
)
assert_files_generated(
rule_runner,
Address("src/protobuf/dir1", relative_file_path="f.proto"),
source_roots=["src/protobuf"],
extra_args=[
"--python-protobuf-mypy-plugin",
"--mypy-protobuf-version=mypy-protobuf==1.24",
"--mypy-protobuf-extra-requirements=six==1.16.0",
"--mypy-protobuf-lockfile=<none>",
],
expected_files=[
"src/protobuf/dir1/f_pb2.py",
"src/protobuf/dir1/f_pb2.pyi",
"src/protobuf/dir1/f_pb2_grpc.py",
],
)
|
|
"""The tests for the MQTT JSON light platform.
Configuration with RGB, brightness, color temp, effect, white value and XY:
light:
platform: mqtt_json
name: mqtt_json_light_1
state_topic: "home/rgb1"
command_topic: "home/rgb1/set"
brightness: true
color_temp: true
effect: true
rgb: true
white_value: true
xy: true
Configuration with RGB, brightness, color temp, effect, white value:
light:
platform: mqtt_json
name: mqtt_json_light_1
state_topic: "home/rgb1"
command_topic: "home/rgb1/set"
brightness: true
color_temp: true
effect: true
rgb: true
white_value: true
Configuration with RGB, brightness, color temp and effect:
light:
platform: mqtt_json
name: mqtt_json_light_1
state_topic: "home/rgb1"
command_topic: "home/rgb1/set"
brightness: true
color_temp: true
effect: true
rgb: true
Configuration with RGB, brightness and color temp:
light:
platform: mqtt_json
name: mqtt_json_light_1
state_topic: "home/rgb1"
command_topic: "home/rgb1/set"
brightness: true
rgb: true
color_temp: true
Configuration with RGB, brightness:
light:
platform: mqtt_json
name: mqtt_json_light_1
state_topic: "home/rgb1"
command_topic: "home/rgb1/set"
brightness: true
rgb: true
Config without RGB:
light:
platform: mqtt_json
name: mqtt_json_light_1
state_topic: "home/rgb1"
command_topic: "home/rgb1/set"
brightness: true
Config without RGB and brightness:
light:
platform: mqtt_json
name: mqtt_json_light_1
state_topic: "home/rgb1"
command_topic: "home/rgb1/set"
Config with brightness and scale:
light:
platform: mqtt_json
name: test
state_topic: "mqtt_json_light_1"
command_topic: "mqtt_json_light_1/set"
brightness: true
brightness_scale: 99
"""
import json
import unittest
from unittest.mock import patch
from homeassistant.setup import setup_component
from homeassistant.const import (
STATE_ON, STATE_OFF, STATE_UNAVAILABLE, ATTR_ASSUMED_STATE,
ATTR_SUPPORTED_FEATURES)
import homeassistant.components.light as light
import homeassistant.core as ha
from tests.common import (
get_test_home_assistant, mock_mqtt_component, fire_mqtt_message,
assert_setup_component, mock_coro)
class TestLightMQTTJSON(unittest.TestCase):
"""Test the MQTT JSON light."""
def setUp(self): # pylint: disable=invalid-name
"""Set up things to be run when tests are started."""
self.hass = get_test_home_assistant()
self.mock_publish = mock_mqtt_component(self.hass)
def tearDown(self): # pylint: disable=invalid-name
"""Stop everything that was started."""
self.hass.stop()
def test_fail_setup_if_no_command_topic(self):
"""Test if setup fails with no command topic."""
with assert_setup_component(0, light.DOMAIN):
assert setup_component(self.hass, light.DOMAIN, {
light.DOMAIN: {
'platform': 'mqtt_json',
'name': 'test',
}
})
self.assertIsNone(self.hass.states.get('light.test'))
def test_no_color_brightness_color_temp_white_val_if_no_topics(self):
"""Test for no RGB, brightness, color temp, effect, white val or XY."""
assert setup_component(self.hass, light.DOMAIN, {
light.DOMAIN: {
'platform': 'mqtt_json',
'name': 'test',
'state_topic': 'test_light_rgb',
'command_topic': 'test_light_rgb/set',
}
})
state = self.hass.states.get('light.test')
self.assertEqual(STATE_OFF, state.state)
self.assertEqual(40, state.attributes.get(ATTR_SUPPORTED_FEATURES))
self.assertIsNone(state.attributes.get('rgb_color'))
self.assertIsNone(state.attributes.get('brightness'))
self.assertIsNone(state.attributes.get('color_temp'))
self.assertIsNone(state.attributes.get('effect'))
self.assertIsNone(state.attributes.get('white_value'))
self.assertIsNone(state.attributes.get('xy_color'))
self.assertIsNone(state.attributes.get('hs_color'))
fire_mqtt_message(self.hass, 'test_light_rgb', '{"state":"ON"}')
self.hass.block_till_done()
state = self.hass.states.get('light.test')
self.assertEqual(STATE_ON, state.state)
self.assertIsNone(state.attributes.get('rgb_color'))
self.assertIsNone(state.attributes.get('brightness'))
self.assertIsNone(state.attributes.get('color_temp'))
self.assertIsNone(state.attributes.get('effect'))
self.assertIsNone(state.attributes.get('white_value'))
self.assertIsNone(state.attributes.get('xy_color'))
self.assertIsNone(state.attributes.get('hs_color'))
def test_controlling_state_via_topic(self):
"""Test the controlling of the state via topic."""
assert setup_component(self.hass, light.DOMAIN, {
light.DOMAIN: {
'platform': 'mqtt_json',
'name': 'test',
'state_topic': 'test_light_rgb',
'command_topic': 'test_light_rgb/set',
'brightness': True,
'color_temp': True,
'effect': True,
'rgb': True,
'white_value': True,
'xy': True,
'hs': True,
'qos': '0'
}
})
state = self.hass.states.get('light.test')
self.assertEqual(STATE_OFF, state.state)
self.assertEqual(191, state.attributes.get(ATTR_SUPPORTED_FEATURES))
self.assertIsNone(state.attributes.get('rgb_color'))
self.assertIsNone(state.attributes.get('brightness'))
self.assertIsNone(state.attributes.get('color_temp'))
self.assertIsNone(state.attributes.get('effect'))
self.assertIsNone(state.attributes.get('white_value'))
self.assertIsNone(state.attributes.get('xy_color'))
self.assertIsNone(state.attributes.get('hs_color'))
self.assertFalse(state.attributes.get(ATTR_ASSUMED_STATE))
# Turn on the light, full white
fire_mqtt_message(self.hass, 'test_light_rgb',
'{"state":"ON",'
'"color":{"r":255,"g":255,"b":255},'
'"brightness":255,'
'"color_temp":155,'
'"effect":"colorloop",'
'"white_value":150}')
self.hass.block_till_done()
state = self.hass.states.get('light.test')
self.assertEqual(STATE_ON, state.state)
self.assertEqual((255, 255, 255), state.attributes.get('rgb_color'))
self.assertEqual(255, state.attributes.get('brightness'))
self.assertEqual(155, state.attributes.get('color_temp'))
self.assertEqual('colorloop', state.attributes.get('effect'))
self.assertEqual(150, state.attributes.get('white_value'))
self.assertEqual((0.323, 0.329), state.attributes.get('xy_color'))
self.assertEqual((0.0, 0.0), state.attributes.get('hs_color'))
# Turn the light off
fire_mqtt_message(self.hass, 'test_light_rgb', '{"state":"OFF"}')
self.hass.block_till_done()
state = self.hass.states.get('light.test')
self.assertEqual(STATE_OFF, state.state)
fire_mqtt_message(self.hass, 'test_light_rgb',
'{"state":"ON",'
'"brightness":100}')
self.hass.block_till_done()
light_state = self.hass.states.get('light.test')
self.hass.block_till_done()
self.assertEqual(100,
light_state.attributes['brightness'])
fire_mqtt_message(self.hass, 'test_light_rgb',
'{"state":"ON",'
'"color":{"r":125,"g":125,"b":125}}')
self.hass.block_till_done()
light_state = self.hass.states.get('light.test')
self.assertEqual((255, 255, 255),
light_state.attributes.get('rgb_color'))
fire_mqtt_message(self.hass, 'test_light_rgb',
'{"state":"ON",'
'"color":{"x":0.135,"y":0.135}}')
self.hass.block_till_done()
light_state = self.hass.states.get('light.test')
self.assertEqual((0.141, 0.14),
light_state.attributes.get('xy_color'))
fire_mqtt_message(self.hass, 'test_light_rgb',
'{"state":"ON",'
'"color":{"h":180,"s":50}}')
self.hass.block_till_done()
light_state = self.hass.states.get('light.test')
self.assertEqual((180.0, 50.0),
light_state.attributes.get('hs_color'))
fire_mqtt_message(self.hass, 'test_light_rgb',
'{"state":"ON",'
'"color_temp":155}')
self.hass.block_till_done()
light_state = self.hass.states.get('light.test')
self.assertEqual(155, light_state.attributes.get('color_temp'))
fire_mqtt_message(self.hass, 'test_light_rgb',
'{"state":"ON",'
'"effect":"colorloop"}')
self.hass.block_till_done()
light_state = self.hass.states.get('light.test')
self.assertEqual('colorloop', light_state.attributes.get('effect'))
fire_mqtt_message(self.hass, 'test_light_rgb',
'{"state":"ON",'
'"white_value":155}')
self.hass.block_till_done()
light_state = self.hass.states.get('light.test')
self.assertEqual(155, light_state.attributes.get('white_value'))
def test_sending_mqtt_commands_and_optimistic(self):
"""Test the sending of command in optimistic mode."""
fake_state = ha.State('light.test', 'on', {'brightness': 95,
'hs_color': [100, 100],
'effect': 'random',
'color_temp': 100,
'white_value': 50})
with patch('homeassistant.components.light.mqtt_json'
'.async_get_last_state',
return_value=mock_coro(fake_state)):
assert setup_component(self.hass, light.DOMAIN, {
light.DOMAIN: {
'platform': 'mqtt_json',
'name': 'test',
'command_topic': 'test_light_rgb/set',
'brightness': True,
'color_temp': True,
'effect': True,
'rgb': True,
'white_value': True,
'qos': 2
}
})
state = self.hass.states.get('light.test')
self.assertEqual(STATE_ON, state.state)
self.assertEqual(95, state.attributes.get('brightness'))
self.assertEqual((100, 100), state.attributes.get('hs_color'))
self.assertEqual('random', state.attributes.get('effect'))
self.assertEqual(100, state.attributes.get('color_temp'))
self.assertEqual(50, state.attributes.get('white_value'))
self.assertEqual(191, state.attributes.get(ATTR_SUPPORTED_FEATURES))
self.assertTrue(state.attributes.get(ATTR_ASSUMED_STATE))
light.turn_on(self.hass, 'light.test')
self.hass.block_till_done()
self.mock_publish.async_publish.assert_called_once_with(
'test_light_rgb/set', '{"state": "ON"}', 2, False)
self.mock_publish.async_publish.reset_mock()
state = self.hass.states.get('light.test')
self.assertEqual(STATE_ON, state.state)
light.turn_off(self.hass, 'light.test')
self.hass.block_till_done()
self.mock_publish.async_publish.assert_called_once_with(
'test_light_rgb/set', '{"state": "OFF"}', 2, False)
self.mock_publish.async_publish.reset_mock()
state = self.hass.states.get('light.test')
self.assertEqual(STATE_OFF, state.state)
light.turn_on(self.hass, 'light.test',
brightness=50, color_temp=155, effect='colorloop',
white_value=170)
self.hass.block_till_done()
self.assertEqual('test_light_rgb/set',
self.mock_publish.async_publish.mock_calls[0][1][0])
self.assertEqual(2,
self.mock_publish.async_publish.mock_calls[0][1][2])
self.assertEqual(False,
self.mock_publish.async_publish.mock_calls[0][1][3])
# Get the sent message
message_json = json.loads(
self.mock_publish.async_publish.mock_calls[0][1][1])
self.assertEqual(50, message_json["brightness"])
self.assertEqual(155, message_json["color_temp"])
self.assertEqual('colorloop', message_json["effect"])
self.assertEqual(170, message_json["white_value"])
self.assertEqual("ON", message_json["state"])
state = self.hass.states.get('light.test')
self.assertEqual(STATE_ON, state.state)
self.assertEqual(50, state.attributes['brightness'])
self.assertEqual(155, state.attributes['color_temp'])
self.assertEqual('colorloop', state.attributes['effect'])
self.assertEqual(170, state.attributes['white_value'])
# Test a color command
light.turn_on(self.hass, 'light.test',
brightness=50, hs_color=(125, 100))
self.hass.block_till_done()
self.assertEqual('test_light_rgb/set',
self.mock_publish.async_publish.mock_calls[0][1][0])
self.assertEqual(2,
self.mock_publish.async_publish.mock_calls[0][1][2])
self.assertEqual(False,
self.mock_publish.async_publish.mock_calls[0][1][3])
# Get the sent message
message_json = json.loads(
self.mock_publish.async_publish.mock_calls[1][1][1])
self.assertEqual(50, message_json["brightness"])
self.assertEqual({
'r': 0,
'g': 255,
'b': 21,
}, message_json["color"])
self.assertEqual("ON", message_json["state"])
state = self.hass.states.get('light.test')
self.assertEqual(STATE_ON, state.state)
self.assertEqual(50, state.attributes['brightness'])
self.assertEqual((125, 100), state.attributes['hs_color'])
def test_sending_hs_color(self):
"""Test light.turn_on with hs color sends hs color parameters."""
assert setup_component(self.hass, light.DOMAIN, {
light.DOMAIN: {
'platform': 'mqtt_json',
'name': 'test',
'command_topic': 'test_light_rgb/set',
'hs': True,
}
})
light.turn_on(self.hass, 'light.test', hs_color=(180.0, 50.0))
self.hass.block_till_done()
message_json = json.loads(
self.mock_publish.async_publish.mock_calls[0][1][1])
self.assertEqual("ON", message_json["state"])
self.assertEqual({
'h': 180.0,
's': 50.0,
}, message_json["color"])
def test_flash_short_and_long(self):
"""Test for flash length being sent when included."""
assert setup_component(self.hass, light.DOMAIN, {
light.DOMAIN: {
'platform': 'mqtt_json',
'name': 'test',
'state_topic': 'test_light_rgb',
'command_topic': 'test_light_rgb/set',
'flash_time_short': 5,
'flash_time_long': 15,
'qos': 0
}
})
state = self.hass.states.get('light.test')
self.assertEqual(STATE_OFF, state.state)
self.assertEqual(40, state.attributes.get(ATTR_SUPPORTED_FEATURES))
light.turn_on(self.hass, 'light.test', flash="short")
self.hass.block_till_done()
self.assertEqual('test_light_rgb/set',
self.mock_publish.async_publish.mock_calls[0][1][0])
self.assertEqual(0,
self.mock_publish.async_publish.mock_calls[0][1][2])
self.assertEqual(False,
self.mock_publish.async_publish.mock_calls[0][1][3])
# Get the sent message
message_json = json.loads(
self.mock_publish.async_publish.mock_calls[0][1][1])
self.assertEqual(5, message_json["flash"])
self.assertEqual("ON", message_json["state"])
self.mock_publish.async_publish.reset_mock()
light.turn_on(self.hass, 'light.test', flash="long")
self.hass.block_till_done()
self.assertEqual('test_light_rgb/set',
self.mock_publish.async_publish.mock_calls[0][1][0])
self.assertEqual(0,
self.mock_publish.async_publish.mock_calls[0][1][2])
self.assertEqual(False,
self.mock_publish.async_publish.mock_calls[0][1][3])
# Get the sent message
message_json = json.loads(
self.mock_publish.async_publish.mock_calls[0][1][1])
self.assertEqual(15, message_json["flash"])
self.assertEqual("ON", message_json["state"])
def test_transition(self):
"""Test for transition time being sent when included."""
assert setup_component(self.hass, light.DOMAIN, {
light.DOMAIN: {
'platform': 'mqtt_json',
'name': 'test',
'state_topic': 'test_light_rgb',
'command_topic': 'test_light_rgb/set',
'qos': 0
}
})
state = self.hass.states.get('light.test')
self.assertEqual(STATE_OFF, state.state)
self.assertEqual(40, state.attributes.get(ATTR_SUPPORTED_FEATURES))
light.turn_on(self.hass, 'light.test', transition=10)
self.hass.block_till_done()
self.assertEqual('test_light_rgb/set',
self.mock_publish.async_publish.mock_calls[0][1][0])
self.assertEqual(0,
self.mock_publish.async_publish.mock_calls[0][1][2])
self.assertEqual(False,
self.mock_publish.async_publish.mock_calls[0][1][3])
# Get the sent message
message_json = json.loads(
self.mock_publish.async_publish.mock_calls[0][1][1])
self.assertEqual(10, message_json["transition"])
self.assertEqual("ON", message_json["state"])
# Transition back off
light.turn_off(self.hass, 'light.test', transition=10)
self.hass.block_till_done()
self.assertEqual('test_light_rgb/set',
self.mock_publish.async_publish.mock_calls[1][1][0])
self.assertEqual(0,
self.mock_publish.async_publish.mock_calls[1][1][2])
self.assertEqual(False,
self.mock_publish.async_publish.mock_calls[1][1][3])
# Get the sent message
message_json = json.loads(
self.mock_publish.async_publish.mock_calls[1][1][1])
self.assertEqual(10, message_json["transition"])
self.assertEqual("OFF", message_json["state"])
def test_brightness_scale(self):
"""Test for brightness scaling."""
assert setup_component(self.hass, light.DOMAIN, {
light.DOMAIN: {
'platform': 'mqtt_json',
'name': 'test',
'state_topic': 'test_light_bright_scale',
'command_topic': 'test_light_bright_scale/set',
'brightness': True,
'brightness_scale': 99
}
})
state = self.hass.states.get('light.test')
self.assertEqual(STATE_OFF, state.state)
self.assertIsNone(state.attributes.get('brightness'))
self.assertFalse(state.attributes.get(ATTR_ASSUMED_STATE))
# Turn on the light
fire_mqtt_message(self.hass, 'test_light_bright_scale',
'{"state":"ON"}')
self.hass.block_till_done()
state = self.hass.states.get('light.test')
self.assertEqual(STATE_ON, state.state)
self.assertEqual(255, state.attributes.get('brightness'))
# Turn on the light with brightness
fire_mqtt_message(self.hass, 'test_light_bright_scale',
'{"state":"ON",'
'"brightness": 99}')
self.hass.block_till_done()
state = self.hass.states.get('light.test')
self.assertEqual(STATE_ON, state.state)
self.assertEqual(255, state.attributes.get('brightness'))
def test_invalid_color_brightness_and_white_values(self):
"""Test that invalid color/brightness/white values are ignored."""
assert setup_component(self.hass, light.DOMAIN, {
light.DOMAIN: {
'platform': 'mqtt_json',
'name': 'test',
'state_topic': 'test_light_rgb',
'command_topic': 'test_light_rgb/set',
'brightness': True,
'rgb': True,
'white_value': True,
'qos': '0'
}
})
state = self.hass.states.get('light.test')
self.assertEqual(STATE_OFF, state.state)
self.assertEqual(185, state.attributes.get(ATTR_SUPPORTED_FEATURES))
self.assertIsNone(state.attributes.get('rgb_color'))
self.assertIsNone(state.attributes.get('brightness'))
self.assertIsNone(state.attributes.get('white_value'))
self.assertFalse(state.attributes.get(ATTR_ASSUMED_STATE))
# Turn on the light
fire_mqtt_message(self.hass, 'test_light_rgb',
'{"state":"ON",'
'"color":{"r":255,"g":255,"b":255},'
'"brightness": 255,'
'"white_value": 255}')
self.hass.block_till_done()
state = self.hass.states.get('light.test')
self.assertEqual(STATE_ON, state.state)
self.assertEqual((255, 255, 255), state.attributes.get('rgb_color'))
self.assertEqual(255, state.attributes.get('brightness'))
self.assertEqual(255, state.attributes.get('white_value'))
# Bad color values
fire_mqtt_message(self.hass, 'test_light_rgb',
'{"state":"ON",'
'"color":{"r":"bad","g":"val","b":"test"}}')
self.hass.block_till_done()
# Color should not have changed
state = self.hass.states.get('light.test')
self.assertEqual(STATE_ON, state.state)
self.assertEqual((255, 255, 255), state.attributes.get('rgb_color'))
# Bad brightness values
fire_mqtt_message(self.hass, 'test_light_rgb',
'{"state":"ON",'
'"brightness": "badValue"}')
self.hass.block_till_done()
# Brightness should not have changed
state = self.hass.states.get('light.test')
self.assertEqual(STATE_ON, state.state)
self.assertEqual(255, state.attributes.get('brightness'))
# Bad white value
fire_mqtt_message(self.hass, 'test_light_rgb',
'{"state":"ON",'
'"white_value": "badValue"}')
self.hass.block_till_done()
# White value should not have changed
state = self.hass.states.get('light.test')
self.assertEqual(STATE_ON, state.state)
self.assertEqual(255, state.attributes.get('white_value'))
def test_default_availability_payload(self):
"""Test availability by default payload with defined topic."""
self.assertTrue(setup_component(self.hass, light.DOMAIN, {
light.DOMAIN: {
'platform': 'mqtt_json',
'name': 'test',
'state_topic': 'test_light_rgb',
'command_topic': 'test_light_rgb/set',
'availability_topic': 'availability-topic'
}
}))
state = self.hass.states.get('light.test')
self.assertEqual(STATE_UNAVAILABLE, state.state)
fire_mqtt_message(self.hass, 'availability-topic', 'online')
self.hass.block_till_done()
state = self.hass.states.get('light.test')
self.assertNotEqual(STATE_UNAVAILABLE, state.state)
fire_mqtt_message(self.hass, 'availability-topic', 'offline')
self.hass.block_till_done()
state = self.hass.states.get('light.test')
self.assertEqual(STATE_UNAVAILABLE, state.state)
def test_custom_availability_payload(self):
"""Test availability by custom payload with defined topic."""
self.assertTrue(setup_component(self.hass, light.DOMAIN, {
light.DOMAIN: {
'platform': 'mqtt_json',
'name': 'test',
'state_topic': 'test_light_rgb',
'command_topic': 'test_light_rgb/set',
'availability_topic': 'availability-topic',
'payload_available': 'good',
'payload_not_available': 'nogood'
}
}))
state = self.hass.states.get('light.test')
self.assertEqual(STATE_UNAVAILABLE, state.state)
fire_mqtt_message(self.hass, 'availability-topic', 'good')
self.hass.block_till_done()
state = self.hass.states.get('light.test')
self.assertNotEqual(STATE_UNAVAILABLE, state.state)
fire_mqtt_message(self.hass, 'availability-topic', 'nogood')
self.hass.block_till_done()
state = self.hass.states.get('light.test')
self.assertEqual(STATE_UNAVAILABLE, state.state)
|
|
import os
import re
import sys
import time
import logging
import telegram
import yaml
import threading
from urllib.error import URLError
from lytebot import config, config_dir
from lytebot.errors import CommandError
class LyteBot:
paths = {
'ignored': os.path.join(config_dir, 'ignored.yml'),
'disabled': os.path.join(config_dir, 'disabled.yml'),
'blacklisted': os.path.join(config_dir, 'blacklisted.yml'),
}
def __init__(self):
'''Initialize bot'''
self.prefix = '/'
self.ignored = {}
self.commands = {}
self.disabled = []
self.previous = {}
self.blacklisted = []
self._bot = telegram.Bot(token=config['telegram']['token'])
# Disable Telegram API's logger to prevent spam
self._bot.logger.disabled = True
for n, f in self.paths.items():
try:
with open(f, 'r') as f:
setattr(self, n, yaml.load(f.read()))
except FileNotFoundError:
pass
except Exception as e:
logging.warning('Couldn\'t load {} data: {}'.format(n, e))
else:
logging.info('Loaded {} data'.format(n))
def _set_previous(self, func, args):
'''
Save previous command per chat
:param func: Command function
:param args: Arguments given to command
'''
self.previous[args.chat_id] = {'func': func, 'args': args}
def _handle_msg(self, update):
'''
Handles all messages sent in all chats (that the bot can see)
:param update: Object with chat info
'''
# Ignore stickers, pictures and other non-text messages
if not update['message'] or not update.message['text']:
return
# Is the user who sent the message ignored?
if update.message.chat_id in self.ignored and \
update.message.from_user.username in self.ignored[update.message.chat_id]:
return
message = update.message.text[1::]
prefix = update.message.text[0]
command = self.get_command(message)
user = update.message.from_user.username or update.message.from_user.first_name
if command and prefix == self.prefix and not self._is_disabled(command):
# Check if the user is an owner if he calls an admin command
if command['admin'] and update.message.from_user.username not in config['telegram']['owners']:
text = '@{} You can\'t do that!'.format(user)
else:
text = command['func'](update.message, user)
t = threading.Thread(target=self._bot.sendMessage, kwargs={
'chat_id': update.message.chat_id,
'text': text
})
t.start()
self._last_id = update.update_id + 1
if not message.startswith('!!'):
self._set_previous(command['func'], update.message)
def is_command(self, command):
'''
Check if a command exists
:param command: Name to check
:rtype: bool
'''
return command['func'].__name__ in self.commands
def blacklist(self, sub):
'''
Blacklist a sub from the /r command
:param sub: Subreddit to blacklist
'''
if sub not in self.blacklisted:
self.blacklisted.append(sub.lower())
self.save_data(self.paths['blacklisted'], self.blacklisted)
def whitelist(self, sub):
'''
Whitelist a sub from the /r command
:param sub: Subreddit to whitelist
'''
try:
self.blacklisted.remove(sub.lower())
except ValueError:
return
self.save_data(self.paths['blacklisted'], self.blacklisted)
def disable(self, command):
'''
Disables a command in _all_ chats
:param command: Command to disables
:raises: CommandError
'''
if self._is_disabled(command):
raise CommandError('Command {} already disabled'.format(command['func'].__name__))
if not self.is_command(command):
raise CommandError('Command {} doesn\'t exist'.format(command['func'].__name__))
self.disabled.append(command['func'].__name__)
self.save_data(self.paths['disabled'], self.disabled)
def _is_disabled(self, command):
'''
Check if command is disabled
:param command: Command to check
:rtype: bool
'''
return command['func'].__name__ in self.disabled
def enable(self, command):
'''
Enables a command in _all_ chats
:param command: Command to enable
:raises: CommandError
'''
if self._is_enabled(command):
raise CommandError('Command {} isn\'t disabled'.format(command['func'].__name__))
if not self.is_command(command):
raise CommandError('Command {} doesn\'t exist'.format(command['func'].__name__))
self.disabled.remove(command['func'].__name__)
self.save_data(self.paths['disabled'], self.disabled)
def _is_enabled(self, command):
'''
Check if command is enabled
:param command: Command to check
:rtype: bool
'''
return not self._is_disabled(command)
def save_data(self, file, data):
'''
Saves data to file to persist data even on shutdown
:param file: File to write to
:param data: Data to write to file
'''
try:
with open(file, 'w') as f:
f.write(yaml.dump(data))
except Exception as e:
logging.warning('Failed to save data: {}'.format(e))
def ignore(self, chat_id, user):
'''
Ignores a user in a chat
:param chat_id: Chat ID
:param user: Username to ignore
'''
if chat_id not in self.ignored:
self.ignored[chat_id] = []
if user not in self.ignored[chat_id]:
# allow /ignore @username for convenience
user = user.replace('@', '')
self.ignored[chat_id].append(user)
self.save_data(self.paths['ignored'], self.ignored)
def unignore(self, chat_id, user):
'''
Unignores a user in a chat
:param chat_id: Chat ID
:param user: Username to unignore
'''
user = user.replace('@', '')
self.ignored[chat_id].remove(user)
self.save_data(self.paths['ignored'], self.ignored)
def command(self, handle, admin=False):
'''
Create a new command entry, saved in self.commands
:param handle: The name for the command
:param admin: Is the command meant for owners only?
:rtype: function
'''
def arguments(function):
self.commands[handle] = {'admin': admin, 'func': function}
logging.info('Found command -> {}'.format(function.__name__))
return self.commands
return arguments
def get_command(self, message):
'''
Gets command from message sent, if it contains a command
:param message: Message that could contain a command
:rtype: function or None
'''
try:
bot_name = self._bot.getMe()['username']
except Exception as e:
logging.warning(e)
return None
for command in self.commands:
pattern = r'^({0}@{1}$|{0}$|{0}(@{1}|)\ \w*)'.format(command, bot_name)
if re.match(pattern, message):
return self.commands[command]
return None
def run(self):
'''Start listening for commands'''
logging.info('Started bot')
try:
self._last_id = self._bot.getUpdates()[0].update_id
except (IndexError, KeyError):
self._last_id = None
while True:
try:
updates = self._bot.getUpdates(offset=self._last_id, timeout=10)
except telegram.error.TelegramError as e:
if e.message in ("Bad Gateway", "Timed out"):
time.sleep(1)
continue
elif e.message == "Unauthorized":
update_id += 1
else:
logging.critical('Failed to start bot: {} (is your Telegram token correct?)'.format(e))
sys.exit(1)
except URLError as e:
time.sleep(1)
continue
for update in updates:
self._handle_msg(update)
lytebot = LyteBot()
|
|
from dateutil import parser
from django.test import TransactionTestCase
from django.contrib.auth.models import Group
from django.core.files.uploadedfile import UploadedFile
from django.core.exceptions import ValidationError
from django.http import HttpRequest
from hs_core.hydroshare import resource
from hs_core import hydroshare
from hs_core.testing import MockIRODSTestCaseMixin
from hs_file_types.models import geofeature
from hs_geographic_feature_resource.models import GeographicFeatureResource, OriginalCoverage, \
GeometryInformation, FieldInformation
from hs_geographic_feature_resource.receivers import metadata_element_pre_create_handler,\
metadata_element_pre_update_handler
class TestGeoFeature(MockIRODSTestCaseMixin, TransactionTestCase):
def setUp(self):
super(TestGeoFeature, self).setUp()
self.group, _ = Group.objects.get_or_create(name='Hydroshare Author')
self.user = hydroshare.create_account(
'[email protected]',
username='drew',
first_name='Zhiyu',
last_name='Li',
superuser=False,
groups=[self.group]
)
self.allowance = 0.00001
self.resGeoFeature = hydroshare.create_resource(
resource_type='GeographicFeatureResource',
owner=self.user,
title='Test Geographic Feature (shapefiles)',
keywords=['kw1', 'kw2']
)
def test_geo_feature_basic_metadata(self):
# add another creator with all sub_elements
cr_name = 'Creator A'
cr_des = 'http://hydroshare.org/user/001'
cr_org = "BYU"
cr_email = '[email protected]'
cr_address = "Provo, UT, USA"
cr_phone = '123-456-7890'
cr_homepage = 'http://home.byu.edu/'
resource.create_metadata_element(self.resGeoFeature.short_id,
'creator',
name=cr_name,
description=cr_des,
organization=cr_org,
email=cr_email,
address=cr_address,
phone=cr_phone,
homepage=cr_homepage,
identifiers={'ORCID': 'https://orcid.org/john',
'ResearchGateID':
'https://www.researchgate.net/john'})
# add another creator with only the name
resource.create_metadata_element(self.resGeoFeature.short_id, 'creator', name='Creator B')
# test adding a contributor with all sub_elements
con_name = 'Contributor A'
con_des = 'http://hydroshare.org/user/002'
con_org = "BYU"
con_email = '[email protected]'
con_address = "Provo, UT, USA"
con_phone = '123-456-7890'
con_homepage = 'http://usu.edu/homepage/009'
resource.create_metadata_element(self.resGeoFeature.short_id,
'contributor',
name=con_name,
description=con_des,
organization=con_org,
email=con_email,
address=con_address,
phone=con_phone,
homepage=con_homepage,
identifiers={'ORCID': 'https://orcid.org/john',
'ResearchGateID':
'https://www.researchgate.net/john'})
# add another creator with only the name
resource.create_metadata_element(self.resGeoFeature.short_id,
'contributor', name='Contributor B')
# add a period type coverage
value_dict = {'name': 'Name for period coverage', 'start': '1/1/2015', 'end': '12/31/2015'}
resource.create_metadata_element(self.resGeoFeature.short_id,
'coverage',
type='period',
value=value_dict)
# add a point type coverage
value_dict = {'name': 'Name for box coverage',
'northlimit': '80', 'eastlimit': '130',
'southlimit': '70', 'westlimit': '120'}
value_dict["projection"] = "WGS 84 EPSG:4326"
value_dict["units"] = "Decimal degrees"
resource.create_metadata_element(self.resGeoFeature.short_id,
'coverage', type='box', value=value_dict)
# add date of type 'valid'
resource.create_metadata_element(self.resGeoFeature.short_id,
'date',
type='valid',
start_date=parser.parse('1/1/2012'),
end_date=parser.parse('12/31/2012'))
# add a format element
format = 'shp'
resource.create_metadata_element(self.resGeoFeature.short_id, 'format', value=format)
# add a relation element of uri type
resource.create_metadata_element(self.resGeoFeature.short_id, 'relation', type='isPartOf',
value='http://hydroshare.org/resource/001')
# add another relation element of non-uri type
resource.create_metadata_element(self.resGeoFeature.short_id, 'relation', type='isReferencedBy',
value='This resource is for another resource')
# add a subject element
resource.create_metadata_element(self.resGeoFeature.short_id, 'subject', value='sub-1')
# add another subject element
resource.create_metadata_element(self.resGeoFeature.short_id, 'subject', value='sub-2')
self.resGeoFeature.delete()
def test_geo_feature_res_specific_metadata(self):
# originalcoverage
# no OriginalCoverage obj
self.assertEqual(OriginalCoverage.objects.all().count(), 0)
# create OriginalCoverage obj without a required para: southlimit
with self.assertRaises(Exception):
resource.create_metadata_element(self.resGeoFeature.short_id, 'originalcoverage',
northlimit='1', eastlimit='2', southlimit='3')
# no OriginalCoverage obj
self.assertEqual(OriginalCoverage.objects.all().count(), 0)
# create 1 OriginalCoverage obj with required para
resource.create_metadata_element(self.resGeoFeature.short_id, 'originalcoverage',
northlimit='1', eastlimit='2',
southlimit='3', westlimit='4')
self.assertEqual(OriginalCoverage.objects.all().count(), 1)
# may not create any more OriginalCoverage
with self.assertRaises(Exception):
resource.create_metadata_element(self.resGeoFeature.short_id, 'originalcoverage',
northlimit='1', eastlimit='2',
southlimit='3', westlimit='4')
self.assertEqual(OriginalCoverage.objects.all().count(), 1)
# update existing meta
resource.update_metadata_element(self.resGeoFeature.short_id, 'originalcoverage',
element_id=OriginalCoverage.objects.first().id,
northlimit='11', eastlimit='22',
southlimit='33', westlimit='44',
projection_string='projection_string1',
projection_name='projection_name1',
datum='datum1', unit='unit1')
self.assertEqual(OriginalCoverage.objects.first().unit, 'unit1')
# delete OriginalCoverage obj
resource.delete_metadata_element(self.resGeoFeature.short_id, 'originalcoverage',
element_id=OriginalCoverage.objects.first().id)
self.assertEqual(OriginalCoverage.objects.all().count(), 0)
# GeometryInformation
# no GeometryInformation obj
self.assertEqual(GeometryInformation.objects.all().count(), 0)
# no GeometryInformation obj
self.assertEqual(GeometryInformation.objects.all().count(), 0)
# create 1 GeometryInformation obj with required para
resource.create_metadata_element(self.resGeoFeature.short_id, 'GeometryInformation',
featureCount='1', geometryType='Polygon_test')
self.assertEqual(GeometryInformation.objects.all().count(), 1)
# may not create any more GeometryInformation
with self.assertRaises(Exception):
resource.create_metadata_element(self.resGeoFeature.short_id, 'GeometryInformation',
featureCount='1', geometryType='Polygon_test')
# update existing meta
resource.update_metadata_element(self.resGeoFeature.short_id, 'GeometryInformation',
element_id=GeometryInformation.objects.first().id,
featureCount='2', geometryType='Point_test')
self.assertEqual(GeometryInformation.objects.first().geometryType, 'Point_test')
self.assertEqual(GeometryInformation.objects.first().featureCount, 2)
# delete GeometryInformation obj
resource.delete_metadata_element(self.resGeoFeature.short_id, 'GeometryInformation',
element_id=GeometryInformation.objects.first().id)
self.assertEqual(GeometryInformation.objects.all().count(), 0)
# FieldInformation
# no FieldInformation obj
self.assertEqual(FieldInformation.objects.all().count(), 0)
# no FieldInformation obj
self.assertEqual(FieldInformation.objects.all().count(), 0)
# create 1 FieldInformation obj with required para
resource.create_metadata_element(self.resGeoFeature.short_id, 'FieldInformation',
fieldName='fieldName1', fieldType='fieldType1')
self.assertEqual(FieldInformation.objects.all().count(), 1)
resource.create_metadata_element(self.resGeoFeature.short_id, 'FieldInformation',
fieldName='fieldName2', fieldType='fieldType2')
self.assertEqual(FieldInformation.objects.all().count(), 2)
# update existing meta
field_info_obj_list = FieldInformation.objects.filter(fieldName='fieldName1')
self.assertEqual(field_info_obj_list.count(), 1)
field_1_ele_id_old = field_info_obj_list[0].id
resource.update_metadata_element(self.resGeoFeature.short_id, 'FieldInformation',
element_id=field_1_ele_id_old,
fieldName='fieldName1_new',
fieldType='fieldType1_new')
field_info_obj_list = FieldInformation.objects.filter(fieldName='fieldName1_new')
self.assertEqual(field_info_obj_list.count(), 1)
field_1_ele_id_new = field_info_obj_list[0].id
# ele_id should not change
self.assertEqual(field_1_ele_id_new, field_1_ele_id_old)
# old value is gone
field_info_obj_list = FieldInformation.objects.filter(fieldName='fieldName1')
self.assertEqual(field_info_obj_list.count(), 0)
field_info_obj_list = FieldInformation.objects.filter(fieldName='fieldName2')
self.assertEqual(field_info_obj_list.count(), 1)
field_2_ele_id_old = field_info_obj_list[0].id
self.assertEqual(FieldInformation.objects.all().count(), 2)
# delete FieldInformation obj
resource.delete_metadata_element(self.resGeoFeature.short_id, 'FieldInformation',
element_id=field_1_ele_id_old)
self.assertEqual(FieldInformation.objects.all().count(), 1)
field_info_obj_list = FieldInformation.objects.filter(fieldName='fieldName1_new')
self.assertEqual(field_info_obj_list.count(), 0)
field_info_obj_list = FieldInformation.objects.filter(fieldName='fieldName2')
self.assertEqual(field_info_obj_list.count(), 1)
resource.delete_metadata_element(self.resGeoFeature.short_id, 'FieldInformation',
element_id=field_2_ele_id_old)
self.assertEqual(FieldInformation.objects.all().count(), 0)
self.resGeoFeature.delete()
def test_create_resource_with_zip_file(self):
# test that file upload will be successful and metadata gets extracted
# if the zip file has the 3 required files
# this zip file has only the required 3 files (.shp, .shx and .dbf)
files = []
target = 'hs_geographic_feature_resource/tests/states_required_files.zip'
files.append(UploadedFile(file=open(target, 'rb'), name='states_required_files.zip'))
self.resGeoFeature = hydroshare.create_resource(
resource_type='GeographicFeatureResource',
owner=self.user,
title='Test Geographic Feature (shapefiles)',
keywords=['kw1', 'kw2'],
files=files
)
# uploaded file validation and metadata extraction happens in post resource
# creation handler
hydroshare.utils.resource_post_create_actions(resource=self.resGeoFeature, user=self.user,
metadata=[])
# check that the resource has 3 files
self.assertEqual(self.resGeoFeature.files.count(), 3)
# test extracted metadata
# there should not be any resource level coverage
self.assertEqual(self.resGeoFeature.metadata.coverages.count(), 0)
self.assertNotEqual(self.resGeoFeature.metadata.geometryinformation, None)
self.assertEqual(self.resGeoFeature.metadata.geometryinformation.featureCount, 51)
self.assertEqual(self.resGeoFeature.metadata.geometryinformation.geometryType,
"MULTIPOLYGON")
self.assertNotEqual(self.resGeoFeature.metadata.originalcoverage, None)
self.assertEqual(self.resGeoFeature.metadata.originalcoverage.datum,
'unknown')
self.assertEqual(self.resGeoFeature.metadata.originalcoverage.projection_name,
'unknown')
self.assertGreater(len(self.resGeoFeature.metadata.originalcoverage.projection_string), 0)
self.assertEqual(self.resGeoFeature.metadata.originalcoverage.unit, 'unknown')
self.assertEqual(self.resGeoFeature.metadata.originalcoverage.eastlimit, -66.9692712587578)
self.assertEqual(self.resGeoFeature.metadata.originalcoverage.northlimit, 71.406235393967)
self.assertEqual(self.resGeoFeature.metadata.originalcoverage.southlimit, 18.921786345087)
self.assertEqual(self.resGeoFeature.metadata.originalcoverage.westlimit,
-178.217598362366)
self.resGeoFeature.delete()
def test_create_resource_with_invalid_zip_file(self):
# test that file upload will fail when an invalid zip file is used to create a resource
files = []
target = 'hs_geographic_feature_resource/tests/states_invalid.zip'
files.append(UploadedFile(file=open(target, 'rb'), name='states_invalid.zip'))
self.resGeoFeature = hydroshare.create_resource(
resource_type='GeographicFeatureResource',
owner=self.user,
title='Test Geographic Feature (shapefiles)',
keywords=['kw1', 'kw2'],
files=files
)
# uploaded file validation and metadata extraction happens in post resource
# creation handler - should fail - no file get uploaded
with self.assertRaises(ValidationError):
hydroshare.utils.resource_post_create_actions(resource=self.resGeoFeature,
user=self.user, metadata=[])
# check that the resource has no files
self.assertEqual(self.resGeoFeature.files.count(), 0)
self.resGeoFeature.delete()
def test_add_zip_file_to_resource(self):
# here we are using a zip file that has all the 15 (3 required + 12 optional) files
# check that the resource has no files
self.assertEqual(self.resGeoFeature.files.count(), 0)
files = []
target = 'hs_geographic_feature_resource/tests/gis.osm_adminareas_v06_all_files.zip'
files.append(UploadedFile(file=open(target, 'rb'),
name='gis.osm_adminareas_v06_all_files.zip'))
hydroshare.utils.resource_file_add_process(self.resGeoFeature, files, self.user)
# check that the resource has 15 files
self.assertEqual(self.resGeoFeature.files.count(), 15)
# test extracted metadata
self.assertEqual(self.resGeoFeature.metadata.fieldinformations.all().count(), 7)
self.assertEqual(self.resGeoFeature.metadata.geometryinformation.featureCount, 87)
self.assertEqual(self.resGeoFeature.metadata.geometryinformation.geometryType, "POLYGON")
self.assertEqual(self.resGeoFeature.metadata.originalcoverage.datum, 'WGS_1984')
self.assertTrue(abs(self.resGeoFeature.metadata.originalcoverage.eastlimit -
3.4520493) < self.allowance)
self.assertTrue(abs(self.resGeoFeature.metadata.originalcoverage.northlimit -
45.0466382) < self.allowance)
self.assertTrue(abs(self.resGeoFeature.metadata.originalcoverage.southlimit -
42.5732416) < self.allowance)
self.assertTrue(abs(self.resGeoFeature.metadata.originalcoverage.westlimit -
(-0.3263017)) < self.allowance)
self.assertEqual(self.resGeoFeature.metadata.originalcoverage.unit, 'degree')
self.assertEqual(self.resGeoFeature.metadata.originalcoverage.projection_name,
'WGS 84')
self.resGeoFeature.delete()
def test_delete_shp_shx_dbf_file(self):
# test that deleting any of the required files (.shp, .shx or .dbf) file deletes all files
self._test_delete_file(file_extension='.shp')
self._test_delete_file(file_extension='.shx')
self._test_delete_file(file_extension='.dbf')
self.resGeoFeature.delete()
def test_delete_optional_files(self):
# test that deleting any of the optional files deletes only that file
for ext in ('.prj', '.sbx', '.sbn', '.cpg', '.xml', '.fbn', '.ain', '.aih', '.atx', '.ixs',
'.mxs', '.fbx'):
self._test_delete_optional_file(file_extension=ext)
def test_delete_prj_file(self):
# deleting .prj file should set attributes (datum, unit, and projection_name) of
# the orginalcoverage element to 'unknown' and delete the spatial coverage at the resource
# level
self.assertEqual(self.resGeoFeature.files.count(), 0)
# add files first
files = []
target = 'hs_geographic_feature_resource/tests/gis.osm_adminareas_v06_all_files.zip'
files.append(UploadedFile(file=open(target, 'rb'),
name='gis.osm_adminareas_v06_all_files.zip'))
hydroshare.utils.resource_file_add_process(self.resGeoFeature, files, self.user, )
# check that the resource has 15 files
self.assertEqual(self.resGeoFeature.files.count(), 15)
self.assertTrue(self.resGeoFeature.metadata.coverages.filter(type='box').exists())
self.assertEqual(self.resGeoFeature.metadata.originalcoverage.datum, 'WGS_1984')
self.assertTrue(abs(self.resGeoFeature.metadata.originalcoverage.eastlimit -
3.4520493) < self.allowance)
self.assertTrue(abs(self.resGeoFeature.metadata.originalcoverage.northlimit -
45.0466382) < self.allowance)
self.assertTrue(abs(self.resGeoFeature.metadata.originalcoverage.southlimit -
42.5732416) < self.allowance)
self.assertTrue(abs(self.resGeoFeature.metadata.originalcoverage.westlimit -
(-0.3263017)) < self.allowance)
self.assertEqual(self.resGeoFeature.metadata.originalcoverage.unit, 'degree')
self.assertEqual(self.resGeoFeature.metadata.originalcoverage.projection_name,
'WGS 84')
self.assertGreater(len(self.resGeoFeature.metadata.originalcoverage.projection_string), 0)
# find the .prj file and delete it
for f in self.resGeoFeature.files.all():
if f.extension == '.prj':
hydroshare.delete_resource_file(self.resGeoFeature.short_id, f.id, self.user)
break
# resource should have 14 files
self.assertEqual(self.resGeoFeature.files.count(), 14)
# resource level spatial coverage should have been deleted
self.assertFalse(self.resGeoFeature.metadata.coverages.filter(type='box').exists())
# test original coverage
self.assertEqual(self.resGeoFeature.metadata.originalcoverage.datum, 'unknown')
self.assertTrue(abs(self.resGeoFeature.metadata.originalcoverage.eastlimit -
3.4520493) < self.allowance)
self.assertTrue(abs(self.resGeoFeature.metadata.originalcoverage.northlimit -
45.0466382) < self.allowance)
self.assertTrue(abs(self.resGeoFeature.metadata.originalcoverage.southlimit -
42.5732416) < self.allowance)
self.assertTrue(abs(self.resGeoFeature.metadata.originalcoverage.westlimit -
(-0.3263017)) < self.allowance)
self.assertEqual(self.resGeoFeature.metadata.originalcoverage.unit, 'unknown')
self.assertEqual(self.resGeoFeature.metadata.originalcoverage.projection_name, 'unknown')
self.assertEqual(self.resGeoFeature.metadata.originalcoverage.projection_string, 'unknown')
self.resGeoFeature.delete()
def test_add_prj_file(self):
# test that if a prj file gets added then the attributes (datum, unit and projection_name)
# of originalcoverage element gets populated and resource level spatial coverage element
# gets created
# this zip file has only the required 3 files (.shp, .shx and .dbf)
files = []
target = 'hs_geographic_feature_resource/tests/states_required_files.zip'
files.append(UploadedFile(file=open(target, 'rb'), name='states_required_files.zip'))
self.resGeoFeature = hydroshare.create_resource(
resource_type='GeographicFeatureResource',
owner=self.user,
title='Test Geographic Feature (shapefiles)',
keywords=['kw1', 'kw2'],
files=files
)
# uploaded file validation and metadata extraction happens in post resource
# creation handler
hydroshare.utils.resource_post_create_actions(resource=self.resGeoFeature, user=self.user,
metadata=[])
# check that the resource has 3 files
self.assertEqual(self.resGeoFeature.files.count(), 3)
# test extracted metadata
# there should not be any resource level coverage
self.assertEqual(self.resGeoFeature.metadata.coverages.count(), 0)
self.assertNotEqual(self.resGeoFeature.metadata.geometryinformation, None)
self.assertEqual(self.resGeoFeature.metadata.geometryinformation.featureCount, 51)
self.assertEqual(self.resGeoFeature.metadata.geometryinformation.geometryType,
"MULTIPOLYGON")
self.assertNotEqual(self.resGeoFeature.metadata.originalcoverage, None)
self.assertEqual(self.resGeoFeature.metadata.originalcoverage.datum,
'unknown')
self.assertEqual(self.resGeoFeature.metadata.originalcoverage.projection_name,
'unknown')
self.assertGreater(len(self.resGeoFeature.metadata.originalcoverage.projection_string), 0)
self.assertEqual(self.resGeoFeature.metadata.originalcoverage.unit, 'unknown')
self.assertEqual(self.resGeoFeature.metadata.originalcoverage.eastlimit, -66.9692712587578)
self.assertEqual(self.resGeoFeature.metadata.originalcoverage.northlimit, 71.406235393967)
self.assertEqual(self.resGeoFeature.metadata.originalcoverage.southlimit, 18.921786345087)
self.assertEqual(self.resGeoFeature.metadata.originalcoverage.westlimit,
-178.217598362366)
# now add the .prj file
files = []
target = 'hs_geographic_feature_resource/tests/states.prj'
files.append(UploadedFile(file=open(target, 'rb'),
name='states.prj'))
hydroshare.utils.resource_file_add_process(self.resGeoFeature, files, self.user)
# there should not be a spacial coverage at resource level coverage
self.assertTrue(self.resGeoFeature.metadata.coverages.filter(type='box').exists())
self.assertEqual(self.resGeoFeature.metadata.originalcoverage.datum,
'North_American_Datum_1983')
self.assertEqual(self.resGeoFeature.metadata.originalcoverage.projection_name,
'NAD83')
self.assertGreater(len(self.resGeoFeature.metadata.originalcoverage.projection_string), 0)
self.assertEqual(self.resGeoFeature.metadata.originalcoverage.unit, 'degree')
self.assertEqual(self.resGeoFeature.metadata.originalcoverage.eastlimit, -66.9692712587578)
self.assertEqual(self.resGeoFeature.metadata.originalcoverage.northlimit, 71.406235393967)
self.assertEqual(self.resGeoFeature.metadata.originalcoverage.southlimit, 18.921786345087)
self.assertEqual(self.resGeoFeature.metadata.originalcoverage.westlimit,
-178.217598362366)
self.resGeoFeature.delete()
def test_add_xml_file_one(self):
# test that if a .xml file gets added then the resource abstract and keywords get
# updated. Abstract gets updated only if the there is no abstract already
# this zip file has only the required 3 files (.shp, .shx and .dbf)
files = []
target = 'hs_geographic_feature_resource/tests/states_required_files.zip'
files.append(UploadedFile(file=open(target, 'rb'), name='states_required_files.zip'))
self.resGeoFeature = hydroshare.create_resource(
resource_type='GeographicFeatureResource',
owner=self.user,
title='Test Geographic Feature (shapefiles)',
keywords=['kw1', 'kw2'],
files=files
)
# uploaded file validation and metadata extraction happens in post resource
# creation handler
hydroshare.utils.resource_post_create_actions(resource=self.resGeoFeature, user=self.user,
metadata=[])
# check that the resource has 3 files
self.assertEqual(self.resGeoFeature.files.count(), 3)
# there should not be any abstract
self.assertEqual(self.resGeoFeature.metadata.description, None)
# there should be 2 keywords
self.assertEqual(self.resGeoFeature.metadata.subjects.count(), 2)
# now add the .shp.xml file
files = []
target = 'hs_geographic_feature_resource/tests/states.shp.xml'
files.append(UploadedFile(file=open(target, 'rb'),
name='states.shp.xml'))
hydroshare.utils.resource_file_add_process(self.resGeoFeature, files, self.user)
# check that the resource has 4 files
self.assertEqual(self.resGeoFeature.files.count(), 4)
# there should be abstract now (abstract came from the xml file)
self.assertNotEqual(self.resGeoFeature.metadata.description, None)
# there should be 4 (2 keywords came from the xml file) keywords
self.assertEqual(self.resGeoFeature.metadata.subjects.count(), 4)
self.resGeoFeature.delete()
def test_add_xml_file_two(self):
# test that if a .xml file gets added then the resource title and abstract gets
# updated. Abstract gets updated only if the there is no abstract already. Title
# gets updated only if the resource has the default title (untitled resource)
# this zip file has only the required 3 files (.shp, .shx and .dbf)
files = []
target = 'hs_geographic_feature_resource/tests/states_required_files.zip'
files.append(UploadedFile(file=open(target, 'rb'), name='states_required_files.zip'))
self.resGeoFeature = hydroshare.create_resource(
resource_type='GeographicFeatureResource',
owner=self.user,
title='Untitled resource',
files=files
)
# uploaded file validation and metadata extraction happens in post resource
# creation handler
hydroshare.utils.resource_post_create_actions(resource=self.resGeoFeature, user=self.user,
metadata=[])
# check that the resource has 3 files
self.assertEqual(self.resGeoFeature.files.count(), 3)
# there should title
self.assertEqual(self.resGeoFeature.metadata.title.value, 'Untitled resource')
# there should not be any abstract
self.assertEqual(self.resGeoFeature.metadata.description, None)
# there should be no keywords
self.assertEqual(self.resGeoFeature.metadata.subjects.count(), 0)
# now add the .shp.xml file
files = []
target = 'hs_geographic_feature_resource/tests/states.shp.xml'
files.append(UploadedFile(file=open(target, 'rb'),
name='states.shp.xml'))
hydroshare.utils.resource_file_add_process(self.resGeoFeature, files, self.user)
# check that the resource has 4 files
self.assertEqual(self.resGeoFeature.files.count(), 4)
# there title should not be 'Untitled resource
self.assertNotEqual(self.resGeoFeature.metadata.title.value, 'Untitled resource')
# there should be abstract now (abstract came from the xml file)
self.assertNotEqual(self.resGeoFeature.metadata.description, None)
# there should be 2 (2 keywords came from the xml file) keywords
self.assertEqual(self.resGeoFeature.metadata.subjects.count(), 2)
self.resGeoFeature.delete()
def test_metadata_element_pre_create_and_update(self):
request = HttpRequest()
# originalcoverage
request.POST = {"northlimit": 123, "eastlimit": 234,
"southlimit": 345, "westlimit": 456,
"projection_string": "proj str",
"projection_name": "prj name1",
"datum": "dam1", "unit": "u1"}
data = metadata_element_pre_create_handler(sender=GeographicFeatureResource,
element_name="originalcoverage",
request=request)
self.assertTrue(data["is_valid"])
data = metadata_element_pre_update_handler(sender=GeographicFeatureResource,
element_name="originalcoverage",
request=request)
self.assertTrue(data["is_valid"])
# fieldinformation
request.POST = {"fieldName": "fieldName 1",
"fieldType": "fieldType 1",
"fieldTypeCode": "fieldTypeCode 1",
"fieldWidth": 5,
"fieldPrecision": 1}
data = metadata_element_pre_create_handler(sender=GeographicFeatureResource,
element_name="fieldinformation",
request=request)
self.assertTrue(data["is_valid"])
data = metadata_element_pre_update_handler(sender=GeographicFeatureResource,
element_name="fieldinformation",
request=request)
self.assertTrue(data["is_valid"])
# geometryinformation
request.POST = {"featureCount": 12, "geometryType": "geometryType 1"}
data = metadata_element_pre_create_handler(sender=GeographicFeatureResource,
element_name="geometryinformation",
request=request)
self.assertTrue(data["is_valid"])
data = metadata_element_pre_update_handler(sender=GeographicFeatureResource,
element_name="geometryinformation",
request=request)
self.assertTrue(data["is_valid"])
def test_single_point_shp(self):
shp_full_path = "hs_geographic_feature_resource/tests/single_point_shp/logan_Outletmv.shp"
meta_dict = geofeature.extract_metadata(shp_full_path)
coverage_dict = meta_dict.get("coverage", None)
self.assertNotEqual(coverage_dict, None)
self.assertEqual(coverage_dict["Coverage"]["type"].lower(), "point")
self.assertTrue(abs(coverage_dict["Coverage"]
["value"]["east"] + 111.790377929) < self.allowance)
self.assertTrue(abs(coverage_dict["Coverage"]
["value"]["north"] - 41.7422180799) < self.allowance)
def test_read_shp_xml(self):
# test parsing shapefile xml metadata
shp_xml_full_path = 'hs_geographic_feature_resource/tests/beaver_ponds_1940.shp.xml'
metadata = geofeature.parse_shp_xml(shp_xml_full_path)
resGeoFeature2 = hydroshare.create_resource(
resource_type='GeographicFeatureResource',
owner=self.user,
title="",
metadata=metadata
)
# test abstract
self.assertIn("white aerial photographs taken in July 1940 by the U.S. "
"Department of Agriculture",
resGeoFeature2.metadata.description.abstract)
# test title
self.assertIn("beaver_ponds_1940",
resGeoFeature2.metadata.title.value)
# test keywords
self.assertEqual(resGeoFeature2.metadata.subjects.all().count(), 3)
subject_list = [s.value for s in resGeoFeature2.metadata.subjects.all()]
self.assertIn("beaver ponds", subject_list)
self.assertIn("beaver meadows", subject_list)
self.assertIn("Voyageurs National Park", subject_list)
resGeoFeature2.delete()
def _test_delete_file(self, file_extension):
# test that deleting the file with the specified extension *file_extension*
# deletes all files
# check that the resource has no files
self.assertEqual(self.resGeoFeature.files.count(), 0)
# add files first
files = []
target = 'hs_geographic_feature_resource/tests/gis.osm_adminareas_v06_all_files.zip'
files.append(UploadedFile(file=open(target, 'rb'),
name='gis.osm_adminareas_v06_all_files.zip'))
hydroshare.utils.resource_file_add_process(self.resGeoFeature, files, self.user, )
# check that the resource has 15 files
self.assertEqual(self.resGeoFeature.files.count(), 15)
# find the .shp file and delete it
for f in self.resGeoFeature.files.all():
if f.extension == file_extension:
hydroshare.delete_resource_file(self.resGeoFeature.short_id, f.id, self.user)
break
# resource should have no files
self.assertEqual(self.resGeoFeature.files.count(), 0)
def _test_delete_optional_file(self, file_extension):
# test that deleting the optional file with the specified extension *file_extension*
# deletes only that file
self.resGeoFeature = hydroshare.create_resource(
resource_type='GeographicFeatureResource',
owner=self.user,
title='Test Geographic Feature (shapefiles)'
)
# check that the resource has no files
self.assertEqual(self.resGeoFeature.files.count(), 0)
# add files first
files = []
target = 'hs_geographic_feature_resource/tests/gis.osm_adminareas_v06_all_files.zip'
files.append(UploadedFile(file=open(target, 'rb'),
name='gis.osm_adminareas_v06_all_files.zip'))
hydroshare.utils.resource_file_add_process(self.resGeoFeature, files, self.user, )
# check that the resource has 15 files
self.assertEqual(self.resGeoFeature.files.count(), 15)
# find the .shp file and delete it
for f in self.resGeoFeature.files.all():
if f.extension == file_extension:
hydroshare.delete_resource_file(self.resGeoFeature.short_id, f.id, self.user)
break
# resource should have 14 files
self.assertEqual(self.resGeoFeature.files.count(), 14)
self.resGeoFeature.delete()
|
|
import asyncio
from importlib import import_module
from unittest.mock import patch
import pytest
from asgiref.sync import sync_to_async
from channels.testing import WebsocketCommunicator
from django.conf import settings
from django.contrib.auth import BACKEND_SESSION_KEY, HASH_SESSION_KEY, SESSION_KEY
from openslides.asgi import application
from openslides.core.config import config
from openslides.utils.autoupdate import (
Element,
inform_changed_elements,
inform_deleted_data,
)
from openslides.utils.cache import element_cache
from ...unit.utils.cache_provider import Collection1, Collection2, get_cachable_provider
from ..helpers import TConfig, TProjector, TUser
@pytest.fixture(autouse=True)
async def prepare_element_cache(settings):
"""
Resets the element cache.
Uses a cacheable_provider for tests with example data.
"""
await element_cache.cache_provider.clear_cache()
orig_cachable_provider = element_cache.cachable_provider
element_cache.cachable_provider = get_cachable_provider(
[Collection1(), Collection2(), TConfig(), TUser(), TProjector()]
)
element_cache._cachables = None
await sync_to_async(element_cache.ensure_cache)()
yield
# Reset the cachable_provider
element_cache.cachable_provider = orig_cachable_provider
element_cache._cachables = None
await element_cache.cache_provider.clear_cache()
@pytest.fixture
async def get_communicator():
communicator: WebsocketCommunicator = None
def get_communicator(query_string=""):
nonlocal communicator # use the outer communicator variable
if query_string:
query_string = f"?{query_string}"
communicator = WebsocketCommunicator(application, f"/ws/{query_string}")
return communicator
yield get_communicator
if communicator:
await communicator.disconnect()
@pytest.fixture
async def communicator(get_communicator):
yield get_communicator()
@pytest.fixture
async def set_config():
"""
Set a config variable in the element_cache without hitting the database.
"""
async def _set_config(key, value):
with patch("openslides.utils.autoupdate.save_history"):
collection_string = config.get_collection_string()
config_id = config.key_to_id[key] # type: ignore
full_data = {"id": config_id, "key": key, "value": value}
await sync_to_async(inform_changed_elements)(
[
Element(
id=config_id,
collection_string=collection_string,
full_data=full_data,
information="",
user_id=None,
disable_history=True,
)
]
)
return _set_config
@pytest.mark.asyncio
async def test_normal_connection(get_communicator, set_config):
await set_config("general_system_enable_anonymous", True)
connected, __ = await get_communicator().connect()
assert connected
@pytest.mark.asyncio
async def test_connection_with_change_id(get_communicator, set_config):
await set_config("general_system_enable_anonymous", True)
communicator = get_communicator("change_id=0")
await communicator.connect()
response = await communicator.receive_json_from()
type = response.get("type")
content = response.get("content")
assert type == "autoupdate"
assert "changed" in content
assert "deleted" in content
assert "from_change_id" in content
assert "to_change_id" in content
assert Collection1().get_collection_string() in content["changed"]
assert Collection2().get_collection_string() in content["changed"]
assert TConfig().get_collection_string() in content["changed"]
assert TUser().get_collection_string() in content["changed"]
@pytest.mark.asyncio
async def test_connection_with_change_id_get_restricted_data_with_restricted_data_cache(
get_communicator, set_config
):
"""
Test, that the returned data is the restricted_data when restricted_data_cache is activated
"""
try:
# Save the value of use_restricted_data_cache
original_use_restricted_data = element_cache.use_restricted_data_cache
element_cache.use_restricted_data_cache = True
await set_config("general_system_enable_anonymous", True)
communicator = get_communicator("change_id=0")
await communicator.connect()
response = await communicator.receive_json_from()
content = response.get("content")
assert content["changed"]["app/collection1"][0]["value"] == "restricted_value1"
finally:
# reset the value of use_restricted_data_cache
element_cache.use_restricted_data_cache = original_use_restricted_data
@pytest.mark.asyncio
async def test_connection_with_invalid_change_id(get_communicator, set_config):
await set_config("general_system_enable_anonymous", True)
communicator = get_communicator("change_id=invalid")
connected, __ = await communicator.connect()
assert connected is False
@pytest.mark.asyncio
async def test_connection_with_to_big_change_id(get_communicator, set_config):
await set_config("general_system_enable_anonymous", True)
communicator = get_communicator("change_id=100")
connected, __ = await communicator.connect()
assert connected is True
assert await communicator.receive_nothing()
@pytest.mark.asyncio
async def test_changed_data_autoupdate_off(communicator, set_config):
await set_config("general_system_enable_anonymous", True)
await communicator.connect()
# Change a config value
await set_config("general_event_name", "Test Event")
assert await communicator.receive_nothing()
@pytest.mark.asyncio
async def test_changed_data_autoupdate_on(get_communicator, set_config):
await set_config("general_system_enable_anonymous", True)
communicator = get_communicator("autoupdate=on")
await communicator.connect()
# Change a config value
await set_config("general_event_name", "Test Event")
response = await communicator.receive_json_from()
id = config.get_key_to_id()["general_event_name"]
type = response.get("type")
content = response.get("content")
assert type == "autoupdate"
assert content["changed"] == {
"core/config": [{"id": id, "key": "general_event_name", "value": "Test Event"}]
}
@pytest.mark.asyncio
async def test_anonymous_disabled(communicator):
connected, __ = await communicator.connect()
assert not connected
@pytest.mark.asyncio
async def test_with_user():
# login user with id 1
engine = import_module(settings.SESSION_ENGINE)
session = engine.SessionStore() # type: ignore
session[SESSION_KEY] = "1"
session[
HASH_SESSION_KEY
] = "362d4f2de1463293cb3aaba7727c967c35de43ee" # see helpers.TUser
session[BACKEND_SESSION_KEY] = "django.contrib.auth.backends.ModelBackend"
session.save()
scn = settings.SESSION_COOKIE_NAME
cookies = (b"cookie", f"{scn}={session.session_key}".encode())
communicator = WebsocketCommunicator(application, "/ws/", headers=[cookies])
connected, __ = await communicator.connect()
assert connected
await communicator.disconnect()
@pytest.mark.asyncio
async def test_receive_deleted_data(get_communicator, set_config):
await set_config("general_system_enable_anonymous", True)
communicator = get_communicator("autoupdate=on")
await communicator.connect()
# Delete test element
with patch("openslides.utils.autoupdate.save_history"):
await sync_to_async(inform_deleted_data)(
[(Collection1().get_collection_string(), 1)]
)
response = await communicator.receive_json_from()
type = response.get("type")
content = response.get("content")
assert type == "autoupdate"
assert content["deleted"] == {Collection1().get_collection_string(): [1]}
@pytest.mark.asyncio
async def test_send_notify(communicator, set_config):
await set_config("general_system_enable_anonymous", True)
await communicator.connect()
await communicator.send_json_to(
{
"type": "notify",
"content": {"content": "foobar, what else.", "name": "message_name"},
"id": "test",
}
)
response = await communicator.receive_json_from()
content = response["content"]
assert isinstance(content, dict)
assert content["content"] == "foobar, what else."
assert content["name"] == "message_name"
assert "senderChannelName" in content
assert content["senderUserId"] == 0
@pytest.mark.asyncio
async def test_invalid_websocket_message_type(communicator, set_config):
await set_config("general_system_enable_anonymous", True)
await communicator.connect()
await communicator.send_json_to([])
response = await communicator.receive_json_from()
assert response["type"] == "error"
@pytest.mark.asyncio
async def test_invalid_websocket_message_no_id(communicator, set_config):
await set_config("general_system_enable_anonymous", True)
await communicator.connect()
await communicator.send_json_to({"type": "test", "content": "foobar"})
response = await communicator.receive_json_from()
assert response["type"] == "error"
@pytest.mark.asyncio
async def test_send_unknown_type(communicator, set_config):
await set_config("general_system_enable_anonymous", True)
await communicator.connect()
await communicator.send_json_to(
{
"type": "if_you_add_this_type_to_openslides_I_will_be_sad",
"content": True,
"id": "test_id",
}
)
response = await communicator.receive_json_from()
assert response["type"] == "error"
assert response["in_response"] == "test_id"
@pytest.mark.asyncio
async def test_request_constants(communicator, settings, set_config):
await set_config("general_system_enable_anonymous", True)
await communicator.connect()
await communicator.send_json_to(
{"type": "constants", "content": "", "id": "test_id"}
)
response = await communicator.receive_json_from()
assert response["type"] == "constants"
# See conftest.py for the content of 'content'
assert response["content"] == {"constant1": "value1", "constant2": "value2"}
@pytest.mark.asyncio
async def test_send_get_elements(communicator, set_config):
await set_config("general_system_enable_anonymous", True)
await communicator.connect()
await communicator.send_json_to(
{"type": "getElements", "content": {}, "id": "test_id"}
)
response = await communicator.receive_json_from()
type = response.get("type")
content = response.get("content")
assert type == "autoupdate"
assert "changed" in content
assert "deleted" in content
assert "from_change_id" in content
assert "to_change_id" in content
assert Collection1().get_collection_string() in content["changed"]
assert Collection2().get_collection_string() in content["changed"]
assert TConfig().get_collection_string() in content["changed"]
assert TUser().get_collection_string() in content["changed"]
@pytest.mark.asyncio
async def test_send_get_elements_to_big_change_id(communicator, set_config):
await set_config("general_system_enable_anonymous", True)
await communicator.connect()
await communicator.send_json_to(
{"type": "getElements", "content": {"change_id": 100}, "id": "test_id"}
)
response = await communicator.receive_json_from()
type = response.get("type")
assert type == "error"
assert response.get("in_response") == "test_id"
@pytest.mark.asyncio
async def test_send_get_elements_to_small_change_id(communicator, set_config):
await set_config("general_system_enable_anonymous", True)
await communicator.connect()
await communicator.send_json_to(
{"type": "getElements", "content": {"change_id": 1}, "id": "test_id"}
)
response = await communicator.receive_json_from()
type = response.get("type")
assert type == "autoupdate"
assert response.get("in_response") == "test_id"
assert response.get("content")["all_data"]
@pytest.mark.asyncio
async def test_send_connect_twice_with_clear_change_id_cache(communicator, set_config):
"""
Test, that a second request with change_id+1 from the first request, returns
an error.
"""
await set_config("general_system_enable_anonymous", True)
element_cache.cache_provider.change_id_data = {} # type: ignore
await communicator.connect()
await communicator.send_json_to(
{"type": "getElements", "content": {"change_id": 0}, "id": "test_id"}
)
response1 = await communicator.receive_json_from()
first_change_id = response1.get("content")["to_change_id"]
await communicator.send_json_to(
{
"type": "getElements",
"content": {"change_id": first_change_id + 1},
"id": "test_id",
}
)
response2 = await communicator.receive_json_from()
assert response2["type"] == "error"
assert (
response2.get("content")
== "Requested change_id is higher this highest change_id."
)
@pytest.mark.asyncio
async def test_send_connect_twice_with_clear_change_id_cache_same_change_id_then_first_request(
communicator, set_config
):
"""
Test, that a second request with the change_id from the first request, returns
all data.
A client should not do this but request for change_id+1
"""
await set_config("general_system_enable_anonymous", True)
await element_cache.cache_provider.clear_cache()
await communicator.connect()
await communicator.send_json_to(
{"type": "getElements", "content": {"change_id": 0}, "id": "test_id"}
)
response1 = await communicator.receive_json_from()
first_change_id = response1.get("content")["to_change_id"]
await communicator.send_json_to(
{
"type": "getElements",
"content": {"change_id": first_change_id},
"id": "test_id",
}
)
response2 = await communicator.receive_json_from()
assert response2["type"] == "autoupdate"
assert response2.get("content")["all_data"]
@pytest.mark.asyncio
async def test_request_changed_elements_no_douple_elements(communicator, set_config):
"""
Test, that when an elements is changed twice, it is only returned
onces when ask a range of change ids.
Test when all_data is false
"""
await set_config("general_system_enable_anonymous", True)
await communicator.connect()
# Change element twice
await set_config("general_event_name", "Test Event")
await set_config("general_event_name", "Other value")
# Ask for all elements
await communicator.send_json_to(
{"type": "getElements", "content": {"change_id": 2}, "id": "test_id"}
)
response = await communicator.receive_json_from()
type = response.get("type")
content = response.get("content")
assert type == "autoupdate"
assert not response.get("content")["all_data"]
config_ids = [e["id"] for e in content["changed"]["core/config"]]
# test that config_ids are unique
assert len(config_ids) == len(set(config_ids))
@pytest.mark.asyncio
async def test_send_invalid_get_elements(communicator, set_config):
await set_config("general_system_enable_anonymous", True)
await communicator.connect()
await communicator.send_json_to(
{"type": "getElements", "content": {"change_id": "some value"}, "id": "test_id"}
)
response = await communicator.receive_json_from()
type = response.get("type")
assert type == "error"
assert response.get("in_response") == "test_id"
@pytest.mark.asyncio
async def test_turn_on_autoupdate(communicator, set_config):
await set_config("general_system_enable_anonymous", True)
await communicator.connect()
await communicator.send_json_to(
{"type": "autoupdate", "content": "on", "id": "test_id"}
)
await asyncio.sleep(0.01)
# Change a config value
await set_config("general_event_name", "Test Event")
response = await communicator.receive_json_from()
id = config.get_key_to_id()["general_event_name"]
type = response.get("type")
content = response.get("content")
assert type == "autoupdate"
assert content["changed"] == {
"core/config": [{"id": id, "key": "general_event_name", "value": "Test Event"}]
}
@pytest.mark.asyncio
async def test_turn_off_autoupdate(get_communicator, set_config):
await set_config("general_system_enable_anonymous", True)
communicator = get_communicator("autoupdate=on")
await communicator.connect()
await communicator.send_json_to(
{"type": "autoupdate", "content": False, "id": "test_id"}
)
await asyncio.sleep(0.01)
# Change a config value
await set_config("general_event_name", "Test Event")
assert await communicator.receive_nothing()
@pytest.mark.asyncio
async def test_listen_to_projector(communicator, set_config):
await set_config("general_system_enable_anonymous", True)
await communicator.connect()
await communicator.send_json_to(
{
"type": "listenToProjectors",
"content": {"projector_ids": [1]},
"id": "test_id",
}
)
response = await communicator.receive_json_from()
type = response.get("type")
content = response.get("content")
assert type == "projector"
assert content == {
"1": [
{
"data": {"name": "slide1", "event_name": "OpenSlides"},
"element": {"id": 1, "name": "test/slide1"},
}
]
}
@pytest.mark.asyncio
async def test_update_projector(communicator, set_config):
await set_config("general_system_enable_anonymous", True)
await communicator.connect()
await communicator.send_json_to(
{
"type": "listenToProjectors",
"content": {"projector_ids": [1]},
"id": "test_id",
}
)
await communicator.receive_json_from()
# Change a config value
await set_config("general_event_name", "Test Event")
response = await communicator.receive_json_from()
type = response.get("type")
content = response.get("content")
assert type == "projector"
assert content == {
"1": [
{
"data": {"name": "slide1", "event_name": "Test Event"},
"element": {"id": 1, "name": "test/slide1"},
}
]
}
@pytest.mark.asyncio
async def test_update_projector_to_current_value(communicator, set_config):
"""
When a value does not change, the projector should not be updated.
"""
await set_config("general_system_enable_anonymous", True)
await communicator.connect()
await communicator.send_json_to(
{
"type": "listenToProjectors",
"content": {"projector_ids": [1]},
"id": "test_id",
}
)
await communicator.receive_json_from()
# Change a config value to current_value
await set_config("general_event_name", "OpenSlides")
assert await communicator.receive_nothing()
|
|
# Copyright 2013 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from collections import OrderedDict
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import tabs
from openstack_dashboard import api
from openstack_dashboard import policy
from openstack_dashboard.dashboards.project.volumes.backups \
import tables as backups_tables
from openstack_dashboard.dashboards.project.volumes.cgroups \
import tables as vol_cgroup_tables
from openstack_dashboard.dashboards.project.volumes.snapshots \
import tables as vol_snapshot_tables
from openstack_dashboard.dashboards.project.volumes.volumes \
import tables as volume_tables
class VolumeTableMixIn(object):
_has_more_data = False
_has_prev_data = False
def _get_volumes(self, search_opts=None):
try:
marker, sort_dir = self._get_marker()
volumes, self._has_more_data, self._has_prev_data = \
api.cinder.volume_list_paged(self.request, marker=marker,
search_opts=search_opts,
sort_dir=sort_dir, paginate=True)
if sort_dir == "asc":
volumes.reverse()
return volumes
except Exception:
exceptions.handle(self.request,
_('Unable to retrieve volume list.'))
return []
def _get_instances(self, search_opts=None, instance_ids=None):
if not instance_ids:
return []
try:
# TODO(tsufiev): we should pass attached_instance_ids to
# nova.server_list as soon as Nova API allows for this
instances, has_more = api.nova.server_list(self.request,
search_opts=search_opts)
return instances
except Exception:
exceptions.handle(self.request,
_("Unable to retrieve volume/instance "
"attachment information"))
return []
def _get_volumes_ids_with_snapshots(self, search_opts=None):
try:
volume_ids = []
snapshots = api.cinder.volume_snapshot_list(
self.request, search_opts=search_opts)
if snapshots:
# extract out the volume ids
volume_ids = set([(s.volume_id) for s in snapshots])
except Exception:
exceptions.handle(self.request,
_("Unable to retrieve snapshot list."))
return volume_ids
def _get_attached_instance_ids(self, volumes):
attached_instance_ids = []
for volume in volumes:
for att in volume.attachments:
server_id = att.get('server_id', None)
if server_id is not None:
attached_instance_ids.append(server_id)
return attached_instance_ids
# set attachment string and if volume has snapshots
def _set_volume_attributes(self,
volumes,
instances,
volume_ids_with_snapshots):
instances = OrderedDict([(inst.id, inst) for inst in instances])
for volume in volumes:
if volume_ids_with_snapshots:
if volume.id in volume_ids_with_snapshots:
setattr(volume, 'has_snapshot', True)
if instances:
for att in volume.attachments:
server_id = att.get('server_id', None)
att['instance'] = instances.get(server_id, None)
class PagedTableMixin(object):
def __init__(self, *args, **kwargs):
super(PagedTableMixin, self).__init__(*args, **kwargs)
self._has_prev_data = False
self._has_more_data = False
def has_prev_data(self, table):
return self._has_prev_data
def has_more_data(self, table):
return self._has_more_data
def _get_marker(self):
meta = self.table_classes[0]._meta
prev_marker = self.request.GET.get(meta.prev_pagination_param, None)
if prev_marker:
return prev_marker, "asc"
else:
marker = self.request.GET.get(meta.pagination_param, None)
if marker:
return marker, "desc"
return None, "desc"
class VolumeTab(PagedTableMixin, tabs.TableTab, VolumeTableMixIn):
table_classes = (volume_tables.VolumesTable,)
name = _("Volumes")
slug = "volumes_tab"
template_name = ("horizon/common/_detail_table.html")
preload = False
def get_volumes_data(self):
volumes = self._get_volumes()
attached_instance_ids = self._get_attached_instance_ids(volumes)
instances = self._get_instances(instance_ids=attached_instance_ids)
volume_ids_with_snapshots = self._get_volumes_ids_with_snapshots()
self._set_volume_attributes(
volumes, instances, volume_ids_with_snapshots)
return volumes
class SnapshotTab(PagedTableMixin, tabs.TableTab):
table_classes = (vol_snapshot_tables.VolumeSnapshotsTable,)
name = _("Volume Snapshots")
slug = "snapshots_tab"
template_name = ("horizon/common/_detail_table.html")
preload = False
def get_volume_snapshots_data(self):
snapshots = []
volumes = {}
if api.base.is_service_enabled(self.request, 'volumev2'):
try:
marker, sort_dir = self._get_marker()
snapshots, self._has_more_data, self._has_prev_data = \
api.cinder.volume_snapshot_list_paged(
self.request, paginate=True, marker=marker,
sort_dir=sort_dir)
volumes = api.cinder.volume_list(self.request)
volumes = dict((v.id, v) for v in volumes)
except Exception:
exceptions.handle(self.request, _("Unable to retrieve "
"volume snapshots."))
for snapshot in snapshots:
volume = volumes.get(snapshot.volume_id)
setattr(snapshot, '_volume', volume)
return snapshots
class BackupsTab(PagedTableMixin, tabs.TableTab, VolumeTableMixIn):
table_classes = (backups_tables.BackupsTable,)
name = _("Volume Backups")
slug = "backups_tab"
template_name = ("horizon/common/_detail_table.html")
preload = False
def allowed(self, request):
return api.cinder.volume_backup_supported(self.request)
def get_volume_backups_data(self):
try:
marker, sort_dir = self._get_marker()
backups, self._has_more_data, self._has_prev_data = \
api.cinder.volume_backup_list_paged(
self.request, marker=marker, sort_dir=sort_dir,
paginate=True)
volumes = api.cinder.volume_list(self.request)
volumes = dict((v.id, v) for v in volumes)
for backup in backups:
backup.volume = volumes.get(backup.volume_id)
except Exception:
backups = []
exceptions.handle(self.request, _("Unable to retrieve "
"volume backups."))
return backups
class CGroupsTab(tabs.TableTab, VolumeTableMixIn):
table_classes = (vol_cgroup_tables.VolumeCGroupsTable,)
name = _("Volume Consistency Groups")
slug = "cgroups_tab"
template_name = ("horizon/common/_detail_table.html")
preload = False
def allowed(self, request):
return policy.check(
(("volume", "consistencygroup:get_all"),),
request
)
def get_volume_cgroups_data(self):
try:
cgroups = api.cinder.volume_cgroup_list_with_vol_type_names(
self.request)
for cgroup in cgroups:
setattr(cgroup, '_volume_tab', self.tab_group.tabs[0])
except Exception:
cgroups = []
exceptions.handle(self.request, _("Unable to retrieve "
"volume consistency groups."))
return cgroups
class VolumeAndSnapshotTabs(tabs.TabGroup):
slug = "volumes_and_snapshots"
tabs = (VolumeTab, SnapshotTab, BackupsTab, CGroupsTab)
sticky = True
|
|
# -*- coding: utf-8 -*-
from datetime import datetime
import hashlib
from werkzeug.security import generate_password_hash, check_password_hash
from itsdangerous import TimedJSONWebSignatureSerializer as Serializer
from markdown import markdown
import bleach
from flask import current_app, request, url_for
from flask_login import UserMixin, AnonymousUserMixin
from app.exceptions import ValidationError
from . import db, login_manager
class Permission:
FOLLOW = 0x01
COMMENT = 0x02
WRITE_ARTICLES = 0x04
MODERATE_COMMENTS = 0x08
ADMINISTER = 0x80
class Role(db.Model):
__tablename__ = 'roles'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64), unique=True)
default = db.Column(db.Boolean, default=False, index=True)
permissions = db.Column(db.Integer)
users = db.relationship('User', backref='role', lazy='dynamic')
@staticmethod
def insert_roles():
roles = {
'User': (Permission.FOLLOW |
Permission.COMMENT |
Permission.WRITE_ARTICLES, True),
'Moderator': (Permission.FOLLOW |
Permission.COMMENT |
Permission.WRITE_ARTICLES |
Permission.MODERATE_COMMENTS, False),
'Administrator': (0xff, False)
}
for r in roles:
role = Role.query.filter_by(name=r).first()
if role is None:
role = Role(name=r)
role.permissions = roles[r][0]
role.default = roles[r][1]
db.session.add(role)
db.session.commit()
def __repr__(self):
return '<Role %r>' % self.name
class Follow(db.Model):
__tablename__ = 'follows'
follower_id = db.Column(db.Integer, db.ForeignKey('users.id'),
primary_key=True)
followed_id = db.Column(db.Integer, db.ForeignKey('users.id'),
primary_key=True)
timestamp = db.Column(db.DateTime, default=datetime.utcnow)
class User(UserMixin, db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
email = db.Column(db.String(64), unique=True, index=True)
username = db.Column(db.String(64), unique=True, index=True)
role_id = db.Column(db.Integer, db.ForeignKey('roles.id'))
password_hash = db.Column(db.String(128))
confirmed = db.Column(db.Boolean, default=False)
name = db.Column(db.String(64))
location = db.Column(db.String(64))
about_me = db.Column(db.Text())
member_since = db.Column(db.DateTime(), default=datetime.utcnow)
last_seen = db.Column(db.DateTime(), default=datetime.utcnow)
avatar_hash = db.Column(db.String(32))
posts = db.relationship('Post', backref='author', lazy='dynamic')
followed = db.relationship('Follow',
foreign_keys=[Follow.follower_id],
backref=db.backref('follower', lazy='joined'),
lazy='dynamic',
cascade='all, delete-orphan')
followers = db.relationship('Follow',
foreign_keys=[Follow.followed_id],
backref=db.backref('followed', lazy='joined'),
lazy='dynamic',
cascade='all, delete-orphan')
comments = db.relationship('Comment', backref='author', lazy='dynamic')
@staticmethod
def generate_fake(count=100):
from sqlalchemy.exc import IntegrityError
from random import seed
import forgery_py
seed()
for i in range(count):
u = User(email=forgery_py.internet.email_address(),
username=forgery_py.internet.user_name(True),
password=forgery_py.lorem_ipsum.word(),
confirmed=True,
name=forgery_py.name.full_name(),
location=forgery_py.address.city(),
about_me=forgery_py.lorem_ipsum.sentence(),
member_since=forgery_py.date.date(True))
db.session.add(u)
try:
db.session.commit()
except IntegrityError:
db.session.rollback()
@staticmethod
def add_self_follows():
for user in User.query.all():
if not user.is_following(user):
user.follow(user)
db.session.add(user)
db.session.commit()
def __init__(self, **kwargs):
super(User, self).__init__(**kwargs)
if self.role is None:
if self.email == current_app.config['FLASKY_ADMIN']:
self.role = Role.query.filter_by(permissions=0xff).first()
if self.role is None:
self.role = Role.query.filter_by(default=True).first()
if self.email is not None and self.avatar_hash is None:
self.avatar_hash = hashlib.md5(
self.email.encode('utf-8')).hexdigest()
self.followed.append(Follow(followed=self))
@property
def password(self):
raise AttributeError('password is not a readable attribute')
@password.setter
def password(self, password):
self.password_hash = generate_password_hash(password)
def verify_password(self, password):
return check_password_hash(self.password_hash, password)
def generate_confirmation_token(self, expiration=3600):
s = Serializer(current_app.config['SECRET_KEY'], expiration)
return s.dumps({'confirm': self.id})
def confirm(self, token):
s = Serializer(current_app.config['SECRET_KEY'])
try:
data = s.loads(token)
except:
return False
if data.get('confirm') != self.id:
return False
self.confirmed = True
db.session.add(self)
return True
def generate_reset_token(self, expiration=3600):
s = Serializer(current_app.config['SECRET_KEY'], expiration)
return s.dumps({'reset': self.id})
def reset_password(self, token, new_password):
s = Serializer(current_app.config['SECRET_KEY'])
try:
data = s.loads(token)
except:
return False
if data.get('reset') != self.id:
return False
self.password = new_password
db.session.add(self)
return True
def generate_email_change_token(self, new_email, expiration=3600):
s = Serializer(current_app.config['SECRET_KEY'], expiration)
return s.dumps({'change_email': self.id, 'new_email': new_email})
def change_email(self, token):
s = Serializer(current_app.config['SECRET_KEY'])
try:
data = s.loads(token)
except:
return False
if data.get('change_email') != self.id:
return False
new_email = data.get('new_email')
if new_email is None:
return False
if self.query.filter_by(email=new_email).first() is not None:
return False
self.email = new_email
self.avatar_hash = hashlib.md5(
self.email.encode('utf-8')).hexdigest()
db.session.add(self)
return True
def can(self, permissions):
return self.role is not None and \
(self.role.permissions & permissions) == permissions
def is_administrator(self):
return self.can(Permission.ADMINISTER)
def ping(self):
self.last_seen = datetime.utcnow()
db.session.add(self)
def gravatar(self, size=100, default='identicon', rating='g'):
if request.is_secure:
url = 'https://secure.gravatar.com/avatar'
else:
url = 'http://www.gravatar.com/avatar'
hash = self.avatar_hash or hashlib.md5(
self.email.encode('utf-8')).hexdigest()
return '{url}/{hash}?s={size}&d={default}&r={rating}'.format(
url=url, hash=hash, size=size, default=default, rating=rating)
def follow(self, user):
if not self.is_following(user):
f = Follow(follower=self, followed=user)
db.session.add(f)
def unfollow(self, user):
f = self.followed.filter_by(followed_id=user.id).first()
if f:
db.session.delete(f)
def is_following(self, user):
return self.followed.filter_by(
followed_id=user.id).first() is not None
def is_followed_by(self, user):
return self.followers.filter_by(
follower_id=user.id).first() is not None
@property
def followed_posts(self):
return Post.query.join(Follow, Follow.followed_id == Post.author_id)\
.filter(Follow.follower_id == self.id)
def to_json(self):
json_user = {
'url': url_for('api.get_user', id=self.id, _external=True),
'username': self.username,
'member_since': self.member_since,
'last_seen': self.last_seen,
'posts': url_for('api.get_user_posts', id=self.id, _external=True),
'followed_posts': url_for('api.get_user_followed_posts',
id=self.id, _external=True),
'post_count': self.posts.count()
}
return json_user
def generate_auth_token(self, expiration):
s = Serializer(current_app.config['SECRET_KEY'],
expires_in=expiration)
return s.dumps({'id': self.id}).decode('ascii')
@staticmethod
def verify_auth_token(token):
s = Serializer(current_app.config['SECRET_KEY'])
try:
data = s.loads(token)
except:
return None
return User.query.get(data['id'])
def __repr__(self):
return '<User %r>' % self.username
class AnonymousUser(AnonymousUserMixin):
def can(self, permissions):
return False
def is_administrator(self):
return False
login_manager.anonymous_user = AnonymousUser
@login_manager.user_loader
def load_user(user_id):
return User.query.get(int(user_id))
'''''
evevy table has 4 column, every column has 1 item, which has 4 content, which is img_url, href_url, content, timestamp,
'''
class HostPageTable(db.Model):
__tablename__ = 'hostpagetable'
id = db.Column(db.Integer, primary_key=True)
img_url = db.Column(db.String(255))
href_url = db.Column(db.String(255))
content = db.Column(db.String(255))
title = db.Column(db.String(255))
price = db.Column(db.Float)
rate = db.Column(db.Float)
timestamp = db.Column(db.DateTime, index=True, default=datetime.utcnow())
def __repr__(self):
return '<HostPageTable %s>' % self.title
'''''
evevy table has 4 column, every column has 1 item, which has 4 content, which is img_url, href_url, content, timestamp,
'''
class Pict(db.Model):
__tablename__ = 'pict'
id = db.Column(db.Integer, primary_key=True)
img_url = db.Column(db.String(255))
name = db.Column(db.String(255))
timestamp = db.Column(db.DateTime, index=True, default=datetime.utcnow())
def __repr__(self):
return '<Pict %s>' % self.name
'''''
ATTENTION IMPORTANT TABLE
goods table record all the msg off goods
'''
class Goods(db.Model):
__tablename__ = 'goods'
id=db.Column(db.Integer, primary_key=True)
code=db.Column(db.String(255),index=True,unique=True)
name=db.Column(db.String(255))
img=db.Column(db.String(255))
link=db.Column(db.String(255))
shop=db.Column(db.String(255))
price=db.Column(db.Float)
soldpermonth=db.Column(db.Integer)
rate=db.Column(db.Float)
earn=db.Column(db.Float)
seller=db.Column(db.String(255))
idlinkshort=db.Column(db.String(255))
idlink=db.Column(db.String(255))
idcmd=db.Column(db.String(255))
couponsnum=db.Column(db.Integer)
leftcoupon=db.Column(db.Integer)
couponvalue=db.Column(db.String(255))
couponstart=db.Column(db.String(255))
couponend=db.Column(db.String(255))
couponlink=db.Column(db.String(255))
couponcmd=db.Column(db.String(255))
couponshortlink=db.Column(db.String(255))
bsellsplan=db.Column(db.Boolean)
def __repr__(self):
return '<Goods %s>' % self.name
class Coupons(db.Model):
__tablename__ = 'coupons'
category=db.Column(db.String(255))
coupon_click_url=db.Column(db.String(255))
coupon_info=db.Column(db.String(255))
item_url=db.Column(db.String(255))
zk_final_price=db.Column(db.String(255))
commission_rate=db.Column(db.String(255))
user_type=db.Column(db.String(255))
samll_images=db.Column(db.String(255))
title=db.Column(db.String(255))
num_iid=db.Column(db.String(255), primary_key=True, index=True, unique=True)
seller_id=db.Column(db.String(255))
coupon_total_count=db.Column(db.String(255))
volumn=db.Column(db.String(255))
nick=db.Column(db.String(255))
pict_url=db.Column(db.String(255))
coupon_remain_count=db.Column(db.String(255))
coupon_start_time=db.Column(db.String(255))
shop_title=db.Column(db.String(255))
item_description=db.Column(db.String(255))
coupon_end_time=db.Column(db.String(255))
def __repr__(self):
return '<Coupons %s>' % self.title
# set character_set_client = utf8;
# set character_set_server = utf8;
# set character_set_connection = utf8;
# set character_set_database = utf8;
# set character_set_results = utf8;
# set collation_connection = utf8_general_ci;
# set collation_database = utf8_general_ci;
# set collation_server = utf8_general_ci;
'''''
evevy table has 4 column, every column has 1 item, which has 4 content, which is img_url, href_url, content, timestamp,
'''
class News(db.Model):
__tablename__ = 'news'
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String(255))
content = db.Column(db.String(255))
link = db.Column(db.String(255))
time = db.Column(db.DateTime, index=True, default=datetime.utcnow())
def __repr__(self):
return '<news %s>' % self.title
class Post(db.Model):
__tablename__ = 'posts'
id = db.Column(db.Integer, primary_key=True)
body = db.Column(db.Text)
body_html = db.Column(db.Text)
timestamp = db.Column(db.DateTime, index=True, default=datetime.utcnow)
author_id = db.Column(db.Integer, db.ForeignKey('users.id'))
comments = db.relationship('Comment', backref='post', lazy='dynamic')
@staticmethod
def generate_fake(count=100):
from random import seed, randint
import forgery_py
seed()
user_count = User.query.count()
for i in range(count):
u = User.query.offset(randint(0, user_count - 1)).first()
p = Post(body=forgery_py.lorem_ipsum.sentences(randint(1, 5)),
timestamp=forgery_py.date.date(True),
author=u)
db.session.add(p)
db.session.commit()
@staticmethod
def on_changed_body(target, value, oldvalue, initiator):
allowed_tags = ['a', 'abbr', 'acronym', 'b', 'blockquote', 'code',
'em', 'i', 'li', 'ol', 'pre', 'strong', 'ul',
'h1', 'h2', 'h3', 'p']
target.body_html = bleach.linkify(bleach.clean(
markdown(value, output_format='html'),
tags=allowed_tags, strip=True))
def to_json(self):
json_post = {
'url': url_for('api.get_post', id=self.id, _external=True),
'body': self.body,
'body_html': self.body_html,
'timestamp': self.timestamp,
'author': url_for('api.get_user', id=self.author_id,
_external=True),
'comments': url_for('api.get_post_comments', id=self.id,
_external=True),
'comment_count': self.comments.count()
}
return json_post
@staticmethod
def from_json(json_post):
body = json_post.get('body')
if body is None or body == '':
raise ValidationError('post does not have a body')
return Post(body=body)
db.event.listen(Post.body, 'set', Post.on_changed_body)
class Comment(db.Model):
__tablename__ = 'comments'
id = db.Column(db.Integer, primary_key=True)
body = db.Column(db.Text)
body_html = db.Column(db.Text)
timestamp = db.Column(db.DateTime, index=True, default=datetime.utcnow)
disabled = db.Column(db.Boolean)
author_id = db.Column(db.Integer, db.ForeignKey('users.id'))
post_id = db.Column(db.Integer, db.ForeignKey('posts.id'))
@staticmethod
def on_changed_body(target, value, oldvalue, initiator):
allowed_tags = ['a', 'abbr', 'acronym', 'b', 'code', 'em', 'i',
'strong']
target.body_html = bleach.linkify(bleach.clean(
markdown(value, output_format='html'),
tags=allowed_tags, strip=True))
def to_json(self):
json_comment = {
'url': url_for('api.get_comment', id=self.id, _external=True),
'post': url_for('api.get_post', id=self.post_id, _external=True),
'body': self.body,
'body_html': self.body_html,
'timestamp': self.timestamp,
'author': url_for('api.get_user', id=self.author_id,
_external=True),
}
return json_comment
@staticmethod
def from_json(json_comment):
body = json_comment.get('body')
if body is None or body == '':
raise ValidationError('comment does not have a body')
return Comment(body=body)
db.event.listen(Comment.body, 'set', Comment.on_changed_body)
|
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import base64
import os
import re
import uuid
from lxml import etree
from nova import context
from nova import flags
from nova.openstack.common import importutils
from nova.openstack.common import jsonutils
from nova.openstack.common.log import logging
from nova import test
from nova.tests import fake_network
from nova.tests.image import fake
from nova.tests.integrated import integrated_helpers
FLAGS = flags.FLAGS
LOG = logging.getLogger(__name__)
class NoMatch(test.TestingException):
pass
class ApiSampleTestBase(integrated_helpers._IntegratedTestBase):
ctype = 'json'
all_extensions = False
extension_name = None
def setUp(self):
self.flags(use_ipv6=False,
osapi_compute_link_prefix=self._get_host(),
osapi_glance_link_prefix=self._get_glance_host())
if not self.all_extensions:
ext = [self.extension_name] if self.extension_name else []
self.flags(osapi_compute_extension=ext)
super(ApiSampleTestBase, self).setUp()
fake_network.stub_compute_with_ips(self.stubs)
self.generate_samples = os.getenv('GENERATE_SAMPLES') is not None
def _pretty_data(self, data):
if self.ctype == 'json':
data = jsonutils.dumps(jsonutils.loads(data), sort_keys=True,
indent=4)
else:
xml = etree.XML(data)
data = etree.tostring(xml, encoding="UTF-8",
xml_declaration=True, pretty_print=True)
return '\n'.join(line.rstrip() for line in data.split('\n')).strip()
def _objectify(self, data):
if not data:
return {}
if self.ctype == 'json':
return jsonutils.loads(data)
else:
def to_dict(node):
ret = {}
if node.items():
ret.update(dict(node.items()))
if node.text:
ret['__content__'] = node.text
if node.tag:
ret['__tag__'] = node.tag
if node.nsmap:
ret['__nsmap__'] = node.nsmap
for element in node:
ret.setdefault(node.tag, [])
ret[node.tag].append(to_dict(element))
return ret
return to_dict(etree.fromstring(data))
@classmethod
def _get_sample_path(cls, name, dirname, suffix=''):
parts = [dirname]
parts.append('api_samples')
if cls.all_extensions:
parts.append('all_extensions')
if cls.extension_name:
alias = importutils.import_class(cls.extension_name).alias
parts.append(alias)
parts.append(name + "." + cls.ctype + suffix)
return os.path.join(*parts)
@classmethod
def _get_sample(cls, name):
dirname = os.path.dirname(os.path.abspath(__file__))
dirname = os.path.join(dirname, "../../../doc")
return cls._get_sample_path(name, dirname)
@classmethod
def _get_template(cls, name):
dirname = os.path.dirname(os.path.abspath(__file__))
return cls._get_sample_path(name, dirname, suffix='.tpl')
def _read_template(self, name):
template = self._get_template(name)
if self.generate_samples and not os.path.exists(template):
with open(template, 'w') as outf:
pass
with open(template) as inf:
return inf.read().strip()
def _write_sample(self, name, data):
with open(self._get_sample(name), 'w') as outf:
outf.write(data)
def _compare_result(self, subs, expected, result):
matched_value = None
if isinstance(expected, dict):
if not isinstance(result, dict):
raise NoMatch(
_('Result: %(result)s is not a dict.') % locals())
ex_keys = sorted(expected.keys())
res_keys = sorted(result.keys())
if ex_keys != res_keys:
raise NoMatch(_('Key mismatch:\n'
'%(ex_keys)s\n%(res_keys)s') % locals())
for key in ex_keys:
res = self._compare_result(subs, expected[key], result[key])
matched_value = res or matched_value
elif isinstance(expected, list):
if not isinstance(result, list):
raise NoMatch(
_('Result: %(result)s is not a list.') % locals())
# NOTE(maurosr): sort the list of dicts by their __tag__ element
# when using xml. This will avoid some fails in keypairs api sample
# which order in different way when using a private key itself or
# its regular expression, and after all doesn't interfere with
# other tests.
# Should we define a criteria when ordering json? Doesn't seems
# necessary so far.
for ex_obj, res_obj in zip(sorted(expected, key=lambda k:
k.get('__tag__', k)),
sorted(result, key=lambda k:
k.get('__tag__', k))):
res = self._compare_result(subs, ex_obj, res_obj)
matched_value = res or matched_value
elif isinstance(expected, basestring) and '%' in expected:
try:
# NOTE(vish): escape stuff for regex
for char in ['[', ']', '<', '>', '?']:
expected = expected.replace(char, '\%s' % char)
expected = expected % subs
match = re.match(expected, result)
except Exception as exc:
raise NoMatch(_('Values do not match:\n'
'%(expected)s\n%(result)s') % locals())
if not match:
raise NoMatch(_('Values do not match:\n'
'%(expected)s\n%(result)s') % locals())
if match.groups():
matched_value = match.groups()[0]
else:
if isinstance(expected, basestring):
# NOTE(danms): Ignore whitespace in this comparison
expected = expected.strip()
result = result.strip()
if expected != result:
raise NoMatch(_('Values do not match:\n'
'%(expected)s\n%(result)s') % locals())
return matched_value
def _verify_response(self, name, subs, response):
expected = self._read_template(name)
expected = self._objectify(expected)
result = self._pretty_data(response.read())
if self.generate_samples:
self._write_sample(name, result)
result = self._objectify(result)
return self._compare_result(subs, expected, result)
def _get_host(self):
return 'http://openstack.example.com'
def _get_glance_host(self):
return 'http://glance.openstack.example.com'
def _get_regexes(self):
if self.ctype == 'json':
text = r'(\\"|[^"])*'
else:
text = r'[^<]*'
return {
'timestamp': '[0-9]{4}-[0,1][0-9]-[0-3][0-9]T'
'[0-9]{2}:[0-9]{2}:[0-9]{2}'
'(Z|(\+|-)[0-9]{2}:[0-9]{2})',
'password': '[0-9a-zA-Z]{1,12}',
'ip': '[0-9]{1,3}.[0-9]{1,3}.[0-9]{1,3}.[0-9]{1,3}',
'ip6': '([0-9a-zA-Z]{1,4}:){1,7}:?[0-9a-zA-Z]',
'id': '([0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}'
'-[0-9a-f]{4}-[0-9a-f]{12})',
'uuid': '[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}'
'-[0-9a-f]{4}-[0-9a-f]{12}',
'private_key': '-----BEGIN RSA PRIVATE KEY-----'
'[a-zA-Z0-9\n/+=]*'
'-----END RSA PRIVATE KEY-----',
'public_key': 'ssh-rsa[ a-zA-Z0-9/+=]*'
'Generated by Nova',
'fingerprint': '([0-9a-f]{2}:){15}[0-9a-f]{2}',
# '[0-9a-f]{2}:[0-9a-f]{2}:[0-9a-f]{2}:[0-9a-f]{2}:'
# '[0-9a-f]{2}:[0-9a-f]{2}:[0-9a-f]{2}:[0-9a-f]{2}:'
# '[0-9a-f]{2}:[0-9a-f]{2}:[0-9a-f]{2}:[0-9a-f]{2}:'
# '[0-9a-f]{2}:[0-9a-f]{2}:[0-9a-f]{2}:[0-9a-f]{2}',
'host': self._get_host(),
'glance_host': self._get_glance_host(),
'compute_host': self.compute.host,
'text': text,
}
def _get_response(self, url, method, body=None, strip_version=False):
headers = {}
headers['Content-Type'] = 'application/' + self.ctype
headers['Accept'] = 'application/' + self.ctype
return self.api.api_request(url, body=body, method=method,
headers=headers, strip_version=strip_version)
def _do_get(self, url, strip_version=False):
return self._get_response(url, 'GET', strip_version=strip_version)
def _do_post(self, url, name, subs, method='POST'):
body = self._read_template(name) % subs
if self.generate_samples:
self._write_sample(name, body)
return self._get_response(url, method, body)
def _do_put(self, url, name, subs):
return self._do_post(url, name, subs, method='PUT')
def _do_delete(self, url):
return self._get_response(url, 'DELETE')
class VersionsSampleJsonTest(ApiSampleTestBase):
def test_versions_get(self):
response = self._do_get('', strip_version=True)
subs = self._get_regexes()
return self._verify_response('versions-get-resp', subs, response)
class VersionsSampleXmlTest(VersionsSampleJsonTest):
ctype = 'xml'
class ServersSampleBase(ApiSampleTestBase):
def _post_server(self):
subs = {
'image_id': fake.get_valid_image_id(),
'host': self._get_host(),
}
response = self._do_post('servers', 'server-post-req', subs)
self.assertEqual(response.status, 202)
subs = self._get_regexes()
return self._verify_response('server-post-resp', subs, response)
class ServersSampleJsonTest(ServersSampleBase):
def test_servers_post(self):
return self._post_server()
def test_servers_get(self):
uuid = self.test_servers_post()
response = self._do_get('servers/%s' % uuid)
subs = self._get_regexes()
subs['hostid'] = '[a-f0-9]+'
subs['id'] = uuid
return self._verify_response('server-get-resp', subs, response)
def test_servers_list(self):
uuid = self._post_server()
response = self._do_get('servers')
self.assertEqual(response.status, 200)
subs = self._get_regexes()
subs['id'] = uuid
return self._verify_response('servers-list-resp', subs, response)
def test_servers_details(self):
uuid = self._post_server()
response = self._do_get('servers/detail')
self.assertEqual(response.status, 200)
subs = self._get_regexes()
subs['hostid'] = '[a-f0-9]+'
subs['id'] = uuid
return self._verify_response('servers-details-resp', subs, response)
class ServersSampleXmlTest(ServersSampleJsonTest):
ctype = 'xml'
class ServersSampleAllExtensionJsonTest(ServersSampleJsonTest):
all_extensions = True
class ServersSampleAllExtensionXmlTest(ServersSampleXmlTest):
all_extensions = True
class ServersMetadataJsonTest(ServersSampleBase):
def _create_and_set(self, subs):
uuid = self._post_server()
response = self._do_put('servers/%s/metadata' % uuid,
'server-metadata-all-req',
subs)
self.assertEqual(response.status, 200)
self._verify_response('server-metadata-all-resp', subs, response)
return uuid
def test_metadata_put_all(self):
"""Test setting all metadata for a server"""
subs = {'value': 'Foo Value'}
return self._create_and_set(subs)
def test_metadata_post_all(self):
"""Test updating all metadata for a server"""
subs = {'value': 'Foo Value'}
uuid = self._create_and_set(subs)
subs['value'] = 'Bar Value'
response = self._do_post('servers/%s/metadata' % uuid,
'server-metadata-all-req',
subs)
self.assertEqual(response.status, 200)
self._verify_response('server-metadata-all-resp', subs, response)
def test_metadata_get_all(self):
"""Test getting all metadata for a server"""
subs = {'value': 'Foo Value'}
uuid = self._create_and_set(subs)
response = self._do_get('servers/%s/metadata' % uuid)
self.assertEqual(response.status, 200)
self._verify_response('server-metadata-all-resp', subs, response)
def test_metadata_put(self):
"""Test putting an individual metadata item for a server"""
subs = {'value': 'Foo Value'}
uuid = self._create_and_set(subs)
subs['value'] = 'Bar Value'
response = self._do_put('servers/%s/metadata/foo' % uuid,
'server-metadata-req',
subs)
self.assertEqual(response.status, 200)
return self._verify_response('server-metadata-resp', subs, response)
def test_metadata_get(self):
"""Test getting an individual metadata item for a server"""
subs = {'value': 'Foo Value'}
uuid = self._create_and_set(subs)
response = self._do_get('servers/%s/metadata/foo' % uuid)
self.assertEqual(response.status, 200)
return self._verify_response('server-metadata-resp', subs, response)
def test_metadata_delete(self):
"""Test deleting an individual metadata item for a server"""
subs = {'value': 'Foo Value'}
uuid = self._create_and_set(subs)
response = self._do_delete('servers/%s/metadata/foo' % uuid)
self.assertEqual(response.status, 204)
self.assertEqual(response.read(), '')
class ServersMetadataXmlTest(ServersMetadataJsonTest):
ctype = 'xml'
class ExtensionsSampleJsonTest(ApiSampleTestBase):
all_extensions = True
def test_extensions_get(self):
response = self._do_get('extensions')
subs = self._get_regexes()
return self._verify_response('extensions-get-resp', subs, response)
class ExtensionsSampleXmlTest(ExtensionsSampleJsonTest):
ctype = 'xml'
class FlavorsSampleJsonTest(ApiSampleTestBase):
def test_flavors_get(self):
response = self._do_get('flavors/1')
subs = self._get_regexes()
return self._verify_response('flavor-get-resp', subs, response)
def test_flavors_list(self):
response = self._do_get('flavors')
subs = self._get_regexes()
return self._verify_response('flavors-list-resp', subs, response)
class FlavorsSampleXmlTest(FlavorsSampleJsonTest):
ctype = 'xml'
class FlavorsSampleAllExtensionJsonTest(FlavorsSampleJsonTest):
all_extensions = True
class FlavorsSampleAllExtensionXmlTest(FlavorsSampleXmlTest):
all_extensions = True
class ImagesSampleJsonTest(ApiSampleTestBase):
def test_images_list(self):
"""Get api sample of images get list request"""
response = self._do_get('images')
subs = self._get_regexes()
return self._verify_response('images-list-get-resp', subs, response)
def test_image_get(self):
"""Get api sample of one single image details request"""
image_id = fake.get_valid_image_id()
response = self._do_get('images/%s' % image_id)
self.assertEqual(response.status, 200)
subs = self._get_regexes()
subs['image_id'] = image_id
return self._verify_response('image-get-resp', subs, response)
def test_images_details(self):
"""Get api sample of all images details request"""
response = self._do_get('images/detail')
subs = self._get_regexes()
return self._verify_response('images-details-get-resp', subs, response)
def test_image_metadata_get(self):
"""Get api sample of a image metadata request"""
image_id = fake.get_valid_image_id()
response = self._do_get('images/%s/metadata' % image_id)
subs = self._get_regexes()
subs['image_id'] = image_id
return self._verify_response('image-metadata-get-resp', subs, response)
def test_image_metadata_post(self):
"""Get api sample to update metadata of an image metadata request"""
image_id = fake.get_valid_image_id()
response = self._do_post(
'images/%s/metadata' % image_id,
'image-metadata-post-req', {})
self.assertEqual(response.status, 200)
subs = self._get_regexes()
return self._verify_response('image-metadata-post-resp',
subs, response)
def test_image_metadata_put(self):
"""Get api sample of image metadata put request"""
image_id = fake.get_valid_image_id()
response = self._do_put('images/%s/metadata' % image_id,
'image-metadata-put-req', {})
self.assertEqual(response.status, 200)
subs = self._get_regexes()
return self._verify_response('image-metadata-put-resp',
subs, response)
def test_image_meta_key_get(self):
"""Get api sample of a image metadata key request"""
image_id = fake.get_valid_image_id()
key = "kernel_id"
response = self._do_get('images/%s/metadata/%s' % (image_id, key))
subs = self._get_regexes()
return self._verify_response('image-meta-key-get', subs, response)
def test_image_meta_key_put(self):
"""Get api sample of image metadata key put request"""
image_id = fake.get_valid_image_id()
key = "auto_disk_config"
response = self._do_put('images/%s/metadata/%s' % (image_id, key),
'image-meta-key-put-req', {})
self.assertEqual(response.status, 200)
subs = self._get_regexes()
return self._verify_response('image-meta-key-put-resp',
subs,
response)
class ImagesSampleXmlTest(ImagesSampleJsonTest):
ctype = 'xml'
class LimitsSampleJsonTest(ApiSampleTestBase):
def test_limits_get(self):
response = self._do_get('limits')
subs = self._get_regexes()
return self._verify_response('limit-get-resp', subs, response)
class LimitsSampleXmlTest(LimitsSampleJsonTest):
ctype = 'xml'
class ServersActionsJsonTest(ServersSampleBase):
def setUp(self):
super(ServersActionsJsonTest, self).setUp()
def _test_server_action(self, uuid, action,
subs={}, resp_tpl=None, code=202):
subs.update({'action': action})
response = self._do_post('servers/%s/action' % uuid,
'server-action-%s' % action.lower(),
subs)
self.assertEqual(response.status, code)
if resp_tpl:
subs.update(self._get_regexes())
return self._verify_response(resp_tpl, subs, response)
else:
self.assertEqual(response.read(), "")
def test_server_password(self):
uuid = self._post_server()
self._test_server_action(uuid, "changePassword",
{"password": "foo"})
def test_server_reboot(self):
uuid = self._post_server()
self._test_server_action(uuid, "reboot",
{"type": "HARD"})
self._test_server_action(uuid, "reboot",
{"type": "SOFT"})
def test_server_rebuild(self):
uuid = self._post_server()
image = self.api.get_images()[0]['id']
subs = {'host': self._get_host(),
'uuid': image,
'name': 'foobar',
'pass': 'seekr3t',
'ip': '1.2.3.4',
'ip6': 'fe80::100',
'hostid': '[a-f0-9]+',
}
self._test_server_action(uuid, 'rebuild', subs,
'server-action-rebuild-resp')
def test_server_resize(self):
FLAGS.allow_resize_to_same_host = True
uuid = self._post_server()
self._test_server_action(uuid, "resize",
{"id": 2,
"host": self._get_host()})
return uuid
def test_server_revert_resize(self):
uuid = self.test_server_resize()
self._test_server_action(uuid, "revertResize")
def test_server_confirm_resize(self):
uuid = self.test_server_resize()
self._test_server_action(uuid, "confirmResize", code=204)
def test_server_create_image(self):
uuid = self._post_server()
self._test_server_action(uuid, 'createImage',
{'name': 'foo-image',
'meta_var': 'myvar',
'meta_val': 'foobar'})
class ServersActionsXmlTest(ServersActionsJsonTest):
ctype = 'xml'
class ServerStartStopJsonTest(ServersSampleBase):
extension_name = "nova.api.openstack.compute.contrib" + \
".server_start_stop.Server_start_stop"
def _test_server_action(self, uuid, action):
response = self._do_post('servers/%s/action' % uuid,
'server_start_stop',
{'action': action})
self.assertEqual(response.status, 202)
self.assertEqual(response.read(), "")
def test_server_start(self):
uuid = self._post_server()
self._test_server_action(uuid, 'os-stop')
self._test_server_action(uuid, 'os-start')
def test_server_stop(self):
uuid = self._post_server()
self._test_server_action(uuid, 'os-stop')
class ServerStartStopXmlTest(ServerStartStopJsonTest):
ctype = 'xml'
class UserDataJsonTest(ApiSampleTestBase):
extension_name = "nova.api.openstack.compute.contrib.user_data.User_data"
def test_user_data_post(self):
user_data_contents = '#!/bin/bash\n/bin/su\necho "I am in you!"\n'
user_data = base64.b64encode(user_data_contents)
subs = {
'image_id': fake.get_valid_image_id(),
'host': self._get_host(),
'user_data': user_data
}
response = self._do_post('servers', 'userdata-post-req', subs)
self.assertEqual(response.status, 202)
subs.update(self._get_regexes())
return self._verify_response('userdata-post-resp', subs, response)
class UserDataXmlTest(UserDataJsonTest):
ctype = 'xml'
class FlavorsExtraDataJsonTest(ApiSampleTestBase):
extension_name = ('nova.api.openstack.compute.contrib.flavorextradata.'
'Flavorextradata')
def _get_flags(self):
f = super(FlavorsExtraDataJsonTest, self)._get_flags()
f['osapi_compute_extension'] = FLAGS.osapi_compute_extension[:]
# Flavorextradata extension also needs Flavormanage to be loaded.
f['osapi_compute_extension'].append(
'nova.api.openstack.compute.contrib.flavormanage.Flavormanage')
return f
def test_flavors_extra_data_get(self):
response = self._do_get('flavors/1')
subs = self._get_regexes()
return self._verify_response('flavors-extra-data-get-resp', subs,
response)
def test_flavors_extra_data_list(self):
response = self._do_get('flavors/detail')
subs = self._get_regexes()
return self._verify_response('flavors-extra-data-list-resp', subs,
response)
def test_flavors_extra_data_post(self):
response = self._do_post('flavors',
'flavors-extra-data-post-req',
{})
self.assertEqual(response.status, 200)
subs = self._get_regexes()
return self._verify_response('flavors-extra-data-post-resp',
subs, response)
class FlavorsExtraDataXmlTest(FlavorsExtraDataJsonTest):
ctype = 'xml'
class SecurityGroupsSampleJsonTest(ServersSampleBase):
extension_name = "nova.api.openstack.compute.contrib" + \
".security_groups.Security_groups"
def test_security_group_create(self):
name = self.ctype + '-test'
subs = {
'group_name': name,
"description": "description",
}
response = self._do_post('os-security-groups',
'security-group-post-req', subs)
self.assertEqual(response.status, 200)
self._verify_response('security-groups-create-resp', subs, response)
def test_security_groups_list(self):
"""Get api sample of security groups get list request"""
response = self._do_get('os-security-groups')
subs = self._get_regexes()
return self._verify_response('security-groups-list-get-resp',
subs, response)
def test_security_groups_get(self):
"""Get api sample of security groups get request"""
security_group_id = '1'
response = self._do_get('os-security-groups/%s' % security_group_id)
subs = self._get_regexes()
return self._verify_response('security-groups-get-resp',
subs, response)
def test_security_groups_list_server(self):
"""Get api sample of security groups for a specific server."""
uuid = self._post_server()
response = self._do_get('servers/%s/os-security-groups' % uuid)
subs = self._get_regexes()
return self._verify_response('server-security-groups-list-resp',
subs, response)
class SecurityGroupsSampleXmlTest(SecurityGroupsSampleJsonTest):
ctype = 'xml'
class SchedulerHintsJsonTest(ApiSampleTestBase):
extension_name = ("nova.api.openstack.compute.contrib.scheduler_hints."
"Scheduler_hints")
def test_scheduler_hints_post(self):
"""Get api sample of scheduler hint post request"""
hints = {'image_id': fake.get_valid_image_id(),
'image_near': str(uuid.uuid4())
}
response = self._do_post('servers', 'scheduler-hints-post-req',
hints)
self.assertEqual(response.status, 202)
subs = self._get_regexes()
return self._verify_response('scheduler-hints-post-resp', subs,
response)
class SchedulerHintsXmlTest(SchedulerHintsJsonTest):
ctype = 'xml'
class ConsoleOutputSampleJsonTest(ServersSampleBase):
extension_name = "nova.api.openstack.compute.contrib" + \
".console_output.Console_output"
def test_get_console_output(self):
uuid = self._post_server()
response = self._do_post('servers/%s/action' % uuid,
'console-output-post-req',
{'action': 'os-getConsoleOutput'})
self.assertEqual(response.status, 200)
subs = self._get_regexes()
return self._verify_response('console-output-post-resp',
subs, response)
class ConsoleOutputSampleXmlTest(ConsoleOutputSampleJsonTest):
ctype = 'xml'
class ExtendedServerAttributesJsonTest(ServersSampleBase):
extension_name = "nova.api.openstack.compute.contrib" + \
".extended_server_attributes" + \
".Extended_server_attributes"
def test_extended_server_attrs_get(self):
uuid = self._post_server()
response = self._do_get('servers/%s' % uuid)
subs = self._get_regexes()
subs['hostid'] = '[a-f0-9]+'
subs['id'] = uuid
subs['instance_name'] = 'instance-\d{8}'
return self._verify_response('extended-server-attrs-get',
subs, response)
def test_extended_server_attrs_list(self):
uuid = self._post_server()
response = self._do_get('servers/detail')
subs = self._get_regexes()
subs['hostid'] = '[a-f0-9]+'
subs['id'] = uuid
subs['instance_name'] = 'instance-\d{8}'
return self._verify_response('extended-server-attrs-list',
subs, response)
class ExtendedServerAttributesXmlTest(ExtendedServerAttributesJsonTest):
ctype = 'xml'
class FloatingIpsJsonTest(ApiSampleTestBase):
extension_name = "nova.api.openstack.compute.contrib." \
"floating_ips.Floating_ips"
def setUp(self):
super(FloatingIpsJsonTest, self).setUp()
pool = FLAGS.default_floating_pool
interface = FLAGS.public_interface
self.ip_pool = [
{
'address': "10.10.10.1",
'pool': pool,
'interface': interface
},
{
'address': "10.10.10.2",
'pool': pool,
'interface': interface
},
{
'address': "10.10.10.3",
'pool': pool,
'interface': interface
},
]
self.compute.db.floating_ip_bulk_create(
context.get_admin_context(), self.ip_pool)
def tearDown(self):
self.compute.db.floating_ip_bulk_destroy(
context.get_admin_context(), self.ip_pool)
super(FloatingIpsJsonTest, self).tearDown()
def test_floating_ips_list_empty(self):
response = self._do_get('os-floating-ips')
self.assertEqual(response.status, 200)
subs = self._get_regexes()
return self._verify_response('floating-ips-list-empty-resp',
subs, response)
def test_floating_ips_list(self):
self._do_post('os-floating-ips',
'floating-ips-create-nopool-req',
{})
self._do_post('os-floating-ips',
'floating-ips-create-nopool-req',
{})
response = self._do_get('os-floating-ips')
self.assertEqual(response.status, 200)
subs = self._get_regexes()
return self._verify_response('floating-ips-list-resp',
subs, response)
def test_floating_ips_create_nopool(self):
response = self._do_post('os-floating-ips',
'floating-ips-create-nopool-req',
{})
self.assertEqual(response.status, 200)
subs = self._get_regexes()
self._verify_response('floating-ips-create-resp',
subs, response)
def test_floating_ips_create(self):
response = self._do_post('os-floating-ips',
'floating-ips-create-req',
{"pool": FLAGS.default_floating_pool})
self.assertEqual(response.status, 200)
subs = self._get_regexes()
self._verify_response('floating-ips-create-resp',
subs, response)
def test_floating_ips_get(self):
self.test_floating_ips_create()
# NOTE(sdague): the first floating ip will always have 1 as an id,
# but it would be better if we could get this from the create
response = self._do_get('os-floating-ips/%d' % 1)
self.assertEqual(response.status, 200)
subs = self._get_regexes()
self._verify_response('floating-ips-create-resp',
subs, response)
def test_floating_ips_delete(self):
self.test_floating_ips_create()
response = self._do_delete('os-floating-ips/%d' % 1)
self.assertEqual(response.status, 202)
class FloatingIpsXmlTest(FloatingIpsJsonTest):
ctype = 'xml'
class KeyPairsSampleJsonTest(ApiSampleTestBase):
extension_name = "nova.api.openstack.compute.contrib.keypairs.Keypairs"
def test_keypairs_post(self, public_key=None):
"""Get api sample of key pairs post request"""
key_name = 'keypair-' + str(uuid.uuid4())
response = self._do_post('os-keypairs', 'keypairs-post-req',
{'keypair_name': key_name})
subs = self._get_regexes()
subs['keypair_name'] = '(%s)' % key_name
self.assertEqual(response.status, 200)
self._verify_response('keypairs-post-resp', subs, response)
# NOTE(maurosr): return the key_name is necessary cause the
# verification returns the label of the last compared information in
# the response, not necessarily the key name.
return key_name
def test_keypairs_import_key_post(self):
"""Get api sample of key pairs post to import user's key"""
key_name = 'keypair-' + str(uuid.uuid4())
subs = {
'keypair_name': key_name,
'public_key': "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAgQDx8nkQv/zgGg"
"B4rMYmIf+6A4l6Rr+o/6lHBQdW5aYd44bd8JttDCE/F/pNRr0l"
"RE+PiqSPO8nDPHw0010JeMH9gYgnnFlyY3/OcJ02RhIPyyxYpv"
"9FhY+2YiUkpwFOcLImyrxEsYXpD/0d3ac30bNH6Sw9JD9UZHYc"
"pSxsIbECHw== Generated by Nova"
}
response = self._do_post('os-keypairs', 'keypairs-import-post-req',
subs)
subs = self._get_regexes()
subs['keypair_name'] = '(%s)' % key_name
self.assertEqual(response.status, 200)
self._verify_response('keypairs-import-post-resp', subs, response)
def test_keypairs_get(self):
"""Get api sample of key pairs get request"""
key_name = self.test_keypairs_post()
response = self._do_get('os-keypairs')
subs = self._get_regexes()
subs['keypair_name'] = '(%s)' % key_name
return self._verify_response('keypairs-get-resp', subs, response)
class KeyPairsSampleXmlTest(KeyPairsSampleJsonTest):
ctype = 'xml'
class RescueJsonTest(ServersSampleBase):
extension_name = ("nova.api.openstack.compute.contrib"
".rescue.Rescue")
def _rescue(self, uuid):
req_subs = {
'password': 'MySecretPass'
}
response = self._do_post('servers/%s/action' % uuid,
'server-rescue-req', req_subs)
self._verify_response('server-rescue', req_subs, response)
def _unrescue(self, uuid):
response = self._do_post('servers/%s/action' % uuid,
'server-unrescue-req', {})
self.assertEqual(response.status, 202)
def test_server_rescue(self):
uuid = self._post_server()
self._rescue(uuid)
# Do a server get to make sure that the 'RESCUE' state is set
response = self._do_get('servers/%s' % uuid)
subs = self._get_regexes()
subs['hostid'] = '[a-f0-9]+'
subs['id'] = uuid
subs['status'] = 'RESCUE'
self._verify_response('server-get-resp-rescue', subs, response)
def test_server_unrescue(self):
uuid = self._post_server()
self._rescue(uuid)
self._unrescue(uuid)
# Do a server get to make sure that the 'ACTIVE' state is back
response = self._do_get('servers/%s' % uuid)
subs = self._get_regexes()
subs['hostid'] = '[a-f0-9]+'
subs['id'] = uuid
subs['status'] = 'ACTIVE'
self._verify_response('server-get-resp-unrescue', subs, response)
class RescueXmlTest(RescueJsonTest):
ctype = 'xml'
class VirtualInterfacesJsonTest(ServersSampleBase):
extension_name = ("nova.api.openstack.compute.contrib"
".virtual_interfaces.Virtual_interfaces")
def test_vifs_list(self):
uuid = self._post_server()
response = self._do_get('servers/%s/os-virtual-interfaces' % uuid)
self.assertEqual(response.status, 200)
subs = self._get_regexes()
subs['mac_addr'] = '(?:[a-f0-9]{2}:){5}[a-f0-9]{2}'
self._verify_response('vifs-list-resp', subs, response)
class VirtualInterfacesXmlTest(VirtualInterfacesJsonTest):
ctype = 'xml'
|
|
# Copyright 2012-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import logging
import os
import platform
import shlex
from subprocess import Popen, PIPE
from docutils.core import publish_string
from docutils.writers import manpage
from botocore.docs.bcdoc import docevents
from botocore.docs.bcdoc.restdoc import ReSTDocument
from botocore.docs.bcdoc.textwriter import TextWriter
from awscli.clidocs import ProviderDocumentEventHandler
from awscli.clidocs import ServiceDocumentEventHandler
from awscli.clidocs import OperationDocumentEventHandler
from awscli.clidocs import TopicListerDocumentEventHandler
from awscli.clidocs import TopicDocumentEventHandler
from awscli.argprocess import ParamShorthand
from awscli.argparser import ArgTableArgParser
from awscli.topictags import TopicTagDB
from awscli.utils import ignore_ctrl_c
LOG = logging.getLogger('awscli.help')
class ExecutableNotFoundError(Exception):
def __init__(self, executable_name):
super(ExecutableNotFoundError, self).__init__(
'Could not find executable named "%s"' % executable_name)
def get_renderer():
"""
Return the appropriate HelpRenderer implementation for the
current platform.
"""
if platform.system() == 'Windows':
return WindowsHelpRenderer()
else:
return PosixHelpRenderer()
class PagingHelpRenderer(object):
"""
Interface for a help renderer.
The renderer is responsible for displaying the help content on
a particular platform.
"""
PAGER = None
def get_pager_cmdline(self):
pager = self.PAGER
if 'MANPAGER' in os.environ:
pager = os.environ['MANPAGER']
elif 'PAGER' in os.environ:
pager = os.environ['PAGER']
return shlex.split(pager)
def render(self, contents):
"""
Each implementation of HelpRenderer must implement this
render method.
"""
converted_content = self._convert_doc_content(contents)
self._send_output_to_pager(converted_content)
def _send_output_to_pager(self, output):
cmdline = self.get_pager_cmdline()
LOG.debug("Running command: %s", cmdline)
p = self._popen(cmdline, stdin=PIPE)
p.communicate(input=output)
def _popen(self, *args, **kwargs):
return Popen(*args, **kwargs)
def _convert_doc_content(self, contents):
return contents
class PosixHelpRenderer(PagingHelpRenderer):
"""
Render help content on a Posix-like system. This includes
Linux and MacOS X.
"""
PAGER = 'less -R'
def _convert_doc_content(self, contents):
man_contents = publish_string(contents, writer=manpage.Writer())
if not self._exists_on_path('groff'):
raise ExecutableNotFoundError('groff')
cmdline = ['groff', '-man', '-T', 'ascii']
LOG.debug("Running command: %s", cmdline)
p3 = self._popen(cmdline, stdin=PIPE, stdout=PIPE, stderr=PIPE)
groff_output = p3.communicate(input=man_contents)[0]
return groff_output
def _send_output_to_pager(self, output):
cmdline = self.get_pager_cmdline()
LOG.debug("Running command: %s", cmdline)
with ignore_ctrl_c():
# We can't rely on the KeyboardInterrupt from
# the CLIDriver being caught because when we
# send the output to a pager it will use various
# control characters that need to be cleaned
# up gracefully. Otherwise if we simply catch
# the Ctrl-C and exit, it will likely leave the
# users terminals in a bad state and they'll need
# to manually run ``reset`` to fix this issue.
# Ignoring Ctrl-C solves this issue. It's also
# the default behavior of less (you can't ctrl-c
# out of a manpage).
p = self._popen(cmdline, stdin=PIPE)
p.communicate(input=output)
def _exists_on_path(self, name):
# Since we're only dealing with POSIX systems, we can
# ignore things like PATHEXT.
return any([os.path.exists(os.path.join(p, name))
for p in os.environ.get('PATH', '').split(os.pathsep)])
class WindowsHelpRenderer(PagingHelpRenderer):
"""Render help content on a Windows platform."""
PAGER = 'more'
def _convert_doc_content(self, contents):
text_output = publish_string(contents,
writer=TextWriter())
return text_output
def _popen(self, *args, **kwargs):
# Also set the shell value to True. To get any of the
# piping to a pager to work, we need to use shell=True.
kwargs['shell'] = True
return Popen(*args, **kwargs)
class HelpCommand(object):
"""
HelpCommand Interface
---------------------
A HelpCommand object acts as the interface between objects in the
CLI (e.g. Providers, Services, Operations, etc.) and the documentation
system (bcdoc).
A HelpCommand object wraps the object from the CLI space and provides
a consistent interface to critical information needed by the
documentation pipeline such as the object's name, description, etc.
The HelpCommand object is passed to the component of the
documentation pipeline that fires documentation events. It is
then passed on to each document event handler that has registered
for the events.
All HelpCommand objects contain the following attributes:
+ ``session`` - A ``botocore`` ``Session`` object.
+ ``obj`` - The object that is being documented.
+ ``command_table`` - A dict mapping command names to
callable objects.
+ ``arg_table`` - A dict mapping argument names to callable objects.
+ ``doc`` - A ``Document`` object that is used to collect the
generated documentation.
In addition, please note the `properties` defined below which are
required to allow the object to be used in the document pipeline.
Implementations of HelpCommand are provided here for Provider,
Service and Operation objects. Other implementations for other
types of objects might be needed for customization in plugins.
As long as the implementations conform to this basic interface
it should be possible to pass them to the documentation system
and generate interactive and static help files.
"""
EventHandlerClass = None
"""
Each subclass should define this class variable to point to the
EventHandler class used by this HelpCommand.
"""
def __init__(self, session, obj, command_table, arg_table):
self.session = session
self.obj = obj
if command_table is None:
command_table = {}
self.command_table = command_table
if arg_table is None:
arg_table = {}
self.arg_table = arg_table
self._subcommand_table = {}
self._related_items = []
self.renderer = get_renderer()
self.doc = ReSTDocument(target='man')
@property
def event_class(self):
"""
Return the ``event_class`` for this object.
The ``event_class`` is used by the documentation pipeline
when generating documentation events. For the event below::
doc-title.<event_class>.<name>
The document pipeline would use this property to determine
the ``event_class`` value.
"""
pass
@property
def name(self):
"""
Return the name of the wrapped object.
This would be called by the document pipeline to determine
the ``name`` to be inserted into the event, as shown above.
"""
pass
@property
def subcommand_table(self):
"""These are the commands that may follow after the help command"""
return self._subcommand_table
@property
def related_items(self):
"""This is list of items that are related to the help command"""
return self._related_items
def __call__(self, args, parsed_globals):
if args:
subcommand_parser = ArgTableArgParser({}, self.subcommand_table)
parsed, remaining = subcommand_parser.parse_known_args(args)
if getattr(parsed, 'subcommand', None) is not None:
return self.subcommand_table[parsed.subcommand](remaining,
parsed_globals)
# Create an event handler for a Provider Document
instance = self.EventHandlerClass(self)
# Now generate all of the events for a Provider document.
# We pass ourselves along so that we can, in turn, get passed
# to all event handlers.
docevents.generate_events(self.session, self)
self.renderer.render(self.doc.getvalue())
instance.unregister()
class ProviderHelpCommand(HelpCommand):
"""Implements top level help command.
This is what is called when ``aws help`` is run.
"""
EventHandlerClass = ProviderDocumentEventHandler
def __init__(self, session, command_table, arg_table,
description, synopsis, usage):
HelpCommand.__init__(self, session, None,
command_table, arg_table)
self.description = description
self.synopsis = synopsis
self.help_usage = usage
self._subcommand_table = None
self._topic_tag_db = None
self._related_items = ['aws help topics']
@property
def event_class(self):
return 'aws'
@property
def name(self):
return 'aws'
@property
def subcommand_table(self):
if self._subcommand_table is None:
if self._topic_tag_db is None:
self._topic_tag_db = TopicTagDB()
self._topic_tag_db.load_json_index()
self._subcommand_table = self._create_subcommand_table()
return self._subcommand_table
def _create_subcommand_table(self):
subcommand_table = {}
# Add the ``aws help topics`` command to the ``topic_table``
topic_lister_command = TopicListerCommand(self.session)
subcommand_table['topics'] = topic_lister_command
topic_names = self._topic_tag_db.get_all_topic_names()
# Add all of the possible topics to the ``topic_table``
for topic_name in topic_names:
topic_help_command = TopicHelpCommand(self.session, topic_name)
subcommand_table[topic_name] = topic_help_command
return subcommand_table
class ServiceHelpCommand(HelpCommand):
"""Implements service level help.
This is the object invoked whenever a service command
help is implemented, e.g. ``aws ec2 help``.
"""
EventHandlerClass = ServiceDocumentEventHandler
def __init__(self, session, obj, command_table, arg_table, name,
event_class):
super(ServiceHelpCommand, self).__init__(session, obj, command_table,
arg_table)
self._name = name
self._event_class = event_class
@property
def event_class(self):
return self._event_class
@property
def name(self):
return self._name
class OperationHelpCommand(HelpCommand):
"""Implements operation level help.
This is the object invoked whenever help for a service is requested,
e.g. ``aws ec2 describe-instances help``.
"""
EventHandlerClass = OperationDocumentEventHandler
def __init__(self, session, operation_model, arg_table, name,
event_class):
HelpCommand.__init__(self, session, operation_model, None, arg_table)
self.param_shorthand = ParamShorthand()
self._name = name
self._event_class = event_class
@property
def event_class(self):
return self._event_class
@property
def name(self):
return self._name
class TopicListerCommand(HelpCommand):
EventHandlerClass = TopicListerDocumentEventHandler
def __init__(self, session):
super(TopicListerCommand, self).__init__(session, None, {}, {})
@property
def event_class(self):
return 'topics'
@property
def name(self):
return 'topics'
class TopicHelpCommand(HelpCommand):
EventHandlerClass = TopicDocumentEventHandler
def __init__(self, session, topic_name):
super(TopicHelpCommand, self).__init__(session, None, {}, {})
self._topic_name = topic_name
@property
def event_class(self):
return 'topics.' + self.name
@property
def name(self):
return self._topic_name
|
|
# Copyright 2011 James McCauley
# Copyright 2008 (C) Nicira, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This file is derived from the packet library in NOX, which was
# developed by Nicira, Inc.
#======================================================================
#
# TCP Header Format
#
# 0 1 2 3
# 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
# +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
# | Source Port | Destination Port |
# +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
# | Sequence Number |
# +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
# | Acknowledgment Number |
# +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
# | Data | |U|A|P|R|S|F| |
# | Offset| Reserved |R|C|S|S|Y|I| Window |
# | | |G|K|H|T|N|N| |
# +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
# | Checksum | Urgent Pointer |
# +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
# | Options | Padding |
# +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
# | data |
# +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
#
#======================================================================
import struct
from packet_utils import *
from socket import htons
from socket import htonl
from packet_base import packet_base
import logging
lg = logging.getLogger('packet')
class tcp_opt:
EOL = 0
NOP = 1
MSS = 2
WSOPT = 3
SACKPERM = 4
SACK = 5
TSOPT = 8
def __init__(self, type, val):
self.type = type
self.val = val
def to_bytes(self):
if self.type == tcp_opt.EOL or self.type == tcp_opt.NOP:
return struct.pack('B',self.type)
elif self.type == tcp_opt.MSS:
return struct.pack('!BBH',self.type,4,self.val)
elif self.type == tcp_opt.WSOPT:
return struct.pack('!BBB',self.type,3,self.val)
elif self.type == tcp_opt.SACKPERM:
return struct.pack('!BB',self.type,2)
elif self.type == tcp_opt.SACK:
return struct.pack("!" + "II" * len(self.val),
*[x for p in self.val for x in p])
elif self.type == tcp_opt.TSOPT:
return struct.pack('!BBII',self.type,10,self.val[0],self.val[1])
else:
lg.info('(tcp_opt to_bytes) warning, unknown option type ' +
str(self.type))
return ''
class tcp(packet_base):
"TCP packet struct"
MIN_LEN = 20
FIN_flag = 0x01
SYN_flag = 0x02
RST_flag = 0x04
PSH_flag = 0x08
ACK_flag = 0x10
URG_flag = 0x20
ECN_flag = 0x40
CWR_flag = 0x80
@property
def FIN (self): return True if self.flags & self.FIN_flag else False
@property
def SYN (self): return True if self.flags & self.SYN_flag else False
@property
def RST (self): return True if self.flags & self.RST_flag else False
@property
def PSH (self): return True if self.flags & self.PSH_flag else False
@property
def ACK (self): return True if self.flags & self.ACK_flag else False
@property
def URG (self): return True if self.flags & self.URG_flag else False
@property
def ECN (self): return True if self.flags & self.ECN_flag else False
@property
def CWR (self): return True if self.flags & self.CWR_flag else False
@FIN.setter
def FIN (self, value): self._setflag(self.FIN_flag, value)
@SYN.setter
def SYN (self, value): self._setflag(self.SYN_flag, value)
@RST.setter
def RST (self, value): self._setflag(self.RST_flag, value)
@PSH.setter
def PSH (self, value): self._setflag(self.PSH_flag, value)
@ACK.setter
def ACK (self, value): self._setflag(self.ACK_flag, value)
@URG.setter
def URG (self, value): self._setflag(self.URG_flag, value)
@ECN.setter
def ECN (self, value): self._setflag(self.ECN_flag, value)
@CWR.setter
def CWR (self, value): self._setflag(self.CWR_flag, value)
def _setflag (self, flag, value):
self.flags = (self.flags & ~flag) | (flag if value else 0)
def __init__(self, raw=None, prev=None, **kw):
packet_base.__init__(self)
self.prev = prev
self.srcport = 0 # 16 bit
self.dstport = 0 # 16 bit
self.seq = 0 # 32 bit
self.ack = 0 # 32 bit
self.off = 0 # 4 bits
self.res = 0 # 4 bits
self.flags = 0 # reserved, 2 bits flags 6 bits
self.win = 0 # 16 bits
self.csum = 0 # 16 bits
self.urg = 0 # 16 bits
self.tcplen = 0 # Options? #TODO: FIXME
self.options = []
self.next = b''
if raw is not None:
self.parse(raw)
self._init(kw)
def __str__(self):
f = ''
if self.SYN: f += 'S'
if self.ACK: f += 'A'
if self.FIN: f += 'F'
if self.RST: f += 'R'
if self.PSH: f += 'P'
if self.URG: f += 'U'
if self.ECN: f += 'E'
if self.CWR: f += 'C'
s = '[TCP %s>%s seq:%s ack:%s f:%s]' % (self.srcport,
self.dstport, self.seq, self.ack, f)
return s
def parse_options(self, raw):
self.options = []
dlen = len(raw)
# option parsing
i = tcp.MIN_LEN
arr = raw
while i < self.hdr_len:
# Single-byte options
if ord(arr[i]) == tcp_opt.EOL:
break
if ord(arr[i]) == tcp_opt.NOP:
self.options.append(tcp_opt(tcp_opt.NOP,None))
i += 1
continue
# Sanity checking
if i + 2 > dlen:
raise RuntimeError("Very truncated TCP option")
if i + ord(arr[i+1]) > dlen:
raise RuntimeError("Truncated TCP option")
if ord(arr[i+1]) < 2:
raise RuntimeError("Illegal TCP option length")
# Actual option parsing
if ord(arr[i]) == tcp_opt.MSS:
if ord(arr[i+1]) != 4:
raise RuntimeError("MSS option length != 4")
val = struct.unpack('!H',arr[i+2:i+4])[0]
self.options.append(tcp_opt(tcp_opt.MSS,val))
elif ord(arr[i]) == tcp_opt.WSOPT:
if ord(arr[i+1]) != 3:
raise RuntimeError("WSOPT option length != 3")
self.options.append(tcp_opt(tcp_opt.WSOPT, ord(arr[i+2])))
elif ord(arr[i]) == tcp_opt.SACKPERM:
if ord(arr[i+1]) != 2:
raise RuntimeError("SACKPERM option length != 2")
self.options.append(tcp_opt(tcp_opt.SACKPERM, None))
elif ord(arr[i]) == tcp_opt.SACK:
if ord(arr[i+1]) >= 2 and ((ord(arr[i+1])-2) % 8) == 0:
num = (ord(arr[i+1]) - 2) / 8
val = struct.unpack("!" + "II" * num, arr[i+2:])
val = [(x,y) for x,y in zip(val[0::2],val[1::2])]
self.options.append(tcp_opt(tcp_opt.SACK, val))
else:
raise RuntimeError("Invalid SACK option")
elif ord(arr[i]) == tcp_opt.TSOPT:
if ord(arr[i+1]) != 10:
raise RuntimeError("TSOPT option length != 10")
(val1,val2) = struct.unpack('!II',arr[i+2:i+10])
self.options.append(tcp_opt(tcp_opt.TSOPT,(val1,val2)))
else:
self.msg('(tcp parse_options) warning, unknown option %x '
% (ord(arr[i]),))
self.options.append(tcp_opt(ord(arr[i]), arr[i+2:i+2+ord(arr[i+1])]))
i += ord(arr[i+1])
return i
def parse(self, raw):
assert isinstance(raw, bytes)
self.next = None # In case of unfinished parsing
self.raw = raw
dlen = len(raw)
if dlen < tcp.MIN_LEN:
self.msg('(tcp parse) warning TCP packet data too short to parse header: data len %u' % (dlen,))
return
(self.srcport, self.dstport, self.seq, self.ack, offres, self.flags,
self.win, self.csum, self.urg) \
= struct.unpack('!HHIIBBHHH', raw[:tcp.MIN_LEN])
self.off = offres >> 4
self.res = offres & 0x0f
self.hdr_len = self.off * 4
self.payload_len = dlen - self.hdr_len
self.tcplen = dlen
if dlen < self.tcplen:
self.msg('(tcp parse) warning TCP packet data shorter than TCP len: %u < %u' % (dlen, self.tcplen))
return
if (self.off * 4) < self.MIN_LEN or (self.off * 4) > dlen :
self.msg('(tcp parse) warning TCP data offset too long or too short %u' % (self.off,))
return
try:
self.parse_options(raw)
except Exception as e:
self.msg(e)
return
self.next = raw[self.hdr_len:]
self.parsed = True
def hdr(self, payload, calc_checksum = True):
if calc_checksum:
self.csum = self.checksum(payload=payload)
csum = self.csum
else:
csum = 0
offres = self.off << 4 | self.res
packet = struct.pack('!HHIIBBHHH',
self.srcport, self.dstport, self.seq, self.ack,
offres, self.flags,
self.win, csum, self.urg)
for option in self.options:
packet += option.to_bytes()
return packet
def checksum(self, unparsed=False, payload=None):
"""
Calculates the checksum.
If unparsed, calculates it on the raw, unparsed data. This is
useful for validating that it is correct on an incoming packet.
"""
ip_ver = None
if self.prev.__class__.__name__ == 'ipv4':
ip_ver = 4
elif self.prev.__class__.__name__ == 'ipv6':
ip_ver = 6
else:
self.msg('packet not in IP; cannot calculate checksum ' +
'over psuedo-header' )
return 0
if unparsed:
payload_len = len(self.raw)
payload = self.raw
else:
if payload is not None:
pass
elif isinstance(self.next, packet_base):
payload = self.next.pack()
elif self.next is None:
payload = bytes()
else:
payload = self.next
payload = self.hdr(None, calc_checksum = False) + payload
payload_len = len(payload)
if ip_ver == 4:
ph = struct.pack('!IIBBH', self.prev.srcip.toUnsigned(),
self.prev.dstip.toUnsigned(),
0,
self.prev.protocol,
payload_len)
return checksum(ph + payload, 0, 14)
elif ip_ver == 6:
ph = self.prev.srcip.raw + self.prev.dstip.raw
ph += struct.pack('!IHBB', payload_len, 0, 0,
self.prev.next_header_type)
return checksum(ph + payload, 0, 28)
|
|
"""The tests for the MQTT binary sensor platform."""
from datetime import datetime, timedelta
import json
from unittest.mock import ANY, patch
from homeassistant.components import binary_sensor, mqtt
from homeassistant.components.mqtt.discovery import async_start
from homeassistant.const import (
EVENT_STATE_CHANGED,
STATE_OFF,
STATE_ON,
STATE_UNAVAILABLE,
)
import homeassistant.core as ha
from homeassistant.setup import async_setup_component
import homeassistant.util.dt as dt_util
from tests.common import (
MockConfigEntry,
async_fire_mqtt_message,
async_fire_time_changed,
async_mock_mqtt_component,
mock_registry,
)
async def test_setting_sensor_value_expires_availability_topic(hass, mqtt_mock, caplog):
"""Test the expiration of the value."""
assert await async_setup_component(
hass,
binary_sensor.DOMAIN,
{
binary_sensor.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "test-topic",
"expire_after": 4,
"force_update": True,
"availability_topic": "availability-topic",
}
},
)
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_UNAVAILABLE
async_fire_mqtt_message(hass, "availability-topic", "online")
state = hass.states.get("binary_sensor.test")
assert state.state != STATE_UNAVAILABLE
await expires_helper(hass, mqtt_mock, caplog)
async def test_setting_sensor_value_expires(hass, mqtt_mock, caplog):
"""Test the expiration of the value."""
assert await async_setup_component(
hass,
binary_sensor.DOMAIN,
{
binary_sensor.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "test-topic",
"expire_after": 4,
"force_update": True,
}
},
)
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_OFF
await expires_helper(hass, mqtt_mock, caplog)
async def expires_helper(hass, mqtt_mock, caplog):
"""Run the basic expiry code."""
now = datetime(2017, 1, 1, 1, tzinfo=dt_util.UTC)
with patch(("homeassistant.helpers.event.dt_util.utcnow"), return_value=now):
async_fire_time_changed(hass, now)
async_fire_mqtt_message(hass, "test-topic", "ON")
await hass.async_block_till_done()
# Value was set correctly.
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_ON
# Time jump +3s
now = now + timedelta(seconds=3)
async_fire_time_changed(hass, now)
await hass.async_block_till_done()
# Value is not yet expired
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_ON
# Next message resets timer
with patch(("homeassistant.helpers.event.dt_util.utcnow"), return_value=now):
async_fire_time_changed(hass, now)
async_fire_mqtt_message(hass, "test-topic", "OFF")
await hass.async_block_till_done()
# Value was updated correctly.
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_OFF
# Time jump +3s
now = now + timedelta(seconds=3)
async_fire_time_changed(hass, now)
await hass.async_block_till_done()
# Value is not yet expired
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_OFF
# Time jump +2s
now = now + timedelta(seconds=2)
async_fire_time_changed(hass, now)
await hass.async_block_till_done()
# Value is expired now
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_UNAVAILABLE
async def test_setting_sensor_value_via_mqtt_message(hass, mqtt_mock):
"""Test the setting of the value via MQTT."""
assert await async_setup_component(
hass,
binary_sensor.DOMAIN,
{
binary_sensor.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "test-topic",
"payload_on": "ON",
"payload_off": "OFF",
}
},
)
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_OFF
async_fire_mqtt_message(hass, "test-topic", "ON")
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_ON
async_fire_mqtt_message(hass, "test-topic", "OFF")
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_OFF
async def test_setting_sensor_value_via_mqtt_message_and_template(hass, mqtt_mock):
"""Test the setting of the value via MQTT."""
assert await async_setup_component(
hass,
binary_sensor.DOMAIN,
{
binary_sensor.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "test-topic",
"payload_on": "ON",
"payload_off": "OFF",
"value_template": '{%if is_state(entity_id,"on")-%}OFF'
"{%-else-%}ON{%-endif%}",
}
},
)
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_OFF
async_fire_mqtt_message(hass, "test-topic", "")
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_ON
async_fire_mqtt_message(hass, "test-topic", "")
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_OFF
async def test_valid_device_class(hass, mqtt_mock):
"""Test the setting of a valid sensor class."""
assert await async_setup_component(
hass,
binary_sensor.DOMAIN,
{
binary_sensor.DOMAIN: {
"platform": "mqtt",
"name": "test",
"device_class": "motion",
"state_topic": "test-topic",
}
},
)
state = hass.states.get("binary_sensor.test")
assert state.attributes.get("device_class") == "motion"
async def test_invalid_device_class(hass, mqtt_mock):
"""Test the setting of an invalid sensor class."""
assert await async_setup_component(
hass,
binary_sensor.DOMAIN,
{
binary_sensor.DOMAIN: {
"platform": "mqtt",
"name": "test",
"device_class": "abc123",
"state_topic": "test-topic",
}
},
)
state = hass.states.get("binary_sensor.test")
assert state is None
async def test_availability_without_topic(hass, mqtt_mock):
"""Test availability without defined availability topic."""
assert await async_setup_component(
hass,
binary_sensor.DOMAIN,
{
binary_sensor.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
}
},
)
state = hass.states.get("binary_sensor.test")
assert state.state != STATE_UNAVAILABLE
async def test_availability_by_defaults(hass, mqtt_mock):
"""Test availability by defaults with defined topic."""
assert await async_setup_component(
hass,
binary_sensor.DOMAIN,
{
binary_sensor.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"availability_topic": "availability-topic",
}
},
)
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_UNAVAILABLE
async_fire_mqtt_message(hass, "availability-topic", "online")
state = hass.states.get("binary_sensor.test")
assert state.state != STATE_UNAVAILABLE
async_fire_mqtt_message(hass, "availability-topic", "offline")
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_UNAVAILABLE
async def test_availability_by_custom_payload(hass, mqtt_mock):
"""Test availability by custom payload with defined topic."""
assert await async_setup_component(
hass,
binary_sensor.DOMAIN,
{
binary_sensor.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"availability_topic": "availability-topic",
"payload_available": "good",
"payload_not_available": "nogood",
}
},
)
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_UNAVAILABLE
async_fire_mqtt_message(hass, "availability-topic", "good")
state = hass.states.get("binary_sensor.test")
assert state.state != STATE_UNAVAILABLE
async_fire_mqtt_message(hass, "availability-topic", "nogood")
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_UNAVAILABLE
async def test_force_update_disabled(hass, mqtt_mock):
"""Test force update option."""
assert await async_setup_component(
hass,
binary_sensor.DOMAIN,
{
binary_sensor.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "test-topic",
"payload_on": "ON",
"payload_off": "OFF",
}
},
)
events = []
@ha.callback
def callback(event):
"""Verify event got called."""
events.append(event)
hass.bus.async_listen(EVENT_STATE_CHANGED, callback)
async_fire_mqtt_message(hass, "test-topic", "ON")
await hass.async_block_till_done()
assert len(events) == 1
async_fire_mqtt_message(hass, "test-topic", "ON")
await hass.async_block_till_done()
assert len(events) == 1
async def test_force_update_enabled(hass, mqtt_mock):
"""Test force update option."""
assert await async_setup_component(
hass,
binary_sensor.DOMAIN,
{
binary_sensor.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "test-topic",
"payload_on": "ON",
"payload_off": "OFF",
"force_update": True,
}
},
)
events = []
@ha.callback
def callback(event):
"""Verify event got called."""
events.append(event)
hass.bus.async_listen(EVENT_STATE_CHANGED, callback)
async_fire_mqtt_message(hass, "test-topic", "ON")
await hass.async_block_till_done()
assert len(events) == 1
async_fire_mqtt_message(hass, "test-topic", "ON")
await hass.async_block_till_done()
assert len(events) == 2
async def test_off_delay(hass, mqtt_mock):
"""Test off_delay option."""
assert await async_setup_component(
hass,
binary_sensor.DOMAIN,
{
binary_sensor.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "test-topic",
"payload_on": "ON",
"payload_off": "OFF",
"off_delay": 30,
"force_update": True,
}
},
)
events = []
@ha.callback
def callback(event):
"""Verify event got called."""
events.append(event)
hass.bus.async_listen(EVENT_STATE_CHANGED, callback)
async_fire_mqtt_message(hass, "test-topic", "ON")
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_ON
assert len(events) == 1
async_fire_mqtt_message(hass, "test-topic", "ON")
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_ON
assert len(events) == 2
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=30))
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_OFF
assert len(events) == 3
async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock):
"""Test the setting of attribute via MQTT with JSON payload."""
assert await async_setup_component(
hass,
binary_sensor.DOMAIN,
{
binary_sensor.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "test-topic",
"json_attributes_topic": "attr-topic",
}
},
)
async_fire_mqtt_message(hass, "attr-topic", '{ "val": "100" }')
state = hass.states.get("binary_sensor.test")
assert state.attributes.get("val") == "100"
async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog):
"""Test attributes get extracted from a JSON result."""
assert await async_setup_component(
hass,
binary_sensor.DOMAIN,
{
binary_sensor.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "test-topic",
"json_attributes_topic": "attr-topic",
}
},
)
async_fire_mqtt_message(hass, "attr-topic", '[ "list", "of", "things"]')
state = hass.states.get("binary_sensor.test")
assert state.attributes.get("val") is None
assert "JSON result was not a dictionary" in caplog.text
async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog):
"""Test attributes get extracted from a JSON result."""
assert await async_setup_component(
hass,
binary_sensor.DOMAIN,
{
binary_sensor.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "test-topic",
"json_attributes_topic": "attr-topic",
}
},
)
async_fire_mqtt_message(hass, "attr-topic", "This is not JSON")
state = hass.states.get("binary_sensor.test")
assert state.attributes.get("val") is None
assert "Erroneous JSON: This is not JSON" in caplog.text
async def test_discovery_update_attr(hass, mqtt_mock, caplog):
"""Test update of discovered MQTTAttributes."""
entry = MockConfigEntry(domain=mqtt.DOMAIN)
await async_start(hass, "homeassistant", {}, entry)
data1 = (
'{ "name": "Beer",'
' "state_topic": "test_topic",'
' "json_attributes_topic": "attr-topic1" }'
)
data2 = (
'{ "name": "Beer",'
' "state_topic": "test_topic",'
' "json_attributes_topic": "attr-topic2" }'
)
async_fire_mqtt_message(hass, "homeassistant/binary_sensor/bla/config", data1)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "attr-topic1", '{ "val": "100" }')
state = hass.states.get("binary_sensor.beer")
assert state.attributes.get("val") == "100"
# Change json_attributes_topic
async_fire_mqtt_message(hass, "homeassistant/binary_sensor/bla/config", data2)
await hass.async_block_till_done()
# Verify we are no longer subscribing to the old topic
async_fire_mqtt_message(hass, "attr-topic1", '{ "val": "50" }')
state = hass.states.get("binary_sensor.beer")
assert state.attributes.get("val") == "100"
# Verify we are subscribing to the new topic
async_fire_mqtt_message(hass, "attr-topic2", '{ "val": "75" }')
state = hass.states.get("binary_sensor.beer")
assert state.attributes.get("val") == "75"
async def test_unique_id(hass):
"""Test unique id option only creates one sensor per unique_id."""
await async_mock_mqtt_component(hass)
assert await async_setup_component(
hass,
binary_sensor.DOMAIN,
{
binary_sensor.DOMAIN: [
{
"platform": "mqtt",
"name": "Test 1",
"state_topic": "test-topic",
"unique_id": "TOTALLY_UNIQUE",
},
{
"platform": "mqtt",
"name": "Test 2",
"state_topic": "test-topic",
"unique_id": "TOTALLY_UNIQUE",
},
]
},
)
async_fire_mqtt_message(hass, "test-topic", "payload")
assert len(hass.states.async_all()) == 1
async def test_discovery_removal_binary_sensor(hass, mqtt_mock, caplog):
"""Test removal of discovered binary_sensor."""
entry = MockConfigEntry(domain=mqtt.DOMAIN)
await async_start(hass, "homeassistant", {}, entry)
data = (
'{ "name": "Beer",'
' "state_topic": "test_topic",'
' "availability_topic": "availability_topic" }'
)
async_fire_mqtt_message(hass, "homeassistant/binary_sensor/bla/config", data)
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.beer")
assert state is not None
assert state.name == "Beer"
async_fire_mqtt_message(hass, "homeassistant/binary_sensor/bla/config", "")
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.beer")
assert state is None
async def test_discovery_update_binary_sensor(hass, mqtt_mock, caplog):
"""Test update of discovered binary_sensor."""
entry = MockConfigEntry(domain=mqtt.DOMAIN)
await async_start(hass, "homeassistant", {}, entry)
data1 = (
'{ "name": "Beer",'
' "state_topic": "test_topic",'
' "availability_topic": "availability_topic1" }'
)
data2 = (
'{ "name": "Milk",'
' "state_topic": "test_topic2",'
' "availability_topic": "availability_topic2" }'
)
async_fire_mqtt_message(hass, "homeassistant/binary_sensor/bla/config", data1)
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.beer")
assert state is not None
assert state.name == "Beer"
async_fire_mqtt_message(hass, "homeassistant/binary_sensor/bla/config", data2)
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.beer")
assert state is not None
assert state.name == "Milk"
state = hass.states.get("binary_sensor.milk")
assert state is None
async def test_discovery_broken(hass, mqtt_mock, caplog):
"""Test handling of bad discovery message."""
entry = MockConfigEntry(domain=mqtt.DOMAIN)
await async_start(hass, "homeassistant", {}, entry)
data1 = '{ "name": "Beer",' ' "off_delay": -1 }'
data2 = '{ "name": "Milk",' ' "state_topic": "test_topic" }'
async_fire_mqtt_message(hass, "homeassistant/binary_sensor/bla/config", data1)
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.beer")
assert state is None
async_fire_mqtt_message(hass, "homeassistant/binary_sensor/bla/config", data2)
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.milk")
assert state is not None
assert state.name == "Milk"
state = hass.states.get("binary_sensor.beer")
assert state is None
async def test_entity_device_info_with_identifier(hass, mqtt_mock):
"""Test MQTT binary sensor device registry integration."""
entry = MockConfigEntry(domain=mqtt.DOMAIN)
entry.add_to_hass(hass)
await async_start(hass, "homeassistant", {}, entry)
registry = await hass.helpers.device_registry.async_get_registry()
data = json.dumps(
{
"platform": "mqtt",
"name": "Test 1",
"state_topic": "test-topic",
"device": {
"identifiers": ["helloworld"],
"connections": [["mac", "02:5b:26:a8:dc:12"]],
"manufacturer": "Whatever",
"name": "Beer",
"model": "Glass",
"sw_version": "0.1-beta",
},
"unique_id": "veryunique",
}
)
async_fire_mqtt_message(hass, "homeassistant/binary_sensor/bla/config", data)
await hass.async_block_till_done()
device = registry.async_get_device({("mqtt", "helloworld")}, set())
assert device is not None
assert device.identifiers == {("mqtt", "helloworld")}
assert device.connections == {("mac", "02:5b:26:a8:dc:12")}
assert device.manufacturer == "Whatever"
assert device.name == "Beer"
assert device.model == "Glass"
assert device.sw_version == "0.1-beta"
async def test_entity_device_info_update(hass, mqtt_mock):
"""Test device registry update."""
entry = MockConfigEntry(domain=mqtt.DOMAIN)
entry.add_to_hass(hass)
await async_start(hass, "homeassistant", {}, entry)
registry = await hass.helpers.device_registry.async_get_registry()
config = {
"platform": "mqtt",
"name": "Test 1",
"state_topic": "test-topic",
"device": {
"identifiers": ["helloworld"],
"connections": [["mac", "02:5b:26:a8:dc:12"]],
"manufacturer": "Whatever",
"name": "Beer",
"model": "Glass",
"sw_version": "0.1-beta",
},
"unique_id": "veryunique",
}
data = json.dumps(config)
async_fire_mqtt_message(hass, "homeassistant/binary_sensor/bla/config", data)
await hass.async_block_till_done()
device = registry.async_get_device({("mqtt", "helloworld")}, set())
assert device is not None
assert device.name == "Beer"
config["device"]["name"] = "Milk"
data = json.dumps(config)
async_fire_mqtt_message(hass, "homeassistant/binary_sensor/bla/config", data)
await hass.async_block_till_done()
device = registry.async_get_device({("mqtt", "helloworld")}, set())
assert device is not None
assert device.name == "Milk"
async def test_entity_id_update(hass, mqtt_mock):
"""Test MQTT subscriptions are managed when entity_id is updated."""
registry = mock_registry(hass, {})
mock_mqtt = await async_mock_mqtt_component(hass)
assert await async_setup_component(
hass,
binary_sensor.DOMAIN,
{
binary_sensor.DOMAIN: [
{
"platform": "mqtt",
"name": "beer",
"state_topic": "test-topic",
"availability_topic": "avty-topic",
"unique_id": "TOTALLY_UNIQUE",
}
]
},
)
state = hass.states.get("binary_sensor.beer")
assert state is not None
assert mock_mqtt.async_subscribe.call_count == 2
mock_mqtt.async_subscribe.assert_any_call("test-topic", ANY, 0, "utf-8")
mock_mqtt.async_subscribe.assert_any_call("avty-topic", ANY, 0, "utf-8")
mock_mqtt.async_subscribe.reset_mock()
registry.async_update_entity(
"binary_sensor.beer", new_entity_id="binary_sensor.milk"
)
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.beer")
assert state is None
state = hass.states.get("binary_sensor.milk")
assert state is not None
assert mock_mqtt.async_subscribe.call_count == 2
mock_mqtt.async_subscribe.assert_any_call("test-topic", ANY, 0, "utf-8")
mock_mqtt.async_subscribe.assert_any_call("avty-topic", ANY, 0, "utf-8")
|
|
"""Model module."""
from __future__ import unicode_literals
from .utils import tostr
from django.db import models
from django.contrib.auth.models import User
from django.utils.encoding import python_2_unicode_compatible
from django.db.models import F
from django.dispatch import receiver
from django.db.models.signals import pre_save, post_save, m2m_changed
from address.models import AddressField, Country
from time import time
from datetime import date, datetime
from re import compile
class Account(models.Model):
"""Account model."""
user = models.OneToOneField(User, primary_key=True)
avatar = models.ForeignKey('Media', related_name='+', null=True)
address = AddressField(null=True, blank=True, related_name='+')
languages = models.ManyToManyField(
Country, blank=True, default=[], related_name='+'
)
class Tag(models.Model):
"""Tag model."""
name = models.CharField(max_length=50, primary_key=True)
RE = compile('#\w+')
@staticmethod
def settags(commentedelt, tagsfield):
"""Set tags to commentedelt from input txt."""
tagsfieldattr = getattr(commentedelt, tagsfield)
tags = [
tag[1:] for tag in Tag.RE.findall(tagsfieldattr)
]
commentedelt.tags.set(tags)
class CommentedElement(models.Model):
"""Commented element model."""
created = models.DateTimeField(blank=True, default=datetime.now)
modified = models.DateTimeField(blank=True, default=None, null=True)
tags = models.ManyToManyField(
Tag, blank=True, default=[], related_name='tagged'
)
@property
def score(self):
"""Score."""
return sum(v.value for v in self.votes.all()) / self.votes.count()
@property
def type(self):
"""Get type name."""
return type(self).__name__
@receiver(pre_save, sender=CommentedElement)
def update(sender, instance, **kwargs):
"""Update modified."""
instance.modified = time()
class Comment(CommentedElement):
"""Comment model."""
author = models.ForeignKey(Account, blank=True, related_name='comments')
cited = models.ManyToManyField(
Account, default=[], blank=True, related_name='cited'
)
content = models.CharField(max_length=255, db_index=True)
commentated = models.ForeignKey(
CommentedElement, related_name='comments', blank=True
)
CITED_RE = compile(r'@\w+')
def setcited(self):
"""Set cited."""
cited = [cited[1:] for cited in Comment.CITED_RE.findall(self.content)]
self.cited.set(cited)
@receiver(pre_save, sender=Comment)
def updatecomment(sender, instance, **kwargs):
"""Update modified time if element is updated."""
Tag.settags(instance, 'content')
instance.setcited()
class Media(models.Model):
"""Media model."""
file = models.FileField()
url = models.CharField(max_length=255)
source = models.ForeignKey(
CommentedElement, blank=True, related_name='medias'
)
lang = models.CharField(max_length=5, blank=True, default='en-en')
class AdministratedElement(CommentedElement):
"""Contact element model."""
name = models.CharField(max_length=50, blank=True, db_index=True)
description = models.CharField(max_length=255, blank=True, db_index=True)
public = models.BooleanField(default=True, blank=True, db_index=True)
admins = models.ManyToManyField(
Account, default=[], blank=True, related_name='administrated'
)
def __str__(self):
"""Get output."""
return '{0} - {1}'.format(self.name, self.description)
@receiver(m2m_changed, sender=AdministratedElement.admins.through)
def avoidnoneadmins(action, instance, pk_set, **kwargs):
"""Calculate stats related to need relationship."""
if action == 'pre_clear':
raise ValueError()
elif action == 'post_remove' and instance.count() == 0:
raise ValueError()
@receiver(pre_save, sender=AdministratedElement)
def updateadministratedelt(sender, instance, **kwargs):
"""Update modified time if element is updated."""
Tag.settags(instance, 'description')
class Topic(AdministratedElement):
"""Topic model."""
base = models.OneToOneField(
AdministratedElement,
parent_link=True, related_name='_topic', blank=True
)
class Space(AdministratedElement):
"""Space model."""
address = AddressField(blank=True)
lon = models.FloatField(blank=True, db_index=True)
lat = models.FloatField(blank=True, db_index=True)
base = models.OneToOneField(
AdministratedElement,
parent_link=True, related_name='_space', blank=True
)
@property
def sorteddebates(self):
"""Get sorted topics by voting score."""
return sorted(
list(self.topics.filter(event=None)), key='score',
reversed=True
)
class Event(AdministratedElement):
"""Event model."""
date = models.DateField(blank=True, db_index=True)
mduration = models.IntegerField(default=60, blank=True, db_index=True)
space = models.ForeignKey(
Space, blank=True, related_name='events'
)
topic = models.ForeignKey(Topic, blank=True, related_name='events')
base = models.OneToOneField(
AdministratedElement,
parent_link=True, related_name='_event', blank=True
)
address = AddressField(blank=True)
lon = models.FloatField(blank=True, db_index=True)
lat = models.FloatField(blank=True, db_index=True)
class Vote(models.Model):
"""Vote model."""
account = models.ForeignKey(Account, blank=True, related_name='votes')
voted = models.ForeignKey(
CommentedElement, blank=True, related_name='votes'
)
value = models.IntegerField()
class Meta:
"""Vote meta class."""
unique_together = ('account', 'voted')
@python_2_unicode_compatible
class ForbiddenEmail(models.Model):
"""Forbidden email."""
email = models.CharField(max_length=255, primary_key=True)
def __str__(self):
"""Representation."""
return tostr(self, 'email')
@python_2_unicode_compatible
class Stat(models.Model):
"""Statistical model."""
date = models.DateField(default=date.today, primary_key=True)
accounts = models.IntegerField(default=0)
topics = models.IntegerField(default=0)
spaces = models.IntegerField(default=0)
votes = models.IntegerField(default=0)
def __str__(self):
"""Representation."""
return tostr(
self, 'date', 'account', 'topics', 'spaces', 'votes'
)
class ProjectionEntry(models.Model):
"""Projection Entry."""
event = models.OneToOneField(Event, blank=True)
question = models.CharField(max_length=255, db_index=True)
answers = models.CharField(max_length=255, db_index=True)
class EventType(models.Model):
"""Type model."""
name = models.CharField(max_length=50, primary_key=True)
description = models.CharField(max_length=255)
class EventTypeChoice():
"""Event type choice model."""
choice = models.ForeignKey(EventType, blank=True)
event = models.ForeignKey(Event, blank=True)
participative = models.BooleanField(default=False, blank=True)
def getorcreatestat(**kwargs):
"""Get or create a stat with input field and value."""
result, created = Stat.objects.get_or_create(
date=date.today(), defaults=kwargs
)
if not created:
for field in kwargs:
kwargs[field] = F(field) + kwargs[field]
Stat.objects.filter(date=result.date).update(**kwargs)
return result
@receiver(post_save, sender=Account)
@receiver(post_save, sender=Topic)
@receiver(post_save, sender=Vote)
@receiver(post_save, sender=Space)
def updatenewitems(sender, instance, created, **kwargs):
"""Save duration in stats."""
if created:
params = {
'{0}s'.format(type(instance).__name__.lower()): 1
}
getorcreatestat(**params)
|
|
#!/usr/bin/env python
#
# A library that provides a Python interface to the Telegram Bot API
# Copyright (C) 2015-2017
# Leandro Toledo de Souza <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see [http://www.gnu.org/licenses/].
import json
import os
from io import BytesIO
import pytest
from flaky import flaky
from telegram import Sticker, TelegramError, PhotoSize, InputFile
@pytest.fixture()
def photo_file():
f = open('tests/data/telegram.jpg', 'rb')
yield f
f.close()
@pytest.fixture(scope='class')
def _photo(bot, chat_id):
with open('tests/data/telegram.jpg', 'rb') as f:
return bot.send_photo(chat_id, photo=f, timeout=10).photo
@pytest.fixture(scope='class')
def thumb(_photo):
return _photo[0]
@pytest.fixture(scope='class')
def photo(_photo):
return _photo[1]
class TestPhoto:
width = 300
height = 300
caption = u'PhotoTest - Caption'
photo_file_url = 'https://python-telegram-bot.org/static/testfiles/telegram.jpg'
file_size = 10209
def test_creation(self, thumb, photo):
# Make sure file has been uploaded.
assert isinstance(photo, PhotoSize)
assert isinstance(photo.file_id, str)
assert photo.file_id is not ''
assert isinstance(thumb, PhotoSize)
assert isinstance(thumb.file_id, str)
assert thumb.file_id is not ''
def test_expected_values(self, photo):
assert photo.width == self.width
assert photo.height == self.height
assert photo.file_size == self.file_size
@flaky(3, 1)
@pytest.mark.timeout(10)
def test_sendphoto_all_args(self, bot, chat_id, photo_file, thumb, photo):
message = bot.sendPhoto(chat_id, photo_file, caption=self.caption,
disable_notification=False)
assert isinstance(message.photo[0], PhotoSize)
assert isinstance(message.photo[0].file_id, str)
assert message.photo[0].file_id != ''
assert message.photo[0].width == thumb.width
assert message.photo[0].height == thumb.height
assert message.photo[0].file_size == thumb.file_size
assert isinstance(message.photo[1], PhotoSize)
assert isinstance(message.photo[1].file_id, str)
assert message.photo[1].file_id != ''
assert message.photo[1].width == photo.width
assert message.photo[1].height == photo.height
assert message.photo[1].file_size == photo.file_size
assert message.caption == TestPhoto.caption
@flaky(3, 1)
@pytest.mark.timeout(10)
def test_get_and_download(self, bot, photo):
new_file = bot.getFile(photo.file_id)
assert new_file.file_size == photo.file_size
assert new_file.file_id == photo.file_id
assert new_file.file_path.startswith('https://') is True
new_file.download('telegram.jpg')
assert os.path.isfile('telegram.jpg') is True
@flaky(3, 1)
@pytest.mark.timeout(10)
def test_send_url_jpg_file(self, bot, chat_id, thumb, photo):
message = bot.sendPhoto(chat_id, photo=self.photo_file_url)
assert isinstance(message.photo[0], PhotoSize)
assert isinstance(message.photo[0].file_id, str)
assert message.photo[0].file_id != ''
assert message.photo[0].width == thumb.width
assert message.photo[0].height == thumb.height
assert message.photo[0].file_size == thumb.file_size
assert isinstance(message.photo[1], PhotoSize)
assert isinstance(message.photo[1].file_id, str)
assert message.photo[1].file_id != ''
assert message.photo[1].width == photo.width
assert message.photo[1].height == photo.height
assert message.photo[1].file_size == photo.file_size
@flaky(3, 1)
@pytest.mark.timeout(10)
def test_send_url_png_file(self, bot, chat_id):
message = bot.sendPhoto(photo='http://dummyimage.com/600x400/000/fff.png&text=telegram',
chat_id=chat_id)
photo = message.photo[-1]
assert isinstance(photo, PhotoSize)
assert isinstance(photo.file_id, str)
assert photo.file_id != ''
@flaky(3, 1)
@pytest.mark.timeout(10)
def test_send_url_gif_file(self, bot, chat_id):
message = bot.sendPhoto(photo='http://dummyimage.com/600x400/000/fff.png&text=telegram',
chat_id=chat_id)
photo = message.photo[-1]
assert isinstance(photo, PhotoSize)
assert isinstance(photo.file_id, str)
assert photo.file_id != ''
@flaky(3, 1)
@pytest.mark.timeout(10)
def test_send_bytesio_jpg_file(self, bot, chat_id):
file_name = 'tests/data/telegram_no_standard_header.jpg'
# raw image bytes
raw_bytes = BytesIO(open(file_name, 'rb').read())
inputfile = InputFile({"photo": raw_bytes})
assert inputfile.mimetype == 'application/octet-stream'
# raw image bytes with name info
raw_bytes = BytesIO(open(file_name, 'rb').read())
raw_bytes.name = file_name
inputfile = InputFile({"photo": raw_bytes})
assert inputfile.mimetype == 'image/jpeg'
# send raw photo
raw_bytes = BytesIO(open(file_name, 'rb').read())
message = bot.sendPhoto(chat_id, photo=raw_bytes)
photo = message.photo[-1]
assert isinstance(photo.file_id, str)
assert photo.file_id != ''
assert isinstance(photo, PhotoSize)
assert photo.width == 1920
assert photo.height == 1080
assert photo.file_size == 30907
def test_send_with_photosize(self, monkeypatch, bot, chat_id, photo):
def test(_, url, data, **kwargs):
return data['photo'] == photo.file_id
monkeypatch.setattr("telegram.utils.request.Request.post", test)
message = bot.send_photo(photo=photo, chat_id=chat_id)
assert message
@flaky(3, 1)
@pytest.mark.timeout(10)
def test_resend(self, bot, chat_id, photo):
message = bot.sendPhoto(chat_id=chat_id, photo=photo.file_id)
thumb, photo = message.photo
assert isinstance(message.photo[0], PhotoSize)
assert isinstance(message.photo[0].file_id, str)
assert message.photo[0].file_id != ''
assert message.photo[0].width == thumb.width
assert message.photo[0].height == thumb.height
assert message.photo[0].file_size == thumb.file_size
assert isinstance(message.photo[1], PhotoSize)
assert isinstance(message.photo[1].file_id, str)
assert message.photo[1].file_id != ''
assert message.photo[1].width == photo.width
assert message.photo[1].height == photo.height
assert message.photo[1].file_size == photo.file_size
def test_de_json(self, bot, photo):
json_dict = {
'file_id': photo.file_id,
'width': self.width,
'height': self.height,
'file_size': self.file_size
}
json_photo = PhotoSize.de_json(json_dict, bot)
assert json_photo.file_id == photo.file_id
assert json_photo.width == self.width
assert json_photo.height == self.height
assert json_photo.file_size == self.file_size
def test_to_json(self, photo):
json.loads(photo.to_json())
def test_to_dict(self, photo):
photo_dict = photo.to_dict()
assert isinstance(photo_dict, dict)
assert photo_dict['file_id'] == photo.file_id
assert photo_dict['width'] == photo.width
assert photo_dict['height'] == photo.height
assert photo_dict['file_size'] == photo.file_size
@flaky(3, 1)
@pytest.mark.timeout(10)
def test_error_send_empty_file(self, bot, chat_id):
with pytest.raises(TelegramError):
bot.sendPhoto(chat_id=chat_id, photo=open(os.devnull, 'rb'))
@flaky(3, 1)
@pytest.mark.timeout(10)
def test_error_send_empty_file_id(self, bot, chat_id):
with pytest.raises(TelegramError):
bot.sendPhoto(chat_id=chat_id, photo='')
@flaky(3, 1)
@pytest.mark.timeout(10)
def test_error_without_required_args(self, bot, chat_id):
with pytest.raises(TypeError):
bot.sendPhoto(chat_id=chat_id)
def test_equality(self, photo):
a = PhotoSize(photo.file_id, self.width, self.height)
b = PhotoSize(photo.file_id, self.width, self.height)
c = PhotoSize(photo.file_id, 0, 0)
d = PhotoSize("", self.width, self.height)
e = Sticker(photo.file_id, self.width, self.height)
assert a == b
assert hash(a) == hash(b)
assert a is not b
assert a == c
assert hash(a) == hash(c)
assert a != d
assert hash(a) != hash(d)
assert a != e
assert hash(a) != hash(e)
|
|
"""An object-local variable management scheme."""
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import functools
import json
import weakref
from tensorflow.python.eager import context
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import gen_io_ops as io_ops
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.training import saveable_object
from tensorflow.python.util import nest
from tensorflow.python.util import serialization
from tensorflow.python.util import tf_decorator
# Key where the object graph proto is saved in a TensorBundle
OBJECT_GRAPH_PROTO_KEY = "_CHECKPOINTABLE_OBJECT_GRAPH"
# A key indicating a variable's value in an object's checkpointed Tensors
# (Checkpointable._gather_saveables_for_checkpoint). If this is the only key and
# the object has no dependencies, then its value may be restored on object
# creation (avoiding double assignment when executing eagerly).
VARIABLE_VALUE_KEY = "VARIABLE_VALUE"
OBJECT_CONFIG_JSON_KEY = "OBJECT_CONFIG_JSON"
CheckpointableReference = collections.namedtuple(
"CheckpointableReference",
[
# The local name for this dependency.
"name",
# The Checkpointable object being referenced.
"ref"
])
class CheckpointInitialValue(ops.Tensor):
"""Tensor wrapper for managing update UIDs in `Variables`.
When supplied as an initial value, objects of this type let a `Variable`
(`Variable`, `ResourceVariable`, etc.) know the UID of the restore the initial
value came from. This allows deferred restorations to be sequenced in the
order the user specified them, and lets us fall back on assignment if an
initial value is not set (e.g. due to a custom getter interfering).
See comments in _add_variable_with_custom_getter for more information about
how `CheckpointInitialValue` is used.
"""
def __init__(self, checkpoint_position, shape=None):
self.wrapped_value = checkpoint_position.value_tensors()[
VARIABLE_VALUE_KEY]
if shape:
# We need to set the static shape information on the initializer if
# possible so we don't get a variable with an unknown shape.
self.wrapped_value.set_shape(shape)
self._checkpoint_position = checkpoint_position
@property
def __class__(self):
return (self.wrapped_value.__class__, CheckpointInitialValue)
def __getattr__(self, attr):
try:
return getattr(self.wrapped_value, attr)
except AttributeError:
return self.__getattribute__(attr)
@property
def checkpoint_position(self):
return self._checkpoint_position
class PythonStringStateSaveable(saveable_object.SaveableObject):
"""Saves Python state in a checkpoint."""
def __init__(self, name, state_callback):
"""Configure saving.
Args:
name: The checkpoint key to write to.
state_callback: A function taking no arguments which returns a
string. This function is run every time a checkpoint is written.
"""
if context.executing_eagerly():
self._save_string = (
lambda: constant_op.constant(state_callback(), dtype=dtypes.string))
else:
self._save_string = constant_op.constant("", dtype=dtypes.string)
self.feed_dict_additions = (
lambda: {self._save_string: state_callback()})
spec = saveable_object.SaveSpec(
self._save_string, "", name, dtype=dtypes.string)
super(PythonStringStateSaveable, self).__init__(
self._save_string, [spec], name)
def restore(self, restored_tensors, restored_shapes):
# TODO(allenl): Add a Python hook for state coming out of a checkpoint
# (currently PythonStringStateSaveable is write-only).
return control_flow_ops.no_op()
class _CheckpointPosition(object):
"""Indicates a position within a `_Checkpoint`."""
def __init__(self, checkpoint, proto_id):
"""Specify an object within a checkpoint.
Args:
checkpoint: A _Checkpoint object.
proto_id: The index of this object in CheckpointableObjectGraph.nodes.
"""
self._checkpoint = checkpoint
self._proto_id = proto_id
def restore(self, checkpointable):
"""Restore this value into `checkpointable`."""
with ops.init_scope():
if self.bind_object(checkpointable):
# This object's correspondence with a checkpointed object is new, so
# process deferred restorations for it and its dependencies.
restore_ops = checkpointable._restore_from_checkpoint_position(self) # pylint: disable=protected-access
if restore_ops:
self._checkpoint.restore_ops.extend(restore_ops)
def bind_object(self, checkpointable):
"""Set a checkpoint<->object correspondence and process slot variables.
Args:
checkpointable: The object to record a correspondence for.
Returns:
True if this is a new assignment, False if this object has already been
mapped to a checkpointed `Object` proto.
Raises:
AssertionError: If another object is already bound to the `Object` proto.
"""
checkpoint = self.checkpoint
checkpoint.all_python_objects.add(checkpointable)
current_assignment = checkpoint.object_by_proto_id.get(self._proto_id, None)
if current_assignment is None:
checkpoint.object_by_proto_id[self._proto_id] = checkpointable
for deferred_slot_restoration in (
checkpoint.deferred_slot_restorations.pop(self._proto_id, ())):
checkpointable._create_or_restore_slot_variable( # pylint: disable=protected-access
slot_variable_position=_CheckpointPosition(
checkpoint=checkpoint,
proto_id=deferred_slot_restoration.slot_variable_id),
variable=deferred_slot_restoration.original_variable,
slot_name=deferred_slot_restoration.slot_name)
for slot_restoration in checkpoint.slot_restorations.pop(
self._proto_id, ()):
optimizer_object = checkpoint.object_by_proto_id.get(
slot_restoration.optimizer_id, None)
if optimizer_object is None:
# The optimizer has not yet been created or tracked. Record in the
# checkpoint that the slot variables need to be restored when it is.
checkpoint.deferred_slot_restorations.setdefault(
slot_restoration.optimizer_id, []).append(
_DeferredSlotVariableRestoration(
original_variable=checkpointable,
slot_variable_id=slot_restoration.slot_variable_id,
slot_name=slot_restoration.slot_name))
else:
optimizer_object._create_or_restore_slot_variable( # pylint: disable=protected-access
slot_variable_position=_CheckpointPosition(
checkpoint=checkpoint,
proto_id=slot_restoration.slot_variable_id),
variable=checkpointable,
slot_name=slot_restoration.slot_name)
return True # New assignment
else:
# The object was already mapped for this checkpoint load, which means
# we don't need to do anything besides check that the mapping is
# consistent (if the dependency DAG is not a tree then there are
# multiple paths to the same object).
if current_assignment is not checkpointable:
logging.warning(
("Inconsistent references when loading the checkpoint into this "
"object graph. Either the Checkpointable object references in the "
"Python program have changed in an incompatible way, or the "
"checkpoint was generated in an incompatible program.\n\nTwo "
"checkpoint references resolved to different objects (%s and %s).")
% (current_assignment, checkpointable))
return False # Not a new assignment
def is_simple_variable(self):
"""Determine whether this value is restorable with a Tensor initializer."""
attributes = self.object_proto.attributes
return (len(attributes) == 1
and attributes[0].name == VARIABLE_VALUE_KEY
and not self.object_proto.children)
def value_tensors(self):
"""Create value `Tensor`s for this object's attributes.
Does not require that the Python object has been created. Used for
restore-on-create when executing eagerly.
Returns:
A dictionary mapping from object attribute names to `Tensor`s.
"""
value_tensors = {}
for serialized_tensor in self.object_proto.attributes:
checkpoint_key = serialized_tensor.checkpoint_key
dtype = self._checkpoint.dtype_map[checkpoint_key]
base_type = dtype.base_dtype
with ops.init_scope():
with ops.device("/cpu:0"):
# Run the restore itself on the CPU.
value, = io_ops.restore_v2(
prefix=self._checkpoint.save_path,
tensor_names=[checkpoint_key],
shape_and_slices=[""],
dtypes=[base_type],
name="%s_checkpoint_read" % (serialized_tensor.name,))
# Copy the value to the current device if necessary.
value_tensors[serialized_tensor.name] = array_ops.identity(value)
return value_tensors
def restore_ops(self):
"""Create or fetch restore ops for this object's attributes.
Requires that the `Checkpointable` Python object has been bound to an object
ID in the checkpoint.
Returns:
A list of operations when graph building, or an empty list when executing
eagerly.
"""
saveables = self.checkpointable._gather_saveables_for_checkpoint() # pylint: disable=protected-access
# Name saveables based on the name this object had when it was checkpointed.
named_saveables = {}
restore_ops = []
building_graph = not context.executing_eagerly()
for serialized_tensor in self.object_proto.attributes:
saveable_factory = saveables.get(serialized_tensor.name, None)
if saveable_factory is None:
# Purposefully does not throw an exception if attributes have been added
# or deleted. Stores unused attributes so an exception can be raised if
# the user decides to check that everything in the checkpoint was
# loaded.
self._checkpoint.unused_attributes.setdefault(
self.checkpointable, []).append(serialized_tensor.name)
continue
if building_graph:
existing_ops = self._checkpoint.restore_ops_by_name.get(
serialized_tensor.name, None)
else:
existing_ops = None
if existing_ops is None:
if callable(saveable_factory):
saveable = saveable_factory(name=serialized_tensor.checkpoint_key)
else:
saveable = saveable_factory
named_saveables[serialized_tensor.checkpoint_key] = saveable
if named_saveables:
validated_saveables = (
self._checkpoint.builder._ValidateAndSliceInputs(named_saveables)) # pylint: disable=protected-access
validated_names = set(saveable.name for saveable in validated_saveables)
if set(named_saveables.keys()) != validated_names:
raise AssertionError(
("Saveable keys changed when validating. Got back %s, was "
"expecting %s") % (named_saveables.keys(), validated_names))
all_tensors = self._checkpoint.builder.bulk_restore(
filename_tensor=self._checkpoint.save_path,
saveables=validated_saveables, preferred_shard=-1,
restore_sequentially=False)
saveable_index = 0
for saveable in validated_saveables:
num_specs = len(saveable.specs)
saveable_tensors = all_tensors[
saveable_index:saveable_index + num_specs]
saveable_index += num_specs
restore_op = saveable.restore(saveable_tensors, restored_shapes=None)
if building_graph:
assert saveable.name not in self._checkpoint.restore_ops_by_name
self._checkpoint.restore_ops_by_name[saveable.name] = restore_op
restore_ops.append(restore_op)
return restore_ops
@property
def checkpoint(self):
return self._checkpoint
@property
def checkpointable(self):
return self._checkpoint.object_by_proto_id[self._proto_id]
@property
def object_proto(self):
return self._checkpoint.object_graph_proto.nodes[self._proto_id]
@property
def restore_uid(self):
return self._checkpoint.restore_uid
def __repr__(self):
return repr(self.object_proto)
_DeferredSlotVariableRestoration = collections.namedtuple(
"_DeferredSlotVariableRestoration",
[
"original_variable",
"slot_variable_id",
"slot_name",
]
)
_SlotVariableRestoration = collections.namedtuple(
"_SlotVariableRestoration",
[
# The checkpoint proto id of the optimizer object.
"optimizer_id",
# The checkpoint proto id of the slot variable.
"slot_variable_id",
"slot_name",
])
def no_automatic_dependency_tracking(method):
"""Disables automatic dependency tracking on attribute assignment.
Use to decorate any method of a Checkpointable object. Attribute assignment in
that method will not add dependencies (also respected in Model). Harmless if
used in a class which does not do automatic dependency tracking (which means
it's safe to use in base classes which may have subclasses which also inherit
from Checkpointable).
Args:
method: The method to decorate.
Returns:
A decorated method which sets and un-sets automatic dependency tracking for
the object the method is called on (not thread safe).
"""
def _method_wrapper(self, *args, **kwargs):
previous_value = getattr(self, "_setattr_tracking", True)
self._setattr_tracking = False # pylint: disable=protected-access
try:
method(self, *args, **kwargs)
finally:
self._setattr_tracking = previous_value # pylint: disable=protected-access
return tf_decorator.make_decorator(
target=method, decorator_func=_method_wrapper)
class CheckpointableBase(object):
"""Base class for `Checkpointable` objects without automatic dependencies.
This class has no __setattr__ override for performance reasons. Dependencies
must be added explicitly. Unless attribute assignment is performance-critical,
use `Checkpointable` instead. Use `CheckpointableBase` for `isinstance`
checks.
"""
# CheckpointableBase does not do automatic dependency tracking, but uses the
# no_automatic_dependency_tracking decorator so it can avoid adding
# dependencies if a subclass is Checkpointable / inherits from Model (both of
# which have __setattr__ overrides).
@no_automatic_dependency_tracking
def _maybe_initialize_checkpointable(self):
"""Initialize dependency management.
Not __init__, since most objects will forget to call it.
"""
if hasattr(self, "_unconditional_checkpoint_dependencies"):
# __init__ already called. This check means that we don't need
# Checkpointable.__init__() in the constructor of every TensorFlow object.
return
# A list of CheckpointableReference objects. Some classes implementing
# `Checkpointable`, notably `Optimizer`s, may override the
# _checkpoint_dependencies property with conditional dependencies
# (e.g. based on the current graph when saving).
self._unconditional_checkpoint_dependencies = []
# Maps names -> Checkpointable objects
self._unconditional_dependency_names = {}
# Restorations for other Checkpointable objects on which this object may
# eventually depend. Maps local name -> _CheckpointPosition list. Optimizers
# tack on conditional dependencies, and so need separate management of
# deferred dependencies too.
self._unconditional_deferred_dependencies = {}
# The UID of the highest assignment to this object. Used to ensure that the
# last requested assignment determines the final value of an object.
if hasattr(self, "_update_uid"):
raise AssertionError(
"Internal error: the object had an update UID set before its "
"initialization code was run.")
self._update_uid = -1
# When executing eagerly, holds a collection of _NameBasedRestoreCoordinator
# instances, which should be checked when creating variables or other
# saveables. These are passed on recursively to all dependencies, since
# unlike object-based checkpoint restores we don't know which subgraph is
# being restored in advance. This mechanism is only necessary for
# restore-on-create when executing eagerly, and so is unused when graph
# building.
self._name_based_restores = set()
def _no_dependency(self, value):
"""If automatic dependency tracking is enabled, ignores `value`."""
return value
def _name_based_attribute_restore(self, checkpoint):
"""Restore the object's attributes from a name-based checkpoint."""
self._name_based_restores.add(checkpoint)
if self._update_uid < checkpoint.restore_uid:
checkpoint.eager_restore(self)
self._update_uid = checkpoint.restore_uid
@property
def _checkpoint_dependencies(self):
"""All dependencies of this object.
May be overridden to include conditional dependencies.
Returns:
A list of `CheckpointableReference` objects indicating named
`Checkpointable` dependencies which should be saved along with this
object.
"""
return self._unconditional_checkpoint_dependencies
@property
def _deferred_dependencies(self):
"""A dictionary with deferred dependencies.
Stores restorations for other Checkpointable objects on which this object
may eventually depend. May be overridden by sub-classes (e.g. Optimizers use
conditional dependencies based the current graph, and so need separate
management of deferred dependencies too).
Returns:
A dictionary mapping from local name to a list of _CheckpointPosition
objects.
"""
return self._unconditional_deferred_dependencies
def _lookup_dependency(self, name):
"""Look up a dependency by name.
May be overridden to include conditional dependencies.
Args:
name: The local name of the dependency.
Returns:
A `Checkpointable` object, or `None` if no dependency by this name was
found.
"""
return self._unconditional_dependency_names.get(name, None)
def _add_variable_with_custom_getter(
self, name, shape=None, dtype=dtypes.float32,
initializer=None, getter=None, overwrite=False,
**kwargs_for_getter):
"""Restore-on-create for a variable be saved with this `Checkpointable`.
If the user has requested that this object or another `Checkpointable` which
depends on this object be restored from a checkpoint (deferred loading
before variable object creation), `initializer` may be ignored and the value
from the checkpoint used instead.
Args:
name: A name for the variable. Must be unique within this object.
shape: The shape of the variable.
dtype: The data type of the variable.
initializer: The initializer to use. Ignored if there is a deferred
restoration left over from a call to
`_restore_from_checkpoint_position`.
getter: The getter to wrap which actually fetches the variable.
overwrite: If True, disables unique name and type checks.
**kwargs_for_getter: Passed to the getter.
Returns:
The new variable object.
Raises:
ValueError: If the variable name is not unique.
"""
self._maybe_initialize_checkpointable()
if not overwrite and self._lookup_dependency(name) is not None:
raise ValueError(
("A variable named '%s' already exists in this Checkpointable, but "
"Checkpointable._add_variable called to create another with "
"that name. Variable names must be unique within a Checkpointable "
"object.") % (name,))
with ops.init_scope():
if context.executing_eagerly():
# If this is a variable with a single Tensor stored in the checkpoint,
# we can set that value as an initializer rather than initializing and
# then assigning (when executing eagerly). This call returns None if
# there is nothing to restore.
checkpoint_initializer = self._preload_simple_restoration(
name=name, shape=shape)
else:
checkpoint_initializer = None
if (checkpoint_initializer is not None
and not (
isinstance(initializer, CheckpointInitialValue)
and (initializer.restore_uid
> checkpoint_initializer.restore_uid))):
# If multiple Checkpointable objects are "creating" the same variable
# via the magic of custom getters, the one with the highest restore UID
# (the one called last) has to make the final initializer. If another
# custom getter interrupts this process by overwriting the initializer,
# then we'll catch that when we call _track_checkpointable. So this is
# "best effort" to set the initializer with the highest restore UID.
initializer = checkpoint_initializer
shape = None
new_variable = getter(
name=name, shape=shape, dtype=dtype, initializer=initializer,
**kwargs_for_getter)
# If we set an initializer and the variable processed it, tracking will not
# assign again. It will add this variable to our dependencies, and if there
# is a non-trivial restoration queued, it will handle that. This also
# handles slot variables.
if not overwrite or isinstance(new_variable, CheckpointableBase):
return self._track_checkpointable(new_variable, name=name,
overwrite=overwrite)
else:
# TODO(allenl): Some variable types are not yet supported. Remove this
# fallback once all get_variable() return types are Checkpointable.
return new_variable
def _preload_simple_restoration(self, name, shape):
"""Return a dependency's value for restore-on-create.
Note the restoration is not deleted; if for some reason preload is called
and then not assigned to the variable (for example because a custom getter
overrides the initializer), the assignment will still happen once the
variable is tracked (determined based on checkpoint.restore_uid).
Args:
name: The object-local name of the dependency holding the variable's
value.
shape: The shape of the variable being loaded into.
Returns:
An callable for use as a variable's initializer/initial_value, or None if
one should not be set (either because there was no variable with this name
in the checkpoint or because it needs more complex deserialization). Any
non-trivial deserialization will happen when the variable object is
tracked.
"""
deferred_dependencies_list = self._deferred_dependencies.get(name, ())
if not deferred_dependencies_list:
# Nothing to do; we don't have a restore for this dependency queued up.
return
for checkpoint_position in deferred_dependencies_list:
if not checkpoint_position.is_simple_variable():
# If _any_ pending restoration is too complicated to fit in an
# initializer (because it has dependencies, or because there are
# multiple Tensors to restore), bail and let the general tracking code
# handle it.
return None
checkpoint_position = max(
deferred_dependencies_list,
key=lambda restore: restore.checkpoint.restore_uid)
return CheckpointInitialValue(
checkpoint_position=checkpoint_position, shape=shape)
def _track_checkpointable(self, checkpointable, name, overwrite=False):
"""Declare a dependency on another `Checkpointable` object.
Indicates that checkpoints for this object should include variables from
`checkpointable`.
Variables in a checkpoint are mapped to `Checkpointable`s based on the names
provided when the checkpoint was written. To avoid breaking existing
checkpoints when modifying a class, neither variable names nor dependency
names (the names passed to `_track_checkpointable`) may change.
Args:
checkpointable: A `Checkpointable` which this object depends on.
name: A local name for `checkpointable`, used for loading checkpoints into
the correct objects.
overwrite: Boolean, whether silently replacing dependencies is OK. Used
for __setattr__, where throwing an error on attribute reassignment would
be inappropriate.
Returns:
`checkpointable`, for convenience when declaring a dependency and
assigning to a member variable in one statement.
Raises:
TypeError: If `checkpointable` does not inherit from `Checkpointable`.
ValueError: If another object is already tracked by this name.
"""
self._maybe_initialize_checkpointable()
if not isinstance(checkpointable, CheckpointableBase):
raise TypeError(
("Checkpointable._track_checkpointable() passed type %s, not a "
"Checkpointable.") % (type(checkpointable),))
new_reference = CheckpointableReference(name=name, ref=checkpointable)
current_object = self._lookup_dependency(name)
if (current_object is not None
and current_object is not checkpointable):
if not overwrite:
raise ValueError(
("Called Checkpointable._track_checkpointable() with name='%s', "
"but a Checkpointable with this name is already declared as a "
"dependency. Names must be unique (or overwrite=True).") % (name,))
# This is a weird thing to do, but we're not going to stop people from
# using __setattr__.
for index, (old_name, _) in enumerate(
self._unconditional_checkpoint_dependencies):
if name == old_name:
self._unconditional_checkpoint_dependencies[index] = new_reference
elif current_object is None:
self._unconditional_checkpoint_dependencies.append(new_reference)
self._unconditional_dependency_names[name] = checkpointable
self._handle_deferred_dependencies(
name=name, checkpointable=checkpointable)
return checkpointable
def _handle_deferred_dependencies(self, name, checkpointable):
"""Pop and load any deferred checkpoint restores into `checkpointable`.
This method does not add a new dependency on `checkpointable`, but it does
check if any outstanding/deferred dependencies have been queued waiting for
this dependency to be added (matched based on `name`). If so,
`checkpointable` and its dependencies are restored. The restorations are
considered fulfilled and so are deleted.
`_track_checkpointable` is more appropriate for adding a
normal/unconditional dependency, and includes handling for deferred
restorations. This method allows objects such as `Optimizer` to use the same
restoration logic while managing conditional dependencies themselves, by
overriding `_checkpoint_dependencies` and `_lookup_dependency` to change the
object's dependencies based on the context it is saved/restored in (a single
optimizer instance can have state associated with multiple graphs).
Args:
name: The name of the dependency within this object (`self`), used to
match `checkpointable` with values saved in a checkpoint.
checkpointable: The Checkpointable object to restore (inheriting from
`CheckpointableBase`).
"""
self._maybe_initialize_checkpointable()
checkpointable._maybe_initialize_checkpointable() # pylint: disable=protected-access
deferred_dependencies_list = self._deferred_dependencies.pop(name, ())
for checkpoint_position in sorted(
deferred_dependencies_list,
key=lambda restore: restore.checkpoint.restore_uid,
reverse=True):
checkpoint_position.restore(checkpointable)
# Pass on any name-based restores queued in this object.
for name_based_restore in sorted(
self._name_based_restores,
key=lambda checkpoint: checkpoint.restore_uid,
reverse=True):
checkpointable._name_based_attribute_restore(name_based_restore) # pylint: disable=protected-access
def _restore_from_checkpoint_position(self, checkpoint_position):
"""Restore this object and its dependencies (may be deferred)."""
# Attempt a breadth-first traversal, since presumably the user has more
# control over shorter paths. If we don't have all of the dependencies at
# this point, the end result is not breadth-first (since other deferred
# traversals will happen later).
visit_queue = collections.deque([checkpoint_position])
restore_ops = []
while visit_queue:
current_position = visit_queue.popleft()
restore_ops.extend(nest.flatten(
current_position.checkpointable # pylint: disable=protected-access
._single_restoration_from_checkpoint_position(
checkpoint_position=current_position,
visit_queue=visit_queue)))
return restore_ops
def _single_restoration_from_checkpoint_position(
self, checkpoint_position, visit_queue):
"""Restore this object, and either queue its dependencies or defer them."""
self._maybe_initialize_checkpointable()
checkpoint = checkpoint_position.checkpoint
# If the UID of this restore is lower than our current update UID, we don't
# need to actually restore the object. However, we should pass the
# restoration on to our dependencies.
if checkpoint.restore_uid > self._update_uid:
restore_ops = checkpoint_position.restore_ops()
self._update_uid = checkpoint.restore_uid
else:
restore_ops = ()
for child in checkpoint_position.object_proto.children:
child_position = _CheckpointPosition(
checkpoint=checkpoint,
proto_id=child.node_id)
local_object = self._lookup_dependency(child.local_name)
if local_object is None:
# We don't yet have a dependency registered with this name. Save it
# in case we do.
self._deferred_dependencies.setdefault(child.local_name, []).append(
child_position)
else:
if child_position.bind_object(checkpointable=local_object):
# This object's correspondence is new, so dependencies need to be
# visited. Delay doing it so that we get a breadth-first dependency
# resolution order (shallowest paths first). The caller is responsible
# for emptying visit_queue.
visit_queue.append(child_position)
return restore_ops
def _gather_saveables_for_checkpoint(self):
"""Returns a dictionary of values to checkpoint with this object.
Keys in the returned dictionary are local to this object and in a separate
namespace from dependencies. Values may either be `SaveableObject` factories
or variables easily converted to `SaveableObject`s (as in `tf.train.Saver`'s
`var_list` constructor argument).
`SaveableObjects` have a name set, which Checkpointable needs to generate
itself. So rather than returning `SaveableObjects` directly, this method
should return a dictionary of callables which take `name` arguments and
return `SaveableObjects` with that name.
If this object may also be passed to the global-name-based `tf.train.Saver`,
the returned callables should have a default value for their name argument
(i.e. be callable with no arguments).
Returned values must be saved only by this object; if any value may be
shared, it should instead be a dependency. For example, variable objects
save their own values with the key `VARIABLE_VALUE_KEY`, but objects which
reference variables simply add a dependency.
Returns:
The dictionary mapping attribute names to `SaveableObject` factories
described above. For example:
{VARIABLE_VALUE_KEY:
lambda name="global_name_for_this_object":
SaveableObject(name=name, ...)}
"""
if not hasattr(self, "get_config"):
return {}
try:
self.get_config()
except NotImplementedError:
return {}
weak_self = weakref.ref(self)
def _state_callback():
dereferenced_self = weak_self()
if dereferenced_self:
return json.dumps(self,
default=serialization.get_json_type,
sort_keys=True).encode("utf8")
else:
return ""
return {OBJECT_CONFIG_JSON_KEY: functools.partial(
PythonStringStateSaveable,
state_callback=_state_callback)}
|
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 Piston Cloud Computing, Inc.
# All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Test of Policy Engine For Manila."""
import os.path
import StringIO
import urllib2
from manila import context
from manila import exception
import manila.openstack.common.policy
from manila.openstack.common import policy as common_policy
from manila import policy
from manila import test
from manila import utils
from oslo.config import cfg
CONF = cfg.CONF
class PolicyFileTestCase(test.TestCase):
def setUp(self):
super(PolicyFileTestCase, self).setUp()
# since is_admin is defined by policy, create context before reset
self.context = context.RequestContext('fake', 'fake')
policy.reset()
self.target = {}
def tearDown(self):
super(PolicyFileTestCase, self).tearDown()
policy.reset()
def test_modified_policy_reloads(self):
with utils.tempdir() as tmpdir:
tmpfilename = os.path.join(tmpdir, 'policy')
self.flags(policy_file=tmpfilename)
action = "example:test"
with open(tmpfilename, "w") as policyfile:
policyfile.write("""{"example:test": []}""")
policy.enforce(self.context, action, self.target)
with open(tmpfilename, "w") as policyfile:
policyfile.write("""{"example:test": ["false:false"]}""")
# NOTE(vish): reset stored policy cache so we don't have to
# sleep(1)
policy._POLICY_CACHE = {}
self.assertRaises(exception.PolicyNotAuthorized, policy.enforce,
self.context, action, self.target)
class PolicyTestCase(test.TestCase):
def setUp(self):
super(PolicyTestCase, self).setUp()
policy.reset()
# NOTE(vish): preload rules to circumvent reloading from file
policy.init()
rules = {
"true": [],
"example:allowed": [],
"example:denied": [["false:false"]],
"example:get_http": [["http:http://www.example.com"]],
"example:my_file": [["role:compute_admin"],
["project_id:%(project_id)s"]],
"example:early_and_fail": [["false:false", "rule:true"]],
"example:early_or_success": [["rule:true"], ["false:false"]],
"example:lowercase_admin": [["role:admin"], ["role:sysadmin"]],
"example:uppercase_admin": [["role:ADMIN"], ["role:sysadmin"]],
}
# NOTE(vish): then overload underlying brain
common_policy.set_brain(common_policy.HttpBrain(rules))
self.context = context.RequestContext('fake', 'fake', roles=['member'])
self.target = {}
def tearDown(self):
policy.reset()
super(PolicyTestCase, self).tearDown()
def test_enforce_nonexistent_action_throws(self):
action = "example:noexist"
self.assertRaises(exception.PolicyNotAuthorized, policy.enforce,
self.context, action, self.target)
def test_enforce_bad_action_throws(self):
action = "example:denied"
self.assertRaises(exception.PolicyNotAuthorized, policy.enforce,
self.context, action, self.target)
def test_enforce_good_action(self):
action = "example:allowed"
policy.enforce(self.context, action, self.target)
def test_enforce_http_true(self):
def fakeurlopen(url, post_data):
return StringIO.StringIO("True")
self.stubs.Set(urllib2, 'urlopen', fakeurlopen)
action = "example:get_http"
target = {}
result = policy.enforce(self.context, action, target)
self.assertEqual(result, None)
def test_enforce_http_false(self):
def fakeurlopen(url, post_data):
return StringIO.StringIO("False")
self.stubs.Set(urllib2, 'urlopen', fakeurlopen)
action = "example:get_http"
target = {}
self.assertRaises(exception.PolicyNotAuthorized, policy.enforce,
self.context, action, target)
def test_templatized_enforcement(self):
target_mine = {'project_id': 'fake'}
target_not_mine = {'project_id': 'another'}
action = "example:my_file"
policy.enforce(self.context, action, target_mine)
self.assertRaises(exception.PolicyNotAuthorized, policy.enforce,
self.context, action, target_not_mine)
def test_early_AND_enforcement(self):
action = "example:early_and_fail"
self.assertRaises(exception.PolicyNotAuthorized, policy.enforce,
self.context, action, self.target)
def test_early_OR_enforcement(self):
action = "example:early_or_success"
policy.enforce(self.context, action, self.target)
def test_ignore_case_role_check(self):
lowercase_action = "example:lowercase_admin"
uppercase_action = "example:uppercase_admin"
# NOTE(dprince) we mix case in the Admin role here to ensure
# case is ignored
admin_context = context.RequestContext('admin',
'fake',
roles=['AdMiN'])
policy.enforce(admin_context, lowercase_action, self.target)
policy.enforce(admin_context, uppercase_action, self.target)
class DefaultPolicyTestCase(test.TestCase):
def setUp(self):
super(DefaultPolicyTestCase, self).setUp()
policy.reset()
policy.init()
self.rules = {
"default": [],
"example:exist": [["false:false"]]
}
self._set_brain('default')
self.context = context.RequestContext('fake', 'fake')
def _set_brain(self, default_rule):
brain = manila.openstack.common.policy.HttpBrain(self.rules,
default_rule)
manila.openstack.common.policy.set_brain(brain)
def tearDown(self):
super(DefaultPolicyTestCase, self).tearDown()
policy.reset()
def test_policy_called(self):
self.assertRaises(exception.PolicyNotAuthorized, policy.enforce,
self.context, "example:exist", {})
def test_not_found_policy_calls_default(self):
policy.enforce(self.context, "example:noexist", {})
def test_default_not_found(self):
self._set_brain("default_noexist")
self.assertRaises(exception.PolicyNotAuthorized, policy.enforce,
self.context, "example:noexist", {})
class ContextIsAdminPolicyTestCase(test.TestCase):
def setUp(self):
super(ContextIsAdminPolicyTestCase, self).setUp()
policy.reset()
policy.init()
def test_default_admin_role_is_admin(self):
ctx = context.RequestContext('fake', 'fake', roles=['johnny-admin'])
self.assertFalse(ctx.is_admin)
ctx = context.RequestContext('fake', 'fake', roles=['admin'])
self.assert_(ctx.is_admin)
def test_custom_admin_role_is_admin(self):
# define explict rules for context_is_admin
rules = {
'context_is_admin': [["role:administrator"], ["role:johnny-admin"]]
}
brain = common_policy.Brain(rules, CONF.policy_default_rule)
common_policy.set_brain(brain)
ctx = context.RequestContext('fake', 'fake', roles=['johnny-admin'])
self.assert_(ctx.is_admin)
ctx = context.RequestContext('fake', 'fake', roles=['administrator'])
self.assert_(ctx.is_admin)
# default rule no longer applies
ctx = context.RequestContext('fake', 'fake', roles=['admin'])
self.assertFalse(ctx.is_admin)
def test_context_is_admin_undefined(self):
rules = {
"admin_or_owner": [["role:admin"], ["project_id:%(project_id)s"]],
"default": [["rule:admin_or_owner"]],
}
brain = common_policy.Brain(rules, CONF.policy_default_rule)
common_policy.set_brain(brain)
ctx = context.RequestContext('fake', 'fake')
self.assertFalse(ctx.is_admin)
ctx = context.RequestContext('fake', 'fake', roles=['admin'])
self.assert_(ctx.is_admin)
|
|
"""Utility functions for copying files and directory trees.
XXX The functions here don't copy the resource fork or other metadata on Mac.
"""
import os
import sys
import stat
from os.path import abspath
__all__ = ["copyfileobj","copyfile","copymode","copystat","copy","copy2",
"copytree","move","rmtree","Error"]
class Error(EnvironmentError):
pass
def copyfileobj(fsrc, fdst, length=16*1024):
"""copy data from file-like object fsrc to file-like object fdst"""
while 1:
buf = fsrc.read(length)
if not buf:
break
fdst.write(buf)
def _samefile(src, dst):
# Macintosh, Unix.
if hasattr(os.path,'samefile'):
try:
return os.path.samefile(src, dst)
except OSError:
return False
# All other platforms: check for same pathname.
return (os.path.normcase(os.path.abspath(src)) ==
os.path.normcase(os.path.abspath(dst)))
def copyfile(src, dst):
"""Copy data from src to dst"""
if _samefile(src, dst):
raise Error, "`%s` and `%s` are the same file" % (src, dst)
fsrc = None
fdst = None
try:
fsrc = open(src, 'rb')
fdst = open(dst, 'wb')
copyfileobj(fsrc, fdst)
finally:
if fdst:
fdst.close()
if fsrc:
fsrc.close()
def copymode(src, dst):
"""Copy mode bits from src to dst"""
if hasattr(os, 'chmod'):
st = os.stat(src)
mode = stat.S_IMODE(st.st_mode)
os.chmod(dst, mode)
def copystat(src, dst):
"""Copy all stat info (mode bits, atime and mtime) from src to dst"""
st = os.stat(src)
mode = stat.S_IMODE(st.st_mode)
if hasattr(os, 'utime'):
os.utime(dst, (st.st_atime, st.st_mtime))
if hasattr(os, 'chmod'):
os.chmod(dst, mode)
def copy(src, dst):
"""Copy data and mode bits ("cp src dst").
The destination may be a directory.
"""
if os.path.isdir(dst):
dst = os.path.join(dst, os.path.basename(src))
copyfile(src, dst)
copymode(src, dst)
def copy2(src, dst):
"""Copy data and all stat info ("cp -p src dst").
The destination may be a directory.
"""
if os.path.isdir(dst):
dst = os.path.join(dst, os.path.basename(src))
copyfile(src, dst)
copystat(src, dst)
def copytree(src, dst, symlinks=False):
"""Recursively copy a directory tree using copy2().
The destination directory must not already exist.
If exception(s) occur, an Error is raised with a list of reasons.
If the optional symlinks flag is true, symbolic links in the
source tree result in symbolic links in the destination tree; if
it is false, the contents of the files pointed to by symbolic
links are copied.
XXX Consider this example code rather than the ultimate tool.
"""
names = os.listdir(src)
os.makedirs(dst)
errors = []
for name in names:
srcname = os.path.join(src, name)
dstname = os.path.join(dst, name)
try:
if symlinks and os.path.islink(srcname):
linkto = os.readlink(srcname)
os.symlink(linkto, dstname)
elif os.path.isdir(srcname):
copytree(srcname, dstname, symlinks)
else:
copy2(srcname, dstname)
# XXX What about devices, sockets etc.?
except (IOError, os.error), why:
errors.append((srcname, dstname, str(why)))
# catch the Error from the recursive copytree so that we can
# continue with other files
except Error, err:
errors.extend(err.args[0])
try:
copystat(src, dst)
except WindowsError:
# can't copy file access times on Windows
pass
except OSError, why:
errors.extend((src, dst, str(why)))
if errors:
raise Error, errors
def rmtree(path, ignore_errors=False, onerror=None):
"""Recursively delete a directory tree.
If ignore_errors is set, errors are ignored; otherwise, if onerror
is set, it is called to handle the error with arguments (func,
path, exc_info) where func is os.listdir, os.remove, or os.rmdir;
path is the argument to that function that caused it to fail; and
exc_info is a tuple returned by sys.exc_info(). If ignore_errors
is false and onerror is None, an exception is raised.
"""
if ignore_errors:
def onerror(*args):
pass
elif onerror is None:
def onerror(*args):
raise
names = []
try:
names = os.listdir(path)
except os.error, err:
onerror(os.listdir, path, sys.exc_info())
for name in names:
fullname = os.path.join(path, name)
try:
mode = os.lstat(fullname).st_mode
except os.error:
mode = 0
if stat.S_ISDIR(mode):
rmtree(fullname, ignore_errors, onerror)
else:
try:
os.remove(fullname)
except os.error, err:
onerror(os.remove, fullname, sys.exc_info())
try:
os.rmdir(path)
except os.error:
onerror(os.rmdir, path, sys.exc_info())
def move(src, dst):
"""Recursively move a file or directory to another location.
If the destination is on our current filesystem, then simply use
rename. Otherwise, copy src to the dst and then remove src.
A lot more could be done here... A look at a mv.c shows a lot of
the issues this implementation glosses over.
"""
try:
os.rename(src, dst)
except OSError:
if os.path.isdir(src):
if destinsrc(src, dst):
raise Error, "Cannot move a directory '%s' into itself '%s'." % (src, dst)
copytree(src, dst, symlinks=True)
rmtree(src)
else:
copy2(src,dst)
os.unlink(src)
def destinsrc(src, dst):
return abspath(dst).startswith(abspath(src))
|
|
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Serves content for "script" handlers using an HTTP runtime.
http_runtime supports two ways to start the runtime instance.
START_PROCESS sends the runtime_config protobuf (serialized and base64 encoded
as not all platforms support binary data over stdin) to the runtime instance
over stdin and requires the runtime instance to send the port it is listening on
over stdout.
START_PROCESS_FILE creates two temporary files and adds the paths of both files
to the runtime instance command line. The first file is written by http_runtime
with the runtime_config proto (serialized); the runtime instance is expected to
delete the file after reading it. The second file is written by the runtime
instance with the port it is listening on (the line must be newline terminated);
http_runtime is expected to delete the file after reading it.
START_PROCESS_REVERSE Works by passing config in via a file and passes the HTTP
port number created in http_runtime.py as an environment variable to the runtime
process.
START_PROCESS_REVERSE_NO_FILE equivalent to START_PROCESS, but passes the HTTP
port number created in http_runtime.py as an environment variable to the runtime
process.
"""
import base64
import logging
import os
import subprocess
import sys
import threading
import time
import portpicker
from google.appengine.tools.devappserver2 import application_configuration
from google.appengine.tools.devappserver2 import http_proxy
from google.appengine.tools.devappserver2 import http_runtime_constants
from google.appengine.tools.devappserver2 import instance
from google.appengine.tools.devappserver2 import safe_subprocess
from google.appengine.tools.devappserver2 import tee
# These are different approaches to passing configuration into the runtimes
# and getting configuration back out of the runtime.
# Works by passing config in via stdin and reading the port over stdout.
START_PROCESS = -1
# Works by passing config in via a file and reading the port over a file.
START_PROCESS_FILE = -2
# Works by passing config in via a file and passes the port via
# a command line flag.
START_PROCESS_REVERSE = -3
# Works by passing config in via stdin and passes the port in via
# an environment variable.
START_PROCESS_REVERSE_NO_FILE = -4
def _sleep_between_retries(attempt, max_attempts, sleep_base):
"""Sleep between retry attempts.
Do an exponential backoff between retry attempts on an operation. The general
pattern for use is:
for attempt in range(max_attempts):
# Try operation, either return or break on success
_sleep_between_retries(attempt, max_attempts, sleep_base)
Args:
attempt: Which attempt just failed (0 based).
max_attempts: The maximum number of attempts that will be made.
sleep_base: How long in seconds to sleep between the first and second
attempt (the time will be doubled between each successive attempt). The
value may be any numeric type that is convertible to float (complex
won't work but user types that are sufficiently numeric-like will).
"""
# Don't sleep after the last attempt as we're about to give up.
if attempt < (max_attempts - 1):
time.sleep((2 ** attempt) * sleep_base)
def _remove_retry_sharing_violation(path, max_attempts=10, sleep_base=.125):
"""Removes a file (with retries on Windows for sharing violations).
Args:
path: The filesystem path to remove.
max_attempts: The maximum number of attempts to try to remove the path
before giving up.
sleep_base: How long in seconds to sleep between the first and second
attempt (the time will be doubled between each successive attempt). The
value may be any numeric type that is convertible to float (complex
won't work but user types that are sufficiently numeric-like will).
Raises:
WindowsError: When an error other than a sharing violation occurs.
"""
if sys.platform == 'win32':
for attempt in range(max_attempts):
try:
os.remove(path)
break
except WindowsError as e:
import winerror
# Sharing violations are expected to occasionally occur when the runtime
# instance is context swapped after writing the port but before closing
# the file. Ignore these and try again.
if e.winerror != winerror.ERROR_SHARING_VIOLATION:
raise
_sleep_between_retries(attempt, max_attempts, sleep_base)
else:
logging.warn('Unable to delete %s', path)
else:
os.remove(path)
def get_vm_environment_variables(module_configuration, runtime_config):
"""Returns VM-specific environment variables."""
keys_values = [
('API_HOST', runtime_config.api_host),
('API_PORT', runtime_config.api_port),
('GAE_LONG_APP_ID', module_configuration.application_external_name),
('GAE_PARTITION', module_configuration.partition),
('GAE_MODULE_NAME', module_configuration.module_name),
('GAE_MODULE_VERSION', module_configuration.major_version),
('GAE_MINOR_VERSION', module_configuration.minor_version),
('GAE_MODULE_INSTANCE', runtime_config.instance_id),
('GAE_SERVER_PORT', runtime_config.server_port),
('MODULE_YAML_PATH', os.path.basename(module_configuration.config_path)),
('SERVER_SOFTWARE', http_runtime_constants.SERVER_SOFTWARE),
]
for entry in runtime_config.environ:
keys_values.append((entry.key, entry.value))
return {key: str(value) for key, value in keys_values}
class HttpRuntimeProxy(instance.RuntimeProxy):
"""Manages a runtime subprocess used to handle dynamic content."""
_VALID_START_PROCESS_FLAVORS = [START_PROCESS, START_PROCESS_FILE,
START_PROCESS_REVERSE,
START_PROCESS_REVERSE_NO_FILE]
# TODO: Determine if we can always use SIGTERM.
# Set this to True to quit with SIGTERM rather than SIGKILL
_quit_with_sigterm = False
@classmethod
def stop_runtimes_with_sigterm(cls, quit_with_sigterm):
"""Configures the http_runtime module to kill the runtimes with SIGTERM.
Args:
quit_with_sigterm: True to enable stopping runtimes with SIGTERM.
Returns:
The previous value.
"""
previous_quit_with_sigterm = cls._quit_with_sigterm
cls._quit_with_sigterm = quit_with_sigterm
return previous_quit_with_sigterm
def __init__(self, args, runtime_config_getter, module_configuration,
env=None, start_process_flavor=START_PROCESS,
extra_args_getter=None):
"""Initializer for HttpRuntimeProxy.
Args:
args: Arguments to use to start the runtime subprocess.
runtime_config_getter: A function that can be called without arguments
and returns the runtime_config_pb2.Config containing the configuration
for the runtime.
module_configuration: An application_configuration.ModuleConfiguration
instance respresenting the configuration of the module that owns the
runtime.
env: A dict of environment variables to pass to the runtime subprocess.
start_process_flavor: Which version of start process to start your
runtime process. Supported flavors are START_PROCESS, START_PROCESS_FILE
START_PROCESS_REVERSE and START_PROCESS_REVERSE_NO_FILE
extra_args_getter: A function that can be called with a port number picked
by this http_runtime,
and returns the extra command line parameter that refers to the port
number.
Raises:
ValueError: An unknown value for start_process_flavor was used.
"""
super(HttpRuntimeProxy, self).__init__()
self._process = None
self._process_lock = threading.Lock() # Lock to guard self._process.
self._stderr_tee = None
self._runtime_config_getter = runtime_config_getter
self._extra_args_getter = extra_args_getter
self._args = args
self._module_configuration = module_configuration
self._env = env
# This sets environment variables at the process level and works for
# Java and Go. Python hacks os.environ to not really return the environment
# variables, so Python needs to set these elsewhere.
runtime_config = self._runtime_config_getter()
if runtime_config.vm:
self._env.update(get_vm_environment_variables(
self._module_configuration, runtime_config))
if start_process_flavor not in self._VALID_START_PROCESS_FLAVORS:
raise ValueError('Invalid start_process_flavor.')
self._start_process_flavor = start_process_flavor
self._proxy = None
def _get_instance_logs(self):
# Give the runtime process a bit of time to write to stderr.
time.sleep(0.1)
return self._stderr_tee.get_buf()
def _instance_died_unexpectedly(self):
with self._process_lock:
# If self._process is None then the process hasn't started yet, so it
# it hasn't died either. Otherwise, if self._process.poll() returns a
# non-None value then the process has exited and the poll() value is
# its return code.
return self._process and self._process.poll() is not None
def handle(self, environ, start_response, url_map, match, request_id,
request_type):
"""Serves this request by forwarding it to the runtime process.
Args:
environ: An environ dict for the request as defined in PEP-333.
start_response: A function with semantics defined in PEP-333.
url_map: An appinfo.URLMap instance containing the configuration for the
handler matching this request.
match: A re.MatchObject containing the result of the matched URL pattern.
request_id: A unique string id associated with the request.
request_type: The type of the request. See instance.*_REQUEST module
constants.
Yields:
A sequence of strings containing the body of the HTTP response.
"""
return self._proxy.handle(environ, start_response, url_map, match,
request_id, request_type)
def _read_start_process_file(self, max_attempts=10, sleep_base=.125):
"""Read the single line response expected in the start process file.
The START_PROCESS_FILE flavor uses a file for the runtime instance to
report back the port it is listening on. We can't rely on EOF semantics
as that is a race condition when the runtime instance is simultaneously
writing the file while the devappserver process is reading it; rather we
rely on the line being terminated with a newline.
Args:
max_attempts: The maximum number of attempts to read the line.
sleep_base: How long in seconds to sleep between the first and second
attempt (the time will be doubled between each successive attempt). The
value may be any numeric type that is convertible to float (complex
won't work but user types that are sufficiently numeric-like will).
Returns:
If a full single line (as indicated by a newline terminator) is found, all
data read up to that point is returned; return an empty string if no
newline is read before the process exits or the max number of attempts are
made.
"""
try:
for attempt in range(max_attempts):
# Yes, the final data may already be in the file even though the
# process exited. That said, since the process should stay alive
# if it's exited we don't care anyway.
if self._process.poll() is not None:
return ''
# On Mac, if the first read in this process occurs before the data is
# written, no data will ever be read by this process without the seek.
self._process.child_out.seek(0)
line = self._process.child_out.read()
if '\n' in line:
return line
_sleep_between_retries(attempt, max_attempts, sleep_base)
finally:
self._process.child_out.close()
return ''
def start(self):
"""Starts the runtime process and waits until it is ready to serve."""
runtime_config = self._runtime_config_getter()
# TODO: Use a different process group to isolate the child process
# from signals sent to the parent. Only available in subprocess in
# Python 2.7.
assert self._start_process_flavor in self._VALID_START_PROCESS_FLAVORS
if self._start_process_flavor == START_PROCESS:
serialized_config = base64.b64encode(runtime_config.SerializeToString())
with self._process_lock:
assert not self._process, 'start() can only be called once'
self._process = safe_subprocess.start_process(
self._args,
serialized_config,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env=self._env,
cwd=self._module_configuration.application_root)
port = self._process.stdout.readline()
elif self._start_process_flavor == START_PROCESS_FILE:
serialized_config = runtime_config.SerializeToString()
with self._process_lock:
assert not self._process, 'start() can only be called once'
self._process = safe_subprocess.start_process_file(
args=self._args,
input_string=serialized_config,
env=self._env,
cwd=self._module_configuration.application_root,
stderr=subprocess.PIPE)
port = self._read_start_process_file()
_remove_retry_sharing_violation(self._process.child_out.name)
elif self._start_process_flavor == START_PROCESS_REVERSE:
serialized_config = runtime_config.SerializeToString()
with self._process_lock:
assert not self._process, 'start() can only be called once'
port = portpicker.PickUnusedPort()
self._env['PORT'] = str(port)
# If any of the strings in args contain {port}, replace that substring
# with the selected port. This allows a user-specified runtime to
# pass the port along to the subprocess as a command-line argument.
args = [arg.replace('{port}', str(port)) for arg in self._args]
self._process = safe_subprocess.start_process_file(
args=args,
input_string=serialized_config,
env=self._env,
cwd=self._module_configuration.application_root,
stderr=subprocess.PIPE)
elif self._start_process_flavor == START_PROCESS_REVERSE_NO_FILE:
serialized_config = runtime_config.SerializeToString()
with self._process_lock:
assert not self._process, 'start() can only be called once'
port = portpicker.PickUnusedPort()
self._args.append(self._extra_args_getter(port))
self._process = safe_subprocess.start_process(
self._args,
input_string=serialized_config,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env=self._env,
cwd=self._module_configuration.application_root)
# _stderr_tee may be pre-set by unit tests.
if self._stderr_tee is None:
self._stderr_tee = tee.Tee(self._process.stderr, sys.stderr)
self._stderr_tee.start()
error = None
try:
port = int(port)
except ValueError:
error = 'bad runtime process port [%r]' % port
logging.error(error)
finally:
self._proxy = http_proxy.HttpProxy(
host='localhost', port=port,
instance_died_unexpectedly=self._instance_died_unexpectedly,
instance_logs_getter=self._get_instance_logs,
error_handler_file=application_configuration.get_app_error_file(
self._module_configuration),
prior_error=error)
self._proxy.wait_for_connection()
def quit(self):
"""Causes the runtime process to exit."""
with self._process_lock:
assert self._process, 'module was not running'
try:
if HttpRuntimeProxy._quit_with_sigterm:
logging.debug('Calling process.terminate on child runtime.')
self._process.terminate()
else:
self._process.kill()
except OSError:
pass
# Mac leaks file descriptors without call to join. Suspect a race
# condition where the interpreter is unable to close the subprocess pipe
# as the thread hasn't returned from the readline call.
self._stderr_tee.join(5)
self._process = None
|
|
"""
:codeauthor: Gareth J. Greenaway ([email protected])
"""
import logging
import os
import pytest
import salt.defaults.exitcodes
from saltfactories.exceptions import FactoryNotStarted
from saltfactories.utils import random_string
from tests.support.helpers import PRE_PYTEST_SKIP_REASON
log = logging.getLogger(__name__)
@pytest.fixture(scope="package", autouse=True)
def skip_on_tcp_transport(request):
if request.config.getoption("--transport") == "tcp":
pytest.skip("Deltaproxy under the TPC transport is not working. See #61367")
@pytest.fixture
def proxy_minion_id(salt_master):
_proxy_minion_id = random_string("proxy-minion-")
try:
yield _proxy_minion_id
finally:
# Remove stale key if it exists
pytest.helpers.remove_stale_minion_key(salt_master, _proxy_minion_id)
def clear_proxy_minions(salt_master, proxy_minion_id):
for proxy in [proxy_minion_id, "dummy_proxy_one", "dummy_proxy_two"]:
pytest.helpers.remove_stale_minion_key(salt_master, proxy)
cachefile = os.path.join(
salt_master.config["cachedir"], "{}.cache".format(proxy)
)
if os.path.exists(cachefile):
os.unlink(cachefile)
@pytest.mark.slow_test
def test_exit_status_no_proxyid(salt_master, proxy_minion_id):
"""
Ensure correct exit status when --proxyid argument is missing.
"""
config_defaults = {
"metaproxy": "deltaproxy",
}
with pytest.raises(FactoryNotStarted) as exc:
factory = salt_master.salt_proxy_minion_daemon(
proxy_minion_id, include_proxyid_cli_flag=False, defaults=config_defaults
)
factory.start(start_timeout=10, max_start_attempts=1)
assert exc.value.exitcode == salt.defaults.exitcodes.EX_USAGE, exc.value
assert "Usage" in exc.value.stderr, exc.value
assert "error: salt-proxy requires --proxyid" in exc.value.stderr, exc.value
@pytest.mark.skip_on_windows(reason="Windows does not do user checks")
def test_exit_status_unknown_user(salt_master, proxy_minion_id):
"""
Ensure correct exit status when the proxy is configured to run as an
unknown user.
"""
config_defaults = {
"metaproxy": "deltaproxy",
}
with pytest.raises(FactoryNotStarted) as exc:
factory = salt_master.salt_proxy_minion_daemon(
proxy_minion_id,
overrides={"user": "unknown-user"},
defaults=config_defaults,
)
factory.start(start_timeout=10, max_start_attempts=1)
assert exc.value.exitcode == salt.defaults.exitcodes.EX_NOUSER, exc.value
assert "The user is not available." in exc.value.stderr, exc.value
@pytest.mark.slow_test
def test_exit_status_unknown_argument(salt_master, proxy_minion_id):
"""
Ensure correct exit status when an unknown argument is passed to
salt-proxy.
"""
config_defaults = {
"metaproxy": "deltaproxy",
}
with pytest.raises(FactoryNotStarted) as exc:
factory = salt_master.salt_proxy_minion_daemon(
proxy_minion_id, defaults=config_defaults
)
factory.start("--unknown-argument", start_timeout=10, max_start_attempts=1)
assert exc.value.exitcode == salt.defaults.exitcodes.EX_USAGE, exc.value
assert "Usage" in exc.value.stderr, exc.value
assert "no such option: --unknown-argument" in exc.value.stderr, exc.value
# Hangs on Windows. You can add a timeout to the proxy.run command, but then
# it just times out.
@pytest.mark.skip_on_windows(reason=PRE_PYTEST_SKIP_REASON)
def test_exit_status_correct_usage(
salt_master,
salt_cli,
proxy_minion_id,
):
"""
Ensure the salt-proxy control proxy starts and
is able to respond to test.ping, additionally ensure that
the proxies being controlled also respond to test.ping.
Finally ensure correct exit status when salt-proxy exits correctly.
Skip on Windows because daemonization not supported
"""
config_defaults = {
"metaproxy": "deltaproxy",
}
proxy_one = "dummy_proxy_one"
proxy_two = "dummy_proxy_two"
top_file = """
base:
{control}:
- controlproxy
{one}:
- {one}
{two}:
- {two}
""".format(
control=proxy_minion_id,
one=proxy_one,
two=proxy_two,
)
controlproxy_pillar_file = """
proxy:
proxytype: deltaproxy
ids:
- {}
- {}
""".format(
proxy_one, proxy_two
)
dummy_proxy_one_pillar_file = """
proxy:
proxytype: dummy
"""
dummy_proxy_two_pillar_file = """
proxy:
proxytype: dummy
"""
top_tempfile = salt_master.pillar_tree.base.temp_file("top.sls", top_file)
controlproxy_tempfile = salt_master.pillar_tree.base.temp_file(
"controlproxy.sls", controlproxy_pillar_file
)
dummy_proxy_one_tempfile = salt_master.pillar_tree.base.temp_file(
"{}.sls".format(proxy_one),
dummy_proxy_one_pillar_file,
)
dummy_proxy_two_tempfile = salt_master.pillar_tree.base.temp_file(
"{}.sls".format(proxy_two),
dummy_proxy_two_pillar_file,
)
with top_tempfile, controlproxy_tempfile, dummy_proxy_one_tempfile, dummy_proxy_two_tempfile:
factory = salt_master.salt_proxy_minion_daemon(
proxy_minion_id,
defaults=config_defaults,
extra_cli_arguments_after_first_start_failure=["--log-level=debug"],
start_timeout=240,
)
for minion_id in (proxy_minion_id, proxy_one, proxy_two):
factory.before_start(
pytest.helpers.remove_stale_proxy_minion_cache_file, factory, minion_id
)
factory.after_terminate(
pytest.helpers.remove_stale_minion_key, salt_master, minion_id
)
factory.after_terminate(
pytest.helpers.remove_stale_proxy_minion_cache_file, factory, minion_id
)
with factory.started():
assert factory.is_running()
# Let's issue a ping the control proxy
ret = salt_cli.run("test.ping", minion_tgt=proxy_minion_id)
assert ret.exitcode == 0
assert ret.json is True
# Let's issue a ping to one of the controlled proxies
ret = salt_cli.run("test.ping", minion_tgt=proxy_one)
assert ret.exitcode == 0
assert ret.json is True
# Let's issue a ping to one of the controlled proxies
ret = salt_cli.run("test.ping", minion_tgt=proxy_two)
assert ret.exitcode == 0
assert ret.json is True
# Terminate the proxy minion
ret = factory.terminate()
assert ret.exitcode == salt.defaults.exitcodes.EX_OK, ret
# Hangs on Windows. You can add a timeout to the proxy.run command, but then
# it just times out.
@pytest.mark.skip_on_windows(reason=PRE_PYTEST_SKIP_REASON)
def test_missing_pillar_file(
salt_master,
salt_cli,
proxy_minion_id,
):
"""
Ensure that the control proxy minion starts up when
pillar files for sub proxies are missing.
Skip on Windows because daemonization not supported
"""
config_defaults = {
"metaproxy": "deltaproxy",
}
proxy_one = "dummy_proxy_one"
proxy_two = "dummy_proxy_two"
top_file = """
base:
{control}:
- controlproxy
{one}:
- {one}
""".format(
control=proxy_minion_id,
one=proxy_one,
)
controlproxy_pillar_file = """
proxy:
proxytype: deltaproxy
ids:
- {}
- {}
""".format(
proxy_one, proxy_two
)
dummy_proxy_one_pillar_file = """
proxy:
proxytype: dummy
"""
top_tempfile = salt_master.pillar_tree.base.temp_file("top.sls", top_file)
controlproxy_tempfile = salt_master.pillar_tree.base.temp_file(
"controlproxy.sls", controlproxy_pillar_file
)
dummy_proxy_one_tempfile = salt_master.pillar_tree.base.temp_file(
"{}.sls".format(proxy_one),
dummy_proxy_one_pillar_file,
)
with top_tempfile, controlproxy_tempfile, dummy_proxy_one_tempfile:
factory = salt_master.salt_proxy_minion_daemon(
proxy_minion_id,
defaults=config_defaults,
extra_cli_arguments_after_first_start_failure=["--log-level=debug"],
start_timeout=240,
)
for minion_id in (proxy_minion_id, proxy_one, proxy_two):
factory.before_start(
pytest.helpers.remove_stale_proxy_minion_cache_file, factory, minion_id
)
factory.after_terminate(
pytest.helpers.remove_stale_minion_key, salt_master, minion_id
)
factory.after_terminate(
pytest.helpers.remove_stale_proxy_minion_cache_file, factory, minion_id
)
with factory.started():
assert factory.is_running()
# Let's issue a ping the control proxy
ret = salt_cli.run("test.ping", minion_tgt=proxy_minion_id)
assert ret.exitcode == 0
assert ret.json is True
# Let's issue a ping to one of the controlled proxies
ret = salt_cli.run("test.ping", minion_tgt="dummy_proxy_one")
assert ret.exitcode == 0
assert ret.json is True
# Terminate the proxy minion
ret = factory.terminate()
assert ret.exitcode == salt.defaults.exitcodes.EX_OK, ret
# Hangs on Windows. You can add a timeout to the proxy.run command, but then
# it just times out.
@pytest.mark.skip_on_windows(reason=PRE_PYTEST_SKIP_REASON)
def test_invalid_connection(
salt_master,
salt_cli,
proxy_minion_id,
):
"""
Ensure that the control proxy minion starts up when
pillar files for sub proxies are missing.
Skip on Windows because daemonization not supported
"""
config_defaults = {
"metaproxy": "deltaproxy",
}
proxy_one = "dummy_proxy_one"
broken_proxy_one = "broken_proxy_one"
broken_proxy_two = "broken_proxy_two"
top_file = """
base:
{control}:
- controlproxy
{one}:
- {one}
{broken_proxy_one}:
- {broken_proxy_one}
{broken_proxy_two}:
- {broken_proxy_two}
""".format(
control=proxy_minion_id,
one=proxy_one,
broken_proxy_one=broken_proxy_one,
broken_proxy_two=broken_proxy_two,
)
controlproxy_pillar_file = """
proxy:
proxytype: deltaproxy
ids:
- {}
- {}
- {}
""".format(
broken_proxy_one, broken_proxy_two, proxy_one
)
dummy_proxy_one_pillar_file = """
proxy:
proxytype: dummy
"""
broken_proxy_one_pillar_file = """
proxy:
proxytype: dummy
raise_minion_error: True
"""
broken_proxy_two_pillar_file = """
proxy:
proxytype: dummy
raise_commandexec_error: True
"""
top_tempfile = salt_master.pillar_tree.base.temp_file("top.sls", top_file)
controlproxy_tempfile = salt_master.pillar_tree.base.temp_file(
"controlproxy.sls", controlproxy_pillar_file
)
dummy_proxy_one_tempfile = salt_master.pillar_tree.base.temp_file(
"{}.sls".format(proxy_one),
dummy_proxy_one_pillar_file,
)
broken_proxy_one_tempfile = salt_master.pillar_tree.base.temp_file(
"{}.sls".format(broken_proxy_one), broken_proxy_one_pillar_file
)
broken_proxy_two_tempfile = salt_master.pillar_tree.base.temp_file(
"{}.sls".format(broken_proxy_two), broken_proxy_two_pillar_file
)
with top_tempfile, controlproxy_tempfile, dummy_proxy_one_tempfile, broken_proxy_one_tempfile, broken_proxy_two_tempfile:
factory = salt_master.salt_proxy_minion_daemon(
proxy_minion_id,
defaults=config_defaults,
extra_cli_arguments_after_first_start_failure=["--log-level=debug"],
start_timeout=240,
)
for minion_id in (
proxy_minion_id,
proxy_one,
broken_proxy_one,
broken_proxy_two,
):
factory.before_start(
pytest.helpers.remove_stale_proxy_minion_cache_file, factory, minion_id
)
factory.after_terminate(
pytest.helpers.remove_stale_minion_key, salt_master, minion_id
)
factory.after_terminate(
pytest.helpers.remove_stale_proxy_minion_cache_file, factory, minion_id
)
with factory.started():
# Let's issue a ping the control proxy
ret = salt_cli.run("test.ping", minion_tgt=proxy_minion_id)
assert ret.exitcode == 0
assert ret.json is True
# Let's issue a ping to one of the controlled proxies
ret = salt_cli.run("test.ping", minion_tgt=proxy_one)
assert ret.exitcode == 0
assert ret.json is True
assert not factory.is_running()
assert ret.exitcode == salt.defaults.exitcodes.EX_OK, ret
|
|
# -*- coding: utf-8 -*-
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
class state(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance - based on the path /network-instances/network-instance/protocols/protocol/bgp/global/afi-safis/afi-safi/route-selection-options/state. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: State information for the route selection options
"""
__slots__ = (
"_path_helper",
"_extmethods",
"__always_compare_med",
"__ignore_as_path_length",
"__external_compare_router_id",
"__advertise_inactive_routes",
"__enable_aigp",
"__ignore_next_hop_igp_metric",
)
_yang_name = "state"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__always_compare_med = YANGDynClass(
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="always-compare-med",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
self.__ignore_as_path_length = YANGDynClass(
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="ignore-as-path-length",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
self.__external_compare_router_id = YANGDynClass(
base=YANGBool,
default=YANGBool("true"),
is_leaf=True,
yang_name="external-compare-router-id",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
self.__advertise_inactive_routes = YANGDynClass(
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="advertise-inactive-routes",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
self.__enable_aigp = YANGDynClass(
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="enable-aigp",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
self.__ignore_next_hop_igp_metric = YANGDynClass(
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="ignore-next-hop-igp-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"bgp",
"global",
"afi-safis",
"afi-safi",
"route-selection-options",
"state",
]
def _get_always_compare_med(self):
"""
Getter method for always_compare_med, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/global/afi_safis/afi_safi/route_selection_options/state/always_compare_med (boolean)
YANG Description: Compare multi-exit discriminator (MED) value from
different ASes when selecting the best route. The
default behavior is to only compare MEDs for paths
received from the same AS.
"""
return self.__always_compare_med
def _set_always_compare_med(self, v, load=False):
"""
Setter method for always_compare_med, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/global/afi_safis/afi_safi/route_selection_options/state/always_compare_med (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_always_compare_med is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_always_compare_med() directly.
YANG Description: Compare multi-exit discriminator (MED) value from
different ASes when selecting the best route. The
default behavior is to only compare MEDs for paths
received from the same AS.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="always-compare-med",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """always_compare_med must be of a type compatible with boolean""",
"defined-type": "boolean",
"generated-type": """YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="always-compare-med", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=False)""",
}
)
self.__always_compare_med = t
if hasattr(self, "_set"):
self._set()
def _unset_always_compare_med(self):
self.__always_compare_med = YANGDynClass(
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="always-compare-med",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
def _get_ignore_as_path_length(self):
"""
Getter method for ignore_as_path_length, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/global/afi_safis/afi_safi/route_selection_options/state/ignore_as_path_length (boolean)
YANG Description: Ignore the AS path length when selecting the best path.
The default is to use the AS path length and prefer paths
with shorter length.
"""
return self.__ignore_as_path_length
def _set_ignore_as_path_length(self, v, load=False):
"""
Setter method for ignore_as_path_length, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/global/afi_safis/afi_safi/route_selection_options/state/ignore_as_path_length (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_ignore_as_path_length is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ignore_as_path_length() directly.
YANG Description: Ignore the AS path length when selecting the best path.
The default is to use the AS path length and prefer paths
with shorter length.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="ignore-as-path-length",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """ignore_as_path_length must be of a type compatible with boolean""",
"defined-type": "boolean",
"generated-type": """YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="ignore-as-path-length", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=False)""",
}
)
self.__ignore_as_path_length = t
if hasattr(self, "_set"):
self._set()
def _unset_ignore_as_path_length(self):
self.__ignore_as_path_length = YANGDynClass(
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="ignore-as-path-length",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
def _get_external_compare_router_id(self):
"""
Getter method for external_compare_router_id, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/global/afi_safis/afi_safi/route_selection_options/state/external_compare_router_id (boolean)
YANG Description: When comparing similar routes received from external
BGP peers, use the router-id as a criterion to select
the active path.
"""
return self.__external_compare_router_id
def _set_external_compare_router_id(self, v, load=False):
"""
Setter method for external_compare_router_id, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/global/afi_safis/afi_safi/route_selection_options/state/external_compare_router_id (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_external_compare_router_id is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_external_compare_router_id() directly.
YANG Description: When comparing similar routes received from external
BGP peers, use the router-id as a criterion to select
the active path.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=YANGBool,
default=YANGBool("true"),
is_leaf=True,
yang_name="external-compare-router-id",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """external_compare_router_id must be of a type compatible with boolean""",
"defined-type": "boolean",
"generated-type": """YANGDynClass(base=YANGBool, default=YANGBool("true"), is_leaf=True, yang_name="external-compare-router-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=False)""",
}
)
self.__external_compare_router_id = t
if hasattr(self, "_set"):
self._set()
def _unset_external_compare_router_id(self):
self.__external_compare_router_id = YANGDynClass(
base=YANGBool,
default=YANGBool("true"),
is_leaf=True,
yang_name="external-compare-router-id",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
def _get_advertise_inactive_routes(self):
"""
Getter method for advertise_inactive_routes, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/global/afi_safis/afi_safi/route_selection_options/state/advertise_inactive_routes (boolean)
YANG Description: Advertise inactive routes to external peers. The
default is to only advertise active routes.
"""
return self.__advertise_inactive_routes
def _set_advertise_inactive_routes(self, v, load=False):
"""
Setter method for advertise_inactive_routes, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/global/afi_safis/afi_safi/route_selection_options/state/advertise_inactive_routes (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_advertise_inactive_routes is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_advertise_inactive_routes() directly.
YANG Description: Advertise inactive routes to external peers. The
default is to only advertise active routes.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="advertise-inactive-routes",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """advertise_inactive_routes must be of a type compatible with boolean""",
"defined-type": "boolean",
"generated-type": """YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="advertise-inactive-routes", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=False)""",
}
)
self.__advertise_inactive_routes = t
if hasattr(self, "_set"):
self._set()
def _unset_advertise_inactive_routes(self):
self.__advertise_inactive_routes = YANGDynClass(
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="advertise-inactive-routes",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
def _get_enable_aigp(self):
"""
Getter method for enable_aigp, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/global/afi_safis/afi_safi/route_selection_options/state/enable_aigp (boolean)
YANG Description: Flag to enable sending / receiving accumulated IGP
attribute in routing updates
"""
return self.__enable_aigp
def _set_enable_aigp(self, v, load=False):
"""
Setter method for enable_aigp, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/global/afi_safis/afi_safi/route_selection_options/state/enable_aigp (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_enable_aigp is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_enable_aigp() directly.
YANG Description: Flag to enable sending / receiving accumulated IGP
attribute in routing updates
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="enable-aigp",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """enable_aigp must be of a type compatible with boolean""",
"defined-type": "boolean",
"generated-type": """YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="enable-aigp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=False)""",
}
)
self.__enable_aigp = t
if hasattr(self, "_set"):
self._set()
def _unset_enable_aigp(self):
self.__enable_aigp = YANGDynClass(
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="enable-aigp",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
def _get_ignore_next_hop_igp_metric(self):
"""
Getter method for ignore_next_hop_igp_metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/global/afi_safis/afi_safi/route_selection_options/state/ignore_next_hop_igp_metric (boolean)
YANG Description: Ignore the IGP metric to the next-hop when calculating
BGP best-path. The default is to select the route for
which the metric to the next-hop is lowest
"""
return self.__ignore_next_hop_igp_metric
def _set_ignore_next_hop_igp_metric(self, v, load=False):
"""
Setter method for ignore_next_hop_igp_metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/global/afi_safis/afi_safi/route_selection_options/state/ignore_next_hop_igp_metric (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_ignore_next_hop_igp_metric is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ignore_next_hop_igp_metric() directly.
YANG Description: Ignore the IGP metric to the next-hop when calculating
BGP best-path. The default is to select the route for
which the metric to the next-hop is lowest
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="ignore-next-hop-igp-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """ignore_next_hop_igp_metric must be of a type compatible with boolean""",
"defined-type": "boolean",
"generated-type": """YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="ignore-next-hop-igp-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=False)""",
}
)
self.__ignore_next_hop_igp_metric = t
if hasattr(self, "_set"):
self._set()
def _unset_ignore_next_hop_igp_metric(self):
self.__ignore_next_hop_igp_metric = YANGDynClass(
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="ignore-next-hop-igp-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
always_compare_med = __builtin__.property(_get_always_compare_med)
ignore_as_path_length = __builtin__.property(_get_ignore_as_path_length)
external_compare_router_id = __builtin__.property(_get_external_compare_router_id)
advertise_inactive_routes = __builtin__.property(_get_advertise_inactive_routes)
enable_aigp = __builtin__.property(_get_enable_aigp)
ignore_next_hop_igp_metric = __builtin__.property(_get_ignore_next_hop_igp_metric)
_pyangbind_elements = OrderedDict(
[
("always_compare_med", always_compare_med),
("ignore_as_path_length", ignore_as_path_length),
("external_compare_router_id", external_compare_router_id),
("advertise_inactive_routes", advertise_inactive_routes),
("enable_aigp", enable_aigp),
("ignore_next_hop_igp_metric", ignore_next_hop_igp_metric),
]
)
class state(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance-l2 - based on the path /network-instances/network-instance/protocols/protocol/bgp/global/afi-safis/afi-safi/route-selection-options/state. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: State information for the route selection options
"""
__slots__ = (
"_path_helper",
"_extmethods",
"__always_compare_med",
"__ignore_as_path_length",
"__external_compare_router_id",
"__advertise_inactive_routes",
"__enable_aigp",
"__ignore_next_hop_igp_metric",
)
_yang_name = "state"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__always_compare_med = YANGDynClass(
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="always-compare-med",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
self.__ignore_as_path_length = YANGDynClass(
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="ignore-as-path-length",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
self.__external_compare_router_id = YANGDynClass(
base=YANGBool,
default=YANGBool("true"),
is_leaf=True,
yang_name="external-compare-router-id",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
self.__advertise_inactive_routes = YANGDynClass(
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="advertise-inactive-routes",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
self.__enable_aigp = YANGDynClass(
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="enable-aigp",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
self.__ignore_next_hop_igp_metric = YANGDynClass(
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="ignore-next-hop-igp-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"bgp",
"global",
"afi-safis",
"afi-safi",
"route-selection-options",
"state",
]
def _get_always_compare_med(self):
"""
Getter method for always_compare_med, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/global/afi_safis/afi_safi/route_selection_options/state/always_compare_med (boolean)
YANG Description: Compare multi-exit discriminator (MED) value from
different ASes when selecting the best route. The
default behavior is to only compare MEDs for paths
received from the same AS.
"""
return self.__always_compare_med
def _set_always_compare_med(self, v, load=False):
"""
Setter method for always_compare_med, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/global/afi_safis/afi_safi/route_selection_options/state/always_compare_med (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_always_compare_med is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_always_compare_med() directly.
YANG Description: Compare multi-exit discriminator (MED) value from
different ASes when selecting the best route. The
default behavior is to only compare MEDs for paths
received from the same AS.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="always-compare-med",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """always_compare_med must be of a type compatible with boolean""",
"defined-type": "boolean",
"generated-type": """YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="always-compare-med", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=False)""",
}
)
self.__always_compare_med = t
if hasattr(self, "_set"):
self._set()
def _unset_always_compare_med(self):
self.__always_compare_med = YANGDynClass(
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="always-compare-med",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
def _get_ignore_as_path_length(self):
"""
Getter method for ignore_as_path_length, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/global/afi_safis/afi_safi/route_selection_options/state/ignore_as_path_length (boolean)
YANG Description: Ignore the AS path length when selecting the best path.
The default is to use the AS path length and prefer paths
with shorter length.
"""
return self.__ignore_as_path_length
def _set_ignore_as_path_length(self, v, load=False):
"""
Setter method for ignore_as_path_length, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/global/afi_safis/afi_safi/route_selection_options/state/ignore_as_path_length (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_ignore_as_path_length is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ignore_as_path_length() directly.
YANG Description: Ignore the AS path length when selecting the best path.
The default is to use the AS path length and prefer paths
with shorter length.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="ignore-as-path-length",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """ignore_as_path_length must be of a type compatible with boolean""",
"defined-type": "boolean",
"generated-type": """YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="ignore-as-path-length", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=False)""",
}
)
self.__ignore_as_path_length = t
if hasattr(self, "_set"):
self._set()
def _unset_ignore_as_path_length(self):
self.__ignore_as_path_length = YANGDynClass(
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="ignore-as-path-length",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
def _get_external_compare_router_id(self):
"""
Getter method for external_compare_router_id, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/global/afi_safis/afi_safi/route_selection_options/state/external_compare_router_id (boolean)
YANG Description: When comparing similar routes received from external
BGP peers, use the router-id as a criterion to select
the active path.
"""
return self.__external_compare_router_id
def _set_external_compare_router_id(self, v, load=False):
"""
Setter method for external_compare_router_id, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/global/afi_safis/afi_safi/route_selection_options/state/external_compare_router_id (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_external_compare_router_id is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_external_compare_router_id() directly.
YANG Description: When comparing similar routes received from external
BGP peers, use the router-id as a criterion to select
the active path.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=YANGBool,
default=YANGBool("true"),
is_leaf=True,
yang_name="external-compare-router-id",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """external_compare_router_id must be of a type compatible with boolean""",
"defined-type": "boolean",
"generated-type": """YANGDynClass(base=YANGBool, default=YANGBool("true"), is_leaf=True, yang_name="external-compare-router-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=False)""",
}
)
self.__external_compare_router_id = t
if hasattr(self, "_set"):
self._set()
def _unset_external_compare_router_id(self):
self.__external_compare_router_id = YANGDynClass(
base=YANGBool,
default=YANGBool("true"),
is_leaf=True,
yang_name="external-compare-router-id",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
def _get_advertise_inactive_routes(self):
"""
Getter method for advertise_inactive_routes, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/global/afi_safis/afi_safi/route_selection_options/state/advertise_inactive_routes (boolean)
YANG Description: Advertise inactive routes to external peers. The
default is to only advertise active routes.
"""
return self.__advertise_inactive_routes
def _set_advertise_inactive_routes(self, v, load=False):
"""
Setter method for advertise_inactive_routes, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/global/afi_safis/afi_safi/route_selection_options/state/advertise_inactive_routes (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_advertise_inactive_routes is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_advertise_inactive_routes() directly.
YANG Description: Advertise inactive routes to external peers. The
default is to only advertise active routes.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="advertise-inactive-routes",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """advertise_inactive_routes must be of a type compatible with boolean""",
"defined-type": "boolean",
"generated-type": """YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="advertise-inactive-routes", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=False)""",
}
)
self.__advertise_inactive_routes = t
if hasattr(self, "_set"):
self._set()
def _unset_advertise_inactive_routes(self):
self.__advertise_inactive_routes = YANGDynClass(
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="advertise-inactive-routes",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
def _get_enable_aigp(self):
"""
Getter method for enable_aigp, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/global/afi_safis/afi_safi/route_selection_options/state/enable_aigp (boolean)
YANG Description: Flag to enable sending / receiving accumulated IGP
attribute in routing updates
"""
return self.__enable_aigp
def _set_enable_aigp(self, v, load=False):
"""
Setter method for enable_aigp, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/global/afi_safis/afi_safi/route_selection_options/state/enable_aigp (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_enable_aigp is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_enable_aigp() directly.
YANG Description: Flag to enable sending / receiving accumulated IGP
attribute in routing updates
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="enable-aigp",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """enable_aigp must be of a type compatible with boolean""",
"defined-type": "boolean",
"generated-type": """YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="enable-aigp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=False)""",
}
)
self.__enable_aigp = t
if hasattr(self, "_set"):
self._set()
def _unset_enable_aigp(self):
self.__enable_aigp = YANGDynClass(
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="enable-aigp",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
def _get_ignore_next_hop_igp_metric(self):
"""
Getter method for ignore_next_hop_igp_metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/global/afi_safis/afi_safi/route_selection_options/state/ignore_next_hop_igp_metric (boolean)
YANG Description: Ignore the IGP metric to the next-hop when calculating
BGP best-path. The default is to select the route for
which the metric to the next-hop is lowest
"""
return self.__ignore_next_hop_igp_metric
def _set_ignore_next_hop_igp_metric(self, v, load=False):
"""
Setter method for ignore_next_hop_igp_metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/global/afi_safis/afi_safi/route_selection_options/state/ignore_next_hop_igp_metric (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_ignore_next_hop_igp_metric is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ignore_next_hop_igp_metric() directly.
YANG Description: Ignore the IGP metric to the next-hop when calculating
BGP best-path. The default is to select the route for
which the metric to the next-hop is lowest
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="ignore-next-hop-igp-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """ignore_next_hop_igp_metric must be of a type compatible with boolean""",
"defined-type": "boolean",
"generated-type": """YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="ignore-next-hop-igp-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=False)""",
}
)
self.__ignore_next_hop_igp_metric = t
if hasattr(self, "_set"):
self._set()
def _unset_ignore_next_hop_igp_metric(self):
self.__ignore_next_hop_igp_metric = YANGDynClass(
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="ignore-next-hop-igp-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
always_compare_med = __builtin__.property(_get_always_compare_med)
ignore_as_path_length = __builtin__.property(_get_ignore_as_path_length)
external_compare_router_id = __builtin__.property(_get_external_compare_router_id)
advertise_inactive_routes = __builtin__.property(_get_advertise_inactive_routes)
enable_aigp = __builtin__.property(_get_enable_aigp)
ignore_next_hop_igp_metric = __builtin__.property(_get_ignore_next_hop_igp_metric)
_pyangbind_elements = OrderedDict(
[
("always_compare_med", always_compare_med),
("ignore_as_path_length", ignore_as_path_length),
("external_compare_router_id", external_compare_router_id),
("advertise_inactive_routes", advertise_inactive_routes),
("enable_aigp", enable_aigp),
("ignore_next_hop_igp_metric", ignore_next_hop_igp_metric),
]
)
|
|
"""
homeassistant.components.media_player.chromecast
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Provides functionality to interact with Cast devices on the network.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/media_player.cast/
"""
import logging
from homeassistant.const import (
STATE_PLAYING, STATE_PAUSED, STATE_IDLE, STATE_OFF,
STATE_UNKNOWN, CONF_HOST)
from homeassistant.components.media_player import (
MediaPlayerDevice,
SUPPORT_PAUSE, SUPPORT_VOLUME_SET, SUPPORT_VOLUME_MUTE,
SUPPORT_TURN_ON, SUPPORT_TURN_OFF, SUPPORT_YOUTUBE,
SUPPORT_PREVIOUS_TRACK, SUPPORT_NEXT_TRACK,
MEDIA_TYPE_MUSIC, MEDIA_TYPE_TVSHOW, MEDIA_TYPE_VIDEO)
REQUIREMENTS = ['pychromecast==0.6.12']
CONF_IGNORE_CEC = 'ignore_cec'
CAST_SPLASH = 'https://home-assistant.io/images/cast/splash.png'
SUPPORT_CAST = SUPPORT_PAUSE | SUPPORT_VOLUME_SET | SUPPORT_VOLUME_MUTE | \
SUPPORT_TURN_ON | SUPPORT_TURN_OFF | SUPPORT_PREVIOUS_TRACK | \
SUPPORT_NEXT_TRACK | SUPPORT_YOUTUBE
KNOWN_HOSTS = []
# pylint: disable=invalid-name
cast = None
# pylint: disable=unused-argument
def setup_platform(hass, config, add_devices, discovery_info=None):
""" Sets up the cast platform. """
global cast
import pychromecast
cast = pychromecast
logger = logging.getLogger(__name__)
# import CEC IGNORE attributes
ignore_cec = config.get(CONF_IGNORE_CEC, [])
if isinstance(ignore_cec, list):
cast.IGNORE_CEC += ignore_cec
else:
logger.error('Chromecast conig, %s must be a list.', CONF_IGNORE_CEC)
hosts = []
if discovery_info and discovery_info[0] not in KNOWN_HOSTS:
hosts = [discovery_info[0]]
elif CONF_HOST in config:
hosts = [config[CONF_HOST]]
else:
hosts = (host_port[0] for host_port
in cast.discover_chromecasts()
if host_port[0] not in KNOWN_HOSTS)
casts = []
for host in hosts:
try:
casts.append(CastDevice(host))
except cast.ChromecastConnectionError:
pass
else:
KNOWN_HOSTS.append(host)
add_devices(casts)
class CastDevice(MediaPlayerDevice):
""" Represents a Cast device on the network. """
# pylint: disable=abstract-method
# pylint: disable=too-many-public-methods
def __init__(self, host):
import pychromecast.controllers.youtube as youtube
self.cast = cast.Chromecast(host)
self.youtube = youtube.YouTubeController()
self.cast.register_handler(self.youtube)
self.cast.socket_client.receiver_controller.register_status_listener(
self)
self.cast.socket_client.media_controller.register_status_listener(self)
self.cast_status = self.cast.status
self.media_status = self.cast.media_controller.status
# Entity properties and methods
@property
def should_poll(self):
return False
@property
def name(self):
""" Returns the name of the device. """
return self.cast.device.friendly_name
# MediaPlayerDevice properties and methods
@property
def state(self):
""" State of the player. """
if self.media_status is None:
return STATE_UNKNOWN
elif self.media_status.player_is_playing:
return STATE_PLAYING
elif self.media_status.player_is_paused:
return STATE_PAUSED
elif self.media_status.player_is_idle:
return STATE_IDLE
elif self.cast.is_idle:
return STATE_OFF
else:
return STATE_UNKNOWN
@property
def volume_level(self):
""" Volume level of the media player (0..1). """
return self.cast_status.volume_level if self.cast_status else None
@property
def is_volume_muted(self):
""" Boolean if volume is currently muted. """
return self.cast_status.volume_muted if self.cast_status else None
@property
def media_content_id(self):
""" Content ID of current playing media. """
return self.media_status.content_id if self.media_status else None
@property
def media_content_type(self):
""" Content type of current playing media. """
if self.media_status is None:
return None
elif self.media_status.media_is_tvshow:
return MEDIA_TYPE_TVSHOW
elif self.media_status.media_is_movie:
return MEDIA_TYPE_VIDEO
elif self.media_status.media_is_musictrack:
return MEDIA_TYPE_MUSIC
return None
@property
def media_duration(self):
""" Duration of current playing media in seconds. """
return self.media_status.duration if self.media_status else None
@property
def media_image_url(self):
""" Image url of current playing media. """
if self.media_status is None:
return None
images = self.media_status.images
return images[0].url if images else None
@property
def media_title(self):
""" Title of current playing media. """
return self.media_status.title if self.media_status else None
@property
def media_artist(self):
""" Artist of current playing media. (Music track only) """
return self.media_status.artist if self.media_status else None
@property
def media_album(self):
""" Album of current playing media. (Music track only) """
return self.media_status.album_name if self.media_status else None
@property
def media_album_artist(self):
""" Album arist of current playing media. (Music track only) """
return self.media_status.album_artist if self.media_status else None
@property
def media_track(self):
""" Track number of current playing media. (Music track only) """
return self.media_status.track if self.media_status else None
@property
def media_series_title(self):
""" Series title of current playing media. (TV Show only)"""
return self.media_status.series_title if self.media_status else None
@property
def media_season(self):
""" Season of current playing media. (TV Show only) """
return self.media_status.season if self.media_status else None
@property
def media_episode(self):
""" Episode of current playing media. (TV Show only) """
return self.media_status.episode if self.media_status else None
@property
def app_id(self):
""" ID of the current running app. """
return self.cast.app_id
@property
def app_name(self):
""" Name of the current running app. """
return self.cast.app_display_name
@property
def supported_media_commands(self):
""" Flags of media commands that are supported. """
return SUPPORT_CAST
def turn_on(self):
""" Turns on the ChromeCast. """
# The only way we can turn the Chromecast is on is by launching an app
if not self.cast.status or not self.cast.status.is_active_input:
if self.cast.app_id:
self.cast.quit_app()
self.cast.play_media(
CAST_SPLASH, cast.STREAM_TYPE_BUFFERED)
def turn_off(self):
""" Turns Chromecast off. """
self.cast.quit_app()
def mute_volume(self, mute):
""" mute the volume. """
self.cast.set_volume_muted(mute)
def set_volume_level(self, volume):
""" set volume level, range 0..1. """
self.cast.set_volume(volume)
def media_play(self):
""" Send play commmand. """
self.cast.media_controller.play()
def media_pause(self):
""" Send pause command. """
self.cast.media_controller.pause()
def media_previous_track(self):
""" Send previous track command. """
self.cast.media_controller.rewind()
def media_next_track(self):
""" Send next track command. """
self.cast.media_controller.skip()
def media_seek(self, position):
""" Seek the media to a specific location. """
self.cast.media_controller.seek(position)
def play_youtube(self, media_id):
""" Plays a YouTube media. """
self.youtube.play_video(media_id)
# implementation of chromecast status_listener methods
def new_cast_status(self, status):
""" Called when a new cast status is received. """
self.cast_status = status
self.update_ha_state()
def new_media_status(self, status):
""" Called when a new media status is received. """
self.media_status = status
self.update_ha_state()
|
|
# Copyright 2008-2010 by Peter Cock. All rights reserved.
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
"""Multiple sequence alignment input/output as alignment objects.
The Bio.AlignIO interface is deliberately very similar to Bio.SeqIO, and in
fact the two are connected internally. Both modules use the same set of file
format names (lower case strings). From the user's perspective, you can read
in a PHYLIP file containing one or more alignments using Bio.AlignIO, or you
can read in the sequences within these alignmenta using Bio.SeqIO.
Bio.AlignIO is also documented at U{http://biopython.org/wiki/AlignIO} and by
a whole chapter in our tutorial:
- U{http://biopython.org/DIST/docs/tutorial/Tutorial.html}
- U{http://biopython.org/DIST/docs/tutorial/Tutorial.pdf}
Input
=====
For the typical special case when your file or handle contains one and only
one alignment, use the function Bio.AlignIO.read(). This takes an input file
handle (or in recent versions of Biopython a filename as a string), format
string and optional number of sequences per alignment. It will return a single
MultipleSeqAlignment object (or raise an exception if there isn't just one
alignment):
>>> from Bio import AlignIO
>>> align = AlignIO.read("Phylip/interlaced.phy", "phylip")
>>> print align
SingleLetterAlphabet() alignment with 3 rows and 384 columns
-----MKVILLFVLAVFTVFVSS---------------RGIPPE...I-- CYS1_DICDI
MAHARVLLLALAVLATAAVAVASSSSFADSNPIRPVTDRAASTL...VAA ALEU_HORVU
------MWATLPLLCAGAWLLGV--------PVCGAAELSVNSL...PLV CATH_HUMAN
For the general case, when the handle could contain any number of alignments,
use the function Bio.AlignIO.parse(...) which takes the same arguments, but
returns an iterator giving MultipleSeqAlignment objects (typically used in a
for loop). If you want random access to the alignments by number, turn this
into a list:
>>> from Bio import AlignIO
>>> alignments = list(AlignIO.parse("Emboss/needle.txt", "emboss"))
>>> print alignments[2]
SingleLetterAlphabet() alignment with 2 rows and 120 columns
-KILIVDDQYGIRILLNEVFNKEGYQTFQAANGLQALDIVTKER...--- ref_rec
LHIVVVDDDPGTCVYIESVFAELGHTCKSFVRPEAAEEYILTHP...HKE gi|94967506|receiver
Most alignment file formats can be concatenated so as to hold as many
different multiple sequence alignments as possible. One common example
is the output of the tool seqboot in the PHLYIP suite. Sometimes there
can be a file header and footer, as seen in the EMBOSS alignment output.
Output
======
Use the function Bio.AlignIO.write(...), which takes a complete set of
Alignment objects (either as a list, or an iterator), an output file handle
(or filename in recent versions of Biopython) and of course the file format::
from Bio import AlignIO
alignments = ...
count = SeqIO.write(alignments, "example.faa", "fasta")
If using a handle make sure to close it to flush the data to the disk::
from Bio import AlignIO
alignments = ...
handle = open("example.faa", "w")
count = SeqIO.write(alignments, handle, "fasta")
handle.close()
In general, you are expected to call this function once (with all your
alignments) and then close the file handle. However, for file formats
like PHYLIP where multiple alignments are stored sequentially (with no file
header and footer), then multiple calls to the write function should work as
expected when using handles.
If you are using a filename, the repeated calls to the write functions will
overwrite the existing file each time.
Conversion
==========
The Bio.AlignIO.convert(...) function allows an easy interface for simple
alignnment file format conversions. Additionally, it may use file format
specific optimisations so this should be the fastest way too.
In general however, you can combine the Bio.AlignIO.parse(...) function with
the Bio.AlignIO.write(...) function for sequence file conversion. Using
generator expressions provides a memory efficient way to perform filtering or
other extra operations as part of the process.
File Formats
============
When specifying the file format, use lowercase strings. The same format
names are also used in Bio.SeqIO and include the following:
- clustal - Ouput from Clustal W or X, see also the module Bio.Clustalw
which can be used to run the command line tool from Biopython.
- emboss - EMBOSS tools' "pairs" and "simple" alignment formats.
- fasta - The generic sequence file format where each record starts with
an identifer line starting with a ">" character, followed by
lines of sequence.
- fasta-m10 - For the pairswise alignments output by Bill Pearson's FASTA
tools when used with the -m 10 command line option for machine
readable output.
- ig - The IntelliGenetics file format, apparently the same as the
MASE alignment format.
- nexus - Output from NEXUS, see also the module Bio.Nexus which can also
read any phylogenetic trees in these files.
- phylip - Used by the PHLIP tools.
- stockholm - A richly annotated alignment file format used by PFAM.
Note that while Bio.AlignIO can read all the above file formats, it cannot
write to all of them.
You can also use any file format supported by Bio.SeqIO, such as "fasta" or
"ig" (which are listed above), PROVIDED the sequences in your file are all the
same length.
"""
__docformat__ = "epytext en" #not just plaintext
#TODO
# - define policy on reading aligned sequences with gaps in
# (e.g. - and . characters) including how the alphabet interacts
#
# - Can we build the to_alignment(...) functionality
# into the generic Alignment class instead?
#
# - How best to handle unique/non unique record.id when writing.
# For most file formats reading such files is fine; The stockholm
# parser would fail.
#
# - MSF multiple alignment format, aka GCG, aka PileUp format (*.msf)
# http://www.bioperl.org/wiki/MSF_multiple_alignment_format
from Bio.Align import MultipleSeqAlignment
from Bio.Align.Generic import Alignment
from Bio.Alphabet import Alphabet, AlphabetEncoder, _get_base_alphabet
import StockholmIO
import ClustalIO
import NexusIO
import PhylipIO
import EmbossIO
import FastaIO
#Convention for format names is "mainname-subtype" in lower case.
#Please use the same names as BioPerl and EMBOSS where possible.
_FormatToIterator = {#"fasta" is done via Bio.SeqIO
"clustal" : ClustalIO.ClustalIterator,
"emboss" : EmbossIO.EmbossIterator,
"fasta-m10" : FastaIO.FastaM10Iterator,
"nexus" : NexusIO.NexusIterator,
"phylip" : PhylipIO.PhylipIterator,
"phylip-relaxed" : PhylipIO.RelaxedPhylipIterator,
"stockholm" : StockholmIO.StockholmIterator,
}
_FormatToWriter = {#"fasta" is done via Bio.SeqIO
#"emboss" : EmbossIO.EmbossWriter, (unfinished)
"nexus" : NexusIO.NexusWriter,
"phylip" : PhylipIO.PhylipWriter,
"phylip-relaxed" : PhylipIO.RelaxedPhylipWriter,
"stockholm" : StockholmIO.StockholmWriter,
"clustal" : ClustalIO.ClustalWriter,
}
def write(alignments, handle, format):
"""Write complete set of alignments to a file.
Arguments:
- alignments - A list (or iterator) of Alignment objects (ideally the
new MultipleSeqAlignment objects), or (if using Biopython
1.54 or later) a single alignment object.
- handle - File handle object to write to, or filename as string
(note older versions of Biopython only took a handle).
- format - lower case string describing the file format to write.
You should close the handle after calling this function.
Returns the number of alignments written (as an integer).
"""
from Bio import SeqIO
#Try and give helpful error messages:
if not isinstance(format, basestring):
raise TypeError("Need a string for the file format (lower case)")
if not format:
raise ValueError("Format required (lower case string)")
if format != format.lower():
raise ValueError("Format string '%s' should be lower case" % format)
if isinstance(alignments, Alignment):
#This raised an exception in order version of Biopython
alignments = [alignments]
if isinstance(handle, basestring):
handle = open(handle, "w")
handle_close = True
else:
handle_close = False
#Map the file format to a writer class
if format in _FormatToWriter:
writer_class = _FormatToWriter[format]
count = writer_class(handle).write_file(alignments)
elif format in SeqIO._FormatToWriter:
#Exploit the existing SeqIO parser to the dirty work!
#TODO - Can we make one call to SeqIO.write() and count the alignments?
count = 0
for alignment in alignments:
if not isinstance(alignment, Alignment):
raise TypeError(\
"Expect a list or iterator of Alignment objects.")
SeqIO.write(alignment, handle, format)
count += 1
elif format in _FormatToIterator or format in SeqIO._FormatToIterator:
raise ValueError("Reading format '%s' is supported, but not writing" \
% format)
else:
raise ValueError("Unknown format '%s'" % format)
assert isinstance(count, int), "Internal error - the underlying %s " \
"writer should have returned the alignment count, not %s" \
% (format, repr(count))
if handle_close:
handle.close()
return count
#This is a generator function!
def _SeqIO_to_alignment_iterator(handle, format, alphabet=None, seq_count=None):
"""Uses Bio.SeqIO to create an MultipleSeqAlignment iterator (PRIVATE).
Arguments:
- handle - handle to the file.
- format - string describing the file format.
- alphabet - optional Alphabet object, useful when the sequence type
cannot be automatically inferred from the file itself
(e.g. fasta, phylip, clustal)
- seq_count - Optional integer, number of sequences expected in each
alignment. Recommended for fasta format files.
If count is omitted (default) then all the sequences in the file are
combined into a single MultipleSeqAlignment.
"""
from Bio import SeqIO
assert format in SeqIO._FormatToIterator
if seq_count:
#Use the count to split the records into batches.
seq_record_iterator = SeqIO.parse(handle, format, alphabet)
records = []
for record in seq_record_iterator:
records.append(record)
if len(records) == seq_count:
yield MultipleSeqAlignment(records, alphabet)
records = []
if len(records) > 0:
raise ValueError("Check seq_count argument, not enough sequences?")
else:
#Must assume that there is a single alignment using all
#the SeqRecord objects:
records = list(SeqIO.parse(handle, format, alphabet))
if records:
yield MultipleSeqAlignment(records, alphabet)
raise StopIteration
def _force_alphabet(alignment_iterator, alphabet):
"""Iterate over alignments, over-riding the alphabet (PRIVATE)."""
#Assume the alphabet argument has been pre-validated
given_base_class = _get_base_alphabet(alphabet).__class__
for align in alignment_iterator:
if not isinstance(_get_base_alphabet(align._alphabet),
given_base_class):
raise ValueError("Specified alphabet %s clashes with "\
"that determined from the file, %s" \
% (repr(alphabet), repr(align._alphabet)))
for record in align:
if not isinstance(_get_base_alphabet(record.seq.alphabet),
given_base_class):
raise ValueError("Specified alphabet %s clashes with "\
"that determined from the file, %s" \
% (repr(alphabet), repr(record.seq.alphabet)))
record.seq.alphabet = alphabet
align._alphabet = alphabet
yield align
def parse(handle, format, seq_count=None, alphabet=None):
"""Iterate over an alignment file as MultipleSeqAlignment objects.
Arguments:
- handle - handle to the file, or the filename as a string
(note older verions of Biopython only took a handle).
- format - string describing the file format.
- alphabet - optional Alphabet object, useful when the sequence type
cannot be automatically inferred from the file itself
(e.g. fasta, phylip, clustal)
- seq_count - Optional integer, number of sequences expected in each
alignment. Recommended for fasta format files.
If you have the file name in a string 'filename', use:
>>> from Bio import AlignIO
>>> filename = "Emboss/needle.txt"
>>> format = "emboss"
>>> for alignment in AlignIO.parse(filename, format):
... print "Alignment of length", alignment.get_alignment_length()
Alignment of length 124
Alignment of length 119
Alignment of length 120
Alignment of length 118
Alignment of length 125
If you have a string 'data' containing the file contents, use:
from Bio import AlignIO
from StringIO import StringIO
my_iterator = AlignIO.parse(StringIO(data), format)
Use the Bio.AlignIO.read() function when you expect a single record only.
"""
from Bio import SeqIO
handle_close = False
if isinstance(handle, basestring):
handle = open(handle, "rU")
#TODO - On Python 2.5+ use with statement to close handle
handle_close = True
#Try and give helpful error messages:
if not isinstance(format, basestring):
raise TypeError("Need a string for the file format (lower case)")
if not format:
raise ValueError("Format required (lower case string)")
if format != format.lower():
raise ValueError("Format string '%s' should be lower case" % format)
if alphabet is not None and not (isinstance(alphabet, Alphabet) or \
isinstance(alphabet, AlphabetEncoder)):
raise ValueError("Invalid alphabet, %s" % repr(alphabet))
if seq_count is not None and not isinstance(seq_count, int):
raise TypeError("Need integer for seq_count (sequences per alignment)")
#Map the file format to a sequence iterator:
if format in _FormatToIterator:
iterator_generator = _FormatToIterator[format]
if alphabet is None :
i = iterator_generator(handle, seq_count)
else:
try:
#Initially assume the optional alphabet argument is supported
i = iterator_generator(handle, seq_count, alphabet=alphabet)
except TypeError:
#It isn't supported.
i = _force_alphabet(iterator_generator(handle, seq_count),
alphabet)
elif format in SeqIO._FormatToIterator:
#Exploit the existing SeqIO parser to the dirty work!
i = _SeqIO_to_alignment_iterator(handle, format,
alphabet=alphabet,
seq_count=seq_count)
else:
raise ValueError("Unknown format '%s'" % format)
#This imposes some overhead... wait until we drop Python 2.4 to fix it
for a in i:
yield a
if handle_close:
handle.close()
def read(handle, format, seq_count=None, alphabet=None):
"""Turns an alignment file into a single MultipleSeqAlignment object.
Arguments:
- handle - handle to the file, or the filename as a string
(note older verions of Biopython only took a handle).
- format - string describing the file format.
- alphabet - optional Alphabet object, useful when the sequence type
cannot be automatically inferred from the file itself
(e.g. fasta, phylip, clustal)
- seq_count - Optional integer, number of sequences expected in each
alignment. Recommended for fasta format files.
If the handle contains no alignments, or more than one alignment,
an exception is raised. For example, using a PFAM/Stockholm file
containing one alignment:
>>> from Bio import AlignIO
>>> filename = "Clustalw/protein.aln"
>>> format = "clustal"
>>> alignment = AlignIO.read(filename, format)
>>> print "Alignment of length", alignment.get_alignment_length()
Alignment of length 411
If however you want the first alignment from a file containing
multiple alignments this function would raise an exception.
>>> from Bio import AlignIO
>>> filename = "Emboss/needle.txt"
>>> format = "emboss"
>>> alignment = AlignIO.read(filename, format)
Traceback (most recent call last):
...
ValueError: More than one record found in handle
Instead use:
>>> from Bio import AlignIO
>>> filename = "Emboss/needle.txt"
>>> format = "emboss"
>>> alignment = AlignIO.parse(filename, format).next()
>>> print "First alignment has length", alignment.get_alignment_length()
First alignment has length 124
You must use the Bio.AlignIO.parse() function if you want to read multiple
records from the handle.
"""
iterator = parse(handle, format, seq_count, alphabet)
try:
first = iterator.next()
except StopIteration:
first = None
if first is None:
raise ValueError("No records found in handle")
try:
second = iterator.next()
except StopIteration:
second = None
if second is not None:
raise ValueError("More than one record found in handle")
if seq_count:
assert len(first)==seq_count
return first
def convert(in_file, in_format, out_file, out_format, alphabet=None):
"""Convert between two alignment files, returns number of alignments.
- in_file - an input handle or filename
- in_format - input file format, lower case string
- output - an output handle or filename
- out_file - output file format, lower case string
- alphabet - optional alphabet to assume
NOTE - If you provide an output filename, it will be opened which will
overwrite any existing file without warning. This may happen if even the
conversion is aborted (e.g. an invalid out_format name is given).
"""
#TODO - Add optimised versions of important conversions
#For now just off load the work to SeqIO parse/write
if isinstance(in_file, basestring):
in_handle = open(in_file, "rU")
in_close = True
else:
in_handle = in_file
in_close = False
#This will check the arguments and issue error messages,
alignments = parse(in_handle, in_format, None, alphabet)
#Don't open the output file until we've checked the input is OK:
if isinstance(out_file, basestring):
out_handle = open(out_file, "w")
out_close = True
else:
out_handle = out_file
out_close = False
#This will check the arguments and issue error messages,
#after we have opened the file which is a shame.
count = write(alignments, out_handle, out_format)
#Must now close any handles we opened
if in_close:
in_handle.close()
if out_close:
out_handle.close()
return count
def _test():
"""Run the Bio.AlignIO module's doctests.
This will try and locate the unit tests directory, and run the doctests
from there in order that the relative paths used in the examples work.
"""
import doctest
import os
if os.path.isdir(os.path.join("..", "..", "Tests")):
print "Runing doctests..."
cur_dir = os.path.abspath(os.curdir)
os.chdir(os.path.join("..", "..", "Tests"))
doctest.testmod()
os.chdir(cur_dir)
del cur_dir
print "Done"
elif os.path.isdir(os.path.join("Tests", "Fasta")):
print "Runing doctests..."
cur_dir = os.path.abspath(os.curdir)
os.chdir(os.path.join("Tests"))
doctest.testmod()
os.chdir(cur_dir)
del cur_dir
print "Done"
if __name__ == "__main__":
_test()
|
|
# Copyright 2015, Kay Hayen, mailto:[email protected]
#
# Part of "Nuitka", an optimizing Python compiler that is compatible and
# integrates with CPython, but also works on its own.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
""" Optimizations of built-ins to built-in calls.
"""
import math
import sys
from nuitka.nodes.ParameterSpecs import (
ParameterSpec,
TooManyArguments,
matchCall
)
from nuitka.utils.Utils import python_version
class BuiltinParameterSpec(ParameterSpec):
def __init__(self, name, arg_names, default_count, list_star_arg = None,
dict_star_arg = None):
ParameterSpec.__init__(
self,
name = name,
normal_args = arg_names,
list_star_arg = list_star_arg,
dict_star_arg = dict_star_arg,
default_count = default_count,
kw_only_args = ()
)
self.builtin = __builtins__[name]
def __repr__(self):
return "<BuiltinParameterSpec %s>" % self.name
def getName(self):
return self.name
def isCompileTimeComputable(self, values):
# By default, we make this dependent on the ability to compute the
# arguments, which is of course a good start for most cases, so this
# is for overloads, pylint: disable=R0201
for value in values:
if value is not None and not value.isCompileTimeConstant():
return False
return True
def simulateCall(self, given_values):
# Using star dict call for simulation and catch any exception as really
# fatal, pylint: disable=W0703
try:
given_normal_args = given_values[:len(self.normal_args)]
if self.list_star_arg:
given_list_star_args = given_values[len(self.normal_args)]
else:
given_list_star_args = None
if self.dict_star_arg:
given_dict_star_args = given_values[ -1 ]
else:
given_dict_star_args = None
arg_dict = {}
for arg_name, given_value in zip(self.normal_args, given_normal_args):
assert type(given_value) not in (tuple, list), \
("do not like a tuple %s" % (given_value,))
if given_value is not None:
arg_dict[ arg_name ] = given_value.getCompileTimeConstant()
if given_dict_star_args:
for given_dict_star_arg in reversed(given_dict_star_args):
arg_name = given_dict_star_arg.getKey().getCompileTimeConstant()
arg_value = given_dict_star_arg.getValue().getCompileTimeConstant()
arg_dict[arg_name] = arg_value
except Exception as e:
sys.exit("Fatal problem: %r" % e)
if given_list_star_args:
return self.builtin(
*(value.getCompileTimeConstant() for value in given_list_star_args),
**arg_dict
)
else:
return self.builtin(**arg_dict)
class BuiltinParameterSpecNoKeywords(BuiltinParameterSpec):
def allowsKeywords(self):
return False
def simulateCall(self, given_values):
# Using star dict call for simulation and catch any exception as really fatal,
# pylint: disable=W0703
try:
if self.list_star_arg:
given_list_star_arg = given_values[ len(self.normal_args) ]
else:
given_list_star_arg = None
arg_list = []
refuse_more = False
for _arg_name, given_value in zip(self.normal_args, given_values):
assert type(given_value) not in (tuple, list), ("do not like tuple %s" % (given_value,))
if given_value is not None:
if not refuse_more:
arg_list.append(given_value.getCompileTimeConstant())
else:
assert False
else:
refuse_more = True
if given_list_star_arg is not None:
arg_list += [ value.getCompileTimeConstant() for value in given_list_star_arg ]
except Exception as e:
print >> sys.stderr, "Fatal error: ",
import traceback
traceback.print_exc()
sys.exit(repr(e))
return self.builtin(*arg_list)
class BuiltinParameterSpecExceptions(BuiltinParameterSpec):
def __init__(self, exception_name, default_count):
# TODO: Parameter default_count makes no sense for exceptions probably.
BuiltinParameterSpec.__init__(
self,
name = exception_name,
arg_names = (),
default_count = default_count,
list_star_arg = "args"
)
def allowsKeywords(self):
return False
def getKeywordRefusalText(self):
return "exceptions.%s does not take keyword arguments" % self.name
def getCallableName(self):
return "exceptions." + self.getName()
def makeBuiltinParameterSpec(exception_name):
if exception_name == "ImportError" and python_version >= 330:
# TODO: Create this beast, needs keyword only arguments to be supported,
# currently user of this function must take care to not have them.
pass
return BuiltinParameterSpecExceptions(
exception_name = exception_name,
default_count = 0
)
builtin_int_spec = BuiltinParameterSpec("int", ('x', "base"), 2)
# These builtins are only available for Python2
if python_version < 300:
builtin_long_spec = BuiltinParameterSpec(
"long",
('x', "base"),
2
)
builtin_execfile_spec = BuiltinParameterSpecNoKeywords(
"execfile",
("filename", "globals", "locals"),
2
)
builtin_unicode_spec = BuiltinParameterSpec(
"unicode",
("string", "encoding", "errors"),
3
)
builtin_xrange_spec = BuiltinParameterSpec(
"xrange",
("start", "stop", "step"),
2
)
builtin_bool_spec = BuiltinParameterSpec("bool", ('x',), 1)
builtin_float_spec = BuiltinParameterSpec("float", ('x',), 1)
builtin_complex_spec = BuiltinParameterSpec("complex", ("real", "imag"), 2)
# This built-in have variable parameters for Python2/3
if python_version < 300:
builtin_str_spec = BuiltinParameterSpec("str", ("object",), 1)
else:
builtin_str_spec = BuiltinParameterSpec("str", ("object", "encoding", "errors"), 3)
builtin_len_spec = BuiltinParameterSpecNoKeywords("len", ("object",), 0)
builtin_dict_spec = BuiltinParameterSpec("dict", (), 0, "list_args", "dict_args")
builtin_len_spec = BuiltinParameterSpecNoKeywords("len", ("object",), 0)
builtin_tuple_spec = BuiltinParameterSpec("tuple", ("sequence",), 1)
builtin_list_spec = BuiltinParameterSpec("list", ("sequence",), 1)
builtin_set_spec = BuiltinParameterSpecNoKeywords("set", ("iterable",), 1)
builtin_import_spec = BuiltinParameterSpec("__import__", ("name", "globals", "locals", "fromlist", "level"), 4)
builtin_open_spec = BuiltinParameterSpec("open", ("name", "mode", "buffering"), 3)
builtin_chr_spec = BuiltinParameterSpecNoKeywords("chr", ('i',), 0)
builtin_ord_spec = BuiltinParameterSpecNoKeywords("ord", ('c',), 0)
builtin_bin_spec = BuiltinParameterSpecNoKeywords("bin", ("number",), 0)
builtin_oct_spec = BuiltinParameterSpecNoKeywords("oct", ("number",), 0)
builtin_hex_spec = BuiltinParameterSpecNoKeywords("hex", ("number",), 0)
builtin_id_spec = BuiltinParameterSpecNoKeywords("id", ("object",), 0)
builtin_repr_spec = BuiltinParameterSpecNoKeywords("repr", ("object",), 0)
builtin_dir_spec = BuiltinParameterSpecNoKeywords("dir", ("object",), 1)
builtin_vars_spec = BuiltinParameterSpecNoKeywords("vars", ("object",), 1)
builtin_locals_spec = BuiltinParameterSpecNoKeywords("locals", (), 0)
builtin_globals_spec = BuiltinParameterSpecNoKeywords("globals", (), 0)
builtin_eval_spec = BuiltinParameterSpecNoKeywords("eval", ("source", "globals", "locals"), 2)
if python_version < 300:
builtin_compile_spec = BuiltinParameterSpec(
"compile",
("source", "filename", "mode", "flags", "dont_inherit"),
2
)
else:
builtin_compile_spec = BuiltinParameterSpec(
"compile",
("source", "filename", "mode", "flags", "dont_inherit", "optimize"),
3
)
if python_version >= 300:
builtin_exec_spec = BuiltinParameterSpecNoKeywords(
"exec",
("source", "globals", "locals"),
2
)
# Note: Iter in fact names its first argument if the default applies
# "collection", fixed up in a wrapper.
builtin_iter_spec = BuiltinParameterSpecNoKeywords("iter", ("callable", "sentinel"), 1)
builtin_next_spec = BuiltinParameterSpecNoKeywords("next", ("iterator", "default"), 1)
# Note: type with 1 and type with 3 arguments are too different.
builtin_type1_spec = BuiltinParameterSpecNoKeywords("type", ("object",), 0)
builtin_type3_spec = BuiltinParameterSpecNoKeywords("type", ("name", "bases", "dict"), 0)
builtin_super_spec = BuiltinParameterSpecNoKeywords("super", ("type", "object"), 1 if python_version < 300 else 2)
builtin_hasattr_spec = BuiltinParameterSpecNoKeywords("hasattr", ("object", "name"), 0)
builtin_getattr_spec = BuiltinParameterSpecNoKeywords("getattr", ("object", "name", "default"), 1)
builtin_setattr_spec = BuiltinParameterSpecNoKeywords("setattr", ("object", "name", "value"), 0)
builtin_isinstance_spec = BuiltinParameterSpecNoKeywords("isinstance", ("instance", "classes"), 0)
builtin_bytearray_spec = BuiltinParameterSpecNoKeywords("bytearray", ("iterable_of_ints",), 1)
# Beware: One argument defines stop, not start.
builtin_slice_spec = BuiltinParameterSpecNoKeywords("slice", ("start", "stop", "step"), 2)
class BuiltinRangeSpec(BuiltinParameterSpecNoKeywords):
def __init__(self, *args):
BuiltinParameterSpecNoKeywords.__init__(self, *args)
def isCompileTimeComputable(self, values):
# For ranges, we need have many cases that can prevent the ability
# to pre-compute, pylint: disable=R0911,R0912
result = BuiltinParameterSpecNoKeywords.isCompileTimeComputable(
self,
values = values
)
if result:
arg_count = len(values)
if arg_count == 1:
low = values[0]
# If it's not a number constant, we can compute the exception
# that will be raised.
if not low.isNumberConstant():
return True
return low.getConstant() < 256
elif arg_count == 2:
low, high = values
# If it's not a number constant, we can compute the exception
# that will be raised.
if not low.isNumberConstant() or not high.isNumberConstant():
return True
return high.getConstant() - low.getConstant() < 256
elif arg_count == 3:
low, high, step = values
if not low.isNumberConstant() or \
not high.isNumberConstant() or \
not step.isNumberConstant():
return True
low = low.getConstant()
high = high.getConstant()
step = step.getConstant()
# It's going to give a ZeroDivisionError in this case.
if step == 0:
return True
if low < high:
if step < 0:
return True
else:
return math.ceil(float(high - low) / step) < 256
else:
if step > 0:
return True
else:
return math.ceil(float(high - low) / step) < 256
else:
assert False
else:
return False
builtin_range_spec = BuiltinRangeSpec("range", ("start", "stop", "step"), 2)
def extractBuiltinArgs(node, builtin_spec, builtin_class,
empty_special_class = None):
try:
kw = node.getCallKw()
# TODO: Could check for too many / too few, even if they are unknown, we
# might raise that error, but that need not be optimized immediately.
if kw is not None:
if not kw.isMappingWithConstantStringKeys():
return None
pairs = kw.getMappingStringKeyPairs()
if pairs and not builtin_spec.allowsKeywords():
raise TooManyArguments(
TypeError(builtin_spec.getKeywordRefusalText())
)
else:
pairs = ()
args = node.getCallArgs()
if args:
if not args.canPredictIterationValues():
return None
positional = args.getIterationValues()
else:
positional = ()
if not positional and not pairs and empty_special_class is not None:
return empty_special_class(source_ref = node.getSourceReference())
args_dict = matchCall(
func_name = builtin_spec.getName(),
args = builtin_spec.getArgumentNames(),
star_list_arg = builtin_spec.getStarListArgumentName(),
star_dict_arg = builtin_spec.getStarDictArgumentName(),
num_defaults = builtin_spec.getDefaultCount(),
positional = positional,
pairs = pairs
)
except TooManyArguments as e:
from nuitka.nodes.NodeMakingHelpers import (
makeRaiseExceptionReplacementExpressionFromInstance,
wrapExpressionWithSideEffects
)
return wrapExpressionWithSideEffects(
new_node = makeRaiseExceptionReplacementExpressionFromInstance(
expression = node,
exception = e.getRealException()
),
old_node = node,
side_effects = node.extractPreCallSideEffects()
)
args_list = []
for argument_name in builtin_spec.getArgumentNames():
args_list.append(args_dict[argument_name])
if builtin_spec.getStarListArgumentName() is not None:
args_list.append(args_dict[builtin_spec.getStarListArgumentName()])
if builtin_spec.getStarDictArgumentName() is not None:
args_list.append(args_dict[builtin_spec.getStarDictArgumentName()])
# Using list reference for passing the arguments without names,
result = builtin_class(
*args_list,
source_ref = node.getSourceReference()
)
result.setCompatibleSourceReference(node.getCompatibleSourceReference())
return result
|
|
# coding: utf-8
"""
MasterMind Service Manager
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: 1.0.0
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class Network(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'engine_url': 'str',
'ca_cert': 'str',
'cert': 'str',
'cert_key': 'str',
'name': 'str'
}
attribute_map = {
'engine_url': 'engine-url',
'ca_cert': 'ca-cert',
'cert': 'cert',
'cert_key': 'cert-key',
'name': 'name'
}
def __init__(self, engine_url=None, ca_cert=None, cert=None, cert_key=None, name=None): # noqa: E501
"""Network - a model defined in Swagger""" # noqa: E501
self._engine_url = None
self._ca_cert = None
self._cert = None
self._cert_key = None
self._name = None
self.discriminator = None
self.engine_url = engine_url
if ca_cert is not None:
self.ca_cert = ca_cert
if cert is not None:
self.cert = cert
if cert_key is not None:
self.cert_key = cert_key
self.name = name
@property
def engine_url(self):
"""Gets the engine_url of this Network. # noqa: E501
:return: The engine_url of this Network. # noqa: E501
:rtype: str
"""
return self._engine_url
@engine_url.setter
def engine_url(self, engine_url):
"""Sets the engine_url of this Network.
:param engine_url: The engine_url of this Network. # noqa: E501
:type: str
"""
if engine_url is None:
raise ValueError("Invalid value for `engine_url`, must not be `None`") # noqa: E501
self._engine_url = engine_url
@property
def ca_cert(self):
"""Gets the ca_cert of this Network. # noqa: E501
:return: The ca_cert of this Network. # noqa: E501
:rtype: str
"""
return self._ca_cert
@ca_cert.setter
def ca_cert(self, ca_cert):
"""Sets the ca_cert of this Network.
:param ca_cert: The ca_cert of this Network. # noqa: E501
:type: str
"""
self._ca_cert = ca_cert
@property
def cert(self):
"""Gets the cert of this Network. # noqa: E501
:return: The cert of this Network. # noqa: E501
:rtype: str
"""
return self._cert
@cert.setter
def cert(self, cert):
"""Sets the cert of this Network.
:param cert: The cert of this Network. # noqa: E501
:type: str
"""
self._cert = cert
@property
def cert_key(self):
"""Gets the cert_key of this Network. # noqa: E501
:return: The cert_key of this Network. # noqa: E501
:rtype: str
"""
return self._cert_key
@cert_key.setter
def cert_key(self, cert_key):
"""Sets the cert_key of this Network.
:param cert_key: The cert_key of this Network. # noqa: E501
:type: str
"""
self._cert_key = cert_key
@property
def name(self):
"""Gets the name of this Network. # noqa: E501
:return: The name of this Network. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this Network.
:param name: The name of this Network. # noqa: E501
:type: str
"""
if name is None:
raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501
self._name = name
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(Network, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, Network):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
|
from django import forms
from django.forms.formsets import DELETION_FIELD_NAME, BaseFormSet
from django.forms.models import (
BaseModelFormSet, inlineformset_factory, modelform_factory,
modelformset_factory,
)
from django.forms.utils import ErrorDict, ErrorList
from django.test import TestCase
from .models import (
Host, Manager, Network, ProfileNetwork, Restaurant, User, UserPreferences,
UserProfile, UserSite,
)
class InlineFormsetTests(TestCase):
def test_formset_over_to_field(self):
"A formset over a ForeignKey with a to_field can be saved. Regression for #10243"
Form = modelform_factory(User, fields="__all__")
FormSet = inlineformset_factory(User, UserSite, fields="__all__")
# Instantiate the Form and FormSet to prove
# you can create a form with no data
form = Form()
form_set = FormSet(instance=User())
# Now create a new User and UserSite instance
data = {
'serial': '1',
'username': 'apollo13',
'usersite_set-TOTAL_FORMS': '1',
'usersite_set-INITIAL_FORMS': '0',
'usersite_set-MAX_NUM_FORMS': '0',
'usersite_set-0-data': '10',
'usersite_set-0-user': 'apollo13'
}
user = User()
form = Form(data)
if form.is_valid():
user = form.save()
else:
self.fail('Errors found on form:%s' % form_set)
form_set = FormSet(data, instance=user)
if form_set.is_valid():
form_set.save()
usersite = UserSite.objects.all().values()
self.assertEqual(usersite[0]['data'], 10)
self.assertEqual(usersite[0]['user_id'], 'apollo13')
else:
self.fail('Errors found on formset:%s' % form_set.errors)
# Now update the UserSite instance
data = {
'usersite_set-TOTAL_FORMS': '1',
'usersite_set-INITIAL_FORMS': '1',
'usersite_set-MAX_NUM_FORMS': '0',
'usersite_set-0-id': str(usersite[0]['id']),
'usersite_set-0-data': '11',
'usersite_set-0-user': 'apollo13'
}
form_set = FormSet(data, instance=user)
if form_set.is_valid():
form_set.save()
usersite = UserSite.objects.all().values()
self.assertEqual(usersite[0]['data'], 11)
self.assertEqual(usersite[0]['user_id'], 'apollo13')
else:
self.fail('Errors found on formset:%s' % form_set.errors)
# Now add a new UserSite instance
data = {
'usersite_set-TOTAL_FORMS': '2',
'usersite_set-INITIAL_FORMS': '1',
'usersite_set-MAX_NUM_FORMS': '0',
'usersite_set-0-id': str(usersite[0]['id']),
'usersite_set-0-data': '11',
'usersite_set-0-user': 'apollo13',
'usersite_set-1-data': '42',
'usersite_set-1-user': 'apollo13'
}
form_set = FormSet(data, instance=user)
if form_set.is_valid():
form_set.save()
usersite = UserSite.objects.all().values().order_by('data')
self.assertEqual(usersite[0]['data'], 11)
self.assertEqual(usersite[0]['user_id'], 'apollo13')
self.assertEqual(usersite[1]['data'], 42)
self.assertEqual(usersite[1]['user_id'], 'apollo13')
else:
self.fail('Errors found on formset:%s' % form_set.errors)
def test_formset_over_inherited_model(self):
"A formset over a ForeignKey with a to_field can be saved. Regression for #11120"
Form = modelform_factory(Restaurant, fields="__all__")
FormSet = inlineformset_factory(Restaurant, Manager, fields="__all__")
# Instantiate the Form and FormSet to prove
# you can create a form with no data
form = Form()
form_set = FormSet(instance=Restaurant())
# Now create a new Restaurant and Manager instance
data = {
'name': "Guido's House of Pasta",
'manager_set-TOTAL_FORMS': '1',
'manager_set-INITIAL_FORMS': '0',
'manager_set-MAX_NUM_FORMS': '0',
'manager_set-0-name': 'Guido Van Rossum'
}
restaurant = User()
form = Form(data)
if form.is_valid():
restaurant = form.save()
else:
self.fail('Errors found on form:%s' % form_set)
form_set = FormSet(data, instance=restaurant)
if form_set.is_valid():
form_set.save()
manager = Manager.objects.all().values()
self.assertEqual(manager[0]['name'], 'Guido Van Rossum')
else:
self.fail('Errors found on formset:%s' % form_set.errors)
# Now update the Manager instance
data = {
'manager_set-TOTAL_FORMS': '1',
'manager_set-INITIAL_FORMS': '1',
'manager_set-MAX_NUM_FORMS': '0',
'manager_set-0-id': str(manager[0]['id']),
'manager_set-0-name': 'Terry Gilliam'
}
form_set = FormSet(data, instance=restaurant)
if form_set.is_valid():
form_set.save()
manager = Manager.objects.all().values()
self.assertEqual(manager[0]['name'], 'Terry Gilliam')
else:
self.fail('Errors found on formset:%s' % form_set.errors)
# Now add a new Manager instance
data = {
'manager_set-TOTAL_FORMS': '2',
'manager_set-INITIAL_FORMS': '1',
'manager_set-MAX_NUM_FORMS': '0',
'manager_set-0-id': str(manager[0]['id']),
'manager_set-0-name': 'Terry Gilliam',
'manager_set-1-name': 'John Cleese'
}
form_set = FormSet(data, instance=restaurant)
if form_set.is_valid():
form_set.save()
manager = Manager.objects.all().values().order_by('name')
self.assertEqual(manager[0]['name'], 'John Cleese')
self.assertEqual(manager[1]['name'], 'Terry Gilliam')
else:
self.fail('Errors found on formset:%s' % form_set.errors)
def test_inline_model_with_to_field(self):
"""
#13794 --- An inline model with a to_field of a formset with instance
has working relations.
"""
FormSet = inlineformset_factory(User, UserSite, exclude=('is_superuser',))
user = User.objects.create(username="guido", serial=1337)
UserSite.objects.create(user=user, data=10)
formset = FormSet(instance=user)
# Testing the inline model's relation
self.assertEqual(formset[0].instance.user_id, "guido")
def test_inline_model_with_primary_to_field(self):
"""An inline model with a OneToOneField with to_field & primary key."""
FormSet = inlineformset_factory(User, UserPreferences, exclude=('is_superuser',))
user = User.objects.create(username='guido', serial=1337)
UserPreferences.objects.create(user=user, favorite_number=10)
formset = FormSet(instance=user)
self.assertEqual(formset[0].fields['user'].initial, 'guido')
def test_inline_model_with_to_field_to_rel(self):
"""
#13794 --- An inline model with a to_field to a related field of a
formset with instance has working relations.
"""
FormSet = inlineformset_factory(UserProfile, ProfileNetwork, exclude=[])
user = User.objects.create(username="guido", serial=1337, pk=1)
self.assertEqual(user.pk, 1)
profile = UserProfile.objects.create(user=user, about="about", pk=2)
self.assertEqual(profile.pk, 2)
ProfileNetwork.objects.create(profile=profile, network=10, identifier=10)
formset = FormSet(instance=profile)
# Testing the inline model's relation
self.assertEqual(formset[0].instance.profile_id, 1)
def test_formset_with_none_instance(self):
"A formset with instance=None can be created. Regression for #11872"
Form = modelform_factory(User, fields="__all__")
FormSet = inlineformset_factory(User, UserSite, fields="__all__")
# Instantiate the Form and FormSet to prove
# you can create a formset with an instance of None
Form(instance=None)
FormSet(instance=None)
def test_empty_fields_on_modelformset(self):
"""
No fields passed to modelformset_factory() should result in no fields
on returned forms except for the id (#14119).
"""
UserFormSet = modelformset_factory(User, fields=())
formset = UserFormSet()
for form in formset.forms:
self.assertIn('id', form.fields)
self.assertEqual(len(form.fields), 1)
def test_save_as_new_with_new_inlines(self):
"""
Existing and new inlines are saved with save_as_new.
Regression for #14938.
"""
efnet = Network.objects.create(name="EFNet")
host1 = Host.objects.create(hostname="irc.he.net", network=efnet)
HostFormSet = inlineformset_factory(Network, Host, fields="__all__")
# Add a new host, modify previous host, and save-as-new
data = {
'host_set-TOTAL_FORMS': '2',
'host_set-INITIAL_FORMS': '1',
'host_set-MAX_NUM_FORMS': '0',
'host_set-0-id': str(host1.id),
'host_set-0-hostname': 'tranquility.hub.dal.net',
'host_set-1-hostname': 'matrix.de.eu.dal.net'
}
# To save a formset as new, it needs a new hub instance
dalnet = Network.objects.create(name="DALnet")
formset = HostFormSet(data, instance=dalnet, save_as_new=True)
self.assertTrue(formset.is_valid())
formset.save()
self.assertQuerysetEqual(
dalnet.host_set.order_by("hostname"),
["<Host: matrix.de.eu.dal.net>", "<Host: tranquility.hub.dal.net>"]
)
def test_initial_data(self):
user = User.objects.create(username="bibi", serial=1)
UserSite.objects.create(user=user, data=7)
FormSet = inlineformset_factory(User, UserSite, extra=2, fields="__all__")
formset = FormSet(instance=user, initial=[{'data': 41}, {'data': 42}])
self.assertEqual(formset.forms[0].initial['data'], 7)
self.assertEqual(formset.extra_forms[0].initial['data'], 41)
self.assertIn('value="42"', formset.extra_forms[1].as_p())
class FormsetTests(TestCase):
def test_error_class(self):
'''
Test the type of Formset and Form error attributes
'''
Formset = modelformset_factory(User, fields="__all__")
data = {
'form-TOTAL_FORMS': '2',
'form-INITIAL_FORMS': '0',
'form-MAX_NUM_FORMS': '0',
'form-0-id': '',
'form-0-username': 'apollo13',
'form-0-serial': '1',
'form-1-id': '',
'form-1-username': 'apollo13',
'form-1-serial': '2',
}
formset = Formset(data)
# check if the returned error classes are correct
# note: formset.errors returns a list as documented
self.assertIsInstance(formset.errors, list)
self.assertIsInstance(formset.non_form_errors(), ErrorList)
for form in formset.forms:
self.assertIsInstance(form.errors, ErrorDict)
self.assertIsInstance(form.non_field_errors(), ErrorList)
def test_initial_data(self):
User.objects.create(username="bibi", serial=1)
Formset = modelformset_factory(User, fields="__all__", extra=2)
formset = Formset(initial=[{'username': 'apollo11'}, {'username': 'apollo12'}])
self.assertEqual(formset.forms[0].initial['username'], "bibi")
self.assertEqual(formset.extra_forms[0].initial['username'], "apollo11")
self.assertIn('value="apollo12"', formset.extra_forms[1].as_p())
def test_extraneous_query_is_not_run(self):
Formset = modelformset_factory(Network, fields="__all__")
data = {
'test-TOTAL_FORMS': '1',
'test-INITIAL_FORMS': '0',
'test-MAX_NUM_FORMS': '',
'test-0-name': 'Random Place',
}
with self.assertNumQueries(1):
formset = Formset(data, prefix="test")
formset.save()
class CustomWidget(forms.widgets.TextInput):
pass
class UserSiteForm(forms.ModelForm):
class Meta:
model = UserSite
fields = "__all__"
widgets = {
'id': CustomWidget,
'data': CustomWidget,
}
localized_fields = ('data',)
class Callback:
def __init__(self):
self.log = []
def __call__(self, db_field, **kwargs):
self.log.append((db_field, kwargs))
return db_field.formfield(**kwargs)
class FormfieldCallbackTests(TestCase):
"""
Regression for #13095 and #17683: Using base forms with widgets
defined in Meta should not raise errors and BaseModelForm should respect
the specified pk widget.
"""
def test_inlineformset_factory_default(self):
Formset = inlineformset_factory(User, UserSite, form=UserSiteForm, fields="__all__")
form = Formset().forms[0]
self.assertIsInstance(form['id'].field.widget, CustomWidget)
self.assertIsInstance(form['data'].field.widget, CustomWidget)
self.assertFalse(form.fields['id'].localize)
self.assertTrue(form.fields['data'].localize)
def test_modelformset_factory_default(self):
Formset = modelformset_factory(UserSite, form=UserSiteForm)
form = Formset().forms[0]
self.assertIsInstance(form['id'].field.widget, CustomWidget)
self.assertIsInstance(form['data'].field.widget, CustomWidget)
self.assertFalse(form.fields['id'].localize)
self.assertTrue(form.fields['data'].localize)
def assertCallbackCalled(self, callback):
id_field, user_field, data_field = UserSite._meta.fields
expected_log = [
(id_field, {'widget': CustomWidget}),
(user_field, {}),
(data_field, {'widget': CustomWidget, 'localize': True}),
]
self.assertEqual(callback.log, expected_log)
def test_inlineformset_custom_callback(self):
callback = Callback()
inlineformset_factory(User, UserSite, form=UserSiteForm,
formfield_callback=callback, fields="__all__")
self.assertCallbackCalled(callback)
def test_modelformset_custom_callback(self):
callback = Callback()
modelformset_factory(UserSite, form=UserSiteForm, formfield_callback=callback)
self.assertCallbackCalled(callback)
class BaseCustomDeleteFormSet(BaseFormSet):
"""
A formset mix-in that lets a form decide if it's to be deleted.
Works for BaseFormSets. Also works for ModelFormSets with #14099 fixed.
form.should_delete() is called. The formset delete field is also suppressed.
"""
def add_fields(self, form, index):
super().add_fields(form, index)
self.can_delete = True
if DELETION_FIELD_NAME in form.fields:
del form.fields[DELETION_FIELD_NAME]
def _should_delete_form(self, form):
return hasattr(form, 'should_delete') and form.should_delete()
class FormfieldShouldDeleteFormTests(TestCase):
"""
Regression for #14099: BaseModelFormSet should use ModelFormSet method _should_delete_form
"""
class BaseCustomDeleteModelFormSet(BaseModelFormSet, BaseCustomDeleteFormSet):
""" Model FormSet with CustomDelete MixIn """
class CustomDeleteUserForm(forms.ModelForm):
""" A model form with a 'should_delete' method """
class Meta:
model = User
fields = "__all__"
def should_delete(self):
""" delete form if odd PK """
return self.instance.pk % 2 != 0
NormalFormset = modelformset_factory(User, form=CustomDeleteUserForm, can_delete=True)
DeleteFormset = modelformset_factory(User, form=CustomDeleteUserForm, formset=BaseCustomDeleteModelFormSet)
data = {
'form-TOTAL_FORMS': '4',
'form-INITIAL_FORMS': '0',
'form-MAX_NUM_FORMS': '4',
'form-0-username': 'John',
'form-0-serial': '1',
'form-1-username': 'Paul',
'form-1-serial': '2',
'form-2-username': 'George',
'form-2-serial': '3',
'form-3-username': 'Ringo',
'form-3-serial': '5',
}
delete_all_ids = {
'form-0-DELETE': '1',
'form-1-DELETE': '1',
'form-2-DELETE': '1',
'form-3-DELETE': '1',
}
def test_init_database(self):
""" Add test data to database via formset """
formset = self.NormalFormset(self.data)
self.assertTrue(formset.is_valid())
self.assertEqual(len(formset.save()), 4)
def test_no_delete(self):
""" Verify base formset doesn't modify database """
# reload database
self.test_init_database()
# pass standard data dict & see none updated
data = dict(self.data)
data['form-INITIAL_FORMS'] = 4
data.update({
'form-%d-id' % i: user.pk
for i, user in enumerate(User.objects.all())
})
formset = self.NormalFormset(data, queryset=User.objects.all())
self.assertTrue(formset.is_valid())
self.assertEqual(len(formset.save()), 0)
self.assertEqual(len(User.objects.all()), 4)
def test_all_delete(self):
""" Verify base formset honors DELETE field """
# reload database
self.test_init_database()
# create data dict with all fields marked for deletion
data = dict(self.data)
data['form-INITIAL_FORMS'] = 4
data.update({
'form-%d-id' % i: user.pk
for i, user in enumerate(User.objects.all())
})
data.update(self.delete_all_ids)
formset = self.NormalFormset(data, queryset=User.objects.all())
self.assertTrue(formset.is_valid())
self.assertEqual(len(formset.save()), 0)
self.assertEqual(len(User.objects.all()), 0)
def test_custom_delete(self):
""" Verify DeleteFormset ignores DELETE field and uses form method """
# reload database
self.test_init_database()
# Create formset with custom Delete function
# create data dict with all fields marked for deletion
data = dict(self.data)
data['form-INITIAL_FORMS'] = 4
data.update({
'form-%d-id' % i: user.pk
for i, user in enumerate(User.objects.all())
})
data.update(self.delete_all_ids)
formset = self.DeleteFormset(data, queryset=User.objects.all())
# verify two were deleted
self.assertTrue(formset.is_valid())
self.assertEqual(len(formset.save()), 0)
self.assertEqual(len(User.objects.all()), 2)
# verify no "odd" PKs left
odd_ids = [user.pk for user in User.objects.all() if user.pk % 2]
self.assertEqual(len(odd_ids), 0)
class RedeleteTests(TestCase):
def test_resubmit(self):
u = User.objects.create(username='foo', serial=1)
us = UserSite.objects.create(user=u, data=7)
formset_cls = inlineformset_factory(User, UserSite, fields="__all__")
data = {
'serial': '1',
'username': 'foo',
'usersite_set-TOTAL_FORMS': '1',
'usersite_set-INITIAL_FORMS': '1',
'usersite_set-MAX_NUM_FORMS': '1',
'usersite_set-0-id': str(us.pk),
'usersite_set-0-data': '7',
'usersite_set-0-user': 'foo',
'usersite_set-0-DELETE': '1'
}
formset = formset_cls(data, instance=u)
self.assertTrue(formset.is_valid())
formset.save()
self.assertEqual(UserSite.objects.count(), 0)
formset = formset_cls(data, instance=u)
# Even if the "us" object isn't in the DB any more, the form
# validates.
self.assertTrue(formset.is_valid())
formset.save()
self.assertEqual(UserSite.objects.count(), 0)
def test_delete_already_deleted(self):
u = User.objects.create(username='foo', serial=1)
us = UserSite.objects.create(user=u, data=7)
formset_cls = inlineformset_factory(User, UserSite, fields="__all__")
data = {
'serial': '1',
'username': 'foo',
'usersite_set-TOTAL_FORMS': '1',
'usersite_set-INITIAL_FORMS': '1',
'usersite_set-MAX_NUM_FORMS': '1',
'usersite_set-0-id': str(us.pk),
'usersite_set-0-data': '7',
'usersite_set-0-user': 'foo',
'usersite_set-0-DELETE': '1'
}
formset = formset_cls(data, instance=u)
us.delete()
self.assertTrue(formset.is_valid())
formset.save()
self.assertEqual(UserSite.objects.count(), 0)
|
|
import os
import os.path as op
import re
import warnings
import numpy as np
import pytest
from mne import read_evokeds, Epochs, create_info
from mne.io import read_raw_fif, RawArray
from mne.utils import (warn, set_log_level, set_log_file, filter_out_warnings,
verbose, _get_call_line, use_log_level, catch_logging,
logger, check)
from mne.utils._logging import _frame_info
base_dir = op.join(op.dirname(__file__), '..', '..', 'io', 'tests', 'data')
fname_raw = op.join(base_dir, 'test_raw.fif')
fname_evoked = op.join(base_dir, 'test-ave.fif')
fname_log = op.join(base_dir, 'test-ave.log')
fname_log_2 = op.join(base_dir, 'test-ave-2.log')
@verbose
def _fun(verbose=None):
logger.debug('Test')
def test_frame_info(capsys, monkeypatch):
"""Test _frame_info."""
stack = _frame_info(100)
assert 2 < len(stack) < 100
this, pytest_line = stack[:2]
assert re.match('^test_logging:[1-9][0-9]$', this) is not None, this
assert 'pytest' in pytest_line
capsys.readouterr()
with use_log_level('debug', add_frames=4):
_fun()
out, _ = capsys.readouterr()
out = out.replace('\n', ' ')
assert re.match(
'.*pytest'
'.*test_logging:[2-9][0-9] '
'.*test_logging:[1-9][0-9] :.*Test', out) is not None, this
monkeypatch.setattr('inspect.currentframe', lambda: None)
assert _frame_info(1) == ['unknown']
def test_how_to_deal_with_warnings():
"""Test filter some messages out of warning records."""
with pytest.warns(UserWarning, match='bb') as w:
warnings.warn("aa warning", UserWarning)
warnings.warn("bb warning", UserWarning)
warnings.warn("bb warning", RuntimeWarning)
warnings.warn("aa warning", UserWarning)
filter_out_warnings(w, category=UserWarning, match='aa')
filter_out_warnings(w, category=RuntimeWarning)
assert len(w) == 1
def clean_lines(lines=[]):
"""Scrub filenames for checking logging output (in test_logging)."""
return [line if 'Reading ' not in line else 'Reading test file'
for line in lines]
def test_logging_options(tmp_path):
"""Test logging (to file)."""
with use_log_level(None): # just ensure it's set back
with pytest.raises(ValueError, match="Invalid value for the 'verbose"):
set_log_level('foo')
tempdir = str(tmp_path)
test_name = op.join(tempdir, 'test.log')
with open(fname_log, 'r') as old_log_file:
# [:-1] used to strip an extra "No baseline correction applied"
old_lines = clean_lines(old_log_file.readlines())
old_lines.pop(-1)
with open(fname_log_2, 'r') as old_log_file_2:
old_lines_2 = clean_lines(old_log_file_2.readlines())
old_lines_2.pop(14)
old_lines_2.pop(-1)
if op.isfile(test_name):
os.remove(test_name)
# test it one way (printing default off)
set_log_file(test_name)
set_log_level('WARNING')
# should NOT print
evoked = read_evokeds(fname_evoked, condition=1)
with open(test_name) as fid:
assert (fid.readlines() == [])
# should NOT print
evoked = read_evokeds(fname_evoked, condition=1, verbose=False)
with open(test_name) as fid:
assert (fid.readlines() == [])
# should NOT print
evoked = read_evokeds(fname_evoked, condition=1, verbose='WARNING')
with open(test_name) as fid:
assert (fid.readlines() == [])
# SHOULD print
evoked = read_evokeds(fname_evoked, condition=1, verbose=True)
with open(test_name, 'r') as new_log_file:
new_lines = clean_lines(new_log_file.readlines())
assert new_lines == old_lines
set_log_file(None) # Need to do this to close the old file
os.remove(test_name)
# now go the other way (printing default on)
set_log_file(test_name)
set_log_level('INFO')
# should NOT print
evoked = read_evokeds(fname_evoked, condition=1, verbose='WARNING')
with open(test_name) as fid:
assert (fid.readlines() == [])
# should NOT print
evoked = read_evokeds(fname_evoked, condition=1, verbose=False)
with open(test_name) as fid:
assert (fid.readlines() == [])
# SHOULD print
evoked = read_evokeds(fname_evoked, condition=1)
with open(test_name, 'r') as new_log_file:
new_lines = clean_lines(new_log_file.readlines())
assert new_lines == old_lines
# check to make sure appending works (and as default, raises a warning)
set_log_file(test_name, overwrite=False)
with pytest.warns(RuntimeWarning, match='appended to the file'):
set_log_file(test_name)
evoked = read_evokeds(fname_evoked, condition=1)
with open(test_name, 'r') as new_log_file:
new_lines = clean_lines(new_log_file.readlines())
assert new_lines == old_lines_2
# make sure overwriting works
set_log_file(test_name, overwrite=True)
# this line needs to be called to actually do some logging
evoked = read_evokeds(fname_evoked, condition=1)
del evoked
with open(test_name, 'r') as new_log_file:
new_lines = clean_lines(new_log_file.readlines())
assert new_lines == old_lines
with catch_logging() as log:
pass
assert log.getvalue() == ''
@pytest.mark.parametrize('verbose', (True, False))
def test_verbose_method(verbose):
"""Test for gh-8772."""
# raw
raw = read_raw_fif(fname_raw, verbose=verbose)
with catch_logging() as log:
raw.load_data(verbose=True)
log = log.getvalue()
assert 'Reading 0 ... 14399' in log
with catch_logging() as log:
raw.load_data(verbose=False)
log = log.getvalue()
assert log == ''
# epochs
events = np.array([[raw.first_samp + 200, 0, 1]], int)
epochs = Epochs(raw, events, verbose=verbose)
with catch_logging() as log:
epochs.drop_bad(verbose=True)
log = log.getvalue()
assert '0 bad epochs dropped' in log
epochs = Epochs(raw, events, verbose=verbose)
with catch_logging() as log:
epochs.drop_bad(verbose=False)
log = log.getvalue()
assert log == ''
def test_warn(capsys, tmp_path, monkeypatch):
"""Test the smart warn() function."""
with pytest.warns(RuntimeWarning, match='foo'):
warn('foo')
captured = capsys.readouterr()
assert captured.out == '' # gh-5592
assert captured.err == '' # this is because pytest.warns took it already
# test ignore_namespaces
bad_name = tmp_path / 'bad.fif'
raw = RawArray(np.zeros((1, 1)), create_info(1, 1000., 'eeg'))
with pytest.warns(RuntimeWarning, match='filename') as ws:
raw.save(bad_name)
assert len(ws) == 1
assert 'test_logging.py' in ws[0].filename # this file (it's in tests/)
def warn_wrap(msg):
warn(msg, ignore_namespaces=())
monkeypatch.setattr(check, 'warn', warn_wrap)
with pytest.warns(RuntimeWarning, match='filename') as ws:
raw.save(bad_name, overwrite=True)
assert len(ws) == 1
assert 'test_logging.py' not in ws[0].filename # this file
assert '_logging.py' in ws[0].filename # where `mne.utils.warn` lives
def test_get_call_line():
"""Test getting a call line."""
@verbose
def foo(verbose=None):
return _get_call_line()
for v in (None, True):
my_line = foo(verbose=v) # testing
assert my_line == 'my_line = foo(verbose=v) # testing'
def bar():
return _get_call_line()
my_line = bar() # testing more
assert my_line == 'my_line = bar() # testing more'
def test_verbose_strictness():
"""Test that the verbose decorator is strict about usability."""
@verbose
def bad_verbose():
pass
with pytest.raises(RuntimeError, match='does not accept'):
bad_verbose()
class Okay:
@verbose
def meth_1(self): # allowed because it should just use self.verbose
logger.info('meth_1')
@verbose
def meth_2(self, verbose=None):
logger.info('meth_2')
o = Okay()
with pytest.raises(RuntimeError, match=r'does not have self\.verbose'):
o.meth_1() # should raise, no verbose attr yet
o.verbose = False
with catch_logging() as log:
o.meth_1()
o.meth_2()
log = log.getvalue()
assert log == ''
with catch_logging() as log:
o.meth_2(verbose=True)
log = log.getvalue()
assert 'meth_2' in log
o.verbose = True
with catch_logging() as log:
o.meth_1()
o.meth_2()
log = log.getvalue()
assert 'meth_1' in log
assert 'meth_2' in log
with catch_logging() as log:
o.meth_2(verbose=False)
log = log.getvalue()
assert log == ''
|
|
# -*- coding: utf-8 -*-
# Copyright 2015 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.internet import defer
from synapse.api.errors import LimitExceededError
from synapse.util.async import sleep
import collections
import contextlib
import logging
logger = logging.getLogger(__name__)
class FederationRateLimiter(object):
def __init__(self, clock, window_size, sleep_limit, sleep_msec,
reject_limit, concurrent_requests):
"""
Args:
clock (Clock)
window_size (int): The window size in milliseconds.
sleep_limit (int): The number of requests received in the last
`window_size` milliseconds before we artificially start
delaying processing of requests.
sleep_msec (int): The number of milliseconds to delay processing
of incoming requests by.
reject_limit (int): The maximum number of requests that are can be
queued for processing before we start rejecting requests with
a 429 Too Many Requests response.
concurrent_requests (int): The number of concurrent requests to
process.
"""
self.clock = clock
self.window_size = window_size
self.sleep_limit = sleep_limit
self.sleep_msec = sleep_msec
self.reject_limit = reject_limit
self.concurrent_requests = concurrent_requests
self.ratelimiters = {}
def ratelimit(self, host):
"""Used to ratelimit an incoming request from given host
Example usage:
with rate_limiter.ratelimit(origin) as wait_deferred:
yield wait_deferred
# Handle request ...
Args:
host (str): Origin of incoming request.
Returns:
_PerHostRatelimiter
"""
return self.ratelimiters.setdefault(
host,
_PerHostRatelimiter(
clock=self.clock,
window_size=self.window_size,
sleep_limit=self.sleep_limit,
sleep_msec=self.sleep_msec,
reject_limit=self.reject_limit,
concurrent_requests=self.concurrent_requests,
)
).ratelimit()
class _PerHostRatelimiter(object):
def __init__(self, clock, window_size, sleep_limit, sleep_msec,
reject_limit, concurrent_requests):
self.clock = clock
self.window_size = window_size
self.sleep_limit = sleep_limit
self.sleep_msec = sleep_msec
self.reject_limit = reject_limit
self.concurrent_requests = concurrent_requests
self.sleeping_requests = set()
self.ready_request_queue = collections.OrderedDict()
self.current_processing = set()
self.request_times = []
def is_empty(self):
time_now = self.clock.time_msec()
self.request_times[:] = [
r for r in self.request_times
if time_now - r < self.window_size
]
return not (
self.ready_request_queue
or self.sleeping_requests
or self.current_processing
or self.request_times
)
@contextlib.contextmanager
def ratelimit(self):
# `contextlib.contextmanager` takes a generator and turns it into a
# context manager. The generator should only yield once with a value
# to be returned by manager.
# Exceptions will be reraised at the yield.
request_id = object()
ret = self._on_enter(request_id)
try:
yield ret
finally:
self._on_exit(request_id)
def _on_enter(self, request_id):
time_now = self.clock.time_msec()
self.request_times[:] = [
r for r in self.request_times
if time_now - r < self.window_size
]
queue_size = len(self.ready_request_queue) + len(self.sleeping_requests)
if queue_size > self.reject_limit:
raise LimitExceededError(
retry_after_ms=int(
self.window_size / self.sleep_limit
),
)
self.request_times.append(time_now)
def queue_request():
if len(self.current_processing) > self.concurrent_requests:
logger.debug("Ratelimit [%s]: Queue req", id(request_id))
queue_defer = defer.Deferred()
self.ready_request_queue[request_id] = queue_defer
return queue_defer
else:
return defer.succeed(None)
logger.debug(
"Ratelimit [%s]: len(self.request_times)=%d",
id(request_id), len(self.request_times),
)
if len(self.request_times) > self.sleep_limit:
logger.debug(
"Ratelimit [%s]: sleeping req",
id(request_id),
)
ret_defer = sleep(self.sleep_msec/1000.0)
self.sleeping_requests.add(request_id)
def on_wait_finished(_):
logger.debug(
"Ratelimit [%s]: Finished sleeping",
id(request_id),
)
self.sleeping_requests.discard(request_id)
queue_defer = queue_request()
return queue_defer
ret_defer.addBoth(on_wait_finished)
else:
ret_defer = queue_request()
def on_start(r):
logger.debug(
"Ratelimit [%s]: Processing req",
id(request_id),
)
self.current_processing.add(request_id)
return r
def on_err(r):
self.current_processing.discard(request_id)
return r
def on_both(r):
# Ensure that we've properly cleaned up.
self.sleeping_requests.discard(request_id)
self.ready_request_queue.pop(request_id, None)
return r
ret_defer.addCallbacks(on_start, on_err)
ret_defer.addBoth(on_both)
return ret_defer
def _on_exit(self, request_id):
logger.debug(
"Ratelimit [%s]: Processed req",
id(request_id),
)
self.current_processing.discard(request_id)
try:
request_id, deferred = self.ready_request_queue.popitem()
self.current_processing.add(request_id)
deferred.callback(None)
except KeyError:
pass
|
|
import numpy as np
import argparse
import re
import collections
readid_re = re.compile('seq.(\d+)([ab]?)')
class ChromosomeAlnCounter:
"""Accumulates counts of aligments by chromosome."""
def __init__(self):
"""Create a new ChromosomeAlnCounter, with no arguments."""
self.mapping = {}
self.chromosomes = []
def add_aln(self, chromosome):
"""Increment the count for the given chromosome."""
if chromosome not in self.mapping:
self.mapping[chromosome] = 0
self.chromosomes.append(chromosome)
self.mapping[chromosome] += 1
def results(self):
"""Return the accumulated counts as a list of tuples.
Each tuple is of the format (chromosome, count), and gives the
count of alignments for a chromosome. There is one tuple for
each chromosome, and they are returned in the order that the
chromosomes were first seen.
"""
return [ (c, self.mapping[c]) for c in self.chromosomes ]
class AlignmentPart:
"""Once part of an alignment. May represent a forward, reverse, or
joined read."""
def __init__(self, starts, ends, sequence):
self.starts = starts
self.ends = ends
self.sequence = sequence
def __str__(self):
return str(self.__dict__)
def __repr__(self):
return 'AlignmentPart' + repr(self.__dict__)
class Alignment:
"""A RUM alignment.
An Alignment contains enough information to identify a read and
describe one mapping of the read to the genome. It contains the
read number, the chromosome and strand it mapped to, and then one
or two 'parts' (forward, reverse, and joined) which contain the
coordinates and the actual sequence. The valid combinations of
parts are:
* forward: Alignment for the forward read only
* reverse: Alignment for the reverse read only
* joined: An overlapping alignment for the forward and reverse
reads, which has been joined together.
* forward and reverse: Non-overlapping alignment for both the
forward and reverse read.
"""
def __init__(self, line=None, read_num=None, chromosome=None, strand=None,
forward=None, reverse=None, joined=None):
if (line is not None):
(readid, chromosome, loc_str, strand, sequence) = line.split('\t', 4)
m = readid_re.match(readid)
if m is None:
raise Exception("%s doesn't look like a read id" % readid)
starts = []
ends = []
for loc in loc_str.split(', '):
(fwd, rev) = loc.split('-')
starts.append(int(fwd))
ends.append(int(rev))
self.read_num = int(m.group(1))
self.chromosome = chromosome
self.strand = strand
ab = m.group(2)
part = AlignmentPart(starts, ends, sequence)
self.forward = None
self.reverse = None
self.joined = None
if ab is 'a': self.forward = part
elif ab is 'b': self.reverse = part
else: self.joined = part
else:
self.read_num = read_num
self.chromosome = chromosome
self.strand = strand
self.forward = forward
self.reverse = reverse
self.joined = joined
def parts(self):
parts = []
if self.forward is not None: parts.append(forward)
if self.reverse is not None: parts.append(reverse)
if self.joined is not None: parts.append(joined)
return parts
def __maybe_write_part(self, out, part, direction):
if part is not None:
locs = ""
for i in range(len(part.starts)):
if i > 0:
locs += ', '
locs += '{:d}-{:d}'.format(part.starts[i], part.ends[i])
if part is not None:
out.write("seq.{:d}{:s}\t{:s}\t{:s}\t{:s}\t{:s}".format(
self.read_num, direction, self.chromosome, locs, self.strand, part.sequence))
def write(self, out):
self.__maybe_write_part(out, self.forward, 'a')
self.__maybe_write_part(out, self.reverse, 'b')
self.__maybe_write_part(out, self.joined, '')
def __str__(self):
return str(self.__dict__)
def __repr__(self):
return repr(self.__dict__)
def read_coverage(cov_file):
"""Determine the total number of bases covered.
Reads in the given coverage file and computes the total number of
bases covered, returning that value as an int.
"""
header = cov_file.next()
footprint = 0
for line in cov_file:
(chromosome, start, end, cov) = line.split("\t")
start = int(start)
end = int(end)
footprint += end - start
return footprint
def aln_iter(lines):
alns = (Alignment(line=line) for line in lines)
last = alns.next()
for aln in alns:
result = None
if (last is None):
last = aln
elif (last.forward is not None and
aln.reverse is not None and
last.read_num == aln.read_num and
last.chromosome == aln.chromosome and
last.strand == aln.strand):
yield Alignment(
read_num = last.read_num,
chromosome = last.chromosome,
strand = last.strand,
forward = last.forward,
reverse = aln.reverse)
last = None
else:
yield last
last = aln
if last is not None:
yield last
def unique_stats(rum_unique, n):
print "Reading RUM_Unique"
fwd_only = np.zeros(n + 1, dtype=bool)
rev_only = np.zeros(n + 1, dtype=bool)
joined = np.zeros(n + 1, dtype=bool)
unjoined = np.zeros(n + 1, dtype=bool)
chr_counts = ChromosomeAlnCounter()
counter = 0
for aln in aln_iter(rum_unique):
counter += 1
if (counter % 100000) == 0:
print " {:d}".format(counter)
chr_counts.add_aln(aln.chromosome)
i = aln.read_num
if aln.joined is not None:
joined[i] = True
else:
fwd = aln.forward is not None
rev = aln.reverse is not None
if fwd and rev:
unjoined[i] = True
elif fwd:
fwd_only[i] = True
elif rev:
rev_only[i] = True
stats = {
'fwd_only' : sum(fwd_only),
'rev_only' : sum(rev_only),
'joined' : sum(joined),
'unjoined' : sum(unjoined),
}
stats['consistent'] = stats['joined'] + stats['unjoined']
stats['fwd'] = stats['fwd_only'] + stats['consistent']
stats['rev'] = stats['rev_only'] + stats['consistent']
stats['any'] = stats['fwd_only'] + stats['rev_only'] + stats['consistent']
return (add_percents(stats, n), chr_counts.results())
def nu_stats(rum_nu, n):
fwd = np.zeros(n + 1, dtype=bool)
rev = np.zeros(n + 1, dtype=bool)
both = np.zeros(n + 1, dtype=bool)
chr_counts = ChromosomeAlnCounter()
counter = 0
for aln in aln_iter(rum_nu):
counter += 1
if (counter % 100000) == 0:
print " {:d}".format(counter)
chr_counts.add_aln(aln.chromosome)
i = aln.read_num
if aln.forward is not None:
fwd[i] = True
if aln.reverse is not None:
rev[i] = True
if (aln.joined is not None or
(aln.forward is not None and
aln.reverse is not None)):
both[i] = True
stats = {
'fwd' : sum(fwd & ~(rev | both)),
'rev' : sum(rev & ~(fwd | both)),
'consistent' : sum(both)
}
stats['any'] = sum(fwd | rev | both)
return (add_percents(stats, n), chr_counts.results())
def add_percents(stats, n):
result = {}
for k in stats:
result[k] = stats[k]
result['pct_' + k] = float(stats[k]) * 100.0 / float(n)
return result
def get_cov_stats(cov_unique, cov_nu, genome_size):
cov_u = read_coverage(cov_unique)
cov_nu = read_coverage(cov_nu)
stats = add_percents({
'cov_u' : cov_u,
'cov_nu' : cov_nu
}, genome_size)
stats['genome_size'] = genome_size
return stats
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--rum-unique', required=True, type=file)
parser.add_argument('--rum-nu', required=True, type=file)
parser.add_argument('--cov-unique', required=True, type=file)
parser.add_argument('--cov-nu', required=True, type=file)
parser.add_argument('--max-seq', required=True, type=int)
parser.add_argument('--genome-size', required=True, type=int)
args = parser.parse_args()
(ustats, u_chr_counts) = unique_stats(args.rum_unique, args.max_seq)
print """
UNIQUE MAPPERS
--------------
Both forward and reverse mapped consistently: %(consistent)d (%(pct_consistent).2f%%)
- do overlap: %(joined)d (%(pct_joined).2f)
- don't overlap: %(unjoined)d (%(pct_unjoined).2f)
Number of forward mapped only: %(fwd_only)d
Number of reverse mapped only: %(rev_only)d
Number of forward total: %(fwd)d (%(pct_fwd).2f)
Number of reverse total: %(rev)d (%(pct_rev).2f)
At least one of forward or reverse mapped: %(any)d (%(any).2f)
""" % ustats
(nustats, nu_chr_counts) = nu_stats(args.rum_nu, args.max_seq)
print """
NON-UNIQUE MAPPERS
------------------
Total number forward only ambiguous: %(fwd)d (%(pct_fwd).2f%%)
Total number reverse only ambiguous: %(rev)d (%(pct_rev).2f%%)
Total number consistent ambiguous: %(consistent)d (%(pct_consistent).2f%%)
""" % nustats
combined = {
'fwd' : ustats['fwd'] + nustats['fwd'] + nustats['consistent'],
'rev' : ustats['rev'] + nustats['rev'] + nustats['consistent'],
'consistent' : ustats['consistent'] + nustats['consistent'],
'any' : ( ustats['any'] +
nustats['fwd'] + nustats['rev'] + nustats['consistent'])
}
combined = add_percents(combined, args.max_seq)
print """
TOTAL
-----
Total number forward: %(fwd)d (%(pct_fwd).2f%%)
Total number reverse: %(rev)d (%(pct_rev).2f%%)
Total number consistent: %(consistent)d (%(pct_consistent).2f%%)
At least one of forward or reverse mapped: %(any)d (%(pct_any).2f%%)
""" % combined
cov = get_cov_stats(args.cov_unique, args.cov_nu, args.genome_size)
print """
Genome size: {genome_size:,d}
Number of bases covered by unique mappers: {cov_u:,d} ({pct_cov_u:.2f}%)
Number of bases covered by non-unique mappers: {cov_nu:,d} ({pct_cov_nu:.2f}%)
""".format(**cov)
print """
RUM_Unique reads per chromosome
-------------------------------"""
for (c, x) in u_chr_counts:
print '{:10s} {:10d}'.format(c, x)
print """
RUM_NU reads per chromosome
---------------------------"""
for (c, x) in nu_chr_counts:
print '{:10s} {:10d}'.format(c, x)
if __name__ == '__main__':
main()
|
|
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
from google.cloud.logging_v2.proto import (
logging_config_pb2 as google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2,
)
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
class ConfigServiceV2Stub(object):
"""Service for configuring sinks used to route log entries.
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.ListSinks = channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/ListSinks",
request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.ListSinksRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.ListSinksResponse.FromString,
)
self.GetSink = channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/GetSink",
request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.GetSinkRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogSink.FromString,
)
self.CreateSink = channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/CreateSink",
request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.CreateSinkRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogSink.FromString,
)
self.UpdateSink = channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/UpdateSink",
request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.UpdateSinkRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogSink.FromString,
)
self.DeleteSink = channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/DeleteSink",
request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.DeleteSinkRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.ListExclusions = channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/ListExclusions",
request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.ListExclusionsRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.ListExclusionsResponse.FromString,
)
self.GetExclusion = channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/GetExclusion",
request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.GetExclusionRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogExclusion.FromString,
)
self.CreateExclusion = channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/CreateExclusion",
request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.CreateExclusionRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogExclusion.FromString,
)
self.UpdateExclusion = channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/UpdateExclusion",
request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.UpdateExclusionRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogExclusion.FromString,
)
self.DeleteExclusion = channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/DeleteExclusion",
request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.DeleteExclusionRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
class ConfigServiceV2Servicer(object):
"""Service for configuring sinks used to route log entries.
"""
def ListSinks(self, request, context):
"""Lists sinks.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def GetSink(self, request, context):
"""Gets a sink.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def CreateSink(self, request, context):
"""Creates a sink that exports specified log entries to a destination. The
export of newly-ingested log entries begins immediately, unless the sink's
`writer_identity` is not permitted to write to the destination. A sink can
export log entries only from the resource owning the sink.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def UpdateSink(self, request, context):
"""Updates a sink. This method replaces the following fields in the existing
sink with values from the new sink: `destination`, and `filter`.
The updated sink might also have a new `writer_identity`; see the
`unique_writer_identity` field.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def DeleteSink(self, request, context):
"""Deletes a sink. If the sink has a unique `writer_identity`, then that
service account is also deleted.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def ListExclusions(self, request, context):
"""Lists all the exclusions in a parent resource.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def GetExclusion(self, request, context):
"""Gets the description of an exclusion.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def CreateExclusion(self, request, context):
"""Creates a new exclusion in a specified parent resource.
Only log entries belonging to that resource can be excluded.
You can have up to 10 exclusions in a resource.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def UpdateExclusion(self, request, context):
"""Changes one or more properties of an existing exclusion.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def DeleteExclusion(self, request, context):
"""Deletes an exclusion.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def add_ConfigServiceV2Servicer_to_server(servicer, server):
rpc_method_handlers = {
"ListSinks": grpc.unary_unary_rpc_method_handler(
servicer.ListSinks,
request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.ListSinksRequest.FromString,
response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.ListSinksResponse.SerializeToString,
),
"GetSink": grpc.unary_unary_rpc_method_handler(
servicer.GetSink,
request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.GetSinkRequest.FromString,
response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogSink.SerializeToString,
),
"CreateSink": grpc.unary_unary_rpc_method_handler(
servicer.CreateSink,
request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.CreateSinkRequest.FromString,
response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogSink.SerializeToString,
),
"UpdateSink": grpc.unary_unary_rpc_method_handler(
servicer.UpdateSink,
request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.UpdateSinkRequest.FromString,
response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogSink.SerializeToString,
),
"DeleteSink": grpc.unary_unary_rpc_method_handler(
servicer.DeleteSink,
request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.DeleteSinkRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
"ListExclusions": grpc.unary_unary_rpc_method_handler(
servicer.ListExclusions,
request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.ListExclusionsRequest.FromString,
response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.ListExclusionsResponse.SerializeToString,
),
"GetExclusion": grpc.unary_unary_rpc_method_handler(
servicer.GetExclusion,
request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.GetExclusionRequest.FromString,
response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogExclusion.SerializeToString,
),
"CreateExclusion": grpc.unary_unary_rpc_method_handler(
servicer.CreateExclusion,
request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.CreateExclusionRequest.FromString,
response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogExclusion.SerializeToString,
),
"UpdateExclusion": grpc.unary_unary_rpc_method_handler(
servicer.UpdateExclusion,
request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.UpdateExclusionRequest.FromString,
response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogExclusion.SerializeToString,
),
"DeleteExclusion": grpc.unary_unary_rpc_method_handler(
servicer.DeleteExclusion,
request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.DeleteExclusionRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
"google.logging.v2.ConfigServiceV2", rpc_method_handlers
)
server.add_generic_rpc_handlers((generic_handler,))
|
|
import os
import weakref
from ufoLib import UFOReader, UFOLibError
from defcon.objects.base import BaseObject
pngSignature = "\x89PNG\r\n\x1a\n"
class DataSet(BaseObject):
"""
This object manages all contents of the data directory in the font.
**This object posts the following notifications:**
===============
Name
===============
DataSet.Changed
===============
"""
changeNotificationName = "DataSet.Changed"
representationFactories = {}
def __init__(self, font=None):
self._font = None
if font is not None:
self._font = weakref.ref(font)
super(DataSet, self).__init__()
self.beginSelfNotificationObservation()
self._data = {}
self._scheduledForDeletion = {}
# --------------
# Parent Objects
# --------------
def getParent(self):
return self.font
def _get_font(self):
if self._font is not None:
return self._font()
return None
font = property(_get_font, doc="The :class:`Font` that this object belongs to.")
# ----------
# File Names
# ----------
def _get_fileNames(self):
return list(self._data.keys())
def _set_fileNames(self, fileNames):
assert not self._data
for fileName in fileNames:
self._data[fileName] = _dataDict()
fileNames = property(_get_fileNames, _set_fileNames, doc="A list of all file names. This should not be set externally.")
# -------------
# Dict Behavior
# -------------
def __getitem__(self, fileName):
if self._data[fileName]["data"] is None:
path = self.font.path
reader = UFOReader(path)
path = os.path.join("data", fileName)
data = reader.readBytesFromPath(path)
onDiskModTime = reader.getFileModificationTime(path)
self._data[fileName] = _dataDict(data=data, onDisk=True, onDiskModTime=onDiskModTime)
return self._data[fileName]["data"]
def __setitem__(self, fileName, data):
assert data is not None
onDisk = False
onDiskModTime = None
if fileName in self._scheduledForDeletion:
assert fileName not in self._data
self._data[fileName] = self._scheduledForDeletion.pop(fileName)
if fileName in self._data:
n = self[fileName] # force it to load so that the stamping is correct
onDisk = self._data[fileName]["onDisk"]
onDiskModTime = self._data[fileName]["onDiskModTime"]
del self._data[fileName] # now remove it
self._data[fileName] = _dataDict(data=data, dirty=True, onDisk=onDisk, onDiskModTime=onDiskModTime)
self.dirty = True
def __delitem__(self, fileName):
n = self[fileName] # force it to load so that the stamping is correct]
self._scheduledForDeletion[fileName] = dict(self._data.pop(fileName))
self.dirty = True
# ----
# Save
# ----
def getSaveProgressBarTickCount(self, formatVersion):
"""
Get the number of ticks that will be used by a progress bar
in the save method. This method should not be called externally.
Subclasses may override this method to implement custom saving behavior.
"""
return 0
def save(self, writer, saveAs=False, progressBar=None):
"""
Save data. This method should not be called externally.
Subclasses may override this method to implement custom saving behavior.
"""
if saveAs:
font = self.font
if font is not None and font.path is not None and os.path.exists(font.path):
reader = UFOReader(font.path)
readerDataDirectoryListing = reader.getDataDirectoryListing()
for fileName, data in list(self._data.items()):
path = os.path.join("data", fileName)
if data["data"] is not None or fileName not in readerDataDirectoryListing:
continue
writer.copyFromReader(reader, path, path)
for fileName in self._scheduledForDeletion:
try:
path = os.path.join("data", fileName)
writer.removeFileForPath(path)
except UFOLibError:
# this will be raised if the file doesn't exist.
# instead of trying to maintain a list of in UFO
# vs. in memory, simply fail and move on when
# something can't be deleted because it isn't
# in the UFO.
pass
self._scheduledForDeletion.clear()
reader = UFOReader(writer.path)
for fileName, data in list(self._data.items()):
if not data["dirty"]:
continue
path = os.path.join("data", fileName)
writer.writeBytesToPath(path, data["data"])
data["dirty"] = False
data["onDisk"] = True
data["onDiskModTime"] = reader.getFileModificationTime(os.path.join("data", fileName))
self.dirty = False
# ---------------------
# External Edit Support
# ---------------------
def testForExternalChanges(self, reader):
"""
Test for external changes. This should not be called externally.
"""
filesOnDisk = reader.getDataDirectoryListing()
modified = []
added = []
deleted = []
for fileName in set(filesOnDisk) - set(self.fileNames):
if not fileName in self._scheduledForDeletion:
added.append(fileName)
elif not self._scheduledForDeletion[fileName]["onDisk"]:
added.append(fileName)
elif self._scheduledForDeletion[fileName]["onDiskModTime"] != reader.getFileModificationTime(os.path.join("data", fileName)):
added.append(fileName)
for fileName, data in list(self._data.items()):
# file on disk and has been loaded
if fileName in filesOnDisk and data["data"] is not None:
path = os.path.join("data", fileName)
newModTime = reader.getFileModificationTime(path)
if newModTime != data["onDiskModTime"]:
newData = reader.readBytesFromPath(path)
if newData != data["data"]:
modified.append(fileName)
continue
# file removed
if fileName not in filesOnDisk and data["onDisk"]:
deleted.append(fileName)
continue
return modified, added, deleted
def reloadData(self, fileNames):
"""
Reload specified data. This should not be called externally.
"""
for fileName in fileNames:
self._data[fileName] = _dataDict()
data = self[fileName]
# ------------------------
# Notification Observation
# ------------------------
def endSelfNotificationObservation(self):
super(DataSet, self).endSelfNotificationObservation()
self._font = None
# -----------------------------
# Serialization/Deserialization
# -----------------------------
def getDataForSerialization(self, **kwargs):
simple_get = lambda key: self[key]
getters = []
for k in self.fileNames:
getters.append((k, simple_get))
return self._serialize(getters, **kwargs)
def setDataFromSerialization(self, data):
self._data = {}
self._scheduledForDeletion = {}
for k in data:
self[k] = data[k]
def _dataDict(data=None, dirty=False, onDisk=True, onDiskModTime=None):
return dict(data=data, dirty=dirty, onDisk=onDisk, onDiskModTime=onDiskModTime)
# -----
# Tests
# -----
def _testRead():
"""
>>> from defcon import Font
>>> from defcon.test.testTools import getTestFontPath
>>> path = getTestFontPath()
>>> font = Font(path)
>>> for fileName in sorted(font.data.fileNames):
... if True in [i.startswith(".") for i in fileName.split(os.sep)]:
... continue
... fileName
'com.typesupply.defcon.test.directory/file 1.txt'
'com.typesupply.defcon.test.directory/sub directory/file 2.txt'
'com.typesupply.defcon.test.file'
>>> font.data["com.typesupply.defcon.test.directory/file 1.txt"]
'This is file 1.'
>>> font.data["com.typesupply.defcon.test.directory/sub directory/file 2.txt"]
'This is file 2.'
>>> font.data["com.typesupply.defcon.test.file"]
'This is a top level test file.'
"""
def _testWrite():
"""
>>> from defcon.test.testTools import makeTestFontCopy, tearDownTestFontCopy
>>> from defcon import Font
>>> path = makeTestFontCopy()
>>> font = Font(path)
>>> font.data["com.typesupply.defcon.test.newdirectory/file.txt"] = "hello."
>>> del font.data["com.typesupply.defcon.test.directory/sub directory/file 2.txt"]
>>> font.save()
>>> p = os.path.join(path, "data", "com.typesupply.defcon.test.newdirectory/file.txt")
>>> os.path.exists(p)
True
>>> f = open(p, "rb")
>>> t = f.read()
>>> f.close()
>>> t
'hello.'
>>> p = os.path.join(path, "data", "com.typesupply.defcon.test.directory/sub directory/file 2.txt")
>>> os.path.exists(p)
False
>>> tearDownTestFontCopy()
"""
def _testSaveAs():
"""
>>> from defcon import Font
>>> from defcon.test.testTools import getTestFontPath, getTestFontCopyPath, tearDownTestFontCopy
>>> path = getTestFontPath()
>>> font = Font(path)
>>> saveAsPath = getTestFontCopyPath(path)
>>> font.save(saveAsPath)
>>> dataDirectory = os.path.join(saveAsPath, "data")
>>> os.path.exists(dataDirectory)
True
>>> os.path.exists(os.path.join(dataDirectory, "com.typesupply.defcon.test.directory/file 1.txt"))
True
>>> os.path.exists(os.path.join(dataDirectory, "com.typesupply.defcon.test.directory/sub directory/file 2.txt"))
True
>>> os.path.exists(os.path.join(dataDirectory, "com.typesupply.defcon.test.file"))
True
>>> tearDownTestFontCopy(saveAsPath)
"""
def _testExternalChanges():
"""
>>> from ufoLib import UFOReader
>>> from defcon.test.testTools import makeTestFontCopy, tearDownTestFontCopy
>>> from defcon import Font
# remove in memory and scan
>>> path = makeTestFontCopy()
>>> font = Font(path)
>>> del font.data["com.typesupply.defcon.test.file"]
>>> reader = UFOReader(path)
>>> font.data.testForExternalChanges(reader)
([], [], [])
>>> tearDownTestFontCopy()
# add in memory and scan
>>> path = makeTestFontCopy()
>>> font = Font(path)
>>> font.data["com.typesupply.defcon.test.file2"] = "blah"
>>> reader = UFOReader(path)
>>> font.data.testForExternalChanges(reader)
([], [], [])
>>> tearDownTestFontCopy()
# modify in memory and scan
>>> path = makeTestFontCopy()
>>> font = Font(path)
>>> font.data["com.typesupply.defcon.test.file"] = "blah"
>>> reader = UFOReader(path)
>>> font.data.testForExternalChanges(reader)
([], [], [])
>>> tearDownTestFontCopy()
# remove on disk and scan
>>> path = makeTestFontCopy()
>>> font = Font(path)
>>> image = font.data["com.typesupply.defcon.test.file"]
>>> os.remove(os.path.join(path, "data", "com.typesupply.defcon.test.file"))
>>> font.data.testForExternalChanges(reader)
([], [], ['com.typesupply.defcon.test.file'])
>>> tearDownTestFontCopy()
# add on disk and scan
>>> import shutil
>>> path = makeTestFontCopy()
>>> font = Font(path)
>>> source = os.path.join(path, "data", "com.typesupply.defcon.test.file")
>>> dest = os.path.join(path, "data", "com.typesupply.defcon.test.file2")
>>> shutil.copy(source, dest)
>>> font.data.testForExternalChanges(reader)
([], ['com.typesupply.defcon.test.file2'], [])
>>> tearDownTestFontCopy()
# modify on disk and scan
>>> path = makeTestFontCopy()
>>> font = Font(path)
>>> d = font.data["com.typesupply.defcon.test.file"]
>>> filePath = os.path.join(path, "data", "com.typesupply.defcon.test.file")
>>> f = open(filePath, "wb")
>>> f.write("blah")
>>> f.close()
>>> reader = UFOReader(path)
>>> font.data.testForExternalChanges(reader)
(['com.typesupply.defcon.test.file'], [], [])
>>> tearDownTestFontCopy()
"""
def _testReloadData():
"""
>>> from ufoLib import UFOReader
>>> from defcon.test.testTools import makeTestFontCopy, tearDownTestFontCopy
>>> from defcon import Font
>>> path = makeTestFontCopy()
>>> font = Font(path)
>>> d = font.data["com.typesupply.defcon.test.file"]
>>> filePath = os.path.join(path, "data", "com.typesupply.defcon.test.file")
>>> newData = "blah"
>>> f = open(filePath, "wb")
>>> f.write(newData)
>>> f.close()
>>> font.data.reloadData(["com.typesupply.defcon.test.file"])
>>> data = font.data["com.typesupply.defcon.test.file"]
>>> data == newData
True
>>> tearDownTestFontCopy()
"""
if __name__ == "__main__":
import doctest
doctest.testmod()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.