file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
39k
| suffix
large_stringlengths 0
36.1k
| middle
large_stringlengths 0
29.4k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
streaming.py | # -*- coding: utf-8 -*-
"""
/***************************************************************************
Client for streaming based WPS.
It exploits asynchronous capabilities of WPS and QGIS for visualizing
intermediate results from a WPS
-------------------
copyright : (C) 2012 by Germán Carrillo (GeoTux)
email : [email protected]
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from PyQt4.QtCore import *
from PyQt4.QtGui import QColor, QMessageBox
from PyQt4.QtNetwork import QNetworkRequest, QNetworkAccessManager
from qgis.core import (QgsNetworkAccessManager, QgsVectorLayer, QgsRasterLayer,
QgsMapLayerRegistry, QgsFeature, QgsGeometry)
from qgis.gui import QgsRubberBand, QgsVertexMarker
from wpslib.processdescription import getFileExtension,isMimeTypeVector,isMimeTypeRaster
from wpslib.executionresult import decodeBase64
from functools import partial
import apicompat
import tempfile
import os, platform
import glob
class Streaming(QObject):
""" Class for keeping track of stream chunks and
providing methods for handling and visualizing them
"""
# Define SIGNALS/SLOTS
playlistHandled = pyqtSignal(dict)
urlReady = pyqtSignal(str, int, str)
dataReady = pyqtSignal(str, int)
def __init__(self, parent, iface, chunks, playlistUrl, mimeType, encoding):
super(Streaming, self).__init__()
self.DEBUG = True
# Variables from other classes
self.parent = parent # For GUI access
self.iface = iface
self.chunks = chunks
self.playlistUrl = playlistUrl
self.mimeType = mimeType
self.encoding = encoding
# Internal variables
self.__endTag = "#PLAYLIST-END"
self.__exceptionTag = "#EXCEPTION"
self.__exceptionUrl = ""
self.__exceptionFound = False
self.__playlistFinished = False # Did the end tag appeared?
self.__bytesInlastReply = 0 # To compare last and current reply sizes
self.__loadedChunks = 0 # For keeping track of # of loaded (to local vars) chunks
self.__deliveredChunks = 0 # For keeping track of # of loaded (to the map) chunks
self.__bFirstChunk = True
self.__features = {} # {0:[f0,f1,f2], 1:[f0,f1]}
self.__bGeomMulti = False # Is the geometry multi{point|line|polygon}
self.__geometryType = "" # Values: "Point","LineString","Polygon","Unknown", "NoGeometry"
self.__tmpGeometry = {} # For visualization purposes {chunkId1: rb1, chunkId2: rb2 }
self.__memoryLayer = None # The whole merged data
# For rasters only
self.__legend = self.iface.legendInterface()
self.__groupIndex = 0
self.__chunksDir = None
self.__virtualFile = "" # Virtual raster file path
if isMimeTypeRaster(self.mimeType, True) != None:
self.__chunksDir = tempfile.mkdtemp(prefix="tmpChunks")
# Other objects
self.timer = QTimer()
self.timer.setInterval(1 * 1000) # 1 second
self.QNAM4Playlist = QNetworkAccessManager()
self.QNAM4Chunks = QNetworkAccessManager()
self.QNAM4Exception = QNetworkAccessManager()
# SIGNAL/SLOT connections
self.playlistHandled.connect(self.fetchChunks)
self.urlReady.connect(self.fetchResult)
self.dataReady.connect(self.loadData)
self.timer.timeout.connect(partial(self.fetchPlaylist, self.playlistUrl))
self.QNAM4Playlist.finished.connect(self.handlePlaylist)
self.QNAM4Chunks.finished.connect(self.handleChunk)
self.QNAM4Exception.finished.connect(self.handleException)
#self.QNAM4Playlist = QgsNetworkAccessManager.instance()
#theReply2.error.connect(self.handleErrors)
# GUI
self.parent.progressBar.setRange(0,0)
self.parent.lblProcess.setText("Reading output playlist...")
def start(self):
""" Start fetching """
self.fetchPlaylist(self.playlistUrl) # First call
def stop(self):
""" Stop fetching """
self.timer.stop()
self.QNAM4Playlist.finished.disconnect(self.handlePlaylist)
self.QNAM4Chunks.finished.disconnect(self.handleChunk)
self.removeTempGeometry(self.__geometryType)
if self.DEBUG: print "Stop streaming!"
def validateCompletedStream(self):
""" Is the stream complete (Did the end tag appeared?) """
#return (self.__loadedChunks >= self.chunks and self.chunks != 0)
return self.__playlistFinished
def allChunksDelivered(self):
""" Are all chunks already loaded into the map? """
return ((self.__loadedChunks == self.__deliveredChunks and
self.__playlistFinished) or self.__exceptionFound)
def fetchPlaylist(self, playlistLink):
url = QUrl(playlistLink)
self.QNAM4Playlist.get(QNetworkRequest(url)) # SLOT: handlePlaylist
def handlePlaylist(self, reply):
""" Parse the chunk URLs and update the loadedChunks counter """
# Check if there is redirection
reDir = reply.attribute(QNetworkRequest.RedirectionTargetAttribute).toUrl()
if not reDir.isEmpty():
self.fetchPlaylist(reDir.toString())
return
# Parse URLs only if there is new data in the reply
if reply.bytesAvailable() > self.__bytesInlastReply:
if self.DEBUG: print " Parsing the playlist..."
startFrom = reply.bytesAvailable() - self.__bytesInlastReply # Delta in bytes
self.__bytesInlastReply = reply.bytesAvailable()
newURLs = self.parseURLs(reply, startFrom)
else:
if self.DEBUG: print " No new data in the playlist..."
newURLs = {}
# Store new URLs
if len(newURLs) > 0:
self.__loadedChunks += len(newURLs)
if self.chunks:
self.parent.progressBar.setRange(0,self.chunks)
if self.DEBUG: print str(self.__loadedChunks) + " chunks loaded" + ((" out of " + str(self.chunks)) if self.chunks else "")
# If not complete, make additional calls
if not self.validateCompletedStream():
if not self.timer.isActive():
self.timer.start()
if self.DEBUG: print "Timer started..."
else:
self.timer.stop()
self.QNAM4Playlist.finished.disconnect(self.handlePlaylist)
if self.DEBUG: print "Playlist finished!"
if self.allChunksDelivered():
self.finishLoading()
if self.__exceptionFound:
self.fetchException()
if len(newURLs) > 0:
self.playlistHandled.emit(newURLs) # SLOT: fetchChunks
def parseURLs(self, reply, startFrom):
""" Get a dict of new IDs:URLs from the current playlist (newURLs) """
newURLs = {} # {0:URL0, 1:URL1, ...}
count = 0
#Get the delta and start reading it
allData = reply.readAll()
allData = allData.right(startFrom) # Get rid of old data
response = QTextStream(allData, QIODevice.ReadOnly)
data = response.readLine()
# Parse
while (data):
data = str(data.split("\n")[0])
if data:
if "#" in data: # It's a playlist comment
if self.__endTag in data:
self.__playlistFinished = True
elif self.__exceptionTag in data:
if self.DEBUG: print "Exception found!"
self.__exceptionFound = True
self.__exceptionUrl = data.split(":",1)[1].strip()
else:
newURLs[count+self.__loadedChunks] = data
count += 1
data = response.readLine()
return newURLs
def fetchChunks(self, newURLs):
""" Fetch each url """
for chunkId in newURLs:
self.urlReady.emit(self.encoding, chunkId, newURLs[chunkId]) # SLOT: fetchResult
def fetchResult(self, encoding, chunkId, fileLink):
""" Send the GET request """
url = QUrl(fileLink)
theReply2 = self.QNAM4Chunks.get(QNetworkRequest(url))
theReply2.setProperty("chunkId", pystring(chunkId))
theReply2.setProperty("encoding", pystring(encoding))
def handleErrors(self, error): # TODO connect it
if self.DEBUG: print "ERROR!!!", error
def fetchException(self):
""" Send the GET request for the exception """
url = QUrl(self.__exceptionUrl)
theReply3 = self.QNAM4Exception.get(QNetworkRequest(url))
def handleException(self, reply):
""" Display the exception """
# Check if there is redirection
reDir = reply.attribute(QNetworkRequest.RedirectionTargetAttribute).toUrl()
if not reDir.isEmpty():
self.__exceptionUrl = reDir.toString()
self.fetchException()
return
resultXML = reply.readAll().data()
self.parent.setStatusLabel('error')
self.parent.progressBar.setMinimum(0)
self.parent.progressBar.setMaximum(100)
self.parent.errorHandler(resultXML)
def handleChunk(self, reply):
""" Store the file received """
#reply.deleteLater() # Recommended way to delete the reply
chunkId = reply.property("chunkId").toInt()[0]
encoding = reply.property("encoding").toString()
# Check if there is redirection
reDir = reply.attribute(QNetworkRequest.RedirectionTargetAttribute).toUrl()
if not reDir.isEmpty():
self.urlReady.emit(encoding, chunkId, reDir.toString())
return
if self.DEBUG: print "GET chunk", chunkId
# Update progressBar
if self.chunks:
self.parent.progressBar.setValue(self.__deliveredChunks + 1)
self.parent.lblProcess.setText("Downloading chunks... ("+str(self.__deliveredChunks + 1)+"/"+str(self.chunks)+")")
# Get a unique temporary file name
tmpFile = tempfile.NamedTemporaryFile(prefix="base64",
suffix=getFileExtension(self.mimeType), dir=self.__chunksDir, delete=False )
# TODO: Check if the file name already exists!!!
# Write the data to the temporary file
outFile = QFile(tmpFile.name)
outFile.open(QIODevice.WriteOnly)
outFile.write(reply.readAll())
outFile.close()
# Decode?
if encoding == "base64":
resultFile = decodeBase64(tmpFile.name, self.mimeType, self.__chunksDir)
else:
resultFile = tmpFile.name
# Finally, load the data
if self.DEBUG: print "READY to be loaded (", resultFile, ", chunkId:", chunkId, ")"
self.dataReady.emit(resultFile, chunkId) # SLOT: loadData
def loadData(self, resultFile, chunkId):
""" Load data to the map """
if isMimeTypeVector(self.mimeType, True) != None:
# Memory layer:
geometryTypes = ["Point","LineString","Polygon","Unknown", "NoGeometry"]
vlayer = QgsVectorLayer(resultFile, "chunk", "ogr")
if self.__bFirstChunk:
self.__bFirstChunk = False
self.__geometryType = geometryTypes[vlayer.geometryType()]
self.__bGeomMulti = vlayer.wkbType() in [4,5,6,11,12,13]
self.__memoryLayer = QgsVectorLayer(self.__geometryType,"Streamed data","memory")
self.__memoryLayer.dataProvider().addAttributes(vlayer.pendingFields().values())
self.__memoryLayer.updateFieldMap()
provider = vlayer.dataProvider()
allAttrs = provider.attributeIndexes()
vlayer.select(allAttrs)
# Visualize temporal geometries during the downloading process
# Don't add temporal geometries if last chunk
if self.DEBUG: print "Loaded chunkId:",chunkId
res = self.__memoryLayer.dataProvider().addFeatures( [feat for feat in vlayer] )
self.__deliveredChunks += 1
if not self.allChunksDelivered():
inFeat = QgsFeature()
inGeom = QgsGeometry()
self.createTempGeometry(chunkId, self.__geometryType)
while provider.nextFeature( inFeat ):
inGeom = inFeat.geometry()
featList = self.extractAsSingle(self.__geometryType, inGeom) if self.__bGeomMulti else [inGeom]
for geom in featList:
self.addTempGeometry(chunkId, self.__geometryType, geom)
else:
self.finishLoading()
# Raster data
elif isMimeTypeRaster(self.mimeType, True) != None:
# We can directly attach the new layer
if self.__bFirstChunk:
self.__bFirstChunk = False
self.__groupIndex = self.__legend.addGroup("Streamed-raster")
rLayer = QgsRasterLayer(resultFile, "raster_"+str(chunkId))
bLoaded = QgsMapLayerRegistry.instance().addMapLayer(rLayer)
self.stretchRaster(rLayer)
self.__legend.moveLayer(rLayer, self.__groupIndex + 1)
self.__deliveredChunks += 1
if self.allChunksDelivered():
self.finishLoading()
def finishLoading(self):
""" Finish the loading process, load the definite assembled layer """
if self.DEBUG: print "DONE!"
if not self.__bFirstChunk:
if isMimeTypeVector(self.mimeType, True) != None:
self.removeTempGeometry(self.__geometryType)
QgsMapLayerRegistry.instance().addMapLayer(self.__memoryLayer)
elif isMimeTypeRaster(self.mimeType, True) != None:
self.parent.lblProcess.setText("All tiles are loaded. Merging them...")
# Generate gdal virtual raster
# Code adapted from GdalTools (C) 2009 by L. Masini and G. Sucameli (Faunalia)
self.process = QProcess(self)
self.connect(self.process, SIGNAL("finished(int, QProcess::ExitStatus)"),
self.loadVirtualRaster)
#self.setProcessEnvironment(self.process) Required in Windows?
cmd = "gdalbuildvrt"
arguments = pystringlist()
if platform.system() == "Windows" and cmd[-3:] == ".py":
command = cmd[:-3] + ".bat"
else:
command = cmd
tmpFile = tempfile.NamedTemporaryFile(prefix="virtual",
suffix=".vrt")
self.__virtualFile = tmpFile.name
arguments.append(self.__virtualFile)
rasters = self.getRasterFiles(self.__chunksDir,
getFileExtension(self.mimeType))
for raster in rasters:
arguments.append(raster)
self.process.start(command, arguments, QIODevice.ReadOnly)
if not self.__exceptionFound:
self.parent.setStatusLabel('finished')
self.parent.progressBar.setRange(0,100)
self.parent.progressBar.setValue(100)
|
def createTempGeometry(self, chunkId, geometryType):
""" Create rubber bands for rapid visualization of geometries """
if geometryType == "Polygon":
self.__tmpGeometry[chunkId] = QgsRubberBand(self.iface.mapCanvas(), True)
self.__tmpGeometry[chunkId].setColor( QColor( 0,255,0,255 ) )
self.__tmpGeometry[chunkId].setWidth( 2 )
if self.DEBUG: print "rubberBand created"
elif geometryType == "LineString":
self.__tmpGeometry[chunkId] = QgsRubberBand(self.iface.mapCanvas(), False)
self.__tmpGeometry[chunkId].setColor( QColor( 255,121,48,255 ) )
self.__tmpGeometry[chunkId].setWidth( 3 )
elif geometryType == "Point":
# In the case of points, they will be added as vertex objects later
self.__tmpGeometry[chunkId] = []
def addTempGeometry(self, chunkId, geometryType, geometry):
""" Add geometries as rubber bands or vertex objects """
if geometryType == "Polygon" or geometryType == "LineString":
self.__tmpGeometry[chunkId].addGeometry(geometry, None)
elif geometryType == "Point":
vertex = QgsVertexMarker(self.iface.mapCanvas())
vertex.setCenter(geometry.asPoint())
vertex.setColor(QColor(0,255,0))
vertex.setIconSize(6)
vertex.setIconType(QgsVertexMarker.ICON_BOX) # or ICON_CROSS, ICON_X
vertex.setPenWidth(3)
self.__tmpGeometry[chunkId].append(vertex)
def removeTempGeometry(self, geometryType):
""" Remove rubber bands or vertex objects from the map """
if geometryType == "Polygon" or geometryType == "LineString":
for chunkId in self.__tmpGeometry.keys():
self.iface.mapCanvas().scene().removeItem(self.__tmpGeometry[chunkId])
del self.__tmpGeometry[chunkId]
elif geometryType == "Point":
for chunkId in self.__tmpGeometry.keys():
if len( self.__tmpGeometry[chunkId] ) > 0:
for vertex in self.__tmpGeometry[chunkId]:
self.iface.mapCanvas().scene().removeItem(vertex)
del vertex
def extractAsSingle(self, geometryType, geom):
""" Extract multi geometries as single ones.
Required because of a QGIS bug regarding multipolygons and rubber bands
"""
# Code adapted from QGIS fTools plugin, (C) 2008-2011 Carson Farmer
multi_geom = QgsGeometry()
temp_geom = []
if geometryType == "Point":
multi_geom = geom.asMultiPoint()
for i in multi_geom:
temp_geom.append( QgsGeometry().fromPoint ( i ) )
elif geometryType == "LineString":
multi_geom = geom.asMultiPolyline()
for i in multi_geom:
temp_geom.append( QgsGeometry().fromPolyline( i ) )
elif geometryType == "Polygon":
multi_geom = geom.asMultiPolygon()
for i in multi_geom:
temp_geom.append( QgsGeometry().fromPolygon( i ) )
return temp_geom
def loadVirtualRaster(self, exitCode, status):
""" Load a virtual raster to QGIS """
if exitCode == 0:
self.__legend.setGroupVisible( self.__groupIndex, False )
rLayer = QgsRasterLayer(self.__virtualFile, "virtual")
bLoaded = QgsMapLayerRegistry.instance().addMapLayer(rLayer)
self.stretchRaster(rLayer)
self.process.kill()
def stretchRaster(self, raster):
raster.setMinimumMaximumUsingLastExtent()
raster.setContrastEnhancementAlgorithm(1)
raster.triggerRepaint()
def setProcessEnvironment(self, process):
""" From GdalTools. Set environment variables for running gdalbuildvrt """
envvar_list = {
"PATH" : self.getGdalBinPath(),
"PYTHONPATH" : self.getGdalPymodPath()
}
if self.DEBUG: print envvar_list
sep = os.pathsep
for name, val in envvar_list.iteritems():
if val == None or val == "":
continue
envval = os.getenv(name)
if envval == None or envval == "":
envval = str(val)
elif not pystring( envval ).split( sep ).contains( val, Qt.CaseInsensitive ):
envval += "%s%s" % (sep, str(val))
else:
envval = None
if envval != None:
os.putenv( name, envval )
if False: # not needed because os.putenv() has already updated the environment for new child processes
env = QProcess.systemEnvironment()
if env.contains( QRegExp( "^%s=(.*)" % name, Qt.CaseInsensitive ) ):
env.replaceInStrings( QRegExp( "^%s=(.*)" % name, Qt.CaseInsensitive ), "%s=\\1%s%s" % (name, sep, gdalPath) )
else:
env << "%s=%s" % (name, val)
process.setEnvironment( env )
def getRasterFiles(self, dir, extension):
rasters = pystringlist()
for name in glob.glob(dir + '/*' + extension):
rasters.append(name)
return rasters
def getGdalBinPath(self):
""" Retrieves GDAL binaries location """
settings = QSettings()
return settings.value( "/GdalTools/gdalPath", pystring( "" ) ).toString()
def getGdalPymodPath(self):
""" Retrieves GDAL python modules location """
settings = QSettings()
return settings.value( "/GdalTools/gdalPymodPath", pystring( "" ) ).toString() | random_line_split |
|
streaming.py | # -*- coding: utf-8 -*-
"""
/***************************************************************************
Client for streaming based WPS.
It exploits asynchronous capabilities of WPS and QGIS for visualizing
intermediate results from a WPS
-------------------
copyright : (C) 2012 by Germán Carrillo (GeoTux)
email : [email protected]
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from PyQt4.QtCore import *
from PyQt4.QtGui import QColor, QMessageBox
from PyQt4.QtNetwork import QNetworkRequest, QNetworkAccessManager
from qgis.core import (QgsNetworkAccessManager, QgsVectorLayer, QgsRasterLayer,
QgsMapLayerRegistry, QgsFeature, QgsGeometry)
from qgis.gui import QgsRubberBand, QgsVertexMarker
from wpslib.processdescription import getFileExtension,isMimeTypeVector,isMimeTypeRaster
from wpslib.executionresult import decodeBase64
from functools import partial
import apicompat
import tempfile
import os, platform
import glob
class Streaming(QObject):
""" Class for keeping track of stream chunks and
providing methods for handling and visualizing them
"""
# Define SIGNALS/SLOTS
playlistHandled = pyqtSignal(dict)
urlReady = pyqtSignal(str, int, str)
dataReady = pyqtSignal(str, int)
def __init__(self, parent, iface, chunks, playlistUrl, mimeType, encoding):
super(Streaming, self).__init__()
self.DEBUG = True
# Variables from other classes
self.parent = parent # For GUI access
self.iface = iface
self.chunks = chunks
self.playlistUrl = playlistUrl
self.mimeType = mimeType
self.encoding = encoding
# Internal variables
self.__endTag = "#PLAYLIST-END"
self.__exceptionTag = "#EXCEPTION"
self.__exceptionUrl = ""
self.__exceptionFound = False
self.__playlistFinished = False # Did the end tag appeared?
self.__bytesInlastReply = 0 # To compare last and current reply sizes
self.__loadedChunks = 0 # For keeping track of # of loaded (to local vars) chunks
self.__deliveredChunks = 0 # For keeping track of # of loaded (to the map) chunks
self.__bFirstChunk = True
self.__features = {} # {0:[f0,f1,f2], 1:[f0,f1]}
self.__bGeomMulti = False # Is the geometry multi{point|line|polygon}
self.__geometryType = "" # Values: "Point","LineString","Polygon","Unknown", "NoGeometry"
self.__tmpGeometry = {} # For visualization purposes {chunkId1: rb1, chunkId2: rb2 }
self.__memoryLayer = None # The whole merged data
# For rasters only
self.__legend = self.iface.legendInterface()
self.__groupIndex = 0
self.__chunksDir = None
self.__virtualFile = "" # Virtual raster file path
if isMimeTypeRaster(self.mimeType, True) != None:
self.__chunksDir = tempfile.mkdtemp(prefix="tmpChunks")
# Other objects
self.timer = QTimer()
self.timer.setInterval(1 * 1000) # 1 second
self.QNAM4Playlist = QNetworkAccessManager()
self.QNAM4Chunks = QNetworkAccessManager()
self.QNAM4Exception = QNetworkAccessManager()
# SIGNAL/SLOT connections
self.playlistHandled.connect(self.fetchChunks)
self.urlReady.connect(self.fetchResult)
self.dataReady.connect(self.loadData)
self.timer.timeout.connect(partial(self.fetchPlaylist, self.playlistUrl))
self.QNAM4Playlist.finished.connect(self.handlePlaylist)
self.QNAM4Chunks.finished.connect(self.handleChunk)
self.QNAM4Exception.finished.connect(self.handleException)
#self.QNAM4Playlist = QgsNetworkAccessManager.instance()
#theReply2.error.connect(self.handleErrors)
# GUI
self.parent.progressBar.setRange(0,0)
self.parent.lblProcess.setText("Reading output playlist...")
def start(self):
""" Start fetching """
self.fetchPlaylist(self.playlistUrl) # First call
def stop(self):
""" Stop fetching """
self.timer.stop()
self.QNAM4Playlist.finished.disconnect(self.handlePlaylist)
self.QNAM4Chunks.finished.disconnect(self.handleChunk)
self.removeTempGeometry(self.__geometryType)
if self.DEBUG: print "Stop streaming!"
def validateCompletedStream(self):
""" Is the stream complete (Did the end tag appeared?) """
#return (self.__loadedChunks >= self.chunks and self.chunks != 0)
return self.__playlistFinished
def allChunksDelivered(self):
""" Are all chunks already loaded into the map? """
return ((self.__loadedChunks == self.__deliveredChunks and
self.__playlistFinished) or self.__exceptionFound)
def fetchPlaylist(self, playlistLink):
url = QUrl(playlistLink)
self.QNAM4Playlist.get(QNetworkRequest(url)) # SLOT: handlePlaylist
def handlePlaylist(self, reply):
""" Parse the chunk URLs and update the loadedChunks counter """
# Check if there is redirection
reDir = reply.attribute(QNetworkRequest.RedirectionTargetAttribute).toUrl()
if not reDir.isEmpty():
self.fetchPlaylist(reDir.toString())
return
# Parse URLs only if there is new data in the reply
if reply.bytesAvailable() > self.__bytesInlastReply:
if self.DEBUG: print " Parsing the playlist..."
startFrom = reply.bytesAvailable() - self.__bytesInlastReply # Delta in bytes
self.__bytesInlastReply = reply.bytesAvailable()
newURLs = self.parseURLs(reply, startFrom)
else:
if self.DEBUG: print " No new data in the playlist..."
newURLs = {}
# Store new URLs
if len(newURLs) > 0:
self.__loadedChunks += len(newURLs)
if self.chunks:
self.parent.progressBar.setRange(0,self.chunks)
if self.DEBUG: print str(self.__loadedChunks) + " chunks loaded" + ((" out of " + str(self.chunks)) if self.chunks else "")
# If not complete, make additional calls
if not self.validateCompletedStream():
if not self.timer.isActive():
self.timer.start()
if self.DEBUG: print "Timer started..."
else:
self.timer.stop()
self.QNAM4Playlist.finished.disconnect(self.handlePlaylist)
if self.DEBUG: print "Playlist finished!"
if self.allChunksDelivered():
self.finishLoading()
if self.__exceptionFound:
self.fetchException()
if len(newURLs) > 0:
self.playlistHandled.emit(newURLs) # SLOT: fetchChunks
def parseURLs(self, reply, startFrom):
""" Get a dict of new IDs:URLs from the current playlist (newURLs) """
newURLs = {} # {0:URL0, 1:URL1, ...}
count = 0
#Get the delta and start reading it
allData = reply.readAll()
allData = allData.right(startFrom) # Get rid of old data
response = QTextStream(allData, QIODevice.ReadOnly)
data = response.readLine()
# Parse
while (data):
data = str(data.split("\n")[0])
if data:
if "#" in data: # It's a playlist comment
if self.__endTag in data:
self.__playlistFinished = True
elif self.__exceptionTag in data:
if self.DEBUG: print "Exception found!"
self.__exceptionFound = True
self.__exceptionUrl = data.split(":",1)[1].strip()
else:
newURLs[count+self.__loadedChunks] = data
count += 1
data = response.readLine()
return newURLs
def fetchChunks(self, newURLs):
""" Fetch each url """
for chunkId in newURLs:
self.urlReady.emit(self.encoding, chunkId, newURLs[chunkId]) # SLOT: fetchResult
def fetchResult(self, encoding, chunkId, fileLink):
""" Send the GET request """
url = QUrl(fileLink)
theReply2 = self.QNAM4Chunks.get(QNetworkRequest(url))
theReply2.setProperty("chunkId", pystring(chunkId))
theReply2.setProperty("encoding", pystring(encoding))
def handleErrors(self, error): # TODO connect it
if self.DEBUG: print "ERROR!!!", error
def fetchException(self):
""" Send the GET request for the exception """
url = QUrl(self.__exceptionUrl)
theReply3 = self.QNAM4Exception.get(QNetworkRequest(url))
def handleException(self, reply):
""" Display the exception """
# Check if there is redirection
reDir = reply.attribute(QNetworkRequest.RedirectionTargetAttribute).toUrl()
if not reDir.isEmpty():
self.__exceptionUrl = reDir.toString()
self.fetchException()
return
resultXML = reply.readAll().data()
self.parent.setStatusLabel('error')
self.parent.progressBar.setMinimum(0)
self.parent.progressBar.setMaximum(100)
self.parent.errorHandler(resultXML)
def handleChunk(self, reply):
""" Store the file received """
#reply.deleteLater() # Recommended way to delete the reply
chunkId = reply.property("chunkId").toInt()[0]
encoding = reply.property("encoding").toString()
# Check if there is redirection
reDir = reply.attribute(QNetworkRequest.RedirectionTargetAttribute).toUrl()
if not reDir.isEmpty():
self.urlReady.emit(encoding, chunkId, reDir.toString())
return
if self.DEBUG: print "GET chunk", chunkId
# Update progressBar
if self.chunks:
self.parent.progressBar.setValue(self.__deliveredChunks + 1)
self.parent.lblProcess.setText("Downloading chunks... ("+str(self.__deliveredChunks + 1)+"/"+str(self.chunks)+")")
# Get a unique temporary file name
tmpFile = tempfile.NamedTemporaryFile(prefix="base64",
suffix=getFileExtension(self.mimeType), dir=self.__chunksDir, delete=False )
# TODO: Check if the file name already exists!!!
# Write the data to the temporary file
outFile = QFile(tmpFile.name)
outFile.open(QIODevice.WriteOnly)
outFile.write(reply.readAll())
outFile.close()
# Decode?
if encoding == "base64":
resultFile = decodeBase64(tmpFile.name, self.mimeType, self.__chunksDir)
else:
resultFile = tmpFile.name
# Finally, load the data
if self.DEBUG: print "READY to be loaded (", resultFile, ", chunkId:", chunkId, ")"
self.dataReady.emit(resultFile, chunkId) # SLOT: loadData
def loadData(self, resultFile, chunkId):
""" Load data to the map """
if isMimeTypeVector(self.mimeType, True) != None:
# Memory layer:
geometryTypes = ["Point","LineString","Polygon","Unknown", "NoGeometry"]
vlayer = QgsVectorLayer(resultFile, "chunk", "ogr")
if self.__bFirstChunk:
self.__bFirstChunk = False
self.__geometryType = geometryTypes[vlayer.geometryType()]
self.__bGeomMulti = vlayer.wkbType() in [4,5,6,11,12,13]
self.__memoryLayer = QgsVectorLayer(self.__geometryType,"Streamed data","memory")
self.__memoryLayer.dataProvider().addAttributes(vlayer.pendingFields().values())
self.__memoryLayer.updateFieldMap()
provider = vlayer.dataProvider()
allAttrs = provider.attributeIndexes()
vlayer.select(allAttrs)
# Visualize temporal geometries during the downloading process
# Don't add temporal geometries if last chunk
if self.DEBUG: print "Loaded chunkId:",chunkId
res = self.__memoryLayer.dataProvider().addFeatures( [feat for feat in vlayer] )
self.__deliveredChunks += 1
if not self.allChunksDelivered():
inFeat = QgsFeature()
inGeom = QgsGeometry()
self.createTempGeometry(chunkId, self.__geometryType)
while provider.nextFeature( inFeat ):
inGeom = inFeat.geometry()
featList = self.extractAsSingle(self.__geometryType, inGeom) if self.__bGeomMulti else [inGeom]
for geom in featList:
self.addTempGeometry(chunkId, self.__geometryType, geom)
else:
self.finishLoading()
# Raster data
elif isMimeTypeRaster(self.mimeType, True) != None:
# We can directly attach the new layer
if self.__bFirstChunk:
self.__bFirstChunk = False
self.__groupIndex = self.__legend.addGroup("Streamed-raster")
rLayer = QgsRasterLayer(resultFile, "raster_"+str(chunkId))
bLoaded = QgsMapLayerRegistry.instance().addMapLayer(rLayer)
self.stretchRaster(rLayer)
self.__legend.moveLayer(rLayer, self.__groupIndex + 1)
self.__deliveredChunks += 1
if self.allChunksDelivered():
self.finishLoading()
def finishLoading(self):
""" Finish the loading process, load the definite assembled layer """
if self.DEBUG: print "DONE!"
if not self.__bFirstChunk:
if isMimeTypeVector(self.mimeType, True) != None:
self.removeTempGeometry(self.__geometryType)
QgsMapLayerRegistry.instance().addMapLayer(self.__memoryLayer)
elif isMimeTypeRaster(self.mimeType, True) != None:
self.parent.lblProcess.setText("All tiles are loaded. Merging them...")
# Generate gdal virtual raster
# Code adapted from GdalTools (C) 2009 by L. Masini and G. Sucameli (Faunalia)
self.process = QProcess(self)
self.connect(self.process, SIGNAL("finished(int, QProcess::ExitStatus)"),
self.loadVirtualRaster)
#self.setProcessEnvironment(self.process) Required in Windows?
cmd = "gdalbuildvrt"
arguments = pystringlist()
if platform.system() == "Windows" and cmd[-3:] == ".py":
command = cmd[:-3] + ".bat"
else:
command = cmd
tmpFile = tempfile.NamedTemporaryFile(prefix="virtual",
suffix=".vrt")
self.__virtualFile = tmpFile.name
arguments.append(self.__virtualFile)
rasters = self.getRasterFiles(self.__chunksDir,
getFileExtension(self.mimeType))
for raster in rasters:
arguments.append(raster)
self.process.start(command, arguments, QIODevice.ReadOnly)
if not self.__exceptionFound:
self.parent.setStatusLabel('finished')
self.parent.progressBar.setRange(0,100)
self.parent.progressBar.setValue(100)
def createTempGeometry(self, chunkId, geometryType):
""" Create rubber bands for rapid visualization of geometries """
if geometryType == "Polygon":
self.__tmpGeometry[chunkId] = QgsRubberBand(self.iface.mapCanvas(), True)
self.__tmpGeometry[chunkId].setColor( QColor( 0,255,0,255 ) )
self.__tmpGeometry[chunkId].setWidth( 2 )
if self.DEBUG: print "rubberBand created"
elif geometryType == "LineString":
self.__tmpGeometry[chunkId] = QgsRubberBand(self.iface.mapCanvas(), False)
self.__tmpGeometry[chunkId].setColor( QColor( 255,121,48,255 ) )
self.__tmpGeometry[chunkId].setWidth( 3 )
elif geometryType == "Point":
# In the case of points, they will be added as vertex objects later
self.__tmpGeometry[chunkId] = []
def addTempGeometry(self, chunkId, geometryType, geometry):
""" Add geometries as rubber bands or vertex objects """
if geometryType == "Polygon" or geometryType == "LineString":
self.__tmpGeometry[chunkId].addGeometry(geometry, None)
elif geometryType == "Point":
vertex = QgsVertexMarker(self.iface.mapCanvas())
vertex.setCenter(geometry.asPoint())
vertex.setColor(QColor(0,255,0))
vertex.setIconSize(6)
vertex.setIconType(QgsVertexMarker.ICON_BOX) # or ICON_CROSS, ICON_X
vertex.setPenWidth(3)
self.__tmpGeometry[chunkId].append(vertex)
def removeTempGeometry(self, geometryType):
""" Remove rubber bands or vertex objects from the map """
if geometryType == "Polygon" or geometryType == "LineString":
for chunkId in self.__tmpGeometry.keys():
self.iface.mapCanvas().scene().removeItem(self.__tmpGeometry[chunkId])
del self.__tmpGeometry[chunkId]
elif geometryType == "Point":
for chunkId in self.__tmpGeometry.keys():
if len( self.__tmpGeometry[chunkId] ) > 0:
for vertex in self.__tmpGeometry[chunkId]:
self.iface.mapCanvas().scene().removeItem(vertex)
del vertex
def extractAsSingle(self, geometryType, geom):
""" Extract multi geometries as single ones.
Required because of a QGIS bug regarding multipolygons and rubber bands
"""
# Code adapted from QGIS fTools plugin, (C) 2008-2011 Carson Farmer
multi_geom = QgsGeometry()
temp_geom = []
if geometryType == "Point":
multi_geom = geom.asMultiPoint()
for i in multi_geom:
temp_geom.append( QgsGeometry().fromPoint ( i ) )
elif geometryType == "LineString":
multi_geom = geom.asMultiPolyline()
for i in multi_geom:
temp_geom.append( QgsGeometry().fromPolyline( i ) )
elif geometryType == "Polygon":
multi_geom = geom.asMultiPolygon()
for i in multi_geom:
temp_geom.append( QgsGeometry().fromPolygon( i ) )
return temp_geom
def loadVirtualRaster(self, exitCode, status):
""" Load a virtual raster to QGIS """
if exitCode == 0:
self.__legend.setGroupVisible( self.__groupIndex, False )
rLayer = QgsRasterLayer(self.__virtualFile, "virtual")
bLoaded = QgsMapLayerRegistry.instance().addMapLayer(rLayer)
self.stretchRaster(rLayer)
self.process.kill()
def stretchRaster(self, raster):
raster.setMinimumMaximumUsingLastExtent()
raster.setContrastEnhancementAlgorithm(1)
raster.triggerRepaint()
def setProcessEnvironment(self, process):
""" From GdalTools. Set environment variables for running gdalbuildvrt """
envvar_list = {
"PATH" : self.getGdalBinPath(),
"PYTHONPATH" : self.getGdalPymodPath()
}
if self.DEBUG: print envvar_list
sep = os.pathsep
for name, val in envvar_list.iteritems():
if val == None or val == "":
continue
envval = os.getenv(name)
if envval == None or envval == "":
envval = str(val)
elif not pystring( envval ).split( sep ).contains( val, Qt.CaseInsensitive ):
envval += "%s%s" % (sep, str(val))
else:
envval = None
if envval != None:
os.putenv( name, envval )
if False: # not needed because os.putenv() has already updated the environment for new child processes
env = QProcess.systemEnvironment()
if env.contains( QRegExp( "^%s=(.*)" % name, Qt.CaseInsensitive ) ):
env.replaceInStrings( QRegExp( "^%s=(.*)" % name, Qt.CaseInsensitive ), "%s=\\1%s%s" % (name, sep, gdalPath) )
else:
env << "%s=%s" % (name, val)
process.setEnvironment( env )
def g | self, dir, extension):
rasters = pystringlist()
for name in glob.glob(dir + '/*' + extension):
rasters.append(name)
return rasters
def getGdalBinPath(self):
""" Retrieves GDAL binaries location """
settings = QSettings()
return settings.value( "/GdalTools/gdalPath", pystring( "" ) ).toString()
def getGdalPymodPath(self):
""" Retrieves GDAL python modules location """
settings = QSettings()
return settings.value( "/GdalTools/gdalPymodPath", pystring( "" ) ).toString()
| etRasterFiles( | identifier_name |
FTS3Placement.py | from DIRAC import S_ERROR, S_OK, gLogger
from DIRAC.DataManagementSystem.private.FTSAbstractPlacement import FTSAbstractPlacement, FTSRoute
from DIRAC.ConfigurationSystem.Client.Helpers.Resources import getFTS3Servers
from DIRAC.ResourceStatusSystem.Client.ResourceStatus import ResourceStatus
import random
class FTS3Placement( FTSAbstractPlacement ):
"""
This class manages all the FTS strategies, routes and what not
"""
__serverPolicy = "Random"
__nextServerID = 0
__serverList = None
__maxAttempts = 0
def __init__( self, csPath = None, ftsHistoryViews = None ):
"""
Call the init of the parent, and initialize the list of FTS3 servers
"""
self.log = gLogger.getSubLogger( "FTS3Placement" )
super( FTS3Placement, self ).__init__( csPath = csPath, ftsHistoryViews = ftsHistoryViews )
srvList = getFTS3Servers()
if not srvList['OK']:
self.log.error( srvList['Message'] )
self.__serverList = srvList.get( 'Value', [] )
self.maxAttempts = len( self.__serverList )
self.rssClient = ResourceStatus()
def getReplicationTree( self, sourceSEs, targetSEs, size, strategy = None ):
""" For multiple source to multiple destination, find the optimal replication
strategy.
:param sourceSEs : list of source SE
:param targetSEs : list of destination SE
:param size : size of the File
:param strategy : which strategy to use
:returns S_OK(dict) < route name : { dict with key Ancestor, SourceSE, TargetSEtargetSE, Strategy } >
For the time being, we are waiting for FTS3 to provide advisory mechanisms. So we just use
simple techniques
"""
# We will use a single random source
sourceSE = random.choice( sourceSEs )
tree = {}
for targetSE in targetSEs:
tree["%s#%s" % ( sourceSE, targetSE )] = { "Ancestor" : False, "SourceSE" : sourceSE,
"TargetSE" : targetSE, "Strategy" : "FTS3Simple" }
return S_OK( tree )
def refresh( self, ftsHistoryViews ):
"""
Refresh, whatever that means... recalculate all what you need,
fetches the latest conf and what not.
"""
return super( FTS3Placement, self ).refresh( ftsHistoryViews = ftsHistoryViews )
def __failoverServerPolicy(self, attempt = 0):
"""
Returns always the server at a given position (normally the first one)
:param attempt: position of the server in the list
"""
if attempt >= len( self.__serverList ):
raise Exception( "FTS3Placement.__failoverServerPolicy: attempt to reach non existing server index" )
return self.__serverList[attempt]
def __sequenceServerPolicy( self ):
"""
Every time the this policy is called, return the next server on the list
"""
fts3server = self.__serverList[self.__nextServerID]
self.__nextServerID = ( self.__nextServerID + 1 ) % len( self.__serverList )
return fts3server
def __randomServerPolicy(self):
"""
return a random server from the list
"""
return random.choice( self.__serverList )
def __chooseFTS3Server( self ):
"""
Choose the appropriate FTS3 server depending on the policy
"""
fts3Server = None
attempt = 0
# FIXME : need to get real valeu from RSS
ftsServerStatus = True
while not fts3Server and attempt < self.maxAttempts:
if self.__serverPolicy == 'Random':
fts3Server = self.__randomServerPolicy()
elif self.__serverPolicy == 'Sequence':
fts3Server = self.__sequenceServerPolicy()
elif self.__serverPolicy == 'Failover':
fts3Server = self.__failoverServerPolicy( attempt = attempt )
else:
self.log.error( 'Unknown server policy %s. Using Random instead' % self.__serverPolicy )
fts3Server = self.__randomServerPolicy()
if not ftsServerStatus:
self.log.warn( 'FTS server %s is not in good shape. Choose another one' % fts3Server )
fts3Server = None
attempt += 1
# FIXME : I need to get the FTS server status from RSS
# ftsStatusFromRss = rss.ftsStatusOrSomethingLikeThat
if fts3Server:
return S_OK( fts3Server )
return S_ERROR ( "Could not find an FTS3 server (max attempt reached)" )
def findRoute( self, sourceSE, targetSE ):
""" Find the appropriate route from point A to B
:param sourceSE : source SE
:param targetSE : destination SE
:returns S_OK(FTSRoute)
"""
fts3server = self.__chooseFTS3Server()
if not fts3server['OK']:
return fts3server
fts3server = fts3server['Value']
route = FTSRoute( sourceSE, targetSE, fts3server )
return S_OK( route )
def isRouteValid( self, route ):
"""
FIXME: until RSS is ready, I check manually the status
In FTS3, all routes are valid a priori.
If a route was not valid for some reason, then FTS would know it
thanks to the blacklist sent by RSS, and would deal with it itself. | :param route : FTSRoute
:returns S_OK or S_ERROR(reason)
"""
rAccess = self.rssClient.getStorageElementStatus( route.sourceSE, "ReadAccess" )
self.log.debug( "se read %s %s" % ( route.sourceSE, rAccess ) )
if not rAccess["OK"]:
self.log.error( rAccess["Message"] )
return rAccess
if rAccess["Value"][route.sourceSE]["ReadAccess"] not in ( "Active", "Degraded" ):
return S_ERROR( "Source SE is not readable" )
wAccess = self.rssClient.getStorageElementStatus( route.targetSE, "WriteAccess" )
self.log.debug( "se write %s %s" % ( route.targetSE, wAccess ) )
if not wAccess["OK"]:
self.log.error( wAccess["Message"] )
return wAccess
if wAccess["Value"][route.targetSE]["WriteAccess"] not in ( "Active", "Degraded" ):
return S_ERROR( "Target SE is not writable" )
return S_OK() | random_line_split |
|
FTS3Placement.py | from DIRAC import S_ERROR, S_OK, gLogger
from DIRAC.DataManagementSystem.private.FTSAbstractPlacement import FTSAbstractPlacement, FTSRoute
from DIRAC.ConfigurationSystem.Client.Helpers.Resources import getFTS3Servers
from DIRAC.ResourceStatusSystem.Client.ResourceStatus import ResourceStatus
import random
class FTS3Placement( FTSAbstractPlacement ):
"""
This class manages all the FTS strategies, routes and what not
"""
__serverPolicy = "Random"
__nextServerID = 0
__serverList = None
__maxAttempts = 0
def __init__( self, csPath = None, ftsHistoryViews = None ):
"""
Call the init of the parent, and initialize the list of FTS3 servers
"""
self.log = gLogger.getSubLogger( "FTS3Placement" )
super( FTS3Placement, self ).__init__( csPath = csPath, ftsHistoryViews = ftsHistoryViews )
srvList = getFTS3Servers()
if not srvList['OK']:
self.log.error( srvList['Message'] )
self.__serverList = srvList.get( 'Value', [] )
self.maxAttempts = len( self.__serverList )
self.rssClient = ResourceStatus()
def getReplicationTree( self, sourceSEs, targetSEs, size, strategy = None ):
""" For multiple source to multiple destination, find the optimal replication
strategy.
:param sourceSEs : list of source SE
:param targetSEs : list of destination SE
:param size : size of the File
:param strategy : which strategy to use
:returns S_OK(dict) < route name : { dict with key Ancestor, SourceSE, TargetSEtargetSE, Strategy } >
For the time being, we are waiting for FTS3 to provide advisory mechanisms. So we just use
simple techniques
"""
# We will use a single random source
sourceSE = random.choice( sourceSEs )
tree = {}
for targetSE in targetSEs:
tree["%s#%s" % ( sourceSE, targetSE )] = { "Ancestor" : False, "SourceSE" : sourceSE,
"TargetSE" : targetSE, "Strategy" : "FTS3Simple" }
return S_OK( tree )
def refresh( self, ftsHistoryViews ):
"""
Refresh, whatever that means... recalculate all what you need,
fetches the latest conf and what not.
"""
return super( FTS3Placement, self ).refresh( ftsHistoryViews = ftsHistoryViews )
def __failoverServerPolicy(self, attempt = 0):
"""
Returns always the server at a given position (normally the first one)
:param attempt: position of the server in the list
"""
if attempt >= len( self.__serverList ):
raise Exception( "FTS3Placement.__failoverServerPolicy: attempt to reach non existing server index" )
return self.__serverList[attempt]
def __sequenceServerPolicy( self ):
"""
Every time the this policy is called, return the next server on the list
"""
fts3server = self.__serverList[self.__nextServerID]
self.__nextServerID = ( self.__nextServerID + 1 ) % len( self.__serverList )
return fts3server
def __randomServerPolicy(self):
"""
return a random server from the list
"""
return random.choice( self.__serverList )
def __chooseFTS3Server( self ):
"""
Choose the appropriate FTS3 server depending on the policy
"""
fts3Server = None
attempt = 0
# FIXME : need to get real valeu from RSS
ftsServerStatus = True
while not fts3Server and attempt < self.maxAttempts:
if self.__serverPolicy == 'Random':
fts3Server = self.__randomServerPolicy()
elif self.__serverPolicy == 'Sequence':
fts3Server = self.__sequenceServerPolicy()
elif self.__serverPolicy == 'Failover':
fts3Server = self.__failoverServerPolicy( attempt = attempt )
else:
self.log.error( 'Unknown server policy %s. Using Random instead' % self.__serverPolicy )
fts3Server = self.__randomServerPolicy()
if not ftsServerStatus:
|
attempt += 1
# FIXME : I need to get the FTS server status from RSS
# ftsStatusFromRss = rss.ftsStatusOrSomethingLikeThat
if fts3Server:
return S_OK( fts3Server )
return S_ERROR ( "Could not find an FTS3 server (max attempt reached)" )
def findRoute( self, sourceSE, targetSE ):
""" Find the appropriate route from point A to B
:param sourceSE : source SE
:param targetSE : destination SE
:returns S_OK(FTSRoute)
"""
fts3server = self.__chooseFTS3Server()
if not fts3server['OK']:
return fts3server
fts3server = fts3server['Value']
route = FTSRoute( sourceSE, targetSE, fts3server )
return S_OK( route )
def isRouteValid( self, route ):
"""
FIXME: until RSS is ready, I check manually the status
In FTS3, all routes are valid a priori.
If a route was not valid for some reason, then FTS would know it
thanks to the blacklist sent by RSS, and would deal with it itself.
:param route : FTSRoute
:returns S_OK or S_ERROR(reason)
"""
rAccess = self.rssClient.getStorageElementStatus( route.sourceSE, "ReadAccess" )
self.log.debug( "se read %s %s" % ( route.sourceSE, rAccess ) )
if not rAccess["OK"]:
self.log.error( rAccess["Message"] )
return rAccess
if rAccess["Value"][route.sourceSE]["ReadAccess"] not in ( "Active", "Degraded" ):
return S_ERROR( "Source SE is not readable" )
wAccess = self.rssClient.getStorageElementStatus( route.targetSE, "WriteAccess" )
self.log.debug( "se write %s %s" % ( route.targetSE, wAccess ) )
if not wAccess["OK"]:
self.log.error( wAccess["Message"] )
return wAccess
if wAccess["Value"][route.targetSE]["WriteAccess"] not in ( "Active", "Degraded" ):
return S_ERROR( "Target SE is not writable" )
return S_OK()
| self.log.warn( 'FTS server %s is not in good shape. Choose another one' % fts3Server )
fts3Server = None | conditional_block |
FTS3Placement.py | from DIRAC import S_ERROR, S_OK, gLogger
from DIRAC.DataManagementSystem.private.FTSAbstractPlacement import FTSAbstractPlacement, FTSRoute
from DIRAC.ConfigurationSystem.Client.Helpers.Resources import getFTS3Servers
from DIRAC.ResourceStatusSystem.Client.ResourceStatus import ResourceStatus
import random
class FTS3Placement( FTSAbstractPlacement ):
"""
This class manages all the FTS strategies, routes and what not
"""
__serverPolicy = "Random"
__nextServerID = 0
__serverList = None
__maxAttempts = 0
def __init__( self, csPath = None, ftsHistoryViews = None ):
"""
Call the init of the parent, and initialize the list of FTS3 servers
"""
self.log = gLogger.getSubLogger( "FTS3Placement" )
super( FTS3Placement, self ).__init__( csPath = csPath, ftsHistoryViews = ftsHistoryViews )
srvList = getFTS3Servers()
if not srvList['OK']:
self.log.error( srvList['Message'] )
self.__serverList = srvList.get( 'Value', [] )
self.maxAttempts = len( self.__serverList )
self.rssClient = ResourceStatus()
def getReplicationTree( self, sourceSEs, targetSEs, size, strategy = None ):
""" For multiple source to multiple destination, find the optimal replication
strategy.
:param sourceSEs : list of source SE
:param targetSEs : list of destination SE
:param size : size of the File
:param strategy : which strategy to use
:returns S_OK(dict) < route name : { dict with key Ancestor, SourceSE, TargetSEtargetSE, Strategy } >
For the time being, we are waiting for FTS3 to provide advisory mechanisms. So we just use
simple techniques
"""
# We will use a single random source
sourceSE = random.choice( sourceSEs )
tree = {}
for targetSE in targetSEs:
tree["%s#%s" % ( sourceSE, targetSE )] = { "Ancestor" : False, "SourceSE" : sourceSE,
"TargetSE" : targetSE, "Strategy" : "FTS3Simple" }
return S_OK( tree )
def refresh( self, ftsHistoryViews ):
"""
Refresh, whatever that means... recalculate all what you need,
fetches the latest conf and what not.
"""
return super( FTS3Placement, self ).refresh( ftsHistoryViews = ftsHistoryViews )
def __failoverServerPolicy(self, attempt = 0):
"""
Returns always the server at a given position (normally the first one)
:param attempt: position of the server in the list
"""
if attempt >= len( self.__serverList ):
raise Exception( "FTS3Placement.__failoverServerPolicy: attempt to reach non existing server index" )
return self.__serverList[attempt]
def __sequenceServerPolicy( self ):
"""
Every time the this policy is called, return the next server on the list
"""
fts3server = self.__serverList[self.__nextServerID]
self.__nextServerID = ( self.__nextServerID + 1 ) % len( self.__serverList )
return fts3server
def __randomServerPolicy(self):
|
def __chooseFTS3Server( self ):
"""
Choose the appropriate FTS3 server depending on the policy
"""
fts3Server = None
attempt = 0
# FIXME : need to get real valeu from RSS
ftsServerStatus = True
while not fts3Server and attempt < self.maxAttempts:
if self.__serverPolicy == 'Random':
fts3Server = self.__randomServerPolicy()
elif self.__serverPolicy == 'Sequence':
fts3Server = self.__sequenceServerPolicy()
elif self.__serverPolicy == 'Failover':
fts3Server = self.__failoverServerPolicy( attempt = attempt )
else:
self.log.error( 'Unknown server policy %s. Using Random instead' % self.__serverPolicy )
fts3Server = self.__randomServerPolicy()
if not ftsServerStatus:
self.log.warn( 'FTS server %s is not in good shape. Choose another one' % fts3Server )
fts3Server = None
attempt += 1
# FIXME : I need to get the FTS server status from RSS
# ftsStatusFromRss = rss.ftsStatusOrSomethingLikeThat
if fts3Server:
return S_OK( fts3Server )
return S_ERROR ( "Could not find an FTS3 server (max attempt reached)" )
def findRoute( self, sourceSE, targetSE ):
""" Find the appropriate route from point A to B
:param sourceSE : source SE
:param targetSE : destination SE
:returns S_OK(FTSRoute)
"""
fts3server = self.__chooseFTS3Server()
if not fts3server['OK']:
return fts3server
fts3server = fts3server['Value']
route = FTSRoute( sourceSE, targetSE, fts3server )
return S_OK( route )
def isRouteValid( self, route ):
"""
FIXME: until RSS is ready, I check manually the status
In FTS3, all routes are valid a priori.
If a route was not valid for some reason, then FTS would know it
thanks to the blacklist sent by RSS, and would deal with it itself.
:param route : FTSRoute
:returns S_OK or S_ERROR(reason)
"""
rAccess = self.rssClient.getStorageElementStatus( route.sourceSE, "ReadAccess" )
self.log.debug( "se read %s %s" % ( route.sourceSE, rAccess ) )
if not rAccess["OK"]:
self.log.error( rAccess["Message"] )
return rAccess
if rAccess["Value"][route.sourceSE]["ReadAccess"] not in ( "Active", "Degraded" ):
return S_ERROR( "Source SE is not readable" )
wAccess = self.rssClient.getStorageElementStatus( route.targetSE, "WriteAccess" )
self.log.debug( "se write %s %s" % ( route.targetSE, wAccess ) )
if not wAccess["OK"]:
self.log.error( wAccess["Message"] )
return wAccess
if wAccess["Value"][route.targetSE]["WriteAccess"] not in ( "Active", "Degraded" ):
return S_ERROR( "Target SE is not writable" )
return S_OK()
| """
return a random server from the list
"""
return random.choice( self.__serverList ) | identifier_body |
FTS3Placement.py | from DIRAC import S_ERROR, S_OK, gLogger
from DIRAC.DataManagementSystem.private.FTSAbstractPlacement import FTSAbstractPlacement, FTSRoute
from DIRAC.ConfigurationSystem.Client.Helpers.Resources import getFTS3Servers
from DIRAC.ResourceStatusSystem.Client.ResourceStatus import ResourceStatus
import random
class FTS3Placement( FTSAbstractPlacement ):
"""
This class manages all the FTS strategies, routes and what not
"""
__serverPolicy = "Random"
__nextServerID = 0
__serverList = None
__maxAttempts = 0
def __init__( self, csPath = None, ftsHistoryViews = None ):
"""
Call the init of the parent, and initialize the list of FTS3 servers
"""
self.log = gLogger.getSubLogger( "FTS3Placement" )
super( FTS3Placement, self ).__init__( csPath = csPath, ftsHistoryViews = ftsHistoryViews )
srvList = getFTS3Servers()
if not srvList['OK']:
self.log.error( srvList['Message'] )
self.__serverList = srvList.get( 'Value', [] )
self.maxAttempts = len( self.__serverList )
self.rssClient = ResourceStatus()
def getReplicationTree( self, sourceSEs, targetSEs, size, strategy = None ):
""" For multiple source to multiple destination, find the optimal replication
strategy.
:param sourceSEs : list of source SE
:param targetSEs : list of destination SE
:param size : size of the File
:param strategy : which strategy to use
:returns S_OK(dict) < route name : { dict with key Ancestor, SourceSE, TargetSEtargetSE, Strategy } >
For the time being, we are waiting for FTS3 to provide advisory mechanisms. So we just use
simple techniques
"""
# We will use a single random source
sourceSE = random.choice( sourceSEs )
tree = {}
for targetSE in targetSEs:
tree["%s#%s" % ( sourceSE, targetSE )] = { "Ancestor" : False, "SourceSE" : sourceSE,
"TargetSE" : targetSE, "Strategy" : "FTS3Simple" }
return S_OK( tree )
def refresh( self, ftsHistoryViews ):
"""
Refresh, whatever that means... recalculate all what you need,
fetches the latest conf and what not.
"""
return super( FTS3Placement, self ).refresh( ftsHistoryViews = ftsHistoryViews )
def __failoverServerPolicy(self, attempt = 0):
"""
Returns always the server at a given position (normally the first one)
:param attempt: position of the server in the list
"""
if attempt >= len( self.__serverList ):
raise Exception( "FTS3Placement.__failoverServerPolicy: attempt to reach non existing server index" )
return self.__serverList[attempt]
def __sequenceServerPolicy( self ):
"""
Every time the this policy is called, return the next server on the list
"""
fts3server = self.__serverList[self.__nextServerID]
self.__nextServerID = ( self.__nextServerID + 1 ) % len( self.__serverList )
return fts3server
def __randomServerPolicy(self):
"""
return a random server from the list
"""
return random.choice( self.__serverList )
def __chooseFTS3Server( self ):
"""
Choose the appropriate FTS3 server depending on the policy
"""
fts3Server = None
attempt = 0
# FIXME : need to get real valeu from RSS
ftsServerStatus = True
while not fts3Server and attempt < self.maxAttempts:
if self.__serverPolicy == 'Random':
fts3Server = self.__randomServerPolicy()
elif self.__serverPolicy == 'Sequence':
fts3Server = self.__sequenceServerPolicy()
elif self.__serverPolicy == 'Failover':
fts3Server = self.__failoverServerPolicy( attempt = attempt )
else:
self.log.error( 'Unknown server policy %s. Using Random instead' % self.__serverPolicy )
fts3Server = self.__randomServerPolicy()
if not ftsServerStatus:
self.log.warn( 'FTS server %s is not in good shape. Choose another one' % fts3Server )
fts3Server = None
attempt += 1
# FIXME : I need to get the FTS server status from RSS
# ftsStatusFromRss = rss.ftsStatusOrSomethingLikeThat
if fts3Server:
return S_OK( fts3Server )
return S_ERROR ( "Could not find an FTS3 server (max attempt reached)" )
def | ( self, sourceSE, targetSE ):
""" Find the appropriate route from point A to B
:param sourceSE : source SE
:param targetSE : destination SE
:returns S_OK(FTSRoute)
"""
fts3server = self.__chooseFTS3Server()
if not fts3server['OK']:
return fts3server
fts3server = fts3server['Value']
route = FTSRoute( sourceSE, targetSE, fts3server )
return S_OK( route )
def isRouteValid( self, route ):
"""
FIXME: until RSS is ready, I check manually the status
In FTS3, all routes are valid a priori.
If a route was not valid for some reason, then FTS would know it
thanks to the blacklist sent by RSS, and would deal with it itself.
:param route : FTSRoute
:returns S_OK or S_ERROR(reason)
"""
rAccess = self.rssClient.getStorageElementStatus( route.sourceSE, "ReadAccess" )
self.log.debug( "se read %s %s" % ( route.sourceSE, rAccess ) )
if not rAccess["OK"]:
self.log.error( rAccess["Message"] )
return rAccess
if rAccess["Value"][route.sourceSE]["ReadAccess"] not in ( "Active", "Degraded" ):
return S_ERROR( "Source SE is not readable" )
wAccess = self.rssClient.getStorageElementStatus( route.targetSE, "WriteAccess" )
self.log.debug( "se write %s %s" % ( route.targetSE, wAccess ) )
if not wAccess["OK"]:
self.log.error( wAccess["Message"] )
return wAccess
if wAccess["Value"][route.targetSE]["WriteAccess"] not in ( "Active", "Degraded" ):
return S_ERROR( "Target SE is not writable" )
return S_OK()
| findRoute | identifier_name |
location.rs | /*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
use std::{convert::TryInto, path::PathBuf};
use common::{Location, SourceLocationKey};
use lsp_types::Url;
use crate::lsp_runtime_error::{LSPRuntimeError, LSPRuntimeResult};
pub fn to_lsp_location_of_graphql_literal(
location: Location,
root_dir: &PathBuf,
) -> LSPRuntimeResult<lsp_types::Location> {
Ok(to_contents_and_lsp_location_of_graphql_literal(location, root_dir)?.1)
}
pub fn to_contents_and_lsp_location_of_graphql_literal(
location: Location,
root_dir: &PathBuf,
) -> LSPRuntimeResult<(String, lsp_types::Location)> {
match location.source_location() {
SourceLocationKey::Embedded { path, index } => {
let path_to_fragment = root_dir.join(PathBuf::from(path.lookup()));
let uri = get_uri(&path_to_fragment)?;
let (file_contents, range) =
read_file_and_get_range(&path_to_fragment, index.try_into().unwrap())?;
Ok((file_contents, lsp_types::Location { uri, range }))
}
SourceLocationKey::Standalone { path } => {
let path_to_fragment = root_dir.join(PathBuf::from(path.lookup()));
let uri = get_uri(&path_to_fragment)?;
let (file_contents, range) = read_file_and_get_range(&path_to_fragment, 0)?;
Ok((file_contents, lsp_types::Location { uri, range }))
}
SourceLocationKey::Generated => Err(LSPRuntimeError::UnexpectedError(
"Cannot get location of a generated artifact".to_string(), | }
fn read_file_and_get_range(
path_to_fragment: &PathBuf,
index: usize,
) -> LSPRuntimeResult<(String, lsp_types::Range)> {
let file = std::fs::read(path_to_fragment)
.map_err(|e| LSPRuntimeError::UnexpectedError(e.to_string()))?;
let file_contents =
std::str::from_utf8(&file).map_err(|e| LSPRuntimeError::UnexpectedError(e.to_string()))?;
let response = extract_graphql::parse_chunks(file_contents);
let source = response.get(index).ok_or_else(|| {
LSPRuntimeError::UnexpectedError(format!(
"File {:?} does not contain enough graphql literals: {} needed; {} found",
path_to_fragment,
index,
response.len()
))
})?;
let lines = source.text.lines().enumerate();
let (line_count, last_line) = lines.last().ok_or_else(|| {
LSPRuntimeError::UnexpectedError(format!(
"Encountered empty graphql literal in {:?} (literal {})",
path_to_fragment, index
))
})?;
Ok((
source.text.to_string(),
lsp_types::Range {
start: lsp_types::Position {
line: source.line_index as u64,
character: source.column_index as u64,
},
end: lsp_types::Position {
line: (source.line_index + line_count) as u64,
character: last_line.len() as u64,
},
},
))
}
fn get_uri(path: &PathBuf) -> LSPRuntimeResult<Url> {
Url::parse(&format!(
"file://{}",
path.to_str()
.ok_or_else(|| LSPRuntimeError::UnexpectedError(format!(
"Could not cast path {:?} as string",
path
)))?
))
.map_err(|e| LSPRuntimeError::UnexpectedError(e.to_string()))
} | )),
} | random_line_split |
location.rs | /*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
use std::{convert::TryInto, path::PathBuf};
use common::{Location, SourceLocationKey};
use lsp_types::Url;
use crate::lsp_runtime_error::{LSPRuntimeError, LSPRuntimeResult};
pub fn to_lsp_location_of_graphql_literal(
location: Location,
root_dir: &PathBuf,
) -> LSPRuntimeResult<lsp_types::Location> {
Ok(to_contents_and_lsp_location_of_graphql_literal(location, root_dir)?.1)
}
pub fn | (
location: Location,
root_dir: &PathBuf,
) -> LSPRuntimeResult<(String, lsp_types::Location)> {
match location.source_location() {
SourceLocationKey::Embedded { path, index } => {
let path_to_fragment = root_dir.join(PathBuf::from(path.lookup()));
let uri = get_uri(&path_to_fragment)?;
let (file_contents, range) =
read_file_and_get_range(&path_to_fragment, index.try_into().unwrap())?;
Ok((file_contents, lsp_types::Location { uri, range }))
}
SourceLocationKey::Standalone { path } => {
let path_to_fragment = root_dir.join(PathBuf::from(path.lookup()));
let uri = get_uri(&path_to_fragment)?;
let (file_contents, range) = read_file_and_get_range(&path_to_fragment, 0)?;
Ok((file_contents, lsp_types::Location { uri, range }))
}
SourceLocationKey::Generated => Err(LSPRuntimeError::UnexpectedError(
"Cannot get location of a generated artifact".to_string(),
)),
}
}
fn read_file_and_get_range(
path_to_fragment: &PathBuf,
index: usize,
) -> LSPRuntimeResult<(String, lsp_types::Range)> {
let file = std::fs::read(path_to_fragment)
.map_err(|e| LSPRuntimeError::UnexpectedError(e.to_string()))?;
let file_contents =
std::str::from_utf8(&file).map_err(|e| LSPRuntimeError::UnexpectedError(e.to_string()))?;
let response = extract_graphql::parse_chunks(file_contents);
let source = response.get(index).ok_or_else(|| {
LSPRuntimeError::UnexpectedError(format!(
"File {:?} does not contain enough graphql literals: {} needed; {} found",
path_to_fragment,
index,
response.len()
))
})?;
let lines = source.text.lines().enumerate();
let (line_count, last_line) = lines.last().ok_or_else(|| {
LSPRuntimeError::UnexpectedError(format!(
"Encountered empty graphql literal in {:?} (literal {})",
path_to_fragment, index
))
})?;
Ok((
source.text.to_string(),
lsp_types::Range {
start: lsp_types::Position {
line: source.line_index as u64,
character: source.column_index as u64,
},
end: lsp_types::Position {
line: (source.line_index + line_count) as u64,
character: last_line.len() as u64,
},
},
))
}
fn get_uri(path: &PathBuf) -> LSPRuntimeResult<Url> {
Url::parse(&format!(
"file://{}",
path.to_str()
.ok_or_else(|| LSPRuntimeError::UnexpectedError(format!(
"Could not cast path {:?} as string",
path
)))?
))
.map_err(|e| LSPRuntimeError::UnexpectedError(e.to_string()))
}
| to_contents_and_lsp_location_of_graphql_literal | identifier_name |
location.rs | /*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
use std::{convert::TryInto, path::PathBuf};
use common::{Location, SourceLocationKey};
use lsp_types::Url;
use crate::lsp_runtime_error::{LSPRuntimeError, LSPRuntimeResult};
pub fn to_lsp_location_of_graphql_literal(
location: Location,
root_dir: &PathBuf,
) -> LSPRuntimeResult<lsp_types::Location> {
Ok(to_contents_and_lsp_location_of_graphql_literal(location, root_dir)?.1)
}
pub fn to_contents_and_lsp_location_of_graphql_literal(
location: Location,
root_dir: &PathBuf,
) -> LSPRuntimeResult<(String, lsp_types::Location)> {
match location.source_location() {
SourceLocationKey::Embedded { path, index } => |
SourceLocationKey::Standalone { path } => {
let path_to_fragment = root_dir.join(PathBuf::from(path.lookup()));
let uri = get_uri(&path_to_fragment)?;
let (file_contents, range) = read_file_and_get_range(&path_to_fragment, 0)?;
Ok((file_contents, lsp_types::Location { uri, range }))
}
SourceLocationKey::Generated => Err(LSPRuntimeError::UnexpectedError(
"Cannot get location of a generated artifact".to_string(),
)),
}
}
fn read_file_and_get_range(
path_to_fragment: &PathBuf,
index: usize,
) -> LSPRuntimeResult<(String, lsp_types::Range)> {
let file = std::fs::read(path_to_fragment)
.map_err(|e| LSPRuntimeError::UnexpectedError(e.to_string()))?;
let file_contents =
std::str::from_utf8(&file).map_err(|e| LSPRuntimeError::UnexpectedError(e.to_string()))?;
let response = extract_graphql::parse_chunks(file_contents);
let source = response.get(index).ok_or_else(|| {
LSPRuntimeError::UnexpectedError(format!(
"File {:?} does not contain enough graphql literals: {} needed; {} found",
path_to_fragment,
index,
response.len()
))
})?;
let lines = source.text.lines().enumerate();
let (line_count, last_line) = lines.last().ok_or_else(|| {
LSPRuntimeError::UnexpectedError(format!(
"Encountered empty graphql literal in {:?} (literal {})",
path_to_fragment, index
))
})?;
Ok((
source.text.to_string(),
lsp_types::Range {
start: lsp_types::Position {
line: source.line_index as u64,
character: source.column_index as u64,
},
end: lsp_types::Position {
line: (source.line_index + line_count) as u64,
character: last_line.len() as u64,
},
},
))
}
fn get_uri(path: &PathBuf) -> LSPRuntimeResult<Url> {
Url::parse(&format!(
"file://{}",
path.to_str()
.ok_or_else(|| LSPRuntimeError::UnexpectedError(format!(
"Could not cast path {:?} as string",
path
)))?
))
.map_err(|e| LSPRuntimeError::UnexpectedError(e.to_string()))
}
| {
let path_to_fragment = root_dir.join(PathBuf::from(path.lookup()));
let uri = get_uri(&path_to_fragment)?;
let (file_contents, range) =
read_file_and_get_range(&path_to_fragment, index.try_into().unwrap())?;
Ok((file_contents, lsp_types::Location { uri, range }))
} | conditional_block |
location.rs | /*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
use std::{convert::TryInto, path::PathBuf};
use common::{Location, SourceLocationKey};
use lsp_types::Url;
use crate::lsp_runtime_error::{LSPRuntimeError, LSPRuntimeResult};
pub fn to_lsp_location_of_graphql_literal(
location: Location,
root_dir: &PathBuf,
) -> LSPRuntimeResult<lsp_types::Location> {
Ok(to_contents_and_lsp_location_of_graphql_literal(location, root_dir)?.1)
}
pub fn to_contents_and_lsp_location_of_graphql_literal(
location: Location,
root_dir: &PathBuf,
) -> LSPRuntimeResult<(String, lsp_types::Location)> {
match location.source_location() {
SourceLocationKey::Embedded { path, index } => {
let path_to_fragment = root_dir.join(PathBuf::from(path.lookup()));
let uri = get_uri(&path_to_fragment)?;
let (file_contents, range) =
read_file_and_get_range(&path_to_fragment, index.try_into().unwrap())?;
Ok((file_contents, lsp_types::Location { uri, range }))
}
SourceLocationKey::Standalone { path } => {
let path_to_fragment = root_dir.join(PathBuf::from(path.lookup()));
let uri = get_uri(&path_to_fragment)?;
let (file_contents, range) = read_file_and_get_range(&path_to_fragment, 0)?;
Ok((file_contents, lsp_types::Location { uri, range }))
}
SourceLocationKey::Generated => Err(LSPRuntimeError::UnexpectedError(
"Cannot get location of a generated artifact".to_string(),
)),
}
}
fn read_file_and_get_range(
path_to_fragment: &PathBuf,
index: usize,
) -> LSPRuntimeResult<(String, lsp_types::Range)> |
fn get_uri(path: &PathBuf) -> LSPRuntimeResult<Url> {
Url::parse(&format!(
"file://{}",
path.to_str()
.ok_or_else(|| LSPRuntimeError::UnexpectedError(format!(
"Could not cast path {:?} as string",
path
)))?
))
.map_err(|e| LSPRuntimeError::UnexpectedError(e.to_string()))
}
| {
let file = std::fs::read(path_to_fragment)
.map_err(|e| LSPRuntimeError::UnexpectedError(e.to_string()))?;
let file_contents =
std::str::from_utf8(&file).map_err(|e| LSPRuntimeError::UnexpectedError(e.to_string()))?;
let response = extract_graphql::parse_chunks(file_contents);
let source = response.get(index).ok_or_else(|| {
LSPRuntimeError::UnexpectedError(format!(
"File {:?} does not contain enough graphql literals: {} needed; {} found",
path_to_fragment,
index,
response.len()
))
})?;
let lines = source.text.lines().enumerate();
let (line_count, last_line) = lines.last().ok_or_else(|| {
LSPRuntimeError::UnexpectedError(format!(
"Encountered empty graphql literal in {:?} (literal {})",
path_to_fragment, index
))
})?;
Ok((
source.text.to_string(),
lsp_types::Range {
start: lsp_types::Position {
line: source.line_index as u64,
character: source.column_index as u64,
},
end: lsp_types::Position {
line: (source.line_index + line_count) as u64,
character: last_line.len() as u64,
},
},
))
} | identifier_body |
historystore.rs | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use std::ops::Deref;
use std::path::PathBuf;
use anyhow::Result;
use edenapi_types::HistoryEntry;
use types::Key;
use types::NodeInfo;
use crate::localstore::LocalStore;
use crate::types::StoreKey;
pub trait HgIdHistoryStore: LocalStore + Send + Sync {
fn get_node_info(&self, key: &Key) -> Result<Option<NodeInfo>>;
fn refresh(&self) -> Result<()>;
}
pub trait HgIdMutableHistoryStore: HgIdHistoryStore + Send + Sync {
fn add(&self, key: &Key, info: &NodeInfo) -> Result<()>;
fn flush(&self) -> Result<Option<Vec<PathBuf>>>;
fn add_entry(&self, entry: &HistoryEntry) -> Result<()> {
self.add(&entry.key, &entry.nodeinfo)
}
}
/// The `RemoteHistoryStore` trait indicates that data can fetched over the network. Care must be
/// taken to avoid serially fetching data and instead data should be fetched in bulk via the
/// `prefetch` API.
pub trait RemoteHistoryStore: HgIdHistoryStore + Send + Sync {
/// Attempt to bring the data corresponding to the passed in keys to a local store.
///
/// When implemented on a pure remote store, like the `EdenApi`, the method will always fetch
/// everything that was asked. On a higher level store, such as the `MetadataStore`, this will
/// avoid fetching data that is already present locally.
fn prefetch(&self, keys: &[StoreKey]) -> Result<()>;
}
/// Implement `HgIdHistoryStore` for all types that can be `Deref` into a `HgIdHistoryStore`.
impl<T: HgIdHistoryStore + ?Sized, U: Deref<Target = T> + Send + Sync> HgIdHistoryStore for U {
fn get_node_info(&self, key: &Key) -> Result<Option<NodeInfo>> {
T::get_node_info(self, key)
}
fn | (&self) -> Result<()> {
T::refresh(self)
}
}
impl<T: HgIdMutableHistoryStore + ?Sized, U: Deref<Target = T> + Send + Sync>
HgIdMutableHistoryStore for U
{
fn add(&self, key: &Key, info: &NodeInfo) -> Result<()> {
T::add(self, key, info)
}
fn flush(&self) -> Result<Option<Vec<PathBuf>>> {
T::flush(self)
}
}
impl<T: RemoteHistoryStore + ?Sized, U: Deref<Target = T> + Send + Sync> RemoteHistoryStore for U {
fn prefetch(&self, keys: &[StoreKey]) -> Result<()> {
T::prefetch(self, keys)
}
}
| refresh | identifier_name |
historystore.rs | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use std::ops::Deref;
use std::path::PathBuf;
use anyhow::Result;
use edenapi_types::HistoryEntry;
use types::Key;
use types::NodeInfo;
use crate::localstore::LocalStore;
use crate::types::StoreKey;
pub trait HgIdHistoryStore: LocalStore + Send + Sync {
fn get_node_info(&self, key: &Key) -> Result<Option<NodeInfo>>;
fn refresh(&self) -> Result<()>;
}
pub trait HgIdMutableHistoryStore: HgIdHistoryStore + Send + Sync {
fn add(&self, key: &Key, info: &NodeInfo) -> Result<()>;
fn flush(&self) -> Result<Option<Vec<PathBuf>>>;
fn add_entry(&self, entry: &HistoryEntry) -> Result<()> {
self.add(&entry.key, &entry.nodeinfo)
}
}
/// The `RemoteHistoryStore` trait indicates that data can fetched over the network. Care must be
/// taken to avoid serially fetching data and instead data should be fetched in bulk via the
/// `prefetch` API.
pub trait RemoteHistoryStore: HgIdHistoryStore + Send + Sync {
/// Attempt to bring the data corresponding to the passed in keys to a local store.
///
/// When implemented on a pure remote store, like the `EdenApi`, the method will always fetch
/// everything that was asked. On a higher level store, such as the `MetadataStore`, this will
/// avoid fetching data that is already present locally.
fn prefetch(&self, keys: &[StoreKey]) -> Result<()>;
}
/// Implement `HgIdHistoryStore` for all types that can be `Deref` into a `HgIdHistoryStore`.
impl<T: HgIdHistoryStore + ?Sized, U: Deref<Target = T> + Send + Sync> HgIdHistoryStore for U {
fn get_node_info(&self, key: &Key) -> Result<Option<NodeInfo>> {
T::get_node_info(self, key)
}
fn refresh(&self) -> Result<()> |
}
impl<T: HgIdMutableHistoryStore + ?Sized, U: Deref<Target = T> + Send + Sync>
HgIdMutableHistoryStore for U
{
fn add(&self, key: &Key, info: &NodeInfo) -> Result<()> {
T::add(self, key, info)
}
fn flush(&self) -> Result<Option<Vec<PathBuf>>> {
T::flush(self)
}
}
impl<T: RemoteHistoryStore + ?Sized, U: Deref<Target = T> + Send + Sync> RemoteHistoryStore for U {
fn prefetch(&self, keys: &[StoreKey]) -> Result<()> {
T::prefetch(self, keys)
}
}
| {
T::refresh(self)
} | identifier_body |
historystore.rs | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use std::ops::Deref;
use std::path::PathBuf;
use anyhow::Result;
use edenapi_types::HistoryEntry;
use types::Key;
use types::NodeInfo;
use crate::localstore::LocalStore;
use crate::types::StoreKey;
pub trait HgIdHistoryStore: LocalStore + Send + Sync {
fn get_node_info(&self, key: &Key) -> Result<Option<NodeInfo>>;
fn refresh(&self) -> Result<()>;
}
pub trait HgIdMutableHistoryStore: HgIdHistoryStore + Send + Sync {
fn add(&self, key: &Key, info: &NodeInfo) -> Result<()>;
fn flush(&self) -> Result<Option<Vec<PathBuf>>>;
fn add_entry(&self, entry: &HistoryEntry) -> Result<()> {
self.add(&entry.key, &entry.nodeinfo)
}
}
/// The `RemoteHistoryStore` trait indicates that data can fetched over the network. Care must be
/// taken to avoid serially fetching data and instead data should be fetched in bulk via the
/// `prefetch` API.
pub trait RemoteHistoryStore: HgIdHistoryStore + Send + Sync {
/// Attempt to bring the data corresponding to the passed in keys to a local store.
///
/// When implemented on a pure remote store, like the `EdenApi`, the method will always fetch
/// everything that was asked. On a higher level store, such as the `MetadataStore`, this will
/// avoid fetching data that is already present locally.
fn prefetch(&self, keys: &[StoreKey]) -> Result<()>;
}
/// Implement `HgIdHistoryStore` for all types that can be `Deref` into a `HgIdHistoryStore`.
impl<T: HgIdHistoryStore + ?Sized, U: Deref<Target = T> + Send + Sync> HgIdHistoryStore for U {
fn get_node_info(&self, key: &Key) -> Result<Option<NodeInfo>> {
T::get_node_info(self, key)
}
fn refresh(&self) -> Result<()> {
T::refresh(self)
}
}
impl<T: HgIdMutableHistoryStore + ?Sized, U: Deref<Target = T> + Send + Sync>
HgIdMutableHistoryStore for U
{
fn add(&self, key: &Key, info: &NodeInfo) -> Result<()> {
T::add(self, key, info)
} | }
impl<T: RemoteHistoryStore + ?Sized, U: Deref<Target = T> + Send + Sync> RemoteHistoryStore for U {
fn prefetch(&self, keys: &[StoreKey]) -> Result<()> {
T::prefetch(self, keys)
}
} |
fn flush(&self) -> Result<Option<Vec<PathBuf>>> {
T::flush(self)
} | random_line_split |
option.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use core::option::*;
use core::kinds::marker;
use core::mem;
#[test]
fn test_get_ptr() {
unsafe {
let x = box 0i;
let addr_x: *const int = mem::transmute(&*x);
let opt = Some(x);
let y = opt.unwrap();
let addr_y: *const int = mem::transmute(&*y);
assert_eq!(addr_x, addr_y);
}
}
#[test]
fn test_get_str() {
let x = "test".to_string();
let addr_x = x.as_slice().as_ptr();
let opt = Some(x);
let y = opt.unwrap();
let addr_y = y.as_slice().as_ptr();
assert_eq!(addr_x, addr_y);
}
#[test]
fn test_get_resource() {
use std::rc::Rc;
use core::cell::RefCell;
struct R {
i: Rc<RefCell<int>>,
}
#[unsafe_destructor]
impl Drop for R {
fn drop(&mut self) {
let ii = &*self.i;
let i = *ii.borrow();
*ii.borrow_mut() = i + 1;
}
}
fn r(i: Rc<RefCell<int>>) -> R {
R {
i: i
}
}
let i = Rc::new(RefCell::new(0i));
{
let x = r(i.clone());
let opt = Some(x);
let _y = opt.unwrap();
}
assert_eq!(*i.borrow(), 1);
}
#[test]
fn test_option_dance() {
let x = Some(());
let mut y = Some(5i);
let mut y2 = 0;
for _x in x.iter() {
y2 = y.take().unwrap();
}
assert_eq!(y2, 5);
assert!(y.is_none());
}
#[test] #[should_fail]
fn test_option_too_much_dance() {
let mut y = Some(marker::NoCopy);
let _y2 = y.take().unwrap();
let _y3 = y.take().unwrap();
}
#[test]
fn test_and() {
let x: Option<int> = Some(1i);
assert_eq!(x.and(Some(2i)), Some(2));
assert_eq!(x.and(None::<int>), None);
let x: Option<int> = None;
assert_eq!(x.and(Some(2i)), None);
assert_eq!(x.and(None::<int>), None);
}
#[test]
fn test_and_then() {
let x: Option<int> = Some(1);
assert_eq!(x.and_then(|x| Some(x + 1)), Some(2));
assert_eq!(x.and_then(|_| None::<int>), None);
let x: Option<int> = None;
assert_eq!(x.and_then(|x| Some(x + 1)), None);
assert_eq!(x.and_then(|_| None::<int>), None);
}
#[test]
fn test_or() {
let x: Option<int> = Some(1);
assert_eq!(x.or(Some(2)), Some(1));
assert_eq!(x.or(None), Some(1));
let x: Option<int> = None;
assert_eq!(x.or(Some(2)), Some(2));
assert_eq!(x.or(None), None);
}
#[test]
fn test_or_else() {
let x: Option<int> = Some(1);
assert_eq!(x.or_else(|| Some(2)), Some(1));
assert_eq!(x.or_else(|| None), Some(1));
let x: Option<int> = None;
assert_eq!(x.or_else(|| Some(2)), Some(2));
assert_eq!(x.or_else(|| None), None);
}
#[test]
fn test_unwrap() {
assert_eq!(Some(1i).unwrap(), 1);
let s = Some("hello".to_string()).unwrap();
assert_eq!(s.as_slice(), "hello");
}
#[test]
#[should_fail]
fn test_unwrap_panic1() {
let x: Option<int> = None;
x.unwrap();
}
#[test]
#[should_fail]
fn test_unwrap_panic2() {
let x: Option<String> = None;
x.unwrap();
}
#[test]
fn test_unwrap_or() {
let x: Option<int> = Some(1);
assert_eq!(x.unwrap_or(2), 1);
let x: Option<int> = None;
assert_eq!(x.unwrap_or(2), 2);
}
#[test]
fn test_unwrap_or_else() {
let x: Option<int> = Some(1);
assert_eq!(x.unwrap_or_else(|| 2), 1);
let x: Option<int> = None;
assert_eq!(x.unwrap_or_else(|| 2), 2);
}
#[test]
fn test_iter() {
let val = 5i;
let x = Some(val);
let mut it = x.iter();
assert_eq!(it.size_hint(), (1, Some(1)));
assert_eq!(it.next(), Some(&val));
assert_eq!(it.size_hint(), (0, Some(0)));
assert!(it.next().is_none());
}
#[test]
fn test_mut_iter() {
let val = 5i;
let new_val = 11i;
let mut x = Some(val);
{
let mut it = x.iter_mut();
assert_eq!(it.size_hint(), (1, Some(1)));
match it.next() {
Some(interior) => {
assert_eq!(*interior, val);
*interior = new_val;
}
None => assert!(false),
}
assert_eq!(it.size_hint(), (0, Some(0)));
assert!(it.next().is_none());
}
assert_eq!(x, Some(new_val));
}
#[test]
fn test_ord() {
let small = Some(1.0f64);
let big = Some(5.0f64);
let nan = Some(0.0f64/0.0);
assert!(!(nan < big));
assert!(!(nan > big));
assert!(small < big);
assert!(None < big);
assert!(big > None);
}
#[test]
fn test_collect() {
let v: Option<Vec<int>> = range(0i, 0).map(|_| Some(0i)).collect();
assert!(v == Some(vec![]));
let v: Option<Vec<int>> = range(0i, 3).map(|x| Some(x)).collect();
assert!(v == Some(vec![0, 1, 2]));
let v: Option<Vec<int>> = range(0i, 3).map(|x| {
if x > 1 | else { Some(x) }
}).collect();
assert!(v == None);
// test that it does not take more elements than it needs
let mut functions = [|| Some(()), || None, || panic!()];
let v: Option<Vec<()>> = functions.iter_mut().map(|f| (*f)()).collect();
assert!(v == None);
}
| { None } | conditional_block |
option.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use core::option::*;
use core::kinds::marker;
use core::mem;
#[test]
fn test_get_ptr() {
unsafe {
let x = box 0i;
let addr_x: *const int = mem::transmute(&*x);
let opt = Some(x);
let y = opt.unwrap();
let addr_y: *const int = mem::transmute(&*y);
assert_eq!(addr_x, addr_y);
}
}
#[test]
fn test_get_str() {
let x = "test".to_string();
let addr_x = x.as_slice().as_ptr();
let opt = Some(x);
let y = opt.unwrap();
let addr_y = y.as_slice().as_ptr();
assert_eq!(addr_x, addr_y);
}
#[test]
fn test_get_resource() {
use std::rc::Rc;
use core::cell::RefCell;
struct R {
i: Rc<RefCell<int>>,
}
#[unsafe_destructor]
impl Drop for R {
fn drop(&mut self) {
let ii = &*self.i;
let i = *ii.borrow();
*ii.borrow_mut() = i + 1;
}
}
fn r(i: Rc<RefCell<int>>) -> R {
R {
i: i
}
} |
let i = Rc::new(RefCell::new(0i));
{
let x = r(i.clone());
let opt = Some(x);
let _y = opt.unwrap();
}
assert_eq!(*i.borrow(), 1);
}
#[test]
fn test_option_dance() {
let x = Some(());
let mut y = Some(5i);
let mut y2 = 0;
for _x in x.iter() {
y2 = y.take().unwrap();
}
assert_eq!(y2, 5);
assert!(y.is_none());
}
#[test] #[should_fail]
fn test_option_too_much_dance() {
let mut y = Some(marker::NoCopy);
let _y2 = y.take().unwrap();
let _y3 = y.take().unwrap();
}
#[test]
fn test_and() {
let x: Option<int> = Some(1i);
assert_eq!(x.and(Some(2i)), Some(2));
assert_eq!(x.and(None::<int>), None);
let x: Option<int> = None;
assert_eq!(x.and(Some(2i)), None);
assert_eq!(x.and(None::<int>), None);
}
#[test]
fn test_and_then() {
let x: Option<int> = Some(1);
assert_eq!(x.and_then(|x| Some(x + 1)), Some(2));
assert_eq!(x.and_then(|_| None::<int>), None);
let x: Option<int> = None;
assert_eq!(x.and_then(|x| Some(x + 1)), None);
assert_eq!(x.and_then(|_| None::<int>), None);
}
#[test]
fn test_or() {
let x: Option<int> = Some(1);
assert_eq!(x.or(Some(2)), Some(1));
assert_eq!(x.or(None), Some(1));
let x: Option<int> = None;
assert_eq!(x.or(Some(2)), Some(2));
assert_eq!(x.or(None), None);
}
#[test]
fn test_or_else() {
let x: Option<int> = Some(1);
assert_eq!(x.or_else(|| Some(2)), Some(1));
assert_eq!(x.or_else(|| None), Some(1));
let x: Option<int> = None;
assert_eq!(x.or_else(|| Some(2)), Some(2));
assert_eq!(x.or_else(|| None), None);
}
#[test]
fn test_unwrap() {
assert_eq!(Some(1i).unwrap(), 1);
let s = Some("hello".to_string()).unwrap();
assert_eq!(s.as_slice(), "hello");
}
#[test]
#[should_fail]
fn test_unwrap_panic1() {
let x: Option<int> = None;
x.unwrap();
}
#[test]
#[should_fail]
fn test_unwrap_panic2() {
let x: Option<String> = None;
x.unwrap();
}
#[test]
fn test_unwrap_or() {
let x: Option<int> = Some(1);
assert_eq!(x.unwrap_or(2), 1);
let x: Option<int> = None;
assert_eq!(x.unwrap_or(2), 2);
}
#[test]
fn test_unwrap_or_else() {
let x: Option<int> = Some(1);
assert_eq!(x.unwrap_or_else(|| 2), 1);
let x: Option<int> = None;
assert_eq!(x.unwrap_or_else(|| 2), 2);
}
#[test]
fn test_iter() {
let val = 5i;
let x = Some(val);
let mut it = x.iter();
assert_eq!(it.size_hint(), (1, Some(1)));
assert_eq!(it.next(), Some(&val));
assert_eq!(it.size_hint(), (0, Some(0)));
assert!(it.next().is_none());
}
#[test]
fn test_mut_iter() {
let val = 5i;
let new_val = 11i;
let mut x = Some(val);
{
let mut it = x.iter_mut();
assert_eq!(it.size_hint(), (1, Some(1)));
match it.next() {
Some(interior) => {
assert_eq!(*interior, val);
*interior = new_val;
}
None => assert!(false),
}
assert_eq!(it.size_hint(), (0, Some(0)));
assert!(it.next().is_none());
}
assert_eq!(x, Some(new_val));
}
#[test]
fn test_ord() {
let small = Some(1.0f64);
let big = Some(5.0f64);
let nan = Some(0.0f64/0.0);
assert!(!(nan < big));
assert!(!(nan > big));
assert!(small < big);
assert!(None < big);
assert!(big > None);
}
#[test]
fn test_collect() {
let v: Option<Vec<int>> = range(0i, 0).map(|_| Some(0i)).collect();
assert!(v == Some(vec![]));
let v: Option<Vec<int>> = range(0i, 3).map(|x| Some(x)).collect();
assert!(v == Some(vec![0, 1, 2]));
let v: Option<Vec<int>> = range(0i, 3).map(|x| {
if x > 1 { None } else { Some(x) }
}).collect();
assert!(v == None);
// test that it does not take more elements than it needs
let mut functions = [|| Some(()), || None, || panic!()];
let v: Option<Vec<()>> = functions.iter_mut().map(|f| (*f)()).collect();
assert!(v == None);
} | random_line_split |
|
option.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use core::option::*;
use core::kinds::marker;
use core::mem;
#[test]
fn test_get_ptr() {
unsafe {
let x = box 0i;
let addr_x: *const int = mem::transmute(&*x);
let opt = Some(x);
let y = opt.unwrap();
let addr_y: *const int = mem::transmute(&*y);
assert_eq!(addr_x, addr_y);
}
}
#[test]
fn test_get_str() {
let x = "test".to_string();
let addr_x = x.as_slice().as_ptr();
let opt = Some(x);
let y = opt.unwrap();
let addr_y = y.as_slice().as_ptr();
assert_eq!(addr_x, addr_y);
}
#[test]
fn test_get_resource() {
use std::rc::Rc;
use core::cell::RefCell;
struct R {
i: Rc<RefCell<int>>,
}
#[unsafe_destructor]
impl Drop for R {
fn drop(&mut self) {
let ii = &*self.i;
let i = *ii.borrow();
*ii.borrow_mut() = i + 1;
}
}
fn r(i: Rc<RefCell<int>>) -> R {
R {
i: i
}
}
let i = Rc::new(RefCell::new(0i));
{
let x = r(i.clone());
let opt = Some(x);
let _y = opt.unwrap();
}
assert_eq!(*i.borrow(), 1);
}
#[test]
fn test_option_dance() {
let x = Some(());
let mut y = Some(5i);
let mut y2 = 0;
for _x in x.iter() {
y2 = y.take().unwrap();
}
assert_eq!(y2, 5);
assert!(y.is_none());
}
#[test] #[should_fail]
fn test_option_too_much_dance() {
let mut y = Some(marker::NoCopy);
let _y2 = y.take().unwrap();
let _y3 = y.take().unwrap();
}
#[test]
fn test_and() {
let x: Option<int> = Some(1i);
assert_eq!(x.and(Some(2i)), Some(2));
assert_eq!(x.and(None::<int>), None);
let x: Option<int> = None;
assert_eq!(x.and(Some(2i)), None);
assert_eq!(x.and(None::<int>), None);
}
#[test]
fn test_and_then() {
let x: Option<int> = Some(1);
assert_eq!(x.and_then(|x| Some(x + 1)), Some(2));
assert_eq!(x.and_then(|_| None::<int>), None);
let x: Option<int> = None;
assert_eq!(x.and_then(|x| Some(x + 1)), None);
assert_eq!(x.and_then(|_| None::<int>), None);
}
#[test]
fn test_or() {
let x: Option<int> = Some(1);
assert_eq!(x.or(Some(2)), Some(1));
assert_eq!(x.or(None), Some(1));
let x: Option<int> = None;
assert_eq!(x.or(Some(2)), Some(2));
assert_eq!(x.or(None), None);
}
#[test]
fn test_or_else() {
let x: Option<int> = Some(1);
assert_eq!(x.or_else(|| Some(2)), Some(1));
assert_eq!(x.or_else(|| None), Some(1));
let x: Option<int> = None;
assert_eq!(x.or_else(|| Some(2)), Some(2));
assert_eq!(x.or_else(|| None), None);
}
#[test]
fn test_unwrap() {
assert_eq!(Some(1i).unwrap(), 1);
let s = Some("hello".to_string()).unwrap();
assert_eq!(s.as_slice(), "hello");
}
#[test]
#[should_fail]
fn test_unwrap_panic1() {
let x: Option<int> = None;
x.unwrap();
}
#[test]
#[should_fail]
fn test_unwrap_panic2() {
let x: Option<String> = None;
x.unwrap();
}
#[test]
fn test_unwrap_or() {
let x: Option<int> = Some(1);
assert_eq!(x.unwrap_or(2), 1);
let x: Option<int> = None;
assert_eq!(x.unwrap_or(2), 2);
}
#[test]
fn | () {
let x: Option<int> = Some(1);
assert_eq!(x.unwrap_or_else(|| 2), 1);
let x: Option<int> = None;
assert_eq!(x.unwrap_or_else(|| 2), 2);
}
#[test]
fn test_iter() {
let val = 5i;
let x = Some(val);
let mut it = x.iter();
assert_eq!(it.size_hint(), (1, Some(1)));
assert_eq!(it.next(), Some(&val));
assert_eq!(it.size_hint(), (0, Some(0)));
assert!(it.next().is_none());
}
#[test]
fn test_mut_iter() {
let val = 5i;
let new_val = 11i;
let mut x = Some(val);
{
let mut it = x.iter_mut();
assert_eq!(it.size_hint(), (1, Some(1)));
match it.next() {
Some(interior) => {
assert_eq!(*interior, val);
*interior = new_val;
}
None => assert!(false),
}
assert_eq!(it.size_hint(), (0, Some(0)));
assert!(it.next().is_none());
}
assert_eq!(x, Some(new_val));
}
#[test]
fn test_ord() {
let small = Some(1.0f64);
let big = Some(5.0f64);
let nan = Some(0.0f64/0.0);
assert!(!(nan < big));
assert!(!(nan > big));
assert!(small < big);
assert!(None < big);
assert!(big > None);
}
#[test]
fn test_collect() {
let v: Option<Vec<int>> = range(0i, 0).map(|_| Some(0i)).collect();
assert!(v == Some(vec![]));
let v: Option<Vec<int>> = range(0i, 3).map(|x| Some(x)).collect();
assert!(v == Some(vec![0, 1, 2]));
let v: Option<Vec<int>> = range(0i, 3).map(|x| {
if x > 1 { None } else { Some(x) }
}).collect();
assert!(v == None);
// test that it does not take more elements than it needs
let mut functions = [|| Some(()), || None, || panic!()];
let v: Option<Vec<()>> = functions.iter_mut().map(|f| (*f)()).collect();
assert!(v == None);
}
| test_unwrap_or_else | identifier_name |
option.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use core::option::*;
use core::kinds::marker;
use core::mem;
#[test]
fn test_get_ptr() {
unsafe {
let x = box 0i;
let addr_x: *const int = mem::transmute(&*x);
let opt = Some(x);
let y = opt.unwrap();
let addr_y: *const int = mem::transmute(&*y);
assert_eq!(addr_x, addr_y);
}
}
#[test]
fn test_get_str() {
let x = "test".to_string();
let addr_x = x.as_slice().as_ptr();
let opt = Some(x);
let y = opt.unwrap();
let addr_y = y.as_slice().as_ptr();
assert_eq!(addr_x, addr_y);
}
#[test]
fn test_get_resource() {
use std::rc::Rc;
use core::cell::RefCell;
struct R {
i: Rc<RefCell<int>>,
}
#[unsafe_destructor]
impl Drop for R {
fn drop(&mut self) {
let ii = &*self.i;
let i = *ii.borrow();
*ii.borrow_mut() = i + 1;
}
}
fn r(i: Rc<RefCell<int>>) -> R {
R {
i: i
}
}
let i = Rc::new(RefCell::new(0i));
{
let x = r(i.clone());
let opt = Some(x);
let _y = opt.unwrap();
}
assert_eq!(*i.borrow(), 1);
}
#[test]
fn test_option_dance() {
let x = Some(());
let mut y = Some(5i);
let mut y2 = 0;
for _x in x.iter() {
y2 = y.take().unwrap();
}
assert_eq!(y2, 5);
assert!(y.is_none());
}
#[test] #[should_fail]
fn test_option_too_much_dance() {
let mut y = Some(marker::NoCopy);
let _y2 = y.take().unwrap();
let _y3 = y.take().unwrap();
}
#[test]
fn test_and() {
let x: Option<int> = Some(1i);
assert_eq!(x.and(Some(2i)), Some(2));
assert_eq!(x.and(None::<int>), None);
let x: Option<int> = None;
assert_eq!(x.and(Some(2i)), None);
assert_eq!(x.and(None::<int>), None);
}
#[test]
fn test_and_then() {
let x: Option<int> = Some(1);
assert_eq!(x.and_then(|x| Some(x + 1)), Some(2));
assert_eq!(x.and_then(|_| None::<int>), None);
let x: Option<int> = None;
assert_eq!(x.and_then(|x| Some(x + 1)), None);
assert_eq!(x.and_then(|_| None::<int>), None);
}
#[test]
fn test_or() {
let x: Option<int> = Some(1);
assert_eq!(x.or(Some(2)), Some(1));
assert_eq!(x.or(None), Some(1));
let x: Option<int> = None;
assert_eq!(x.or(Some(2)), Some(2));
assert_eq!(x.or(None), None);
}
#[test]
fn test_or_else() {
let x: Option<int> = Some(1);
assert_eq!(x.or_else(|| Some(2)), Some(1));
assert_eq!(x.or_else(|| None), Some(1));
let x: Option<int> = None;
assert_eq!(x.or_else(|| Some(2)), Some(2));
assert_eq!(x.or_else(|| None), None);
}
#[test]
fn test_unwrap() {
assert_eq!(Some(1i).unwrap(), 1);
let s = Some("hello".to_string()).unwrap();
assert_eq!(s.as_slice(), "hello");
}
#[test]
#[should_fail]
fn test_unwrap_panic1() {
let x: Option<int> = None;
x.unwrap();
}
#[test]
#[should_fail]
fn test_unwrap_panic2() {
let x: Option<String> = None;
x.unwrap();
}
#[test]
fn test_unwrap_or() |
#[test]
fn test_unwrap_or_else() {
let x: Option<int> = Some(1);
assert_eq!(x.unwrap_or_else(|| 2), 1);
let x: Option<int> = None;
assert_eq!(x.unwrap_or_else(|| 2), 2);
}
#[test]
fn test_iter() {
let val = 5i;
let x = Some(val);
let mut it = x.iter();
assert_eq!(it.size_hint(), (1, Some(1)));
assert_eq!(it.next(), Some(&val));
assert_eq!(it.size_hint(), (0, Some(0)));
assert!(it.next().is_none());
}
#[test]
fn test_mut_iter() {
let val = 5i;
let new_val = 11i;
let mut x = Some(val);
{
let mut it = x.iter_mut();
assert_eq!(it.size_hint(), (1, Some(1)));
match it.next() {
Some(interior) => {
assert_eq!(*interior, val);
*interior = new_val;
}
None => assert!(false),
}
assert_eq!(it.size_hint(), (0, Some(0)));
assert!(it.next().is_none());
}
assert_eq!(x, Some(new_val));
}
#[test]
fn test_ord() {
let small = Some(1.0f64);
let big = Some(5.0f64);
let nan = Some(0.0f64/0.0);
assert!(!(nan < big));
assert!(!(nan > big));
assert!(small < big);
assert!(None < big);
assert!(big > None);
}
#[test]
fn test_collect() {
let v: Option<Vec<int>> = range(0i, 0).map(|_| Some(0i)).collect();
assert!(v == Some(vec![]));
let v: Option<Vec<int>> = range(0i, 3).map(|x| Some(x)).collect();
assert!(v == Some(vec![0, 1, 2]));
let v: Option<Vec<int>> = range(0i, 3).map(|x| {
if x > 1 { None } else { Some(x) }
}).collect();
assert!(v == None);
// test that it does not take more elements than it needs
let mut functions = [|| Some(()), || None, || panic!()];
let v: Option<Vec<()>> = functions.iter_mut().map(|f| (*f)()).collect();
assert!(v == None);
}
| {
let x: Option<int> = Some(1);
assert_eq!(x.unwrap_or(2), 1);
let x: Option<int> = None;
assert_eq!(x.unwrap_or(2), 2);
} | identifier_body |
app-routing.module.ts | import { NgModule } from '@angular/core';
import { Routes, RouterModule } from '@angular/router';
import { BlogComponent } from './blog.component';
import { HeroComponent } from './hero.component'; |
const appRoutes: Routes = [
{ path: '', redirectTo: '/hero', pathMatch: 'full' },
{ path: 'blog', component: BlogComponent },
{ path: 'hero', component: HeroComponent },
{ path: 'about', component: AboutComponent},
{ path: 'pricing', component: PricingComponent},
{ path: 'contact-us', component: ContactUsComponent},
{ path: 'post/:id', component: BlogPostFullComponent }
]
@NgModule({
imports: [RouterModule.forRoot(appRoutes)],
exports: [RouterModule]
})
export class AppRoutingModule { } | import { AboutComponent } from './about.component';
import { PricingComponent} from './pricing.component';
import { ContactUsComponent } from './contact-us.component';
import { BlogPostFullComponent } from './blog-post-full.component'; | random_line_split |
app-routing.module.ts | import { NgModule } from '@angular/core';
import { Routes, RouterModule } from '@angular/router';
import { BlogComponent } from './blog.component';
import { HeroComponent } from './hero.component';
import { AboutComponent } from './about.component';
import { PricingComponent} from './pricing.component';
import { ContactUsComponent } from './contact-us.component';
import { BlogPostFullComponent } from './blog-post-full.component';
const appRoutes: Routes = [
{ path: '', redirectTo: '/hero', pathMatch: 'full' },
{ path: 'blog', component: BlogComponent },
{ path: 'hero', component: HeroComponent },
{ path: 'about', component: AboutComponent},
{ path: 'pricing', component: PricingComponent},
{ path: 'contact-us', component: ContactUsComponent},
{ path: 'post/:id', component: BlogPostFullComponent }
]
@NgModule({
imports: [RouterModule.forRoot(appRoutes)],
exports: [RouterModule]
})
export class | { }
| AppRoutingModule | identifier_name |
websocketGate.js | import React, { Component } from 'react';
import { bindActionCreators } from 'redux';
import { connect } from 'react-redux';
import { getAllOpenRoomsIds } from './../../actions/roomsActionCreators';
import { getMessagesByRooms } from './../../actions/messagesActionCreators';
import { getGlobalSolvedUnsolvedStatistics } from './../../actions/statsActionCreators';
import { websocketManager } from './../../services/websocketManager';
import { authManager } from './../../services/authManager';
import { WS_COMMANDS } from './../../consts/commands';
import { WS } from './../../serversConfig';
const mapStateToProps = (state) => {
return {
role: state.context.role,
rooms: state.rooms
}
};
const mapDispatchToProps = (dispatch) => {
return bindActionCreators({
getAllOpenRoomsIds,
getMessagesByRooms,
getGlobalSolvedUnsolvedStatistics
}, dispatch);
};
class WebsocketGate extends Component {
constructor() {
super();
this.state = { isOpened: false };
}
componentDidMount() {
this.initGate(WS.SERVER_ADDRESS);
}
initGate(addr) {
websocketManager.initNewConnection(
addr,
this.onOpen.bind(this),
this.onMessage.bind(this),
this.onClose.bind(this))
}
onOpen(e) |
onMessage(e) {
console.log(e);
const data = e.data;
if (data.split(":")[0] === WS_COMMANDS.NEW_MESSAGE) {
this.onNewMessageAction(data);
}
if (data.split(":")[0] === WS_COMMANDS.NEW_ROOM) {
this.onNewRoomAction(data);
}
}
onNewMessageAction(data) {
this.props.getMessagesByRooms(this.props.rooms);
}
onNewRoomAction(data) {
if (authManager.isEmployee(this.props.role)) {
this.props.getAllOpenRoomsIds();
this.props.getGlobalSolvedUnsolvedStatistics();
}
}
onClose(e) {
console.log(e);
}
render() {
const { children } = this.props;
return (
<div>
{children}
<div className='connection-indicator'>
{!this.state.isOpened &&
<div>
<span className="label label-warning">Connecting...</span>
</div>}
{this.state.isOpened &&
<div>
<span className="label label-success">Connected</span>
</div>
}
</div>
</div>
);
}
}
export default connect(
mapStateToProps,
mapDispatchToProps
)(WebsocketGate);
| {
console.log(e);
this.setState({ isOpened: true });
} | identifier_body |
websocketGate.js | import React, { Component } from 'react';
import { bindActionCreators } from 'redux';
import { connect } from 'react-redux';
import { getAllOpenRoomsIds } from './../../actions/roomsActionCreators';
import { getMessagesByRooms } from './../../actions/messagesActionCreators';
import { getGlobalSolvedUnsolvedStatistics } from './../../actions/statsActionCreators';
import { websocketManager } from './../../services/websocketManager';
import { authManager } from './../../services/authManager';
import { WS_COMMANDS } from './../../consts/commands';
import { WS } from './../../serversConfig';
const mapStateToProps = (state) => {
return {
role: state.context.role,
rooms: state.rooms
}
};
const mapDispatchToProps = (dispatch) => {
return bindActionCreators({
getAllOpenRoomsIds,
getMessagesByRooms,
getGlobalSolvedUnsolvedStatistics
}, dispatch);
};
class WebsocketGate extends Component {
constructor() {
super();
this.state = { isOpened: false };
}
componentDidMount() {
this.initGate(WS.SERVER_ADDRESS);
}
initGate(addr) {
websocketManager.initNewConnection(
addr,
this.onOpen.bind(this),
this.onMessage.bind(this),
this.onClose.bind(this))
}
| (e) {
console.log(e);
this.setState({ isOpened: true });
}
onMessage(e) {
console.log(e);
const data = e.data;
if (data.split(":")[0] === WS_COMMANDS.NEW_MESSAGE) {
this.onNewMessageAction(data);
}
if (data.split(":")[0] === WS_COMMANDS.NEW_ROOM) {
this.onNewRoomAction(data);
}
}
onNewMessageAction(data) {
this.props.getMessagesByRooms(this.props.rooms);
}
onNewRoomAction(data) {
if (authManager.isEmployee(this.props.role)) {
this.props.getAllOpenRoomsIds();
this.props.getGlobalSolvedUnsolvedStatistics();
}
}
onClose(e) {
console.log(e);
}
render() {
const { children } = this.props;
return (
<div>
{children}
<div className='connection-indicator'>
{!this.state.isOpened &&
<div>
<span className="label label-warning">Connecting...</span>
</div>}
{this.state.isOpened &&
<div>
<span className="label label-success">Connected</span>
</div>
}
</div>
</div>
);
}
}
export default connect(
mapStateToProps,
mapDispatchToProps
)(WebsocketGate);
| onOpen | identifier_name |
websocketGate.js | import React, { Component } from 'react';
import { bindActionCreators } from 'redux';
import { connect } from 'react-redux';
import { getAllOpenRoomsIds } from './../../actions/roomsActionCreators';
import { getMessagesByRooms } from './../../actions/messagesActionCreators';
import { getGlobalSolvedUnsolvedStatistics } from './../../actions/statsActionCreators';
import { websocketManager } from './../../services/websocketManager';
import { authManager } from './../../services/authManager';
import { WS_COMMANDS } from './../../consts/commands';
import { WS } from './../../serversConfig';
const mapStateToProps = (state) => {
return {
role: state.context.role,
rooms: state.rooms
}
};
const mapDispatchToProps = (dispatch) => {
return bindActionCreators({
getAllOpenRoomsIds,
getMessagesByRooms,
getGlobalSolvedUnsolvedStatistics
}, dispatch);
};
class WebsocketGate extends Component {
constructor() {
super();
this.state = { isOpened: false };
}
componentDidMount() {
this.initGate(WS.SERVER_ADDRESS);
}
initGate(addr) {
websocketManager.initNewConnection(
addr,
this.onOpen.bind(this),
this.onMessage.bind(this),
this.onClose.bind(this))
}
onOpen(e) {
console.log(e);
this.setState({ isOpened: true });
}
onMessage(e) {
console.log(e);
const data = e.data;
if (data.split(":")[0] === WS_COMMANDS.NEW_MESSAGE) {
this.onNewMessageAction(data);
}
if (data.split(":")[0] === WS_COMMANDS.NEW_ROOM) {
this.onNewRoomAction(data);
}
}
onNewMessageAction(data) { | onNewRoomAction(data) {
if (authManager.isEmployee(this.props.role)) {
this.props.getAllOpenRoomsIds();
this.props.getGlobalSolvedUnsolvedStatistics();
}
}
onClose(e) {
console.log(e);
}
render() {
const { children } = this.props;
return (
<div>
{children}
<div className='connection-indicator'>
{!this.state.isOpened &&
<div>
<span className="label label-warning">Connecting...</span>
</div>}
{this.state.isOpened &&
<div>
<span className="label label-success">Connected</span>
</div>
}
</div>
</div>
);
}
}
export default connect(
mapStateToProps,
mapDispatchToProps
)(WebsocketGate); | this.props.getMessagesByRooms(this.props.rooms);
} | random_line_split |
websocketGate.js | import React, { Component } from 'react';
import { bindActionCreators } from 'redux';
import { connect } from 'react-redux';
import { getAllOpenRoomsIds } from './../../actions/roomsActionCreators';
import { getMessagesByRooms } from './../../actions/messagesActionCreators';
import { getGlobalSolvedUnsolvedStatistics } from './../../actions/statsActionCreators';
import { websocketManager } from './../../services/websocketManager';
import { authManager } from './../../services/authManager';
import { WS_COMMANDS } from './../../consts/commands';
import { WS } from './../../serversConfig';
const mapStateToProps = (state) => {
return {
role: state.context.role,
rooms: state.rooms
}
};
const mapDispatchToProps = (dispatch) => {
return bindActionCreators({
getAllOpenRoomsIds,
getMessagesByRooms,
getGlobalSolvedUnsolvedStatistics
}, dispatch);
};
class WebsocketGate extends Component {
constructor() {
super();
this.state = { isOpened: false };
}
componentDidMount() {
this.initGate(WS.SERVER_ADDRESS);
}
initGate(addr) {
websocketManager.initNewConnection(
addr,
this.onOpen.bind(this),
this.onMessage.bind(this),
this.onClose.bind(this))
}
onOpen(e) {
console.log(e);
this.setState({ isOpened: true });
}
onMessage(e) {
console.log(e);
const data = e.data;
if (data.split(":")[0] === WS_COMMANDS.NEW_MESSAGE) {
this.onNewMessageAction(data);
}
if (data.split(":")[0] === WS_COMMANDS.NEW_ROOM) |
}
onNewMessageAction(data) {
this.props.getMessagesByRooms(this.props.rooms);
}
onNewRoomAction(data) {
if (authManager.isEmployee(this.props.role)) {
this.props.getAllOpenRoomsIds();
this.props.getGlobalSolvedUnsolvedStatistics();
}
}
onClose(e) {
console.log(e);
}
render() {
const { children } = this.props;
return (
<div>
{children}
<div className='connection-indicator'>
{!this.state.isOpened &&
<div>
<span className="label label-warning">Connecting...</span>
</div>}
{this.state.isOpened &&
<div>
<span className="label label-success">Connected</span>
</div>
}
</div>
</div>
);
}
}
export default connect(
mapStateToProps,
mapDispatchToProps
)(WebsocketGate);
| {
this.onNewRoomAction(data);
} | conditional_block |
__init__.py | from enum import IntEnum
__all__ = ['HTTPStatus']
class HTTPStatus(IntEnum):
"""HTTP status codes and reason phrases
Status codes from the following RFCs are all observed:
* RFC 7231: Hypertext Transfer Protocol (HTTP/1.1), obsoletes 2616
* RFC 6585: Additional HTTP Status Codes
* RFC 3229: Delta encoding in HTTP
* RFC 4918: HTTP Extensions for WebDAV, obsoletes 2518
* RFC 5842: Binding Extensions to WebDAV
* RFC 7238: Permanent Redirect
* RFC 2295: Transparent Content Negotiation in HTTP
* RFC 2774: An HTTP Extension Framework
* RFC 7725: An HTTP Status Code to Report Legal Obstacles
* RFC 7540: Hypertext Transfer Protocol Version 2 (HTTP/2)
* RFC 2324: Hyper Text Coffee Pot Control Protocol (HTCPCP/1.0)
* RFC 8297: An HTTP Status Code for Indicating Hints
* RFC 8470: Using Early Data in HTTP
"""
def __new__(cls, value, phrase, description=''):
obj = int.__new__(cls, value)
obj._value_ = value
obj.phrase = phrase
obj.description = description
return obj
# informational
CONTINUE = 100, 'Continue', 'Request received, please continue'
SWITCHING_PROTOCOLS = (101, 'Switching Protocols',
'Switching to new protocol; obey Upgrade header')
PROCESSING = 102, 'Processing'
EARLY_HINTS = 103, 'Early Hints'
# success
OK = 200, 'OK', 'Request fulfilled, document follows'
CREATED = 201, 'Created', 'Document created, URL follows'
ACCEPTED = (202, 'Accepted',
'Request accepted, processing continues off-line')
NON_AUTHORITATIVE_INFORMATION = (203,
'Non-Authoritative Information', 'Request fulfilled from cache')
NO_CONTENT = 204, 'No Content', 'Request fulfilled, nothing follows'
RESET_CONTENT = 205, 'Reset Content', 'Clear input form for further input'
PARTIAL_CONTENT = 206, 'Partial Content', 'Partial content follows'
MULTI_STATUS = 207, 'Multi-Status'
ALREADY_REPORTED = 208, 'Already Reported'
IM_USED = 226, 'IM Used'
# redirection
MULTIPLE_CHOICES = (300, 'Multiple Choices',
'Object has several resources -- see URI list')
MOVED_PERMANENTLY = (301, 'Moved Permanently',
'Object moved permanently -- see URI list')
FOUND = 302, 'Found', 'Object moved temporarily -- see URI list'
SEE_OTHER = 303, 'See Other', 'Object moved -- see Method and URL list'
NOT_MODIFIED = (304, 'Not Modified',
'Document has not changed since given time')
USE_PROXY = (305, 'Use Proxy',
'You must use proxy specified in Location to access this resource')
TEMPORARY_REDIRECT = (307, 'Temporary Redirect',
'Object moved temporarily -- see URI list')
PERMANENT_REDIRECT = (308, 'Permanent Redirect',
'Object moved permanently -- see URI list')
# client error
BAD_REQUEST = (400, 'Bad Request',
'Bad request syntax or unsupported method')
UNAUTHORIZED = (401, 'Unauthorized',
'No permission -- see authorization schemes')
PAYMENT_REQUIRED = (402, 'Payment Required',
'No payment -- see charging schemes')
FORBIDDEN = (403, 'Forbidden',
'Request forbidden -- authorization will not help')
NOT_FOUND = (404, 'Not Found',
'Nothing matches the given URI')
METHOD_NOT_ALLOWED = (405, 'Method Not Allowed',
'Specified method is invalid for this resource')
NOT_ACCEPTABLE = (406, 'Not Acceptable',
'URI not available in preferred format')
PROXY_AUTHENTICATION_REQUIRED = (407,
'Proxy Authentication Required',
'You must authenticate with this proxy before proceeding')
REQUEST_TIMEOUT = (408, 'Request Timeout',
'Request timed out; try again later')
CONFLICT = 409, 'Conflict', 'Request conflict'
GONE = (410, 'Gone',
'URI no longer exists and has been permanently removed')
LENGTH_REQUIRED = (411, 'Length Required',
'Client must specify Content-Length')
PRECONDITION_FAILED = (412, 'Precondition Failed',
'Precondition in headers is false')
REQUEST_ENTITY_TOO_LARGE = (413, 'Request Entity Too Large',
'Entity is too large')
REQUEST_URI_TOO_LONG = (414, 'Request-URI Too Long',
'URI is too long')
UNSUPPORTED_MEDIA_TYPE = (415, 'Unsupported Media Type',
'Entity body in unsupported format')
REQUESTED_RANGE_NOT_SATISFIABLE = (416,
'Requested Range Not Satisfiable',
'Cannot satisfy request range')
EXPECTATION_FAILED = (417, 'Expectation Failed',
'Expect condition could not be satisfied')
IM_A_TEAPOT = (418, 'I\'m a Teapot',
'Server refuses to brew coffee because it is a teapot.')
MISDIRECTED_REQUEST = (421, 'Misdirected Request',
'Server is not able to produce a response')
UNPROCESSABLE_ENTITY = 422, 'Unprocessable Entity'
LOCKED = 423, 'Locked'
FAILED_DEPENDENCY = 424, 'Failed Dependency'
TOO_EARLY = 425, 'Too Early'
UPGRADE_REQUIRED = 426, 'Upgrade Required'
PRECONDITION_REQUIRED = (428, 'Precondition Required',
'The origin server requires the request to be conditional')
TOO_MANY_REQUESTS = (429, 'Too Many Requests',
'The user has sent too many requests in '
'a given amount of time ("rate limiting")') | UNAVAILABLE_FOR_LEGAL_REASONS = (451,
'Unavailable For Legal Reasons',
'The server is denying access to the '
'resource as a consequence of a legal demand')
# server errors
INTERNAL_SERVER_ERROR = (500, 'Internal Server Error',
'Server got itself in trouble')
NOT_IMPLEMENTED = (501, 'Not Implemented',
'Server does not support this operation')
BAD_GATEWAY = (502, 'Bad Gateway',
'Invalid responses from another server/proxy')
SERVICE_UNAVAILABLE = (503, 'Service Unavailable',
'The server cannot process the request due to a high load')
GATEWAY_TIMEOUT = (504, 'Gateway Timeout',
'The gateway server did not receive a timely response')
HTTP_VERSION_NOT_SUPPORTED = (505, 'HTTP Version Not Supported',
'Cannot fulfill request')
VARIANT_ALSO_NEGOTIATES = 506, 'Variant Also Negotiates'
INSUFFICIENT_STORAGE = 507, 'Insufficient Storage'
LOOP_DETECTED = 508, 'Loop Detected'
NOT_EXTENDED = 510, 'Not Extended'
NETWORK_AUTHENTICATION_REQUIRED = (511,
'Network Authentication Required',
'The client needs to authenticate to gain network access') | REQUEST_HEADER_FIELDS_TOO_LARGE = (431,
'Request Header Fields Too Large',
'The server is unwilling to process the request because its header '
'fields are too large') | random_line_split |
__init__.py | from enum import IntEnum
__all__ = ['HTTPStatus']
class HTTPStatus(IntEnum):
| """HTTP status codes and reason phrases
Status codes from the following RFCs are all observed:
* RFC 7231: Hypertext Transfer Protocol (HTTP/1.1), obsoletes 2616
* RFC 6585: Additional HTTP Status Codes
* RFC 3229: Delta encoding in HTTP
* RFC 4918: HTTP Extensions for WebDAV, obsoletes 2518
* RFC 5842: Binding Extensions to WebDAV
* RFC 7238: Permanent Redirect
* RFC 2295: Transparent Content Negotiation in HTTP
* RFC 2774: An HTTP Extension Framework
* RFC 7725: An HTTP Status Code to Report Legal Obstacles
* RFC 7540: Hypertext Transfer Protocol Version 2 (HTTP/2)
* RFC 2324: Hyper Text Coffee Pot Control Protocol (HTCPCP/1.0)
* RFC 8297: An HTTP Status Code for Indicating Hints
* RFC 8470: Using Early Data in HTTP
"""
def __new__(cls, value, phrase, description=''):
obj = int.__new__(cls, value)
obj._value_ = value
obj.phrase = phrase
obj.description = description
return obj
# informational
CONTINUE = 100, 'Continue', 'Request received, please continue'
SWITCHING_PROTOCOLS = (101, 'Switching Protocols',
'Switching to new protocol; obey Upgrade header')
PROCESSING = 102, 'Processing'
EARLY_HINTS = 103, 'Early Hints'
# success
OK = 200, 'OK', 'Request fulfilled, document follows'
CREATED = 201, 'Created', 'Document created, URL follows'
ACCEPTED = (202, 'Accepted',
'Request accepted, processing continues off-line')
NON_AUTHORITATIVE_INFORMATION = (203,
'Non-Authoritative Information', 'Request fulfilled from cache')
NO_CONTENT = 204, 'No Content', 'Request fulfilled, nothing follows'
RESET_CONTENT = 205, 'Reset Content', 'Clear input form for further input'
PARTIAL_CONTENT = 206, 'Partial Content', 'Partial content follows'
MULTI_STATUS = 207, 'Multi-Status'
ALREADY_REPORTED = 208, 'Already Reported'
IM_USED = 226, 'IM Used'
# redirection
MULTIPLE_CHOICES = (300, 'Multiple Choices',
'Object has several resources -- see URI list')
MOVED_PERMANENTLY = (301, 'Moved Permanently',
'Object moved permanently -- see URI list')
FOUND = 302, 'Found', 'Object moved temporarily -- see URI list'
SEE_OTHER = 303, 'See Other', 'Object moved -- see Method and URL list'
NOT_MODIFIED = (304, 'Not Modified',
'Document has not changed since given time')
USE_PROXY = (305, 'Use Proxy',
'You must use proxy specified in Location to access this resource')
TEMPORARY_REDIRECT = (307, 'Temporary Redirect',
'Object moved temporarily -- see URI list')
PERMANENT_REDIRECT = (308, 'Permanent Redirect',
'Object moved permanently -- see URI list')
# client error
BAD_REQUEST = (400, 'Bad Request',
'Bad request syntax or unsupported method')
UNAUTHORIZED = (401, 'Unauthorized',
'No permission -- see authorization schemes')
PAYMENT_REQUIRED = (402, 'Payment Required',
'No payment -- see charging schemes')
FORBIDDEN = (403, 'Forbidden',
'Request forbidden -- authorization will not help')
NOT_FOUND = (404, 'Not Found',
'Nothing matches the given URI')
METHOD_NOT_ALLOWED = (405, 'Method Not Allowed',
'Specified method is invalid for this resource')
NOT_ACCEPTABLE = (406, 'Not Acceptable',
'URI not available in preferred format')
PROXY_AUTHENTICATION_REQUIRED = (407,
'Proxy Authentication Required',
'You must authenticate with this proxy before proceeding')
REQUEST_TIMEOUT = (408, 'Request Timeout',
'Request timed out; try again later')
CONFLICT = 409, 'Conflict', 'Request conflict'
GONE = (410, 'Gone',
'URI no longer exists and has been permanently removed')
LENGTH_REQUIRED = (411, 'Length Required',
'Client must specify Content-Length')
PRECONDITION_FAILED = (412, 'Precondition Failed',
'Precondition in headers is false')
REQUEST_ENTITY_TOO_LARGE = (413, 'Request Entity Too Large',
'Entity is too large')
REQUEST_URI_TOO_LONG = (414, 'Request-URI Too Long',
'URI is too long')
UNSUPPORTED_MEDIA_TYPE = (415, 'Unsupported Media Type',
'Entity body in unsupported format')
REQUESTED_RANGE_NOT_SATISFIABLE = (416,
'Requested Range Not Satisfiable',
'Cannot satisfy request range')
EXPECTATION_FAILED = (417, 'Expectation Failed',
'Expect condition could not be satisfied')
IM_A_TEAPOT = (418, 'I\'m a Teapot',
'Server refuses to brew coffee because it is a teapot.')
MISDIRECTED_REQUEST = (421, 'Misdirected Request',
'Server is not able to produce a response')
UNPROCESSABLE_ENTITY = 422, 'Unprocessable Entity'
LOCKED = 423, 'Locked'
FAILED_DEPENDENCY = 424, 'Failed Dependency'
TOO_EARLY = 425, 'Too Early'
UPGRADE_REQUIRED = 426, 'Upgrade Required'
PRECONDITION_REQUIRED = (428, 'Precondition Required',
'The origin server requires the request to be conditional')
TOO_MANY_REQUESTS = (429, 'Too Many Requests',
'The user has sent too many requests in '
'a given amount of time ("rate limiting")')
REQUEST_HEADER_FIELDS_TOO_LARGE = (431,
'Request Header Fields Too Large',
'The server is unwilling to process the request because its header '
'fields are too large')
UNAVAILABLE_FOR_LEGAL_REASONS = (451,
'Unavailable For Legal Reasons',
'The server is denying access to the '
'resource as a consequence of a legal demand')
# server errors
INTERNAL_SERVER_ERROR = (500, 'Internal Server Error',
'Server got itself in trouble')
NOT_IMPLEMENTED = (501, 'Not Implemented',
'Server does not support this operation')
BAD_GATEWAY = (502, 'Bad Gateway',
'Invalid responses from another server/proxy')
SERVICE_UNAVAILABLE = (503, 'Service Unavailable',
'The server cannot process the request due to a high load')
GATEWAY_TIMEOUT = (504, 'Gateway Timeout',
'The gateway server did not receive a timely response')
HTTP_VERSION_NOT_SUPPORTED = (505, 'HTTP Version Not Supported',
'Cannot fulfill request')
VARIANT_ALSO_NEGOTIATES = 506, 'Variant Also Negotiates'
INSUFFICIENT_STORAGE = 507, 'Insufficient Storage'
LOOP_DETECTED = 508, 'Loop Detected'
NOT_EXTENDED = 510, 'Not Extended'
NETWORK_AUTHENTICATION_REQUIRED = (511,
'Network Authentication Required',
'The client needs to authenticate to gain network access') | identifier_body |
|
__init__.py | from enum import IntEnum
__all__ = ['HTTPStatus']
class | (IntEnum):
"""HTTP status codes and reason phrases
Status codes from the following RFCs are all observed:
* RFC 7231: Hypertext Transfer Protocol (HTTP/1.1), obsoletes 2616
* RFC 6585: Additional HTTP Status Codes
* RFC 3229: Delta encoding in HTTP
* RFC 4918: HTTP Extensions for WebDAV, obsoletes 2518
* RFC 5842: Binding Extensions to WebDAV
* RFC 7238: Permanent Redirect
* RFC 2295: Transparent Content Negotiation in HTTP
* RFC 2774: An HTTP Extension Framework
* RFC 7725: An HTTP Status Code to Report Legal Obstacles
* RFC 7540: Hypertext Transfer Protocol Version 2 (HTTP/2)
* RFC 2324: Hyper Text Coffee Pot Control Protocol (HTCPCP/1.0)
* RFC 8297: An HTTP Status Code for Indicating Hints
* RFC 8470: Using Early Data in HTTP
"""
def __new__(cls, value, phrase, description=''):
obj = int.__new__(cls, value)
obj._value_ = value
obj.phrase = phrase
obj.description = description
return obj
# informational
CONTINUE = 100, 'Continue', 'Request received, please continue'
SWITCHING_PROTOCOLS = (101, 'Switching Protocols',
'Switching to new protocol; obey Upgrade header')
PROCESSING = 102, 'Processing'
EARLY_HINTS = 103, 'Early Hints'
# success
OK = 200, 'OK', 'Request fulfilled, document follows'
CREATED = 201, 'Created', 'Document created, URL follows'
ACCEPTED = (202, 'Accepted',
'Request accepted, processing continues off-line')
NON_AUTHORITATIVE_INFORMATION = (203,
'Non-Authoritative Information', 'Request fulfilled from cache')
NO_CONTENT = 204, 'No Content', 'Request fulfilled, nothing follows'
RESET_CONTENT = 205, 'Reset Content', 'Clear input form for further input'
PARTIAL_CONTENT = 206, 'Partial Content', 'Partial content follows'
MULTI_STATUS = 207, 'Multi-Status'
ALREADY_REPORTED = 208, 'Already Reported'
IM_USED = 226, 'IM Used'
# redirection
MULTIPLE_CHOICES = (300, 'Multiple Choices',
'Object has several resources -- see URI list')
MOVED_PERMANENTLY = (301, 'Moved Permanently',
'Object moved permanently -- see URI list')
FOUND = 302, 'Found', 'Object moved temporarily -- see URI list'
SEE_OTHER = 303, 'See Other', 'Object moved -- see Method and URL list'
NOT_MODIFIED = (304, 'Not Modified',
'Document has not changed since given time')
USE_PROXY = (305, 'Use Proxy',
'You must use proxy specified in Location to access this resource')
TEMPORARY_REDIRECT = (307, 'Temporary Redirect',
'Object moved temporarily -- see URI list')
PERMANENT_REDIRECT = (308, 'Permanent Redirect',
'Object moved permanently -- see URI list')
# client error
BAD_REQUEST = (400, 'Bad Request',
'Bad request syntax or unsupported method')
UNAUTHORIZED = (401, 'Unauthorized',
'No permission -- see authorization schemes')
PAYMENT_REQUIRED = (402, 'Payment Required',
'No payment -- see charging schemes')
FORBIDDEN = (403, 'Forbidden',
'Request forbidden -- authorization will not help')
NOT_FOUND = (404, 'Not Found',
'Nothing matches the given URI')
METHOD_NOT_ALLOWED = (405, 'Method Not Allowed',
'Specified method is invalid for this resource')
NOT_ACCEPTABLE = (406, 'Not Acceptable',
'URI not available in preferred format')
PROXY_AUTHENTICATION_REQUIRED = (407,
'Proxy Authentication Required',
'You must authenticate with this proxy before proceeding')
REQUEST_TIMEOUT = (408, 'Request Timeout',
'Request timed out; try again later')
CONFLICT = 409, 'Conflict', 'Request conflict'
GONE = (410, 'Gone',
'URI no longer exists and has been permanently removed')
LENGTH_REQUIRED = (411, 'Length Required',
'Client must specify Content-Length')
PRECONDITION_FAILED = (412, 'Precondition Failed',
'Precondition in headers is false')
REQUEST_ENTITY_TOO_LARGE = (413, 'Request Entity Too Large',
'Entity is too large')
REQUEST_URI_TOO_LONG = (414, 'Request-URI Too Long',
'URI is too long')
UNSUPPORTED_MEDIA_TYPE = (415, 'Unsupported Media Type',
'Entity body in unsupported format')
REQUESTED_RANGE_NOT_SATISFIABLE = (416,
'Requested Range Not Satisfiable',
'Cannot satisfy request range')
EXPECTATION_FAILED = (417, 'Expectation Failed',
'Expect condition could not be satisfied')
IM_A_TEAPOT = (418, 'I\'m a Teapot',
'Server refuses to brew coffee because it is a teapot.')
MISDIRECTED_REQUEST = (421, 'Misdirected Request',
'Server is not able to produce a response')
UNPROCESSABLE_ENTITY = 422, 'Unprocessable Entity'
LOCKED = 423, 'Locked'
FAILED_DEPENDENCY = 424, 'Failed Dependency'
TOO_EARLY = 425, 'Too Early'
UPGRADE_REQUIRED = 426, 'Upgrade Required'
PRECONDITION_REQUIRED = (428, 'Precondition Required',
'The origin server requires the request to be conditional')
TOO_MANY_REQUESTS = (429, 'Too Many Requests',
'The user has sent too many requests in '
'a given amount of time ("rate limiting")')
REQUEST_HEADER_FIELDS_TOO_LARGE = (431,
'Request Header Fields Too Large',
'The server is unwilling to process the request because its header '
'fields are too large')
UNAVAILABLE_FOR_LEGAL_REASONS = (451,
'Unavailable For Legal Reasons',
'The server is denying access to the '
'resource as a consequence of a legal demand')
# server errors
INTERNAL_SERVER_ERROR = (500, 'Internal Server Error',
'Server got itself in trouble')
NOT_IMPLEMENTED = (501, 'Not Implemented',
'Server does not support this operation')
BAD_GATEWAY = (502, 'Bad Gateway',
'Invalid responses from another server/proxy')
SERVICE_UNAVAILABLE = (503, 'Service Unavailable',
'The server cannot process the request due to a high load')
GATEWAY_TIMEOUT = (504, 'Gateway Timeout',
'The gateway server did not receive a timely response')
HTTP_VERSION_NOT_SUPPORTED = (505, 'HTTP Version Not Supported',
'Cannot fulfill request')
VARIANT_ALSO_NEGOTIATES = 506, 'Variant Also Negotiates'
INSUFFICIENT_STORAGE = 507, 'Insufficient Storage'
LOOP_DETECTED = 508, 'Loop Detected'
NOT_EXTENDED = 510, 'Not Extended'
NETWORK_AUTHENTICATION_REQUIRED = (511,
'Network Authentication Required',
'The client needs to authenticate to gain network access')
| HTTPStatus | identifier_name |
purefa_volume.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2018, Simon Dodsley ([email protected])
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: purefa_volume
version_added: '2.4'
short_description: Manage volumes on Pure Storage FlashArrays
description:
- Create, delete or extend the capacity of a volume on Pure Storage FlashArray.
author:
- Simon Dodsley (@sdodsley)
options:
name:
description:
- The name of the volume.
required: true
target:
description:
- The name of the target volume, if copying.
state:
description:
- Define whether the volume should exist or not.
default: present
choices: [ absent, present ]
eradicate:
description:
- Define whether to eradicate the volume on delete or leave in trash.
type: bool
default: 'no'
overwrite:
description:
- Define whether to overwrite a target volume if it already exisits.
type: bool
default: 'no'
size:
description:
- Volume size in M, G, T or P units.
extends_documentation_fragment:
- purestorage.fa
'''
EXAMPLES = r'''
- name: Create new volume named foo
purefa_volume:
name: foo
size: 1T
fa_url: 10.10.10.2
api_token: e31060a7-21fc-e277-6240-25983c6c4592
state: present
- name: Extend the size of an existing volume named foo
purefa_volume:
name: foo
size: 2T
fa_url: 10.10.10.2
api_token: e31060a7-21fc-e277-6240-25983c6c4592
state: present
- name: Delete and eradicate volume named foo
purefa_volume:
name: foo
eradicate: yes
fa_url: 10.10.10.2
api_token: e31060a7-21fc-e277-6240-25983c6c4592
state: absent
- name: Create clone of volume bar named foo
purefa_volume:
name: foo
target: bar
fa_url: 10.10.10.2
api_token: e31060a7-21fc-e277-6240-25983c6c4592
state: present
- name: Overwrite volume bar with volume foo
purefa_volume:
name: foo
target: bar
overwrite: yes
fa_url: 10.10.10.2
api_token: e31060a7-21fc-e277-6240-25983c6c4592
state: present
'''
RETURN = r'''
'''
try:
from purestorage import purestorage
HAS_PURESTORAGE = True
except ImportError:
HAS_PURESTORAGE = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.pure import get_system, purefa_argument_spec
def | (size):
"""Given a human-readable byte string (e.g. 2G, 30M),
return the number of bytes. Will return 0 if the argument has
unexpected form.
"""
bytes = size[:-1]
unit = size[-1]
if bytes.isdigit():
bytes = int(bytes)
if unit == 'P':
bytes *= 1125899906842624
elif unit == 'T':
bytes *= 1099511627776
elif unit == 'G':
bytes *= 1073741824
elif unit == 'M':
bytes *= 1048576
else:
bytes = 0
else:
bytes = 0
return bytes
def get_volume(module, array):
"""Return Volume or None"""
try:
return array.get_volume(module.params['name'])
except:
return None
def get_target(module, array):
"""Return Volume or None"""
try:
return array.get_volume(module.params['target'])
except:
return None
def create_volume(module, array):
"""Create Volume"""
size = module.params['size']
changed = True
if not module.check_mode:
try:
array.create_volume(module.params['name'], size)
except:
changed = False
module.exit_json(changed=changed)
def copy_from_volume(module, array):
"""Create Volume Clone"""
changed = False
tgt = get_target(module, array)
if tgt is None:
changed = True
if not module.check_mode:
array.copy_volume(module.params['name'],
module.params['target'])
elif tgt is not None and module.params['overwrite']:
changed = True
if not module.check_mode:
array.copy_volume(module.params['name'],
module.params['target'],
overwrite=module.params['overwrite'])
module.exit_json(changed=changed)
def update_volume(module, array):
"""Update Volume"""
changed = True
vol = array.get_volume(module.params['name'])
if human_to_bytes(module.params['size']) > vol['size']:
if not module.check_mode:
array.extend_volume(module.params['name'], module.params['size'])
else:
changed = False
module.exit_json(changed=changed)
def delete_volume(module, array):
""" Delete Volume"""
changed = True
if not module.check_mode:
try:
array.destroy_volume(module.params['name'])
if module.params['eradicate']:
try:
array.eradicate_volume(module.params['name'])
except:
changed = False
except:
changed = False
module.exit_json(changed=True)
def main():
argument_spec = purefa_argument_spec()
argument_spec.update(dict(
name=dict(type='str', required=True),
target=dict(type='str'),
overwrite=dict(type='bool', default=False),
eradicate=dict(type='bool', default=False),
state=dict(type='str', default='present', choices=['absent', 'present']),
size=dict(type='str'),
))
mutually_exclusive = [['size', 'target']]
module = AnsibleModule(argument_spec,
mutually_exclusive=mutually_exclusive,
supports_check_mode=True)
if not HAS_PURESTORAGE:
module.fail_json(msg='purestorage sdk is required for this module in volume')
size = module.params['size']
state = module.params['state']
array = get_system(module)
volume = get_volume(module, array)
target = get_target(module, array)
if state == 'present' and not volume and size:
create_volume(module, array)
elif state == 'present' and volume and size:
update_volume(module, array)
elif state == 'present' and volume and target:
copy_from_volume(module, array)
elif state == 'present' and volume and not target:
copy_from_volume(module, array)
elif state == 'absent' and volume:
delete_volume(module, array)
elif state == 'present' and not volume or not size:
module.exit_json(changed=False)
elif state == 'absent' and not volume:
module.exit_json(changed=False)
if __name__ == '__main__':
main()
| human_to_bytes | identifier_name |
purefa_volume.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2018, Simon Dodsley ([email protected])
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: purefa_volume
version_added: '2.4'
short_description: Manage volumes on Pure Storage FlashArrays
description:
- Create, delete or extend the capacity of a volume on Pure Storage FlashArray.
author:
- Simon Dodsley (@sdodsley)
options:
name:
description:
- The name of the volume.
required: true
target:
description:
- The name of the target volume, if copying.
state:
description:
- Define whether the volume should exist or not.
default: present
choices: [ absent, present ]
eradicate:
description:
- Define whether to eradicate the volume on delete or leave in trash.
type: bool
default: 'no'
overwrite:
description:
- Define whether to overwrite a target volume if it already exisits.
type: bool
default: 'no'
size:
description:
- Volume size in M, G, T or P units.
extends_documentation_fragment:
- purestorage.fa
'''
EXAMPLES = r'''
- name: Create new volume named foo
purefa_volume:
name: foo
size: 1T
fa_url: 10.10.10.2
api_token: e31060a7-21fc-e277-6240-25983c6c4592
state: present
- name: Extend the size of an existing volume named foo
purefa_volume:
name: foo
size: 2T
fa_url: 10.10.10.2
api_token: e31060a7-21fc-e277-6240-25983c6c4592
state: present
- name: Delete and eradicate volume named foo
purefa_volume:
name: foo
eradicate: yes
fa_url: 10.10.10.2
api_token: e31060a7-21fc-e277-6240-25983c6c4592
state: absent
- name: Create clone of volume bar named foo
purefa_volume:
name: foo
target: bar
fa_url: 10.10.10.2
api_token: e31060a7-21fc-e277-6240-25983c6c4592
state: present
- name: Overwrite volume bar with volume foo
purefa_volume:
name: foo
target: bar
overwrite: yes
fa_url: 10.10.10.2
api_token: e31060a7-21fc-e277-6240-25983c6c4592
state: present
'''
RETURN = r'''
'''
try:
from purestorage import purestorage
HAS_PURESTORAGE = True
except ImportError:
HAS_PURESTORAGE = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.pure import get_system, purefa_argument_spec
def human_to_bytes(size):
"""Given a human-readable byte string (e.g. 2G, 30M),
return the number of bytes. Will return 0 if the argument has
unexpected form.
"""
bytes = size[:-1]
unit = size[-1]
if bytes.isdigit():
bytes = int(bytes)
if unit == 'P':
bytes *= 1125899906842624
elif unit == 'T':
bytes *= 1099511627776
elif unit == 'G':
bytes *= 1073741824
elif unit == 'M':
bytes *= 1048576
else:
bytes = 0
else:
bytes = 0
return bytes
def get_volume(module, array):
"""Return Volume or None"""
try:
return array.get_volume(module.params['name'])
except:
return None
def get_target(module, array):
"""Return Volume or None"""
try:
return array.get_volume(module.params['target'])
except:
return None
def create_volume(module, array):
"""Create Volume"""
size = module.params['size']
changed = True
if not module.check_mode:
try:
array.create_volume(module.params['name'], size)
except:
changed = False
module.exit_json(changed=changed)
def copy_from_volume(module, array):
"""Create Volume Clone"""
changed = False
tgt = get_target(module, array)
if tgt is None:
changed = True
if not module.check_mode:
array.copy_volume(module.params['name'],
module.params['target'])
elif tgt is not None and module.params['overwrite']:
changed = True
if not module.check_mode:
array.copy_volume(module.params['name'],
module.params['target'],
overwrite=module.params['overwrite'])
module.exit_json(changed=changed)
def update_volume(module, array):
"""Update Volume"""
changed = True
vol = array.get_volume(module.params['name'])
if human_to_bytes(module.params['size']) > vol['size']:
if not module.check_mode:
array.extend_volume(module.params['name'], module.params['size'])
else:
changed = False
module.exit_json(changed=changed)
def delete_volume(module, array):
""" Delete Volume"""
changed = True
if not module.check_mode:
try:
array.destroy_volume(module.params['name'])
if module.params['eradicate']:
try:
array.eradicate_volume(module.params['name'])
except:
changed = False
except:
changed = False
module.exit_json(changed=True)
def main():
argument_spec = purefa_argument_spec()
argument_spec.update(dict(
name=dict(type='str', required=True),
target=dict(type='str'),
overwrite=dict(type='bool', default=False),
eradicate=dict(type='bool', default=False),
state=dict(type='str', default='present', choices=['absent', 'present']),
size=dict(type='str'),
))
mutually_exclusive = [['size', 'target']]
module = AnsibleModule(argument_spec,
mutually_exclusive=mutually_exclusive,
supports_check_mode=True)
if not HAS_PURESTORAGE:
module.fail_json(msg='purestorage sdk is required for this module in volume')
size = module.params['size']
state = module.params['state']
array = get_system(module)
volume = get_volume(module, array)
target = get_target(module, array)
if state == 'present' and not volume and size:
create_volume(module, array)
elif state == 'present' and volume and size:
update_volume(module, array)
elif state == 'present' and volume and target:
copy_from_volume(module, array)
elif state == 'present' and volume and not target:
copy_from_volume(module, array)
elif state == 'absent' and volume:
delete_volume(module, array)
elif state == 'present' and not volume or not size:
module.exit_json(changed=False)
elif state == 'absent' and not volume:
|
if __name__ == '__main__':
main()
| module.exit_json(changed=False) | conditional_block |
purefa_volume.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2018, Simon Dodsley ([email protected])
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: purefa_volume
version_added: '2.4'
short_description: Manage volumes on Pure Storage FlashArrays
description:
- Create, delete or extend the capacity of a volume on Pure Storage FlashArray.
author:
- Simon Dodsley (@sdodsley)
options:
name:
description:
- The name of the volume.
required: true
target:
description:
- The name of the target volume, if copying.
state:
description:
- Define whether the volume should exist or not.
default: present
choices: [ absent, present ]
eradicate:
description:
- Define whether to eradicate the volume on delete or leave in trash.
type: bool
default: 'no'
overwrite:
description:
- Define whether to overwrite a target volume if it already exisits.
type: bool
default: 'no'
size:
description:
- Volume size in M, G, T or P units.
extends_documentation_fragment:
- purestorage.fa
'''
EXAMPLES = r'''
- name: Create new volume named foo
purefa_volume:
name: foo
size: 1T
fa_url: 10.10.10.2
api_token: e31060a7-21fc-e277-6240-25983c6c4592
state: present
- name: Extend the size of an existing volume named foo
purefa_volume:
name: foo
size: 2T
fa_url: 10.10.10.2
api_token: e31060a7-21fc-e277-6240-25983c6c4592
state: present
- name: Delete and eradicate volume named foo
purefa_volume:
name: foo
eradicate: yes
fa_url: 10.10.10.2
api_token: e31060a7-21fc-e277-6240-25983c6c4592
state: absent
- name: Create clone of volume bar named foo
purefa_volume:
name: foo
target: bar
fa_url: 10.10.10.2
api_token: e31060a7-21fc-e277-6240-25983c6c4592
state: present
- name: Overwrite volume bar with volume foo
purefa_volume:
name: foo
target: bar
overwrite: yes
fa_url: 10.10.10.2
api_token: e31060a7-21fc-e277-6240-25983c6c4592
state: present
'''
RETURN = r'''
'''
try:
from purestorage import purestorage
HAS_PURESTORAGE = True
except ImportError:
HAS_PURESTORAGE = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.pure import get_system, purefa_argument_spec
def human_to_bytes(size):
"""Given a human-readable byte string (e.g. 2G, 30M),
return the number of bytes. Will return 0 if the argument has
unexpected form.
"""
bytes = size[:-1]
unit = size[-1]
if bytes.isdigit():
bytes = int(bytes)
if unit == 'P':
bytes *= 1125899906842624
elif unit == 'T':
bytes *= 1099511627776
elif unit == 'G':
bytes *= 1073741824
elif unit == 'M':
bytes *= 1048576
else:
bytes = 0
else:
bytes = 0
return bytes
def get_volume(module, array):
"""Return Volume or None"""
try:
return array.get_volume(module.params['name'])
except:
return None
def get_target(module, array):
"""Return Volume or None"""
try:
return array.get_volume(module.params['target'])
except:
return None
def create_volume(module, array):
"""Create Volume"""
size = module.params['size']
changed = True
if not module.check_mode:
try:
array.create_volume(module.params['name'], size)
except:
changed = False
module.exit_json(changed=changed)
def copy_from_volume(module, array):
"""Create Volume Clone"""
changed = False
tgt = get_target(module, array)
if tgt is None:
changed = True
if not module.check_mode:
array.copy_volume(module.params['name'],
module.params['target'])
elif tgt is not None and module.params['overwrite']:
changed = True
if not module.check_mode:
array.copy_volume(module.params['name'],
module.params['target'],
overwrite=module.params['overwrite'])
module.exit_json(changed=changed)
def update_volume(module, array):
|
def delete_volume(module, array):
""" Delete Volume"""
changed = True
if not module.check_mode:
try:
array.destroy_volume(module.params['name'])
if module.params['eradicate']:
try:
array.eradicate_volume(module.params['name'])
except:
changed = False
except:
changed = False
module.exit_json(changed=True)
def main():
argument_spec = purefa_argument_spec()
argument_spec.update(dict(
name=dict(type='str', required=True),
target=dict(type='str'),
overwrite=dict(type='bool', default=False),
eradicate=dict(type='bool', default=False),
state=dict(type='str', default='present', choices=['absent', 'present']),
size=dict(type='str'),
))
mutually_exclusive = [['size', 'target']]
module = AnsibleModule(argument_spec,
mutually_exclusive=mutually_exclusive,
supports_check_mode=True)
if not HAS_PURESTORAGE:
module.fail_json(msg='purestorage sdk is required for this module in volume')
size = module.params['size']
state = module.params['state']
array = get_system(module)
volume = get_volume(module, array)
target = get_target(module, array)
if state == 'present' and not volume and size:
create_volume(module, array)
elif state == 'present' and volume and size:
update_volume(module, array)
elif state == 'present' and volume and target:
copy_from_volume(module, array)
elif state == 'present' and volume and not target:
copy_from_volume(module, array)
elif state == 'absent' and volume:
delete_volume(module, array)
elif state == 'present' and not volume or not size:
module.exit_json(changed=False)
elif state == 'absent' and not volume:
module.exit_json(changed=False)
if __name__ == '__main__':
main()
| """Update Volume"""
changed = True
vol = array.get_volume(module.params['name'])
if human_to_bytes(module.params['size']) > vol['size']:
if not module.check_mode:
array.extend_volume(module.params['name'], module.params['size'])
else:
changed = False
module.exit_json(changed=changed) | identifier_body |
purefa_volume.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2018, Simon Dodsley ([email protected])
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: purefa_volume
version_added: '2.4'
short_description: Manage volumes on Pure Storage FlashArrays
description:
- Create, delete or extend the capacity of a volume on Pure Storage FlashArray.
author:
- Simon Dodsley (@sdodsley)
options:
name:
description:
- The name of the volume.
required: true
target:
description:
- The name of the target volume, if copying.
state:
description:
- Define whether the volume should exist or not.
default: present
choices: [ absent, present ]
eradicate:
description:
- Define whether to eradicate the volume on delete or leave in trash.
type: bool
default: 'no'
overwrite:
description:
- Define whether to overwrite a target volume if it already exisits.
type: bool
default: 'no'
size:
description:
- Volume size in M, G, T or P units.
extends_documentation_fragment:
- purestorage.fa
'''
EXAMPLES = r'''
- name: Create new volume named foo
purefa_volume:
name: foo
size: 1T
fa_url: 10.10.10.2
api_token: e31060a7-21fc-e277-6240-25983c6c4592
state: present
- name: Extend the size of an existing volume named foo
purefa_volume:
name: foo
size: 2T
fa_url: 10.10.10.2
api_token: e31060a7-21fc-e277-6240-25983c6c4592
state: present
- name: Delete and eradicate volume named foo
purefa_volume:
name: foo
eradicate: yes
fa_url: 10.10.10.2
api_token: e31060a7-21fc-e277-6240-25983c6c4592
state: absent
- name: Create clone of volume bar named foo
purefa_volume:
name: foo
target: bar
fa_url: 10.10.10.2
api_token: e31060a7-21fc-e277-6240-25983c6c4592
state: present
- name: Overwrite volume bar with volume foo
purefa_volume:
name: foo
target: bar
overwrite: yes
fa_url: 10.10.10.2
api_token: e31060a7-21fc-e277-6240-25983c6c4592
state: present
'''
RETURN = r'''
'''
try:
from purestorage import purestorage
HAS_PURESTORAGE = True
except ImportError:
HAS_PURESTORAGE = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.pure import get_system, purefa_argument_spec
def human_to_bytes(size):
"""Given a human-readable byte string (e.g. 2G, 30M),
return the number of bytes. Will return 0 if the argument has
unexpected form.
"""
bytes = size[:-1]
unit = size[-1]
if bytes.isdigit():
bytes = int(bytes)
if unit == 'P':
bytes *= 1125899906842624
elif unit == 'T':
bytes *= 1099511627776
elif unit == 'G':
bytes *= 1073741824
elif unit == 'M':
bytes *= 1048576
else:
bytes = 0
else:
bytes = 0
return bytes
def get_volume(module, array):
"""Return Volume or None"""
try:
return array.get_volume(module.params['name'])
except:
return None
def get_target(module, array):
"""Return Volume or None"""
try:
return array.get_volume(module.params['target'])
except:
return None
def create_volume(module, array):
"""Create Volume"""
size = module.params['size']
changed = True
if not module.check_mode:
try:
array.create_volume(module.params['name'], size)
except:
changed = False
module.exit_json(changed=changed)
def copy_from_volume(module, array):
"""Create Volume Clone""" | if tgt is None:
changed = True
if not module.check_mode:
array.copy_volume(module.params['name'],
module.params['target'])
elif tgt is not None and module.params['overwrite']:
changed = True
if not module.check_mode:
array.copy_volume(module.params['name'],
module.params['target'],
overwrite=module.params['overwrite'])
module.exit_json(changed=changed)
def update_volume(module, array):
"""Update Volume"""
changed = True
vol = array.get_volume(module.params['name'])
if human_to_bytes(module.params['size']) > vol['size']:
if not module.check_mode:
array.extend_volume(module.params['name'], module.params['size'])
else:
changed = False
module.exit_json(changed=changed)
def delete_volume(module, array):
""" Delete Volume"""
changed = True
if not module.check_mode:
try:
array.destroy_volume(module.params['name'])
if module.params['eradicate']:
try:
array.eradicate_volume(module.params['name'])
except:
changed = False
except:
changed = False
module.exit_json(changed=True)
def main():
argument_spec = purefa_argument_spec()
argument_spec.update(dict(
name=dict(type='str', required=True),
target=dict(type='str'),
overwrite=dict(type='bool', default=False),
eradicate=dict(type='bool', default=False),
state=dict(type='str', default='present', choices=['absent', 'present']),
size=dict(type='str'),
))
mutually_exclusive = [['size', 'target']]
module = AnsibleModule(argument_spec,
mutually_exclusive=mutually_exclusive,
supports_check_mode=True)
if not HAS_PURESTORAGE:
module.fail_json(msg='purestorage sdk is required for this module in volume')
size = module.params['size']
state = module.params['state']
array = get_system(module)
volume = get_volume(module, array)
target = get_target(module, array)
if state == 'present' and not volume and size:
create_volume(module, array)
elif state == 'present' and volume and size:
update_volume(module, array)
elif state == 'present' and volume and target:
copy_from_volume(module, array)
elif state == 'present' and volume and not target:
copy_from_volume(module, array)
elif state == 'absent' and volume:
delete_volume(module, array)
elif state == 'present' and not volume or not size:
module.exit_json(changed=False)
elif state == 'absent' and not volume:
module.exit_json(changed=False)
if __name__ == '__main__':
main() | changed = False
tgt = get_target(module, array)
| random_line_split |
font_icon.rs | use crate::{
proc_macros::IntoRenderObject,
render_object::*,
utils::{Brush, Point, Rectangle},
};
/// The `FontIconRenderObject` holds the font icons inside | impl RenderObject for FontIconRenderObject {
fn render_self(&self, ctx: &mut Context, global_position: &Point) {
let (bounds, icon, icon_brush, icon_font, icon_size) = {
let widget = ctx.widget();
(
*widget.get::<Rectangle>("bounds"),
widget.clone::<String>("icon"),
widget.get::<Brush>("icon_brush").clone(),
widget.get::<String>("icon_font").clone(),
*widget.get::<f64>("icon_size"),
)
};
if bounds.width() == 0.0
|| bounds.height() == 0.0
|| icon_brush.is_transparent()
|| icon_size == 0.0
|| icon.is_empty()
{
return;
}
if !icon.is_empty() {
ctx.render_context_2_d().begin_path();
ctx.render_context_2_d().set_font_family(icon_font);
ctx.render_context_2_d().set_font_size(icon_size);
ctx.render_context_2_d().set_fill_style(icon_brush);
ctx.render_context_2_d().fill_text(
&icon,
global_position.x() + bounds.x(),
global_position.y() + bounds.y(),
);
ctx.render_context_2_d().close_path();
}
}
} | /// a render object.
#[derive(Debug, IntoRenderObject)]
pub struct FontIconRenderObject;
| random_line_split |
font_icon.rs | use crate::{
proc_macros::IntoRenderObject,
render_object::*,
utils::{Brush, Point, Rectangle},
};
/// The `FontIconRenderObject` holds the font icons inside
/// a render object.
#[derive(Debug, IntoRenderObject)]
pub struct FontIconRenderObject;
impl RenderObject for FontIconRenderObject {
fn render_self(&self, ctx: &mut Context, global_position: &Point) |
}
| {
let (bounds, icon, icon_brush, icon_font, icon_size) = {
let widget = ctx.widget();
(
*widget.get::<Rectangle>("bounds"),
widget.clone::<String>("icon"),
widget.get::<Brush>("icon_brush").clone(),
widget.get::<String>("icon_font").clone(),
*widget.get::<f64>("icon_size"),
)
};
if bounds.width() == 0.0
|| bounds.height() == 0.0
|| icon_brush.is_transparent()
|| icon_size == 0.0
|| icon.is_empty()
{
return;
}
if !icon.is_empty() {
ctx.render_context_2_d().begin_path();
ctx.render_context_2_d().set_font_family(icon_font);
ctx.render_context_2_d().set_font_size(icon_size);
ctx.render_context_2_d().set_fill_style(icon_brush);
ctx.render_context_2_d().fill_text(
&icon,
global_position.x() + bounds.x(),
global_position.y() + bounds.y(),
);
ctx.render_context_2_d().close_path();
}
} | identifier_body |
font_icon.rs | use crate::{
proc_macros::IntoRenderObject,
render_object::*,
utils::{Brush, Point, Rectangle},
};
/// The `FontIconRenderObject` holds the font icons inside
/// a render object.
#[derive(Debug, IntoRenderObject)]
pub struct FontIconRenderObject;
impl RenderObject for FontIconRenderObject {
fn render_self(&self, ctx: &mut Context, global_position: &Point) {
let (bounds, icon, icon_brush, icon_font, icon_size) = {
let widget = ctx.widget();
(
*widget.get::<Rectangle>("bounds"),
widget.clone::<String>("icon"),
widget.get::<Brush>("icon_brush").clone(),
widget.get::<String>("icon_font").clone(),
*widget.get::<f64>("icon_size"),
)
};
if bounds.width() == 0.0
|| bounds.height() == 0.0
|| icon_brush.is_transparent()
|| icon_size == 0.0
|| icon.is_empty()
|
if !icon.is_empty() {
ctx.render_context_2_d().begin_path();
ctx.render_context_2_d().set_font_family(icon_font);
ctx.render_context_2_d().set_font_size(icon_size);
ctx.render_context_2_d().set_fill_style(icon_brush);
ctx.render_context_2_d().fill_text(
&icon,
global_position.x() + bounds.x(),
global_position.y() + bounds.y(),
);
ctx.render_context_2_d().close_path();
}
}
}
| {
return;
} | conditional_block |
font_icon.rs | use crate::{
proc_macros::IntoRenderObject,
render_object::*,
utils::{Brush, Point, Rectangle},
};
/// The `FontIconRenderObject` holds the font icons inside
/// a render object.
#[derive(Debug, IntoRenderObject)]
pub struct | ;
impl RenderObject for FontIconRenderObject {
fn render_self(&self, ctx: &mut Context, global_position: &Point) {
let (bounds, icon, icon_brush, icon_font, icon_size) = {
let widget = ctx.widget();
(
*widget.get::<Rectangle>("bounds"),
widget.clone::<String>("icon"),
widget.get::<Brush>("icon_brush").clone(),
widget.get::<String>("icon_font").clone(),
*widget.get::<f64>("icon_size"),
)
};
if bounds.width() == 0.0
|| bounds.height() == 0.0
|| icon_brush.is_transparent()
|| icon_size == 0.0
|| icon.is_empty()
{
return;
}
if !icon.is_empty() {
ctx.render_context_2_d().begin_path();
ctx.render_context_2_d().set_font_family(icon_font);
ctx.render_context_2_d().set_font_size(icon_size);
ctx.render_context_2_d().set_fill_style(icon_brush);
ctx.render_context_2_d().fill_text(
&icon,
global_position.x() + bounds.x(),
global_position.y() + bounds.y(),
);
ctx.render_context_2_d().close_path();
}
}
}
| FontIconRenderObject | identifier_name |
tasks.py | import hashlib
import logging
import os
from django.conf import settings
from django.core.files.storage import default_storage as storage
from django.db import transaction
from PIL import Image
from olympia import amo
from olympia.addons.models import (
Addon, attach_tags, attach_translations, AppSupport, CompatOverride,
IncompatibleVersions, Persona, Preview)
from olympia.addons.indexers import AddonIndexer
from olympia.amo.celery import task
from olympia.amo.decorators import set_modified_on, write
from olympia.amo.helpers import user_media_path
from olympia.amo.storage_utils import rm_stored_dir
from olympia.amo.utils import cache_ns_key, ImageCheck, LocalFileStorage
from olympia.editors.models import RereviewQueueTheme
from olympia.lib.es.utils import index_objects
from olympia.versions.models import Version
# pulling tasks from cron
from . import cron # noqa
log = logging.getLogger('z.task')
@task
@write
def version_changed(addon_id, **kw):
update_last_updated(addon_id)
update_appsupport([addon_id])
def update_last_updated(addon_id):
queries = Addon._last_updated_queries()
try:
addon = Addon.objects.get(pk=addon_id)
except Addon.DoesNotExist:
log.info('[1@None] Updating last updated for %s failed, no addon found'
% addon_id)
return
log.info('[1@None] Updating last updated for %s.' % addon_id)
if addon.is_persona():
q = 'personas'
elif addon.status == amo.STATUS_PUBLIC:
q = 'public'
else:
q = 'exp'
qs = queries[q].filter(pk=addon_id).using('default')
res = qs.values_list('id', 'last_updated')
if res:
pk, t = res[0]
Addon.objects.filter(pk=pk).update(last_updated=t)
@write
def update_appsupport(ids):
log.info("[%s@None] Updating appsupport for %s." % (len(ids), ids))
addons = Addon.objects.no_cache().filter(id__in=ids).no_transforms()
support = []
for addon in addons:
for app, appver in addon.compatible_apps.items():
if appver is None:
# Fake support for all version ranges.
min_, max_ = 0, 999999999999999999
else:
min_, max_ = appver.min.version_int, appver.max.version_int
support.append(AppSupport(addon=addon, app=app.id,
min=min_, max=max_))
if not support:
return
with transaction.atomic():
AppSupport.objects.filter(addon__id__in=ids).delete()
AppSupport.objects.bulk_create(support)
# All our updates were sql, so invalidate manually.
Addon.objects.invalidate(*addons)
@task
def delete_preview_files(id, **kw):
log.info('[1@None] Removing preview with id of %s.' % id)
p = Preview(id=id)
for f in (p.thumbnail_path, p.image_path):
try:
storage.delete(f)
except Exception, e:
log.error('Error deleting preview file (%s): %s' % (f, e))
@task(acks_late=True)
def index_addons(ids, **kw):
log.info('Indexing addons %s-%s. [%s]' % (ids[0], ids[-1], len(ids)))
transforms = (attach_tags, attach_translations)
index_objects(ids, Addon, AddonIndexer.extract_document,
kw.pop('index', None), transforms, Addon.unfiltered)
@task
def unindex_addons(ids, **kw):
for addon in ids:
log.info('Removing addon [%s] from search index.' % addon)
Addon.unindex(addon)
@task
def delete_persona_image(dst, **kw):
log.info('[1@None] Deleting persona image: %s.' % dst)
if not dst.startswith(user_media_path('addons')):
log.error("Someone tried deleting something they shouldn't: %s" % dst)
return
try:
storage.delete(dst)
except Exception, e:
log.error('Error deleting persona image: %s' % e)
@set_modified_on
def create_persona_preview_images(src, full_dst, **kw):
"""
Creates a 680x100 thumbnail used for the Persona preview and
a 32x32 thumbnail used for search suggestions/detail pages.
"""
log.info('[1@None] Resizing persona images: %s' % full_dst)
preview, full = amo.PERSONA_IMAGE_SIZES['header']
preview_w, preview_h = preview
orig_w, orig_h = full
with storage.open(src) as fp:
i_orig = i = Image.open(fp)
# Crop image from the right.
i = i.crop((orig_w - (preview_w * 2), 0, orig_w, orig_h))
# Resize preview.
i = i.resize(preview, Image.ANTIALIAS)
i.load()
with storage.open(full_dst[0], 'wb') as fp:
i.save(fp, 'png')
_, icon_size = amo.PERSONA_IMAGE_SIZES['icon']
icon_w, icon_h = icon_size
# Resize icon.
i = i_orig
i.load()
i = i.crop((orig_w - (preview_h * 2), 0, orig_w, orig_h))
i = i.resize(icon_size, Image.ANTIALIAS)
i.load()
with storage.open(full_dst[1], 'wb') as fp:
i.save(fp, 'png')
return True
@set_modified_on
def save_persona_image(src, full_dst, **kw):
"""Creates a PNG of a Persona header/footer image."""
log.info('[1@None] Saving persona image: %s' % full_dst)
img = ImageCheck(storage.open(src))
if not img.is_image():
log.error('Not an image: %s' % src, exc_info=True)
return
with storage.open(src, 'rb') as fp:
i = Image.open(fp)
with storage.open(full_dst, 'wb') as fp:
i.save(fp, 'png')
return True
@task
def update_incompatible_appversions(data, **kw):
"""Updates the incompatible_versions table for this version."""
log.info('Updating incompatible_versions for %s versions.' % len(data))
addon_ids = set()
for version_id in data:
# This is here to handle both post_save and post_delete hooks.
IncompatibleVersions.objects.filter(version=version_id).delete()
try:
version = Version.objects.get(pk=version_id)
except Version.DoesNotExist:
log.info('Version ID [%d] not found. Incompatible versions were '
'cleared.' % version_id)
return
addon_ids.add(version.addon_id)
try:
compat = CompatOverride.objects.get(addon=version.addon)
except CompatOverride.DoesNotExist:
log.info('Compat override for addon with version ID [%d] not '
'found. Incompatible versions were cleared.' % version_id)
return
app_ranges = []
ranges = compat.collapsed_ranges()
for range in ranges:
if range.min == '0' and range.max == '*':
# Wildcard range, add all app ranges
app_ranges.extend(range.apps)
else:
# Since we can't rely on add-on version numbers, get the min
# and max ID values and find versions whose ID is within those
# ranges, being careful with wildcards.
min_id = max_id = None
if range.min == '0':
versions = (Version.objects.filter(addon=version.addon_id)
.order_by('id')
.values_list('id', flat=True)[:1])
if versions:
min_id = versions[0]
else:
try:
min_id = Version.objects.get(addon=version.addon_id,
version=range.min).id
except Version.DoesNotExist:
pass
if range.max == '*':
versions = (Version.objects.filter(addon=version.addon_id)
.order_by('-id')
.values_list('id', flat=True)[:1])
if versions:
max_id = versions[0]
else:
try:
max_id = Version.objects.get(addon=version.addon_id,
version=range.max).id
except Version.DoesNotExist:
pass
if min_id and max_id:
if min_id <= version.id <= max_id:
|
for app_range in app_ranges:
IncompatibleVersions.objects.create(version=version,
app=app_range.app.id,
min_app_version=app_range.min,
max_app_version=app_range.max)
log.info('Added incompatible version for version ID [%d]: '
'app:%d, %s -> %s' % (version_id, app_range.app.id,
app_range.min, app_range.max))
# Increment namespace cache of compat versions.
for addon_id in addon_ids:
cache_ns_key('d2c-versions:%s' % addon_id, increment=True)
def make_checksum(header_path, footer_path):
ls = LocalFileStorage()
footer = footer_path and ls._open(footer_path).read() or ''
raw_checksum = ls._open(header_path).read() + footer
return hashlib.sha224(raw_checksum).hexdigest()
def theme_checksum(theme, **kw):
theme.checksum = make_checksum(theme.header_path, theme.footer_path)
dupe_personas = Persona.objects.filter(checksum=theme.checksum)
if dupe_personas.exists():
theme.dupe_persona = dupe_personas[0]
theme.save()
def rereviewqueuetheme_checksum(rqt, **kw):
"""Check for possible duplicate theme images."""
dupe_personas = Persona.objects.filter(
checksum=make_checksum(rqt.header_path or rqt.theme.header_path,
rqt.footer_path or rqt.theme.footer_path))
if dupe_personas.exists():
rqt.dupe_persona = dupe_personas[0]
rqt.save()
@task
@write
def save_theme(header, footer, addon, **kw):
"""Save theme image and calculates checksum after theme save."""
dst_root = os.path.join(user_media_path('addons'), str(addon.id))
header = os.path.join(settings.TMP_PATH, 'persona_header', header)
header_dst = os.path.join(dst_root, 'header.png')
if footer:
footer = os.path.join(settings.TMP_PATH, 'persona_footer', footer)
footer_dst = os.path.join(dst_root, 'footer.png')
try:
save_persona_image(src=header, full_dst=header_dst)
if footer:
save_persona_image(src=footer, full_dst=footer_dst)
create_persona_preview_images(
src=header, full_dst=[os.path.join(dst_root, 'preview.png'),
os.path.join(dst_root, 'icon.png')],
set_modified_on=[addon])
theme_checksum(addon.persona)
except IOError:
addon.delete()
raise
@task
@write
def save_theme_reupload(header, footer, addon, **kw):
header_dst = None
footer_dst = None
dst_root = os.path.join(user_media_path('addons'), str(addon.id))
try:
if header:
header = os.path.join(settings.TMP_PATH, 'persona_header', header)
header_dst = os.path.join(dst_root, 'pending_header.png')
save_persona_image(src=header, full_dst=header_dst)
if footer:
footer = os.path.join(settings.TMP_PATH, 'persona_footer', footer)
footer_dst = os.path.join(dst_root, 'pending_footer.png')
save_persona_image(src=footer, full_dst=footer_dst)
except IOError as e:
log.error(str(e))
raise
if header_dst or footer_dst:
theme = addon.persona
header = 'pending_header.png' if header_dst else theme.header
# Theme footer is optional, but can't be None.
footer = theme.footer or ''
if footer_dst:
footer = 'pending_footer.png'
# Store pending header and/or footer file paths for review.
RereviewQueueTheme.objects.filter(theme=theme).delete()
rqt = RereviewQueueTheme(theme=theme, header=header, footer=footer)
rereviewqueuetheme_checksum(rqt=rqt)
rqt.save()
@task
@write
def calc_checksum(theme_id, **kw):
"""For migration 596."""
lfs = LocalFileStorage()
theme = Persona.objects.get(id=theme_id)
header = theme.header_path
footer = theme.footer_path
# Delete invalid themes that are not images (e.g. PDF, EXE).
try:
Image.open(header)
Image.open(footer)
except IOError:
log.info('Deleting invalid theme [%s] (header: %s) (footer: %s)' %
(theme.addon.id, header, footer))
theme.addon.delete()
theme.delete()
rm_stored_dir(header.replace('header.png', ''), storage=lfs)
return
# Calculate checksum and save.
try:
theme.checksum = make_checksum(header, footer)
theme.save()
except IOError as e:
log.error(str(e))
| app_ranges.extend(range.apps) | conditional_block |
tasks.py | import hashlib
import logging
import os
from django.conf import settings
from django.core.files.storage import default_storage as storage
from django.db import transaction
from PIL import Image
from olympia import amo
from olympia.addons.models import (
Addon, attach_tags, attach_translations, AppSupport, CompatOverride,
IncompatibleVersions, Persona, Preview)
from olympia.addons.indexers import AddonIndexer
from olympia.amo.celery import task
from olympia.amo.decorators import set_modified_on, write
from olympia.amo.helpers import user_media_path
from olympia.amo.storage_utils import rm_stored_dir
from olympia.amo.utils import cache_ns_key, ImageCheck, LocalFileStorage
from olympia.editors.models import RereviewQueueTheme
from olympia.lib.es.utils import index_objects
from olympia.versions.models import Version
# pulling tasks from cron
from . import cron # noqa
log = logging.getLogger('z.task')
@task
@write
def version_changed(addon_id, **kw):
update_last_updated(addon_id)
update_appsupport([addon_id])
def update_last_updated(addon_id):
queries = Addon._last_updated_queries()
try:
addon = Addon.objects.get(pk=addon_id)
except Addon.DoesNotExist:
log.info('[1@None] Updating last updated for %s failed, no addon found'
% addon_id)
return
log.info('[1@None] Updating last updated for %s.' % addon_id)
if addon.is_persona():
q = 'personas'
elif addon.status == amo.STATUS_PUBLIC:
q = 'public'
else:
q = 'exp'
qs = queries[q].filter(pk=addon_id).using('default')
res = qs.values_list('id', 'last_updated')
if res:
pk, t = res[0]
Addon.objects.filter(pk=pk).update(last_updated=t)
@write
def update_appsupport(ids):
log.info("[%s@None] Updating appsupport for %s." % (len(ids), ids))
addons = Addon.objects.no_cache().filter(id__in=ids).no_transforms()
support = []
for addon in addons:
for app, appver in addon.compatible_apps.items():
if appver is None:
# Fake support for all version ranges.
min_, max_ = 0, 999999999999999999
else:
min_, max_ = appver.min.version_int, appver.max.version_int
support.append(AppSupport(addon=addon, app=app.id,
min=min_, max=max_))
if not support:
return
with transaction.atomic():
AppSupport.objects.filter(addon__id__in=ids).delete()
AppSupport.objects.bulk_create(support)
# All our updates were sql, so invalidate manually.
Addon.objects.invalidate(*addons)
@task
def delete_preview_files(id, **kw):
log.info('[1@None] Removing preview with id of %s.' % id)
p = Preview(id=id)
for f in (p.thumbnail_path, p.image_path):
try:
storage.delete(f)
except Exception, e:
log.error('Error deleting preview file (%s): %s' % (f, e))
@task(acks_late=True)
def index_addons(ids, **kw):
log.info('Indexing addons %s-%s. [%s]' % (ids[0], ids[-1], len(ids)))
transforms = (attach_tags, attach_translations)
index_objects(ids, Addon, AddonIndexer.extract_document,
kw.pop('index', None), transforms, Addon.unfiltered)
@task
def unindex_addons(ids, **kw):
for addon in ids:
log.info('Removing addon [%s] from search index.' % addon)
Addon.unindex(addon)
@task
def delete_persona_image(dst, **kw):
log.info('[1@None] Deleting persona image: %s.' % dst)
if not dst.startswith(user_media_path('addons')):
log.error("Someone tried deleting something they shouldn't: %s" % dst)
return
try:
storage.delete(dst)
except Exception, e:
log.error('Error deleting persona image: %s' % e)
@set_modified_on
def create_persona_preview_images(src, full_dst, **kw):
"""
Creates a 680x100 thumbnail used for the Persona preview and
a 32x32 thumbnail used for search suggestions/detail pages.
"""
log.info('[1@None] Resizing persona images: %s' % full_dst)
preview, full = amo.PERSONA_IMAGE_SIZES['header']
preview_w, preview_h = preview
orig_w, orig_h = full
with storage.open(src) as fp:
i_orig = i = Image.open(fp)
# Crop image from the right.
i = i.crop((orig_w - (preview_w * 2), 0, orig_w, orig_h))
# Resize preview.
i = i.resize(preview, Image.ANTIALIAS)
i.load()
with storage.open(full_dst[0], 'wb') as fp:
i.save(fp, 'png')
_, icon_size = amo.PERSONA_IMAGE_SIZES['icon']
icon_w, icon_h = icon_size
# Resize icon.
i = i_orig
i.load()
i = i.crop((orig_w - (preview_h * 2), 0, orig_w, orig_h))
i = i.resize(icon_size, Image.ANTIALIAS)
i.load()
with storage.open(full_dst[1], 'wb') as fp:
i.save(fp, 'png')
return True
@set_modified_on
def save_persona_image(src, full_dst, **kw):
"""Creates a PNG of a Persona header/footer image."""
log.info('[1@None] Saving persona image: %s' % full_dst)
img = ImageCheck(storage.open(src))
if not img.is_image():
log.error('Not an image: %s' % src, exc_info=True)
return
with storage.open(src, 'rb') as fp:
i = Image.open(fp)
with storage.open(full_dst, 'wb') as fp:
i.save(fp, 'png')
return True
@task
def update_incompatible_appversions(data, **kw):
"""Updates the incompatible_versions table for this version."""
log.info('Updating incompatible_versions for %s versions.' % len(data))
addon_ids = set()
for version_id in data:
# This is here to handle both post_save and post_delete hooks.
IncompatibleVersions.objects.filter(version=version_id).delete()
try:
version = Version.objects.get(pk=version_id)
except Version.DoesNotExist:
log.info('Version ID [%d] not found. Incompatible versions were '
'cleared.' % version_id)
return
addon_ids.add(version.addon_id)
try:
compat = CompatOverride.objects.get(addon=version.addon)
except CompatOverride.DoesNotExist:
log.info('Compat override for addon with version ID [%d] not '
'found. Incompatible versions were cleared.' % version_id)
return
app_ranges = []
ranges = compat.collapsed_ranges()
for range in ranges:
if range.min == '0' and range.max == '*':
# Wildcard range, add all app ranges
app_ranges.extend(range.apps)
else:
# Since we can't rely on add-on version numbers, get the min
# and max ID values and find versions whose ID is within those
# ranges, being careful with wildcards.
min_id = max_id = None
if range.min == '0':
versions = (Version.objects.filter(addon=version.addon_id)
.order_by('id')
.values_list('id', flat=True)[:1])
if versions:
min_id = versions[0]
else:
try:
min_id = Version.objects.get(addon=version.addon_id,
version=range.min).id
except Version.DoesNotExist:
pass
if range.max == '*':
versions = (Version.objects.filter(addon=version.addon_id)
.order_by('-id')
.values_list('id', flat=True)[:1])
if versions:
max_id = versions[0]
else:
try:
max_id = Version.objects.get(addon=version.addon_id,
version=range.max).id
except Version.DoesNotExist:
pass
| IncompatibleVersions.objects.create(version=version,
app=app_range.app.id,
min_app_version=app_range.min,
max_app_version=app_range.max)
log.info('Added incompatible version for version ID [%d]: '
'app:%d, %s -> %s' % (version_id, app_range.app.id,
app_range.min, app_range.max))
# Increment namespace cache of compat versions.
for addon_id in addon_ids:
cache_ns_key('d2c-versions:%s' % addon_id, increment=True)
def make_checksum(header_path, footer_path):
ls = LocalFileStorage()
footer = footer_path and ls._open(footer_path).read() or ''
raw_checksum = ls._open(header_path).read() + footer
return hashlib.sha224(raw_checksum).hexdigest()
def theme_checksum(theme, **kw):
theme.checksum = make_checksum(theme.header_path, theme.footer_path)
dupe_personas = Persona.objects.filter(checksum=theme.checksum)
if dupe_personas.exists():
theme.dupe_persona = dupe_personas[0]
theme.save()
def rereviewqueuetheme_checksum(rqt, **kw):
"""Check for possible duplicate theme images."""
dupe_personas = Persona.objects.filter(
checksum=make_checksum(rqt.header_path or rqt.theme.header_path,
rqt.footer_path or rqt.theme.footer_path))
if dupe_personas.exists():
rqt.dupe_persona = dupe_personas[0]
rqt.save()
@task
@write
def save_theme(header, footer, addon, **kw):
"""Save theme image and calculates checksum after theme save."""
dst_root = os.path.join(user_media_path('addons'), str(addon.id))
header = os.path.join(settings.TMP_PATH, 'persona_header', header)
header_dst = os.path.join(dst_root, 'header.png')
if footer:
footer = os.path.join(settings.TMP_PATH, 'persona_footer', footer)
footer_dst = os.path.join(dst_root, 'footer.png')
try:
save_persona_image(src=header, full_dst=header_dst)
if footer:
save_persona_image(src=footer, full_dst=footer_dst)
create_persona_preview_images(
src=header, full_dst=[os.path.join(dst_root, 'preview.png'),
os.path.join(dst_root, 'icon.png')],
set_modified_on=[addon])
theme_checksum(addon.persona)
except IOError:
addon.delete()
raise
@task
@write
def save_theme_reupload(header, footer, addon, **kw):
header_dst = None
footer_dst = None
dst_root = os.path.join(user_media_path('addons'), str(addon.id))
try:
if header:
header = os.path.join(settings.TMP_PATH, 'persona_header', header)
header_dst = os.path.join(dst_root, 'pending_header.png')
save_persona_image(src=header, full_dst=header_dst)
if footer:
footer = os.path.join(settings.TMP_PATH, 'persona_footer', footer)
footer_dst = os.path.join(dst_root, 'pending_footer.png')
save_persona_image(src=footer, full_dst=footer_dst)
except IOError as e:
log.error(str(e))
raise
if header_dst or footer_dst:
theme = addon.persona
header = 'pending_header.png' if header_dst else theme.header
# Theme footer is optional, but can't be None.
footer = theme.footer or ''
if footer_dst:
footer = 'pending_footer.png'
# Store pending header and/or footer file paths for review.
RereviewQueueTheme.objects.filter(theme=theme).delete()
rqt = RereviewQueueTheme(theme=theme, header=header, footer=footer)
rereviewqueuetheme_checksum(rqt=rqt)
rqt.save()
@task
@write
def calc_checksum(theme_id, **kw):
"""For migration 596."""
lfs = LocalFileStorage()
theme = Persona.objects.get(id=theme_id)
header = theme.header_path
footer = theme.footer_path
# Delete invalid themes that are not images (e.g. PDF, EXE).
try:
Image.open(header)
Image.open(footer)
except IOError:
log.info('Deleting invalid theme [%s] (header: %s) (footer: %s)' %
(theme.addon.id, header, footer))
theme.addon.delete()
theme.delete()
rm_stored_dir(header.replace('header.png', ''), storage=lfs)
return
# Calculate checksum and save.
try:
theme.checksum = make_checksum(header, footer)
theme.save()
except IOError as e:
log.error(str(e)) | if min_id and max_id:
if min_id <= version.id <= max_id:
app_ranges.extend(range.apps)
for app_range in app_ranges: | random_line_split |
tasks.py | import hashlib
import logging
import os
from django.conf import settings
from django.core.files.storage import default_storage as storage
from django.db import transaction
from PIL import Image
from olympia import amo
from olympia.addons.models import (
Addon, attach_tags, attach_translations, AppSupport, CompatOverride,
IncompatibleVersions, Persona, Preview)
from olympia.addons.indexers import AddonIndexer
from olympia.amo.celery import task
from olympia.amo.decorators import set_modified_on, write
from olympia.amo.helpers import user_media_path
from olympia.amo.storage_utils import rm_stored_dir
from olympia.amo.utils import cache_ns_key, ImageCheck, LocalFileStorage
from olympia.editors.models import RereviewQueueTheme
from olympia.lib.es.utils import index_objects
from olympia.versions.models import Version
# pulling tasks from cron
from . import cron # noqa
log = logging.getLogger('z.task')
@task
@write
def version_changed(addon_id, **kw):
update_last_updated(addon_id)
update_appsupport([addon_id])
def update_last_updated(addon_id):
queries = Addon._last_updated_queries()
try:
addon = Addon.objects.get(pk=addon_id)
except Addon.DoesNotExist:
log.info('[1@None] Updating last updated for %s failed, no addon found'
% addon_id)
return
log.info('[1@None] Updating last updated for %s.' % addon_id)
if addon.is_persona():
q = 'personas'
elif addon.status == amo.STATUS_PUBLIC:
q = 'public'
else:
q = 'exp'
qs = queries[q].filter(pk=addon_id).using('default')
res = qs.values_list('id', 'last_updated')
if res:
pk, t = res[0]
Addon.objects.filter(pk=pk).update(last_updated=t)
@write
def update_appsupport(ids):
log.info("[%s@None] Updating appsupport for %s." % (len(ids), ids))
addons = Addon.objects.no_cache().filter(id__in=ids).no_transforms()
support = []
for addon in addons:
for app, appver in addon.compatible_apps.items():
if appver is None:
# Fake support for all version ranges.
min_, max_ = 0, 999999999999999999
else:
min_, max_ = appver.min.version_int, appver.max.version_int
support.append(AppSupport(addon=addon, app=app.id,
min=min_, max=max_))
if not support:
return
with transaction.atomic():
AppSupport.objects.filter(addon__id__in=ids).delete()
AppSupport.objects.bulk_create(support)
# All our updates were sql, so invalidate manually.
Addon.objects.invalidate(*addons)
@task
def delete_preview_files(id, **kw):
log.info('[1@None] Removing preview with id of %s.' % id)
p = Preview(id=id)
for f in (p.thumbnail_path, p.image_path):
try:
storage.delete(f)
except Exception, e:
log.error('Error deleting preview file (%s): %s' % (f, e))
@task(acks_late=True)
def index_addons(ids, **kw):
log.info('Indexing addons %s-%s. [%s]' % (ids[0], ids[-1], len(ids)))
transforms = (attach_tags, attach_translations)
index_objects(ids, Addon, AddonIndexer.extract_document,
kw.pop('index', None), transforms, Addon.unfiltered)
@task
def unindex_addons(ids, **kw):
for addon in ids:
log.info('Removing addon [%s] from search index.' % addon)
Addon.unindex(addon)
@task
def delete_persona_image(dst, **kw):
log.info('[1@None] Deleting persona image: %s.' % dst)
if not dst.startswith(user_media_path('addons')):
log.error("Someone tried deleting something they shouldn't: %s" % dst)
return
try:
storage.delete(dst)
except Exception, e:
log.error('Error deleting persona image: %s' % e)
@set_modified_on
def create_persona_preview_images(src, full_dst, **kw):
"""
Creates a 680x100 thumbnail used for the Persona preview and
a 32x32 thumbnail used for search suggestions/detail pages.
"""
log.info('[1@None] Resizing persona images: %s' % full_dst)
preview, full = amo.PERSONA_IMAGE_SIZES['header']
preview_w, preview_h = preview
orig_w, orig_h = full
with storage.open(src) as fp:
i_orig = i = Image.open(fp)
# Crop image from the right.
i = i.crop((orig_w - (preview_w * 2), 0, orig_w, orig_h))
# Resize preview.
i = i.resize(preview, Image.ANTIALIAS)
i.load()
with storage.open(full_dst[0], 'wb') as fp:
i.save(fp, 'png')
_, icon_size = amo.PERSONA_IMAGE_SIZES['icon']
icon_w, icon_h = icon_size
# Resize icon.
i = i_orig
i.load()
i = i.crop((orig_w - (preview_h * 2), 0, orig_w, orig_h))
i = i.resize(icon_size, Image.ANTIALIAS)
i.load()
with storage.open(full_dst[1], 'wb') as fp:
i.save(fp, 'png')
return True
@set_modified_on
def | (src, full_dst, **kw):
"""Creates a PNG of a Persona header/footer image."""
log.info('[1@None] Saving persona image: %s' % full_dst)
img = ImageCheck(storage.open(src))
if not img.is_image():
log.error('Not an image: %s' % src, exc_info=True)
return
with storage.open(src, 'rb') as fp:
i = Image.open(fp)
with storage.open(full_dst, 'wb') as fp:
i.save(fp, 'png')
return True
@task
def update_incompatible_appversions(data, **kw):
"""Updates the incompatible_versions table for this version."""
log.info('Updating incompatible_versions for %s versions.' % len(data))
addon_ids = set()
for version_id in data:
# This is here to handle both post_save and post_delete hooks.
IncompatibleVersions.objects.filter(version=version_id).delete()
try:
version = Version.objects.get(pk=version_id)
except Version.DoesNotExist:
log.info('Version ID [%d] not found. Incompatible versions were '
'cleared.' % version_id)
return
addon_ids.add(version.addon_id)
try:
compat = CompatOverride.objects.get(addon=version.addon)
except CompatOverride.DoesNotExist:
log.info('Compat override for addon with version ID [%d] not '
'found. Incompatible versions were cleared.' % version_id)
return
app_ranges = []
ranges = compat.collapsed_ranges()
for range in ranges:
if range.min == '0' and range.max == '*':
# Wildcard range, add all app ranges
app_ranges.extend(range.apps)
else:
# Since we can't rely on add-on version numbers, get the min
# and max ID values and find versions whose ID is within those
# ranges, being careful with wildcards.
min_id = max_id = None
if range.min == '0':
versions = (Version.objects.filter(addon=version.addon_id)
.order_by('id')
.values_list('id', flat=True)[:1])
if versions:
min_id = versions[0]
else:
try:
min_id = Version.objects.get(addon=version.addon_id,
version=range.min).id
except Version.DoesNotExist:
pass
if range.max == '*':
versions = (Version.objects.filter(addon=version.addon_id)
.order_by('-id')
.values_list('id', flat=True)[:1])
if versions:
max_id = versions[0]
else:
try:
max_id = Version.objects.get(addon=version.addon_id,
version=range.max).id
except Version.DoesNotExist:
pass
if min_id and max_id:
if min_id <= version.id <= max_id:
app_ranges.extend(range.apps)
for app_range in app_ranges:
IncompatibleVersions.objects.create(version=version,
app=app_range.app.id,
min_app_version=app_range.min,
max_app_version=app_range.max)
log.info('Added incompatible version for version ID [%d]: '
'app:%d, %s -> %s' % (version_id, app_range.app.id,
app_range.min, app_range.max))
# Increment namespace cache of compat versions.
for addon_id in addon_ids:
cache_ns_key('d2c-versions:%s' % addon_id, increment=True)
def make_checksum(header_path, footer_path):
ls = LocalFileStorage()
footer = footer_path and ls._open(footer_path).read() or ''
raw_checksum = ls._open(header_path).read() + footer
return hashlib.sha224(raw_checksum).hexdigest()
def theme_checksum(theme, **kw):
theme.checksum = make_checksum(theme.header_path, theme.footer_path)
dupe_personas = Persona.objects.filter(checksum=theme.checksum)
if dupe_personas.exists():
theme.dupe_persona = dupe_personas[0]
theme.save()
def rereviewqueuetheme_checksum(rqt, **kw):
"""Check for possible duplicate theme images."""
dupe_personas = Persona.objects.filter(
checksum=make_checksum(rqt.header_path or rqt.theme.header_path,
rqt.footer_path or rqt.theme.footer_path))
if dupe_personas.exists():
rqt.dupe_persona = dupe_personas[0]
rqt.save()
@task
@write
def save_theme(header, footer, addon, **kw):
"""Save theme image and calculates checksum after theme save."""
dst_root = os.path.join(user_media_path('addons'), str(addon.id))
header = os.path.join(settings.TMP_PATH, 'persona_header', header)
header_dst = os.path.join(dst_root, 'header.png')
if footer:
footer = os.path.join(settings.TMP_PATH, 'persona_footer', footer)
footer_dst = os.path.join(dst_root, 'footer.png')
try:
save_persona_image(src=header, full_dst=header_dst)
if footer:
save_persona_image(src=footer, full_dst=footer_dst)
create_persona_preview_images(
src=header, full_dst=[os.path.join(dst_root, 'preview.png'),
os.path.join(dst_root, 'icon.png')],
set_modified_on=[addon])
theme_checksum(addon.persona)
except IOError:
addon.delete()
raise
@task
@write
def save_theme_reupload(header, footer, addon, **kw):
header_dst = None
footer_dst = None
dst_root = os.path.join(user_media_path('addons'), str(addon.id))
try:
if header:
header = os.path.join(settings.TMP_PATH, 'persona_header', header)
header_dst = os.path.join(dst_root, 'pending_header.png')
save_persona_image(src=header, full_dst=header_dst)
if footer:
footer = os.path.join(settings.TMP_PATH, 'persona_footer', footer)
footer_dst = os.path.join(dst_root, 'pending_footer.png')
save_persona_image(src=footer, full_dst=footer_dst)
except IOError as e:
log.error(str(e))
raise
if header_dst or footer_dst:
theme = addon.persona
header = 'pending_header.png' if header_dst else theme.header
# Theme footer is optional, but can't be None.
footer = theme.footer or ''
if footer_dst:
footer = 'pending_footer.png'
# Store pending header and/or footer file paths for review.
RereviewQueueTheme.objects.filter(theme=theme).delete()
rqt = RereviewQueueTheme(theme=theme, header=header, footer=footer)
rereviewqueuetheme_checksum(rqt=rqt)
rqt.save()
@task
@write
def calc_checksum(theme_id, **kw):
"""For migration 596."""
lfs = LocalFileStorage()
theme = Persona.objects.get(id=theme_id)
header = theme.header_path
footer = theme.footer_path
# Delete invalid themes that are not images (e.g. PDF, EXE).
try:
Image.open(header)
Image.open(footer)
except IOError:
log.info('Deleting invalid theme [%s] (header: %s) (footer: %s)' %
(theme.addon.id, header, footer))
theme.addon.delete()
theme.delete()
rm_stored_dir(header.replace('header.png', ''), storage=lfs)
return
# Calculate checksum and save.
try:
theme.checksum = make_checksum(header, footer)
theme.save()
except IOError as e:
log.error(str(e))
| save_persona_image | identifier_name |
tasks.py | import hashlib
import logging
import os
from django.conf import settings
from django.core.files.storage import default_storage as storage
from django.db import transaction
from PIL import Image
from olympia import amo
from olympia.addons.models import (
Addon, attach_tags, attach_translations, AppSupport, CompatOverride,
IncompatibleVersions, Persona, Preview)
from olympia.addons.indexers import AddonIndexer
from olympia.amo.celery import task
from olympia.amo.decorators import set_modified_on, write
from olympia.amo.helpers import user_media_path
from olympia.amo.storage_utils import rm_stored_dir
from olympia.amo.utils import cache_ns_key, ImageCheck, LocalFileStorage
from olympia.editors.models import RereviewQueueTheme
from olympia.lib.es.utils import index_objects
from olympia.versions.models import Version
# pulling tasks from cron
from . import cron # noqa
log = logging.getLogger('z.task')
@task
@write
def version_changed(addon_id, **kw):
update_last_updated(addon_id)
update_appsupport([addon_id])
def update_last_updated(addon_id):
queries = Addon._last_updated_queries()
try:
addon = Addon.objects.get(pk=addon_id)
except Addon.DoesNotExist:
log.info('[1@None] Updating last updated for %s failed, no addon found'
% addon_id)
return
log.info('[1@None] Updating last updated for %s.' % addon_id)
if addon.is_persona():
q = 'personas'
elif addon.status == amo.STATUS_PUBLIC:
q = 'public'
else:
q = 'exp'
qs = queries[q].filter(pk=addon_id).using('default')
res = qs.values_list('id', 'last_updated')
if res:
pk, t = res[0]
Addon.objects.filter(pk=pk).update(last_updated=t)
@write
def update_appsupport(ids):
log.info("[%s@None] Updating appsupport for %s." % (len(ids), ids))
addons = Addon.objects.no_cache().filter(id__in=ids).no_transforms()
support = []
for addon in addons:
for app, appver in addon.compatible_apps.items():
if appver is None:
# Fake support for all version ranges.
min_, max_ = 0, 999999999999999999
else:
min_, max_ = appver.min.version_int, appver.max.version_int
support.append(AppSupport(addon=addon, app=app.id,
min=min_, max=max_))
if not support:
return
with transaction.atomic():
AppSupport.objects.filter(addon__id__in=ids).delete()
AppSupport.objects.bulk_create(support)
# All our updates were sql, so invalidate manually.
Addon.objects.invalidate(*addons)
@task
def delete_preview_files(id, **kw):
log.info('[1@None] Removing preview with id of %s.' % id)
p = Preview(id=id)
for f in (p.thumbnail_path, p.image_path):
try:
storage.delete(f)
except Exception, e:
log.error('Error deleting preview file (%s): %s' % (f, e))
@task(acks_late=True)
def index_addons(ids, **kw):
log.info('Indexing addons %s-%s. [%s]' % (ids[0], ids[-1], len(ids)))
transforms = (attach_tags, attach_translations)
index_objects(ids, Addon, AddonIndexer.extract_document,
kw.pop('index', None), transforms, Addon.unfiltered)
@task
def unindex_addons(ids, **kw):
for addon in ids:
log.info('Removing addon [%s] from search index.' % addon)
Addon.unindex(addon)
@task
def delete_persona_image(dst, **kw):
log.info('[1@None] Deleting persona image: %s.' % dst)
if not dst.startswith(user_media_path('addons')):
log.error("Someone tried deleting something they shouldn't: %s" % dst)
return
try:
storage.delete(dst)
except Exception, e:
log.error('Error deleting persona image: %s' % e)
@set_modified_on
def create_persona_preview_images(src, full_dst, **kw):
"""
Creates a 680x100 thumbnail used for the Persona preview and
a 32x32 thumbnail used for search suggestions/detail pages.
"""
log.info('[1@None] Resizing persona images: %s' % full_dst)
preview, full = amo.PERSONA_IMAGE_SIZES['header']
preview_w, preview_h = preview
orig_w, orig_h = full
with storage.open(src) as fp:
i_orig = i = Image.open(fp)
# Crop image from the right.
i = i.crop((orig_w - (preview_w * 2), 0, orig_w, orig_h))
# Resize preview.
i = i.resize(preview, Image.ANTIALIAS)
i.load()
with storage.open(full_dst[0], 'wb') as fp:
i.save(fp, 'png')
_, icon_size = amo.PERSONA_IMAGE_SIZES['icon']
icon_w, icon_h = icon_size
# Resize icon.
i = i_orig
i.load()
i = i.crop((orig_w - (preview_h * 2), 0, orig_w, orig_h))
i = i.resize(icon_size, Image.ANTIALIAS)
i.load()
with storage.open(full_dst[1], 'wb') as fp:
i.save(fp, 'png')
return True
@set_modified_on
def save_persona_image(src, full_dst, **kw):
"""Creates a PNG of a Persona header/footer image."""
log.info('[1@None] Saving persona image: %s' % full_dst)
img = ImageCheck(storage.open(src))
if not img.is_image():
log.error('Not an image: %s' % src, exc_info=True)
return
with storage.open(src, 'rb') as fp:
i = Image.open(fp)
with storage.open(full_dst, 'wb') as fp:
i.save(fp, 'png')
return True
@task
def update_incompatible_appversions(data, **kw):
"""Updates the incompatible_versions table for this version."""
log.info('Updating incompatible_versions for %s versions.' % len(data))
addon_ids = set()
for version_id in data:
# This is here to handle both post_save and post_delete hooks.
IncompatibleVersions.objects.filter(version=version_id).delete()
try:
version = Version.objects.get(pk=version_id)
except Version.DoesNotExist:
log.info('Version ID [%d] not found. Incompatible versions were '
'cleared.' % version_id)
return
addon_ids.add(version.addon_id)
try:
compat = CompatOverride.objects.get(addon=version.addon)
except CompatOverride.DoesNotExist:
log.info('Compat override for addon with version ID [%d] not '
'found. Incompatible versions were cleared.' % version_id)
return
app_ranges = []
ranges = compat.collapsed_ranges()
for range in ranges:
if range.min == '0' and range.max == '*':
# Wildcard range, add all app ranges
app_ranges.extend(range.apps)
else:
# Since we can't rely on add-on version numbers, get the min
# and max ID values and find versions whose ID is within those
# ranges, being careful with wildcards.
min_id = max_id = None
if range.min == '0':
versions = (Version.objects.filter(addon=version.addon_id)
.order_by('id')
.values_list('id', flat=True)[:1])
if versions:
min_id = versions[0]
else:
try:
min_id = Version.objects.get(addon=version.addon_id,
version=range.min).id
except Version.DoesNotExist:
pass
if range.max == '*':
versions = (Version.objects.filter(addon=version.addon_id)
.order_by('-id')
.values_list('id', flat=True)[:1])
if versions:
max_id = versions[0]
else:
try:
max_id = Version.objects.get(addon=version.addon_id,
version=range.max).id
except Version.DoesNotExist:
pass
if min_id and max_id:
if min_id <= version.id <= max_id:
app_ranges.extend(range.apps)
for app_range in app_ranges:
IncompatibleVersions.objects.create(version=version,
app=app_range.app.id,
min_app_version=app_range.min,
max_app_version=app_range.max)
log.info('Added incompatible version for version ID [%d]: '
'app:%d, %s -> %s' % (version_id, app_range.app.id,
app_range.min, app_range.max))
# Increment namespace cache of compat versions.
for addon_id in addon_ids:
cache_ns_key('d2c-versions:%s' % addon_id, increment=True)
def make_checksum(header_path, footer_path):
ls = LocalFileStorage()
footer = footer_path and ls._open(footer_path).read() or ''
raw_checksum = ls._open(header_path).read() + footer
return hashlib.sha224(raw_checksum).hexdigest()
def theme_checksum(theme, **kw):
|
def rereviewqueuetheme_checksum(rqt, **kw):
"""Check for possible duplicate theme images."""
dupe_personas = Persona.objects.filter(
checksum=make_checksum(rqt.header_path or rqt.theme.header_path,
rqt.footer_path or rqt.theme.footer_path))
if dupe_personas.exists():
rqt.dupe_persona = dupe_personas[0]
rqt.save()
@task
@write
def save_theme(header, footer, addon, **kw):
"""Save theme image and calculates checksum after theme save."""
dst_root = os.path.join(user_media_path('addons'), str(addon.id))
header = os.path.join(settings.TMP_PATH, 'persona_header', header)
header_dst = os.path.join(dst_root, 'header.png')
if footer:
footer = os.path.join(settings.TMP_PATH, 'persona_footer', footer)
footer_dst = os.path.join(dst_root, 'footer.png')
try:
save_persona_image(src=header, full_dst=header_dst)
if footer:
save_persona_image(src=footer, full_dst=footer_dst)
create_persona_preview_images(
src=header, full_dst=[os.path.join(dst_root, 'preview.png'),
os.path.join(dst_root, 'icon.png')],
set_modified_on=[addon])
theme_checksum(addon.persona)
except IOError:
addon.delete()
raise
@task
@write
def save_theme_reupload(header, footer, addon, **kw):
header_dst = None
footer_dst = None
dst_root = os.path.join(user_media_path('addons'), str(addon.id))
try:
if header:
header = os.path.join(settings.TMP_PATH, 'persona_header', header)
header_dst = os.path.join(dst_root, 'pending_header.png')
save_persona_image(src=header, full_dst=header_dst)
if footer:
footer = os.path.join(settings.TMP_PATH, 'persona_footer', footer)
footer_dst = os.path.join(dst_root, 'pending_footer.png')
save_persona_image(src=footer, full_dst=footer_dst)
except IOError as e:
log.error(str(e))
raise
if header_dst or footer_dst:
theme = addon.persona
header = 'pending_header.png' if header_dst else theme.header
# Theme footer is optional, but can't be None.
footer = theme.footer or ''
if footer_dst:
footer = 'pending_footer.png'
# Store pending header and/or footer file paths for review.
RereviewQueueTheme.objects.filter(theme=theme).delete()
rqt = RereviewQueueTheme(theme=theme, header=header, footer=footer)
rereviewqueuetheme_checksum(rqt=rqt)
rqt.save()
@task
@write
def calc_checksum(theme_id, **kw):
"""For migration 596."""
lfs = LocalFileStorage()
theme = Persona.objects.get(id=theme_id)
header = theme.header_path
footer = theme.footer_path
# Delete invalid themes that are not images (e.g. PDF, EXE).
try:
Image.open(header)
Image.open(footer)
except IOError:
log.info('Deleting invalid theme [%s] (header: %s) (footer: %s)' %
(theme.addon.id, header, footer))
theme.addon.delete()
theme.delete()
rm_stored_dir(header.replace('header.png', ''), storage=lfs)
return
# Calculate checksum and save.
try:
theme.checksum = make_checksum(header, footer)
theme.save()
except IOError as e:
log.error(str(e))
| theme.checksum = make_checksum(theme.header_path, theme.footer_path)
dupe_personas = Persona.objects.filter(checksum=theme.checksum)
if dupe_personas.exists():
theme.dupe_persona = dupe_personas[0]
theme.save() | identifier_body |
helpers_sync.py | import os
from geotrek.flatpages.models import FlatPage
from geotrek.flatpages.views import FlatPageViewSet, FlatPageMeta
from django.db.models import Q
class SyncRando:
def __init__(self, sync):
self.global_sync = sync
def sync(self, lang):
| self.global_sync.sync_geojson(lang, FlatPageViewSet, 'flatpages.geojson', zipfile=self.global_sync.zipfile)
flatpages = FlatPage.objects.filter(published=True)
if self.global_sync.source:
flatpages = flatpages.filter(source__name__in=self.global_sync.source)
if self.global_sync.portal:
flatpages = flatpages.filter(Q(portal__name=self.global_sync.portal) | Q(portal=None))
for flatpage in flatpages:
name = os.path.join('meta', lang, flatpage.rando_url, 'index.html')
self.global_sync.sync_view(lang, FlatPageMeta.as_view(), name, pk=flatpage.pk,
params={'rando_url': self.global_sync.rando_url}) | identifier_body |
|
helpers_sync.py | import os
from geotrek.flatpages.models import FlatPage
from geotrek.flatpages.views import FlatPageViewSet, FlatPageMeta
from django.db.models import Q
class SyncRando:
def __init__(self, sync):
self.global_sync = sync
def sync(self, lang):
self.global_sync.sync_geojson(lang, FlatPageViewSet, 'flatpages.geojson', zipfile=self.global_sync.zipfile)
flatpages = FlatPage.objects.filter(published=True)
if self.global_sync.source:
flatpages = flatpages.filter(source__name__in=self.global_sync.source)
if self.global_sync.portal:
flatpages = flatpages.filter(Q(portal__name=self.global_sync.portal) | Q(portal=None))
for flatpage in flatpages:
| name = os.path.join('meta', lang, flatpage.rando_url, 'index.html')
self.global_sync.sync_view(lang, FlatPageMeta.as_view(), name, pk=flatpage.pk,
params={'rando_url': self.global_sync.rando_url}) | conditional_block |
|
helpers_sync.py | import os
from geotrek.flatpages.models import FlatPage
from geotrek.flatpages.views import FlatPageViewSet, FlatPageMeta
from django.db.models import Q
class SyncRando: | def __init__(self, sync):
self.global_sync = sync
def sync(self, lang):
self.global_sync.sync_geojson(lang, FlatPageViewSet, 'flatpages.geojson', zipfile=self.global_sync.zipfile)
flatpages = FlatPage.objects.filter(published=True)
if self.global_sync.source:
flatpages = flatpages.filter(source__name__in=self.global_sync.source)
if self.global_sync.portal:
flatpages = flatpages.filter(Q(portal__name=self.global_sync.portal) | Q(portal=None))
for flatpage in flatpages:
name = os.path.join('meta', lang, flatpage.rando_url, 'index.html')
self.global_sync.sync_view(lang, FlatPageMeta.as_view(), name, pk=flatpage.pk,
params={'rando_url': self.global_sync.rando_url}) | random_line_split |
|
helpers_sync.py | import os
from geotrek.flatpages.models import FlatPage
from geotrek.flatpages.views import FlatPageViewSet, FlatPageMeta
from django.db.models import Q
class SyncRando:
def | (self, sync):
self.global_sync = sync
def sync(self, lang):
self.global_sync.sync_geojson(lang, FlatPageViewSet, 'flatpages.geojson', zipfile=self.global_sync.zipfile)
flatpages = FlatPage.objects.filter(published=True)
if self.global_sync.source:
flatpages = flatpages.filter(source__name__in=self.global_sync.source)
if self.global_sync.portal:
flatpages = flatpages.filter(Q(portal__name=self.global_sync.portal) | Q(portal=None))
for flatpage in flatpages:
name = os.path.join('meta', lang, flatpage.rando_url, 'index.html')
self.global_sync.sync_view(lang, FlatPageMeta.as_view(), name, pk=flatpage.pk,
params={'rando_url': self.global_sync.rando_url})
| __init__ | identifier_name |
plugin.js |
// @require core/widget/helpers.js
(function ( $, _, Svelto ) {
/* PLUGIN */
let Plugin = {
call ( Widget, $ele, args ) {
let options = args[0],
isMethodCall = ( _.isString ( options ) && options.charAt ( 0 ) !== '_' ); // Methods starting with '_' are private
for ( let i = 0, l = $ele.length; i < l; i++ ) {
let instance = $.widget.get ( $ele[i], Widget, options );
if ( isMethodCall && _.isFunction ( instance[options] ) ) {
let returnValue = args.length > 1 ? instance[options]( ...Array.prototype.slice.call ( args, 1 ) ) : instance[options]();
if ( !_.isNil ( returnValue ) ) return returnValue;
}
}
return $ele;
},
make ( Widget ) {
if ( !Widget.config.plugin ) return;
$.fn[Widget.config.name] = function () {
return Plugin.call ( Widget, this, arguments );
};
},
| ( Widget ) {
if ( !Widget.config.plugin ) return;
delete $.fn[Widget.config.name];
}
};
/* EXPORT */
Svelto.Plugin = Plugin;
}( Svelto.$, Svelto._, Svelto ));
| unmake | identifier_name |
plugin.js | // @require core/widget/helpers.js
(function ( $, _, Svelto ) {
/* PLUGIN */
let Plugin = {
call ( Widget, $ele, args ) {
| let options = args[0],
isMethodCall = ( _.isString ( options ) && options.charAt ( 0 ) !== '_' ); // Methods starting with '_' are private
for ( let i = 0, l = $ele.length; i < l; i++ ) {
let instance = $.widget.get ( $ele[i], Widget, options );
if ( isMethodCall && _.isFunction ( instance[options] ) ) {
let returnValue = args.length > 1 ? instance[options]( ...Array.prototype.slice.call ( args, 1 ) ) : instance[options]();
if ( !_.isNil ( returnValue ) ) return returnValue;
}
}
return $ele;
},
make ( Widget ) {
if ( !Widget.config.plugin ) return;
$.fn[Widget.config.name] = function () {
return Plugin.call ( Widget, this, arguments );
};
},
unmake ( Widget ) {
if ( !Widget.config.plugin ) return;
delete $.fn[Widget.config.name];
}
};
/* EXPORT */
Svelto.Plugin = Plugin;
}( Svelto.$, Svelto._, Svelto )); | random_line_split |
|
plugin.js |
// @require core/widget/helpers.js
(function ( $, _, Svelto ) {
/* PLUGIN */
let Plugin = {
call ( Widget, $ele, args ) {
let options = args[0],
isMethodCall = ( _.isString ( options ) && options.charAt ( 0 ) !== '_' ); // Methods starting with '_' are private
for ( let i = 0, l = $ele.length; i < l; i++ ) {
let instance = $.widget.get ( $ele[i], Widget, options );
if ( isMethodCall && _.isFunction ( instance[options] ) ) {
let returnValue = args.length > 1 ? instance[options]( ...Array.prototype.slice.call ( args, 1 ) ) : instance[options]();
if ( !_.isNil ( returnValue ) ) return returnValue;
}
}
return $ele;
},
make ( Widget ) {
if ( !Widget.config.plugin ) return;
$.fn[Widget.config.name] = function () {
return Plugin.call ( Widget, this, arguments );
};
},
unmake ( Widget ) |
};
/* EXPORT */
Svelto.Plugin = Plugin;
}( Svelto.$, Svelto._, Svelto ));
| {
if ( !Widget.config.plugin ) return;
delete $.fn[Widget.config.name];
} | identifier_body |
ne.rs | #![feature(core)]
extern crate core;
#[cfg(test)]
mod tests {
use core::iter::Iterator;
use core::iter::order::ne;
use core::cmp::PartialEq;
struct A<T: PartialEq> {
begin: T,
end: T
}
macro_rules! Iterator_impl {
($T:ty) => {
impl Iterator for A<$T> {
type Item = $T;
fn next(&mut self) -> Option<Self::Item> {
if self.begin < self.end {
let result = self.begin;
self.begin = self.begin.wrapping_add(1);
Some::<Self::Item>(result)
} else {
None::<Self::Item>
}
}
}
}
}
// pub fn eq<L: Iterator, R: Iterator>(mut a: L, mut b: R) -> bool where
// L::Item: PartialEq<R::Item>,
// {
// loop {
// match (a.next(), b.next()) {
// (None, None) => return true,
// (None, _) | (_, None) => return false,
// (Some(x), Some(y)) => if !x.eq(&y) { return false }, | // }
type T = i32;
Iterator_impl!(T);
type L = A<T>;
type R = A<T>;
#[test]
fn ne_test1() {
let a: L = L { begin: 0, end: 10 };
let b: R = R { begin: 0, end: 10 };
let result: bool = ne::<L, R>(a, b);
assert_eq!(result, false);
}
#[test]
fn ne_test2() {
let a: L = L { begin: 0, end: 9 };
let b: R = R { begin: 0, end: 10 };
let result: bool = ne::<L, R>(a, b);
assert_eq!(result, true);
}
#[test]
fn ne_test3() {
let a: L = L { begin: 0, end: 11 };
let b: R = R { begin: 0, end: 10 };
let result: bool = ne::<L, R>(a, b);
assert_eq!(result, true);
}
#[test]
fn ne_test4() {
let a: L = L { begin: 0, end: 10 };
let b: R = R { begin: 10, end: 20 };
let result: bool = ne::<L, R>(a, b);
assert_eq!(result, true);
}
#[test]
fn ne_test5() {
let a: L = L { begin: 10, end: 20 };
let b: R = R { begin: 0, end: 10 };
let result: bool = ne::<L, R>(a, b);
assert_eq!(result, true);
}
} | // }
// } | random_line_split |
ne.rs | #![feature(core)]
extern crate core;
#[cfg(test)]
mod tests {
use core::iter::Iterator;
use core::iter::order::ne;
use core::cmp::PartialEq;
struct A<T: PartialEq> {
begin: T,
end: T
}
macro_rules! Iterator_impl {
($T:ty) => {
impl Iterator for A<$T> {
type Item = $T;
fn next(&mut self) -> Option<Self::Item> {
if self.begin < self.end {
let result = self.begin;
self.begin = self.begin.wrapping_add(1);
Some::<Self::Item>(result)
} else {
None::<Self::Item>
}
}
}
}
}
// pub fn eq<L: Iterator, R: Iterator>(mut a: L, mut b: R) -> bool where
// L::Item: PartialEq<R::Item>,
// {
// loop {
// match (a.next(), b.next()) {
// (None, None) => return true,
// (None, _) | (_, None) => return false,
// (Some(x), Some(y)) => if !x.eq(&y) { return false },
// }
// }
// }
type T = i32;
Iterator_impl!(T);
type L = A<T>;
type R = A<T>;
#[test]
fn ne_test1() {
let a: L = L { begin: 0, end: 10 };
let b: R = R { begin: 0, end: 10 };
let result: bool = ne::<L, R>(a, b);
assert_eq!(result, false);
}
#[test]
fn ne_test2() {
let a: L = L { begin: 0, end: 9 };
let b: R = R { begin: 0, end: 10 };
let result: bool = ne::<L, R>(a, b);
assert_eq!(result, true);
}
#[test]
fn ne_test3() {
let a: L = L { begin: 0, end: 11 };
let b: R = R { begin: 0, end: 10 };
let result: bool = ne::<L, R>(a, b);
assert_eq!(result, true);
}
#[test]
fn ne_test4() |
#[test]
fn ne_test5() {
let a: L = L { begin: 10, end: 20 };
let b: R = R { begin: 0, end: 10 };
let result: bool = ne::<L, R>(a, b);
assert_eq!(result, true);
}
}
| {
let a: L = L { begin: 0, end: 10 };
let b: R = R { begin: 10, end: 20 };
let result: bool = ne::<L, R>(a, b);
assert_eq!(result, true);
} | identifier_body |
ne.rs | #![feature(core)]
extern crate core;
#[cfg(test)]
mod tests {
use core::iter::Iterator;
use core::iter::order::ne;
use core::cmp::PartialEq;
struct | <T: PartialEq> {
begin: T,
end: T
}
macro_rules! Iterator_impl {
($T:ty) => {
impl Iterator for A<$T> {
type Item = $T;
fn next(&mut self) -> Option<Self::Item> {
if self.begin < self.end {
let result = self.begin;
self.begin = self.begin.wrapping_add(1);
Some::<Self::Item>(result)
} else {
None::<Self::Item>
}
}
}
}
}
// pub fn eq<L: Iterator, R: Iterator>(mut a: L, mut b: R) -> bool where
// L::Item: PartialEq<R::Item>,
// {
// loop {
// match (a.next(), b.next()) {
// (None, None) => return true,
// (None, _) | (_, None) => return false,
// (Some(x), Some(y)) => if !x.eq(&y) { return false },
// }
// }
// }
type T = i32;
Iterator_impl!(T);
type L = A<T>;
type R = A<T>;
#[test]
fn ne_test1() {
let a: L = L { begin: 0, end: 10 };
let b: R = R { begin: 0, end: 10 };
let result: bool = ne::<L, R>(a, b);
assert_eq!(result, false);
}
#[test]
fn ne_test2() {
let a: L = L { begin: 0, end: 9 };
let b: R = R { begin: 0, end: 10 };
let result: bool = ne::<L, R>(a, b);
assert_eq!(result, true);
}
#[test]
fn ne_test3() {
let a: L = L { begin: 0, end: 11 };
let b: R = R { begin: 0, end: 10 };
let result: bool = ne::<L, R>(a, b);
assert_eq!(result, true);
}
#[test]
fn ne_test4() {
let a: L = L { begin: 0, end: 10 };
let b: R = R { begin: 10, end: 20 };
let result: bool = ne::<L, R>(a, b);
assert_eq!(result, true);
}
#[test]
fn ne_test5() {
let a: L = L { begin: 10, end: 20 };
let b: R = R { begin: 0, end: 10 };
let result: bool = ne::<L, R>(a, b);
assert_eq!(result, true);
}
}
| A | identifier_name |
multiple.py | from mininet.net import Mininet
from mininet.topo import Topo
from mininet.log import lg, setLogLevel
from mininet.cli import CLI
from mininet.node import RemoteController
FANOUT = 2
SWITCH_NUM = 2
CORES = {}
for i in range(1, SWITCH_NUM + 1):
CORES['s%d' % i] = {}
if i < 10:
CORES['s%d' % i]['dpid'] = '0000000000000%d00' % i
else:
CORES['s%d' % i]['dpid'] = '000000000000%d00' % i
class | (Topo):
def __init__(self, enable_all = True):
"Create Multiple topology."
# Add default members to class.
super(MultipleTopo, self).__init__()
# Add core switches
self.cores = {}
for switch in CORES:
self.cores[switch] = self.addSwitch(switch, dpid=(CORES[switch]['dpid']))
# Add hosts and connect them to their core switch
for switch in CORES:
for count in xrange(1, FANOUT + 1):
# Add hosts
host = 'h_%s_%s' % (switch, count)
ip = '10.0.0.%s' % count
mac = CORES[switch]['dpid'][4:-1] + str(count)
h = self.addHost(host, ip=ip, mac=mac)
# Connect hosts to core switches
self.addLink(h, self.cores[switch])
if __name__ == '__main__':
topo = MultipleTopo()
net = Mininet(topo, autoSetMacs=True, xterms=False, controller=RemoteController)
net.addController('c', ip='128.112.93.28') # localhost:127.0.0.1 vm-to-mac:10.0.2.2 server-to-mac:128.112.93.28
print "\nHosts configured with IPs, switches pointing to OpenVirteX at 128.112.93.28 port 6633\n"
net.start()
raw_input("started, press...")
#CLI(net)
#net.stop()
| MultipleTopo | identifier_name |
multiple.py | from mininet.net import Mininet
from mininet.topo import Topo
from mininet.log import lg, setLogLevel
from mininet.cli import CLI
from mininet.node import RemoteController
FANOUT = 2
SWITCH_NUM = 2
CORES = {}
for i in range(1, SWITCH_NUM + 1):
CORES['s%d' % i] = {}
if i < 10:
CORES['s%d' % i]['dpid'] = '0000000000000%d00' % i
else:
CORES['s%d' % i]['dpid'] = '000000000000%d00' % i
class MultipleTopo(Topo):
|
if __name__ == '__main__':
topo = MultipleTopo()
net = Mininet(topo, autoSetMacs=True, xterms=False, controller=RemoteController)
net.addController('c', ip='128.112.93.28') # localhost:127.0.0.1 vm-to-mac:10.0.2.2 server-to-mac:128.112.93.28
print "\nHosts configured with IPs, switches pointing to OpenVirteX at 128.112.93.28 port 6633\n"
net.start()
raw_input("started, press...")
#CLI(net)
#net.stop()
| def __init__(self, enable_all = True):
"Create Multiple topology."
# Add default members to class.
super(MultipleTopo, self).__init__()
# Add core switches
self.cores = {}
for switch in CORES:
self.cores[switch] = self.addSwitch(switch, dpid=(CORES[switch]['dpid']))
# Add hosts and connect them to their core switch
for switch in CORES:
for count in xrange(1, FANOUT + 1):
# Add hosts
host = 'h_%s_%s' % (switch, count)
ip = '10.0.0.%s' % count
mac = CORES[switch]['dpid'][4:-1] + str(count)
h = self.addHost(host, ip=ip, mac=mac)
# Connect hosts to core switches
self.addLink(h, self.cores[switch]) | identifier_body |
multiple.py | from mininet.log import lg, setLogLevel
from mininet.cli import CLI
from mininet.node import RemoteController
FANOUT = 2
SWITCH_NUM = 2
CORES = {}
for i in range(1, SWITCH_NUM + 1):
CORES['s%d' % i] = {}
if i < 10:
CORES['s%d' % i]['dpid'] = '0000000000000%d00' % i
else:
CORES['s%d' % i]['dpid'] = '000000000000%d00' % i
class MultipleTopo(Topo):
def __init__(self, enable_all = True):
"Create Multiple topology."
# Add default members to class.
super(MultipleTopo, self).__init__()
# Add core switches
self.cores = {}
for switch in CORES:
self.cores[switch] = self.addSwitch(switch, dpid=(CORES[switch]['dpid']))
# Add hosts and connect them to their core switch
for switch in CORES:
for count in xrange(1, FANOUT + 1):
# Add hosts
host = 'h_%s_%s' % (switch, count)
ip = '10.0.0.%s' % count
mac = CORES[switch]['dpid'][4:-1] + str(count)
h = self.addHost(host, ip=ip, mac=mac)
# Connect hosts to core switches
self.addLink(h, self.cores[switch])
if __name__ == '__main__':
topo = MultipleTopo()
net = Mininet(topo, autoSetMacs=True, xterms=False, controller=RemoteController)
net.addController('c', ip='128.112.93.28') # localhost:127.0.0.1 vm-to-mac:10.0.2.2 server-to-mac:128.112.93.28
print "\nHosts configured with IPs, switches pointing to OpenVirteX at 128.112.93.28 port 6633\n"
net.start()
raw_input("started, press...")
#CLI(net)
#net.stop() | from mininet.net import Mininet
from mininet.topo import Topo | random_line_split |
|
multiple.py | from mininet.net import Mininet
from mininet.topo import Topo
from mininet.log import lg, setLogLevel
from mininet.cli import CLI
from mininet.node import RemoteController
FANOUT = 2
SWITCH_NUM = 2
CORES = {}
for i in range(1, SWITCH_NUM + 1):
CORES['s%d' % i] = {}
if i < 10:
|
else:
CORES['s%d' % i]['dpid'] = '000000000000%d00' % i
class MultipleTopo(Topo):
def __init__(self, enable_all = True):
"Create Multiple topology."
# Add default members to class.
super(MultipleTopo, self).__init__()
# Add core switches
self.cores = {}
for switch in CORES:
self.cores[switch] = self.addSwitch(switch, dpid=(CORES[switch]['dpid']))
# Add hosts and connect them to their core switch
for switch in CORES:
for count in xrange(1, FANOUT + 1):
# Add hosts
host = 'h_%s_%s' % (switch, count)
ip = '10.0.0.%s' % count
mac = CORES[switch]['dpid'][4:-1] + str(count)
h = self.addHost(host, ip=ip, mac=mac)
# Connect hosts to core switches
self.addLink(h, self.cores[switch])
if __name__ == '__main__':
topo = MultipleTopo()
net = Mininet(topo, autoSetMacs=True, xterms=False, controller=RemoteController)
net.addController('c', ip='128.112.93.28') # localhost:127.0.0.1 vm-to-mac:10.0.2.2 server-to-mac:128.112.93.28
print "\nHosts configured with IPs, switches pointing to OpenVirteX at 128.112.93.28 port 6633\n"
net.start()
raw_input("started, press...")
#CLI(net)
#net.stop()
| CORES['s%d' % i]['dpid'] = '0000000000000%d00' % i | conditional_block |
state.stories.ts | import { OptGrid } from '../types/options';
import Grid from '../src/grid';
import '../src/css/grid.css';
export default {
title: 'State layer',
};
function createGrid(options: Omit<OptGrid, 'el'>) |
const columns = [{ name: 'name' }, { name: 'artist' }];
export const noData = () => {
const { el } = createGrid({ columns, bodyHeight: 'fitToParent' });
return el;
};
const noDataNote = `
## State
- If there is no data, the "no data" text is shown.
`;
noData.story = { parameters: { notes: noDataNote } };
export const noDataWithScroll = () => {
const { el } = createGrid({
columns,
bodyHeight: 'fitToParent',
columnOptions: { minWidth: 300 },
width: 300,
});
return el;
};
const noDataWithScrollNote = `
## State
- If there is no data, the "no data" text is shown.
- When the total column widths exceed grid width, the scroll should be displayed.
`;
noDataWithScroll.story = { parameters: { notes: noDataWithScrollNote } };
| {
const el = document.createElement('div');
el.style.width = '800px';
const grid = new Grid({ el, ...options });
return { el, grid };
} | identifier_body |
state.stories.ts | import { OptGrid } from '../types/options';
import Grid from '../src/grid';
import '../src/css/grid.css';
export default {
title: 'State layer',
};
function | (options: Omit<OptGrid, 'el'>) {
const el = document.createElement('div');
el.style.width = '800px';
const grid = new Grid({ el, ...options });
return { el, grid };
}
const columns = [{ name: 'name' }, { name: 'artist' }];
export const noData = () => {
const { el } = createGrid({ columns, bodyHeight: 'fitToParent' });
return el;
};
const noDataNote = `
## State
- If there is no data, the "no data" text is shown.
`;
noData.story = { parameters: { notes: noDataNote } };
export const noDataWithScroll = () => {
const { el } = createGrid({
columns,
bodyHeight: 'fitToParent',
columnOptions: { minWidth: 300 },
width: 300,
});
return el;
};
const noDataWithScrollNote = `
## State
- If there is no data, the "no data" text is shown.
- When the total column widths exceed grid width, the scroll should be displayed.
`;
noDataWithScroll.story = { parameters: { notes: noDataWithScrollNote } };
| createGrid | identifier_name |
state.stories.ts | import { OptGrid } from '../types/options';
import Grid from '../src/grid';
import '../src/css/grid.css';
export default {
title: 'State layer',
};
function createGrid(options: Omit<OptGrid, 'el'>) {
const el = document.createElement('div');
el.style.width = '800px';
const grid = new Grid({ el, ...options });
return { el, grid };
} |
return el;
};
const noDataNote = `
## State
- If there is no data, the "no data" text is shown.
`;
noData.story = { parameters: { notes: noDataNote } };
export const noDataWithScroll = () => {
const { el } = createGrid({
columns,
bodyHeight: 'fitToParent',
columnOptions: { minWidth: 300 },
width: 300,
});
return el;
};
const noDataWithScrollNote = `
## State
- If there is no data, the "no data" text is shown.
- When the total column widths exceed grid width, the scroll should be displayed.
`;
noDataWithScroll.story = { parameters: { notes: noDataWithScrollNote } }; |
const columns = [{ name: 'name' }, { name: 'artist' }];
export const noData = () => {
const { el } = createGrid({ columns, bodyHeight: 'fitToParent' }); | random_line_split |
unboxed-closures-counter-not-moved.rs | // run-pass
// Test that we mutate a counter on the stack only when we expect to.
fn call<F>(f: F) where F : FnOnce() {
f();
}
fn | () {
let y = vec![format!("Hello"), format!("World")];
let mut counter = 22_u32;
call(|| {
// Move `y`, but do not move `counter`, even though it is read
// by value (note that it is also mutated).
for item in y { //~ WARN unused variable: `item`
let v = counter;
counter += v;
}
});
assert_eq!(counter, 88);
call(move || {
// this mutates a moved copy, and hence doesn't affect original
counter += 1; //~ WARN value assigned to `counter` is never read
//~| WARN unused variable: `counter`
});
assert_eq!(counter, 88);
}
| main | identifier_name |
unboxed-closures-counter-not-moved.rs | // run-pass
// Test that we mutate a counter on the stack only when we expect to.
fn call<F>(f: F) where F : FnOnce() {
f();
}
fn main() {
let y = vec![format!("Hello"), format!("World")];
let mut counter = 22_u32;
call(|| { | // Move `y`, but do not move `counter`, even though it is read
// by value (note that it is also mutated).
for item in y { //~ WARN unused variable: `item`
let v = counter;
counter += v;
}
});
assert_eq!(counter, 88);
call(move || {
// this mutates a moved copy, and hence doesn't affect original
counter += 1; //~ WARN value assigned to `counter` is never read
//~| WARN unused variable: `counter`
});
assert_eq!(counter, 88);
} | random_line_split |
|
unboxed-closures-counter-not-moved.rs | // run-pass
// Test that we mutate a counter on the stack only when we expect to.
fn call<F>(f: F) where F : FnOnce() |
fn main() {
let y = vec![format!("Hello"), format!("World")];
let mut counter = 22_u32;
call(|| {
// Move `y`, but do not move `counter`, even though it is read
// by value (note that it is also mutated).
for item in y { //~ WARN unused variable: `item`
let v = counter;
counter += v;
}
});
assert_eq!(counter, 88);
call(move || {
// this mutates a moved copy, and hence doesn't affect original
counter += 1; //~ WARN value assigned to `counter` is never read
//~| WARN unused variable: `counter`
});
assert_eq!(counter, 88);
}
| {
f();
} | identifier_body |
mod.rs | //! Encoder-based structs and traits.
mod encoder;
mod impl_tuples;
mod impls;
use self::write::Writer;
use crate::{config::Config, error::EncodeError, utils::Sealed};
pub mod write;
pub use self::encoder::EncoderImpl;
/// Any source that can be encoded. This trait should be implemented for all types that you want to be able to use with any of the `encode_with` methods.
///
/// This trait will be automatically implemented if you enable the `derive` feature and add `#[derive(bincode::Encode)]` to your trait.
///
/// # Implementing this trait manually
///
/// If you want to implement this trait for your type, the easiest way is to add a `#[derive(bincode::Encode)]`, build and check your `target/` folder. This should generate a `<Struct name>_Encode.rs` file.
///
/// For this struct:
///
/// ```
/// struct Entity {
/// pub x: f32,
/// pub y: f32,
/// }
/// ```
/// It will look something like:
///
/// ```
/// # struct Entity {
/// # pub x: f32,
/// # pub y: f32,
/// # }
/// impl bincode::Encode for Entity {
/// fn encode<E: bincode::enc::Encoder>(
/// &self,
/// encoder: &mut E,
/// ) -> core::result::Result<(), bincode::error::EncodeError> {
/// bincode::Encode::encode(&self.x, encoder)?;
/// bincode::Encode::encode(&self.y, encoder)?;
/// Ok(())
/// }
/// }
/// ```
///
/// From here you can add/remove fields, or add custom logic.
pub trait Encode {
/// Encode a given type.
fn encode<E: Encoder>(&self, encoder: &mut E) -> Result<(), EncodeError>;
}
/// Helper trait to encode basic types into.
pub trait Encoder: Sealed {
/// The concrete [Writer] type
type W: Writer;
/// The concrete [Config] type
type C: Config;
/// Returns a mutable reference to the writer
fn writer(&mut self) -> &mut Self::W;
/// Returns a reference to the config
fn config(&self) -> &Self::C;
}
impl<'a, T> Encoder for &'a mut T
where
T: Encoder,
{
type W = T::W;
type C = T::C;
fn writer(&mut self) -> &mut Self::W {
T::writer(self)
}
fn config(&self) -> &Self::C {
T::config(self)
}
}
/// Encode the variant of the given option. Will not encode the option itself.
#[inline]
pub(crate) fn encode_option_variant<E: Encoder, T>(
encoder: &mut E,
value: &Option<T>,
) -> Result<(), EncodeError> {
match value {
None => 0u8.encode(encoder),
Some(_) => 1u8.encode(encoder),
}
}
/// Encodes the length of any slice, container, etc into the given encoder
#[inline]
pub(crate) fn encode_slice_len<E: Encoder>(encoder: &mut E, len: usize) -> Result<(), EncodeError> | {
(len as u64).encode(encoder)
} | identifier_body |
|
mod.rs | //! Encoder-based structs and traits.
mod encoder;
mod impl_tuples;
mod impls;
use self::write::Writer;
use crate::{config::Config, error::EncodeError, utils::Sealed};
pub mod write;
pub use self::encoder::EncoderImpl; | /// This trait will be automatically implemented if you enable the `derive` feature and add `#[derive(bincode::Encode)]` to your trait.
///
/// # Implementing this trait manually
///
/// If you want to implement this trait for your type, the easiest way is to add a `#[derive(bincode::Encode)]`, build and check your `target/` folder. This should generate a `<Struct name>_Encode.rs` file.
///
/// For this struct:
///
/// ```
/// struct Entity {
/// pub x: f32,
/// pub y: f32,
/// }
/// ```
/// It will look something like:
///
/// ```
/// # struct Entity {
/// # pub x: f32,
/// # pub y: f32,
/// # }
/// impl bincode::Encode for Entity {
/// fn encode<E: bincode::enc::Encoder>(
/// &self,
/// encoder: &mut E,
/// ) -> core::result::Result<(), bincode::error::EncodeError> {
/// bincode::Encode::encode(&self.x, encoder)?;
/// bincode::Encode::encode(&self.y, encoder)?;
/// Ok(())
/// }
/// }
/// ```
///
/// From here you can add/remove fields, or add custom logic.
pub trait Encode {
/// Encode a given type.
fn encode<E: Encoder>(&self, encoder: &mut E) -> Result<(), EncodeError>;
}
/// Helper trait to encode basic types into.
pub trait Encoder: Sealed {
/// The concrete [Writer] type
type W: Writer;
/// The concrete [Config] type
type C: Config;
/// Returns a mutable reference to the writer
fn writer(&mut self) -> &mut Self::W;
/// Returns a reference to the config
fn config(&self) -> &Self::C;
}
impl<'a, T> Encoder for &'a mut T
where
T: Encoder,
{
type W = T::W;
type C = T::C;
fn writer(&mut self) -> &mut Self::W {
T::writer(self)
}
fn config(&self) -> &Self::C {
T::config(self)
}
}
/// Encode the variant of the given option. Will not encode the option itself.
#[inline]
pub(crate) fn encode_option_variant<E: Encoder, T>(
encoder: &mut E,
value: &Option<T>,
) -> Result<(), EncodeError> {
match value {
None => 0u8.encode(encoder),
Some(_) => 1u8.encode(encoder),
}
}
/// Encodes the length of any slice, container, etc into the given encoder
#[inline]
pub(crate) fn encode_slice_len<E: Encoder>(encoder: &mut E, len: usize) -> Result<(), EncodeError> {
(len as u64).encode(encoder)
} |
/// Any source that can be encoded. This trait should be implemented for all types that you want to be able to use with any of the `encode_with` methods.
/// | random_line_split |
mod.rs | //! Encoder-based structs and traits.
mod encoder;
mod impl_tuples;
mod impls;
use self::write::Writer;
use crate::{config::Config, error::EncodeError, utils::Sealed};
pub mod write;
pub use self::encoder::EncoderImpl;
/// Any source that can be encoded. This trait should be implemented for all types that you want to be able to use with any of the `encode_with` methods.
///
/// This trait will be automatically implemented if you enable the `derive` feature and add `#[derive(bincode::Encode)]` to your trait.
///
/// # Implementing this trait manually
///
/// If you want to implement this trait for your type, the easiest way is to add a `#[derive(bincode::Encode)]`, build and check your `target/` folder. This should generate a `<Struct name>_Encode.rs` file.
///
/// For this struct:
///
/// ```
/// struct Entity {
/// pub x: f32,
/// pub y: f32,
/// }
/// ```
/// It will look something like:
///
/// ```
/// # struct Entity {
/// # pub x: f32,
/// # pub y: f32,
/// # }
/// impl bincode::Encode for Entity {
/// fn encode<E: bincode::enc::Encoder>(
/// &self,
/// encoder: &mut E,
/// ) -> core::result::Result<(), bincode::error::EncodeError> {
/// bincode::Encode::encode(&self.x, encoder)?;
/// bincode::Encode::encode(&self.y, encoder)?;
/// Ok(())
/// }
/// }
/// ```
///
/// From here you can add/remove fields, or add custom logic.
pub trait Encode {
/// Encode a given type.
fn encode<E: Encoder>(&self, encoder: &mut E) -> Result<(), EncodeError>;
}
/// Helper trait to encode basic types into.
pub trait Encoder: Sealed {
/// The concrete [Writer] type
type W: Writer;
/// The concrete [Config] type
type C: Config;
/// Returns a mutable reference to the writer
fn writer(&mut self) -> &mut Self::W;
/// Returns a reference to the config
fn config(&self) -> &Self::C;
}
impl<'a, T> Encoder for &'a mut T
where
T: Encoder,
{
type W = T::W;
type C = T::C;
fn writer(&mut self) -> &mut Self::W {
T::writer(self)
}
fn config(&self) -> &Self::C {
T::config(self)
}
}
/// Encode the variant of the given option. Will not encode the option itself.
#[inline]
pub(crate) fn encode_option_variant<E: Encoder, T>(
encoder: &mut E,
value: &Option<T>,
) -> Result<(), EncodeError> {
match value {
None => 0u8.encode(encoder),
Some(_) => 1u8.encode(encoder),
}
}
/// Encodes the length of any slice, container, etc into the given encoder
#[inline]
pub(crate) fn | <E: Encoder>(encoder: &mut E, len: usize) -> Result<(), EncodeError> {
(len as u64).encode(encoder)
}
| encode_slice_len | identifier_name |
model_deployment.py | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Deploy a model in AI Platform."""
import logging
import json
import time
import subprocess
from googleapiclient import discovery
from googleapiclient import errors
_WAIT_FOR_COMPLETION_SLEEP_SECONDS = 10
_PYTHON_VERSION = '3.5'
_RUN_TIME_VERSION = '1.15'
def _create_service():
"""Gets service instance to start API searches.
:return:
"""
return discovery.build('ml', 'v1')
def copy_artifacts(source_path, destination_path):
"""
:param source_path:
:param destination_path:
:return:
"""
logging.info(
'Moving model directory from {} to {}'.format(source_path,
destination_path))
subprocess.call(
"gsutil -m cp -r {} {}".format(source_path, destination_path),
shell=True)
class AIPlatformModel(object):
def __init__(self, project_id):
self._project_id = project_id
self._service = _create_service()
def model_exists(self, model_name):
"""
:param model_name:
:return:
"""
models = self._service.projects().models()
try:
response = models.list(
parent='projects/{}'.format(self._project_id)).execute()
if response:
for model in response['models']:
if model['name'].rsplit('/', 1)[1] == model_name:
return True
else:
return False
except errors.HttpError as err:
logging.error('%s', json.loads(err.content)['error']['message'])
def _list_model_versions(self, model_name):
"""Lists existing model versions in the project.
Args:
model_name: Model name to list versions for.
Returns:
Dictionary of model versions.
"""
versions = self._service.projects().models().versions()
try:
return versions.list(
parent='projects/{}/models/{}'.format(self._project_id,
model_name)).execute()
except errors.HttpError as err:
logging.error('%s', json.loads(err.content)['error']['message'])
def create_model(self, model_name, model_region='us-central1'):
"""
:param model_name:
:param model_region:
:return:
"""
if not self.model_exists(model_name):
body = {
'name': model_name,
'regions': model_region,
'description': 'MLflow model'
}
parent = 'projects/{}'.format(self._project_id)
try:
self._service.projects().models().create(
parent=parent, body=body).execute()
logging.info('Model "%s" has been created.', model_name)
except errors.HttpError as err:
logging.error('"%s". Skipping model creation.',
json.loads(err.content)['error']['message'])
else:
logging.warning('Model "%s" already exists.', model_name)
def | (self, bucket_name, model_name, model_version,
runtime_version=_RUN_TIME_VERSION):
"""Deploys model on AI Platform.
Args:
bucket_name: Cloud Storage Bucket name that stores saved model.
model_name: Model name to deploy.
model_version: Model version.
runtime_version: Runtime version.
Raises:
RuntimeError if deployment completes with errors.
"""
# For details on request body, refer to:
# https://cloud.google.com/ml-engine/reference/rest/v1/projects
# .models.versions/create
model_version_exists = False
model_versions_list = self._list_model_versions(model_name)
# Field: version.name Error: A name should start with a letter and
# contain only letters, numbers and underscores
model_version = 'mlflow_{}'.format(model_version)
if model_versions_list:
for version in model_versions_list['versions']:
if version['name'].rsplit('/', 1)[1] == model_version:
model_version_exists = True
if not model_version_exists:
request_body = {
'name': model_version,
'deploymentUri': '{}'.format(bucket_name),
'framework': 'TENSORFLOW',
'runtimeVersion': runtime_version,
'pythonVersion': _PYTHON_VERSION
}
parent = 'projects/{}/models/{}'.format(self._project_id,
model_name)
response = self._service.projects().models().versions().create(
parent=parent, body=request_body).execute()
op_name = response['name']
while True:
deploy_status = (
self._service.projects().operations().get(
name=op_name).execute())
if deploy_status.get('done'):
logging.info('Model "%s" with version "%s" deployed.',
model_name,
model_version)
break
if deploy_status.get('error'):
logging.error(deploy_status['error'])
raise RuntimeError(
'Failed to deploy model for serving: {}'.format(
deploy_status['error']))
logging.info(
'Waiting for %d seconds for "%s" with "%s" version to be '
'deployed.',
_WAIT_FOR_COMPLETION_SLEEP_SECONDS, model_name,
model_version)
time.sleep(_WAIT_FOR_COMPLETION_SLEEP_SECONDS)
else:
logging.info('Model "%s" with version "%s" already exists.',
model_name,
model_version)
| deploy_model | identifier_name |
model_deployment.py | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Deploy a model in AI Platform."""
import logging
import json
import time
import subprocess
from googleapiclient import discovery
from googleapiclient import errors
_WAIT_FOR_COMPLETION_SLEEP_SECONDS = 10
_PYTHON_VERSION = '3.5'
_RUN_TIME_VERSION = '1.15'
def _create_service():
"""Gets service instance to start API searches.
:return:
"""
return discovery.build('ml', 'v1')
def copy_artifacts(source_path, destination_path):
"""
:param source_path:
:param destination_path:
:return:
"""
logging.info(
'Moving model directory from {} to {}'.format(source_path,
destination_path))
subprocess.call(
"gsutil -m cp -r {} {}".format(source_path, destination_path),
shell=True)
class AIPlatformModel(object):
def __init__(self, project_id):
|
def model_exists(self, model_name):
"""
:param model_name:
:return:
"""
models = self._service.projects().models()
try:
response = models.list(
parent='projects/{}'.format(self._project_id)).execute()
if response:
for model in response['models']:
if model['name'].rsplit('/', 1)[1] == model_name:
return True
else:
return False
except errors.HttpError as err:
logging.error('%s', json.loads(err.content)['error']['message'])
def _list_model_versions(self, model_name):
"""Lists existing model versions in the project.
Args:
model_name: Model name to list versions for.
Returns:
Dictionary of model versions.
"""
versions = self._service.projects().models().versions()
try:
return versions.list(
parent='projects/{}/models/{}'.format(self._project_id,
model_name)).execute()
except errors.HttpError as err:
logging.error('%s', json.loads(err.content)['error']['message'])
def create_model(self, model_name, model_region='us-central1'):
"""
:param model_name:
:param model_region:
:return:
"""
if not self.model_exists(model_name):
body = {
'name': model_name,
'regions': model_region,
'description': 'MLflow model'
}
parent = 'projects/{}'.format(self._project_id)
try:
self._service.projects().models().create(
parent=parent, body=body).execute()
logging.info('Model "%s" has been created.', model_name)
except errors.HttpError as err:
logging.error('"%s". Skipping model creation.',
json.loads(err.content)['error']['message'])
else:
logging.warning('Model "%s" already exists.', model_name)
def deploy_model(self, bucket_name, model_name, model_version,
runtime_version=_RUN_TIME_VERSION):
"""Deploys model on AI Platform.
Args:
bucket_name: Cloud Storage Bucket name that stores saved model.
model_name: Model name to deploy.
model_version: Model version.
runtime_version: Runtime version.
Raises:
RuntimeError if deployment completes with errors.
"""
# For details on request body, refer to:
# https://cloud.google.com/ml-engine/reference/rest/v1/projects
# .models.versions/create
model_version_exists = False
model_versions_list = self._list_model_versions(model_name)
# Field: version.name Error: A name should start with a letter and
# contain only letters, numbers and underscores
model_version = 'mlflow_{}'.format(model_version)
if model_versions_list:
for version in model_versions_list['versions']:
if version['name'].rsplit('/', 1)[1] == model_version:
model_version_exists = True
if not model_version_exists:
request_body = {
'name': model_version,
'deploymentUri': '{}'.format(bucket_name),
'framework': 'TENSORFLOW',
'runtimeVersion': runtime_version,
'pythonVersion': _PYTHON_VERSION
}
parent = 'projects/{}/models/{}'.format(self._project_id,
model_name)
response = self._service.projects().models().versions().create(
parent=parent, body=request_body).execute()
op_name = response['name']
while True:
deploy_status = (
self._service.projects().operations().get(
name=op_name).execute())
if deploy_status.get('done'):
logging.info('Model "%s" with version "%s" deployed.',
model_name,
model_version)
break
if deploy_status.get('error'):
logging.error(deploy_status['error'])
raise RuntimeError(
'Failed to deploy model for serving: {}'.format(
deploy_status['error']))
logging.info(
'Waiting for %d seconds for "%s" with "%s" version to be '
'deployed.',
_WAIT_FOR_COMPLETION_SLEEP_SECONDS, model_name,
model_version)
time.sleep(_WAIT_FOR_COMPLETION_SLEEP_SECONDS)
else:
logging.info('Model "%s" with version "%s" already exists.',
model_name,
model_version)
| self._project_id = project_id
self._service = _create_service() | identifier_body |
model_deployment.py | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Deploy a model in AI Platform."""
import logging
import json
import time
import subprocess
from googleapiclient import discovery
from googleapiclient import errors
_WAIT_FOR_COMPLETION_SLEEP_SECONDS = 10
_PYTHON_VERSION = '3.5'
_RUN_TIME_VERSION = '1.15'
def _create_service():
"""Gets service instance to start API searches.
:return:
"""
return discovery.build('ml', 'v1')
def copy_artifacts(source_path, destination_path):
"""
:param source_path:
:param destination_path:
:return:
"""
logging.info(
'Moving model directory from {} to {}'.format(source_path,
destination_path))
subprocess.call(
"gsutil -m cp -r {} {}".format(source_path, destination_path),
shell=True)
class AIPlatformModel(object):
def __init__(self, project_id):
self._project_id = project_id
self._service = _create_service()
def model_exists(self, model_name):
"""
:param model_name:
:return:
"""
models = self._service.projects().models()
try:
response = models.list(
parent='projects/{}'.format(self._project_id)).execute()
if response:
for model in response['models']:
if model['name'].rsplit('/', 1)[1] == model_name:
return True
else:
return False
except errors.HttpError as err:
logging.error('%s', json.loads(err.content)['error']['message'])
def _list_model_versions(self, model_name):
"""Lists existing model versions in the project.
Args:
model_name: Model name to list versions for.
Returns:
Dictionary of model versions.
"""
versions = self._service.projects().models().versions()
try:
return versions.list(
parent='projects/{}/models/{}'.format(self._project_id,
model_name)).execute()
except errors.HttpError as err:
logging.error('%s', json.loads(err.content)['error']['message'])
def create_model(self, model_name, model_region='us-central1'):
"""
:param model_name:
:param model_region:
:return:
"""
if not self.model_exists(model_name):
body = {
'name': model_name,
'regions': model_region,
'description': 'MLflow model'
}
parent = 'projects/{}'.format(self._project_id)
try:
self._service.projects().models().create(
parent=parent, body=body).execute()
logging.info('Model "%s" has been created.', model_name)
except errors.HttpError as err:
logging.error('"%s". Skipping model creation.',
json.loads(err.content)['error']['message'])
else:
logging.warning('Model "%s" already exists.', model_name)
def deploy_model(self, bucket_name, model_name, model_version,
runtime_version=_RUN_TIME_VERSION):
"""Deploys model on AI Platform.
Args: | Raises:
RuntimeError if deployment completes with errors.
"""
# For details on request body, refer to:
# https://cloud.google.com/ml-engine/reference/rest/v1/projects
# .models.versions/create
model_version_exists = False
model_versions_list = self._list_model_versions(model_name)
# Field: version.name Error: A name should start with a letter and
# contain only letters, numbers and underscores
model_version = 'mlflow_{}'.format(model_version)
if model_versions_list:
for version in model_versions_list['versions']:
if version['name'].rsplit('/', 1)[1] == model_version:
model_version_exists = True
if not model_version_exists:
request_body = {
'name': model_version,
'deploymentUri': '{}'.format(bucket_name),
'framework': 'TENSORFLOW',
'runtimeVersion': runtime_version,
'pythonVersion': _PYTHON_VERSION
}
parent = 'projects/{}/models/{}'.format(self._project_id,
model_name)
response = self._service.projects().models().versions().create(
parent=parent, body=request_body).execute()
op_name = response['name']
while True:
deploy_status = (
self._service.projects().operations().get(
name=op_name).execute())
if deploy_status.get('done'):
logging.info('Model "%s" with version "%s" deployed.',
model_name,
model_version)
break
if deploy_status.get('error'):
logging.error(deploy_status['error'])
raise RuntimeError(
'Failed to deploy model for serving: {}'.format(
deploy_status['error']))
logging.info(
'Waiting for %d seconds for "%s" with "%s" version to be '
'deployed.',
_WAIT_FOR_COMPLETION_SLEEP_SECONDS, model_name,
model_version)
time.sleep(_WAIT_FOR_COMPLETION_SLEEP_SECONDS)
else:
logging.info('Model "%s" with version "%s" already exists.',
model_name,
model_version) | bucket_name: Cloud Storage Bucket name that stores saved model.
model_name: Model name to deploy.
model_version: Model version.
runtime_version: Runtime version.
| random_line_split |
model_deployment.py | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Deploy a model in AI Platform."""
import logging
import json
import time
import subprocess
from googleapiclient import discovery
from googleapiclient import errors
_WAIT_FOR_COMPLETION_SLEEP_SECONDS = 10
_PYTHON_VERSION = '3.5'
_RUN_TIME_VERSION = '1.15'
def _create_service():
"""Gets service instance to start API searches.
:return:
"""
return discovery.build('ml', 'v1')
def copy_artifacts(source_path, destination_path):
"""
:param source_path:
:param destination_path:
:return:
"""
logging.info(
'Moving model directory from {} to {}'.format(source_path,
destination_path))
subprocess.call(
"gsutil -m cp -r {} {}".format(source_path, destination_path),
shell=True)
class AIPlatformModel(object):
def __init__(self, project_id):
self._project_id = project_id
self._service = _create_service()
def model_exists(self, model_name):
"""
:param model_name:
:return:
"""
models = self._service.projects().models()
try:
response = models.list(
parent='projects/{}'.format(self._project_id)).execute()
if response:
for model in response['models']:
if model['name'].rsplit('/', 1)[1] == model_name:
|
else:
return False
except errors.HttpError as err:
logging.error('%s', json.loads(err.content)['error']['message'])
def _list_model_versions(self, model_name):
"""Lists existing model versions in the project.
Args:
model_name: Model name to list versions for.
Returns:
Dictionary of model versions.
"""
versions = self._service.projects().models().versions()
try:
return versions.list(
parent='projects/{}/models/{}'.format(self._project_id,
model_name)).execute()
except errors.HttpError as err:
logging.error('%s', json.loads(err.content)['error']['message'])
def create_model(self, model_name, model_region='us-central1'):
"""
:param model_name:
:param model_region:
:return:
"""
if not self.model_exists(model_name):
body = {
'name': model_name,
'regions': model_region,
'description': 'MLflow model'
}
parent = 'projects/{}'.format(self._project_id)
try:
self._service.projects().models().create(
parent=parent, body=body).execute()
logging.info('Model "%s" has been created.', model_name)
except errors.HttpError as err:
logging.error('"%s". Skipping model creation.',
json.loads(err.content)['error']['message'])
else:
logging.warning('Model "%s" already exists.', model_name)
def deploy_model(self, bucket_name, model_name, model_version,
runtime_version=_RUN_TIME_VERSION):
"""Deploys model on AI Platform.
Args:
bucket_name: Cloud Storage Bucket name that stores saved model.
model_name: Model name to deploy.
model_version: Model version.
runtime_version: Runtime version.
Raises:
RuntimeError if deployment completes with errors.
"""
# For details on request body, refer to:
# https://cloud.google.com/ml-engine/reference/rest/v1/projects
# .models.versions/create
model_version_exists = False
model_versions_list = self._list_model_versions(model_name)
# Field: version.name Error: A name should start with a letter and
# contain only letters, numbers and underscores
model_version = 'mlflow_{}'.format(model_version)
if model_versions_list:
for version in model_versions_list['versions']:
if version['name'].rsplit('/', 1)[1] == model_version:
model_version_exists = True
if not model_version_exists:
request_body = {
'name': model_version,
'deploymentUri': '{}'.format(bucket_name),
'framework': 'TENSORFLOW',
'runtimeVersion': runtime_version,
'pythonVersion': _PYTHON_VERSION
}
parent = 'projects/{}/models/{}'.format(self._project_id,
model_name)
response = self._service.projects().models().versions().create(
parent=parent, body=request_body).execute()
op_name = response['name']
while True:
deploy_status = (
self._service.projects().operations().get(
name=op_name).execute())
if deploy_status.get('done'):
logging.info('Model "%s" with version "%s" deployed.',
model_name,
model_version)
break
if deploy_status.get('error'):
logging.error(deploy_status['error'])
raise RuntimeError(
'Failed to deploy model for serving: {}'.format(
deploy_status['error']))
logging.info(
'Waiting for %d seconds for "%s" with "%s" version to be '
'deployed.',
_WAIT_FOR_COMPLETION_SLEEP_SECONDS, model_name,
model_version)
time.sleep(_WAIT_FOR_COMPLETION_SLEEP_SECONDS)
else:
logging.info('Model "%s" with version "%s" already exists.',
model_name,
model_version)
| return True | conditional_block |
sre_constants.py | #
# Secret Labs' Regular Expression Engine
#
# various symbols used by the regular expression engine.
# run this script to update the _sre include files!
#
# Copyright (c) 1998-2001 by Secret Labs AB. All rights reserved.
#
# See the sre.py file for information on usage and redistribution.
#
"""Internal support module for sre"""
# update when constants are added or removed
MAGIC = 20031017
MAXREPEAT = 2147483648
#from _sre import MAXREPEAT
# SRE standard exception (access as sre.error)
# should this really be here?
class error(Exception):
|
# operators
FAILURE = "failure"
SUCCESS = "success"
ANY = "any"
ANY_ALL = "any_all"
ASSERT = "assert"
ASSERT_NOT = "assert_not"
AT = "at"
BIGCHARSET = "bigcharset"
BRANCH = "branch"
CALL = "call"
CATEGORY = "category"
CHARSET = "charset"
GROUPREF = "groupref"
GROUPREF_IGNORE = "groupref_ignore"
GROUPREF_EXISTS = "groupref_exists"
IN = "in"
IN_IGNORE = "in_ignore"
INFO = "info"
JUMP = "jump"
LITERAL = "literal"
LITERAL_IGNORE = "literal_ignore"
MARK = "mark"
MAX_REPEAT = "max_repeat"
MAX_UNTIL = "max_until"
MIN_REPEAT = "min_repeat"
MIN_UNTIL = "min_until"
NEGATE = "negate"
NOT_LITERAL = "not_literal"
NOT_LITERAL_IGNORE = "not_literal_ignore"
RANGE = "range"
REPEAT = "repeat"
REPEAT_ONE = "repeat_one"
SUBPATTERN = "subpattern"
MIN_REPEAT_ONE = "min_repeat_one"
# positions
AT_BEGINNING = "at_beginning"
AT_BEGINNING_LINE = "at_beginning_line"
AT_BEGINNING_STRING = "at_beginning_string"
AT_BOUNDARY = "at_boundary"
AT_NON_BOUNDARY = "at_non_boundary"
AT_END = "at_end"
AT_END_LINE = "at_end_line"
AT_END_STRING = "at_end_string"
AT_LOC_BOUNDARY = "at_loc_boundary"
AT_LOC_NON_BOUNDARY = "at_loc_non_boundary"
AT_UNI_BOUNDARY = "at_uni_boundary"
AT_UNI_NON_BOUNDARY = "at_uni_non_boundary"
# categories
CATEGORY_DIGIT = "category_digit"
CATEGORY_NOT_DIGIT = "category_not_digit"
CATEGORY_SPACE = "category_space"
CATEGORY_NOT_SPACE = "category_not_space"
CATEGORY_WORD = "category_word"
CATEGORY_NOT_WORD = "category_not_word"
CATEGORY_LINEBREAK = "category_linebreak"
CATEGORY_NOT_LINEBREAK = "category_not_linebreak"
CATEGORY_LOC_WORD = "category_loc_word"
CATEGORY_LOC_NOT_WORD = "category_loc_not_word"
CATEGORY_UNI_DIGIT = "category_uni_digit"
CATEGORY_UNI_NOT_DIGIT = "category_uni_not_digit"
CATEGORY_UNI_SPACE = "category_uni_space"
CATEGORY_UNI_NOT_SPACE = "category_uni_not_space"
CATEGORY_UNI_WORD = "category_uni_word"
CATEGORY_UNI_NOT_WORD = "category_uni_not_word"
CATEGORY_UNI_LINEBREAK = "category_uni_linebreak"
CATEGORY_UNI_NOT_LINEBREAK = "category_uni_not_linebreak"
OPCODES = [
# failure=0 success=1 (just because it looks better that way :-)
FAILURE, SUCCESS,
ANY, ANY_ALL,
ASSERT, ASSERT_NOT,
AT,
BRANCH,
CALL,
CATEGORY,
CHARSET, BIGCHARSET,
GROUPREF, GROUPREF_EXISTS, GROUPREF_IGNORE,
IN, IN_IGNORE,
INFO,
JUMP,
LITERAL, LITERAL_IGNORE,
MARK,
MAX_UNTIL,
MIN_UNTIL,
NOT_LITERAL, NOT_LITERAL_IGNORE,
NEGATE,
RANGE,
REPEAT,
REPEAT_ONE,
SUBPATTERN,
MIN_REPEAT_ONE
]
ATCODES = [
AT_BEGINNING, AT_BEGINNING_LINE, AT_BEGINNING_STRING, AT_BOUNDARY,
AT_NON_BOUNDARY, AT_END, AT_END_LINE, AT_END_STRING,
AT_LOC_BOUNDARY, AT_LOC_NON_BOUNDARY, AT_UNI_BOUNDARY,
AT_UNI_NON_BOUNDARY
]
CHCODES = [
CATEGORY_DIGIT, CATEGORY_NOT_DIGIT, CATEGORY_SPACE,
CATEGORY_NOT_SPACE, CATEGORY_WORD, CATEGORY_NOT_WORD,
CATEGORY_LINEBREAK, CATEGORY_NOT_LINEBREAK, CATEGORY_LOC_WORD,
CATEGORY_LOC_NOT_WORD, CATEGORY_UNI_DIGIT, CATEGORY_UNI_NOT_DIGIT,
CATEGORY_UNI_SPACE, CATEGORY_UNI_NOT_SPACE, CATEGORY_UNI_WORD,
CATEGORY_UNI_NOT_WORD, CATEGORY_UNI_LINEBREAK,
CATEGORY_UNI_NOT_LINEBREAK
]
def makedict(list):
d = {}
i = 0
for item in list:
d[item] = i
i = i + 1
return d
OPCODES = makedict(OPCODES)
ATCODES = makedict(ATCODES)
CHCODES = makedict(CHCODES)
# replacement operations for "ignore case" mode
OP_IGNORE = {
GROUPREF: GROUPREF_IGNORE,
IN: IN_IGNORE,
LITERAL: LITERAL_IGNORE,
NOT_LITERAL: NOT_LITERAL_IGNORE
}
AT_MULTILINE = {
AT_BEGINNING: AT_BEGINNING_LINE,
AT_END: AT_END_LINE
}
AT_LOCALE = {
AT_BOUNDARY: AT_LOC_BOUNDARY,
AT_NON_BOUNDARY: AT_LOC_NON_BOUNDARY
}
AT_UNICODE = {
AT_BOUNDARY: AT_UNI_BOUNDARY,
AT_NON_BOUNDARY: AT_UNI_NON_BOUNDARY
}
CH_LOCALE = {
CATEGORY_DIGIT: CATEGORY_DIGIT,
CATEGORY_NOT_DIGIT: CATEGORY_NOT_DIGIT,
CATEGORY_SPACE: CATEGORY_SPACE,
CATEGORY_NOT_SPACE: CATEGORY_NOT_SPACE,
CATEGORY_WORD: CATEGORY_LOC_WORD,
CATEGORY_NOT_WORD: CATEGORY_LOC_NOT_WORD,
CATEGORY_LINEBREAK: CATEGORY_LINEBREAK,
CATEGORY_NOT_LINEBREAK: CATEGORY_NOT_LINEBREAK
}
CH_UNICODE = {
CATEGORY_DIGIT: CATEGORY_UNI_DIGIT,
CATEGORY_NOT_DIGIT: CATEGORY_UNI_NOT_DIGIT,
CATEGORY_SPACE: CATEGORY_UNI_SPACE,
CATEGORY_NOT_SPACE: CATEGORY_UNI_NOT_SPACE,
CATEGORY_WORD: CATEGORY_UNI_WORD,
CATEGORY_NOT_WORD: CATEGORY_UNI_NOT_WORD,
CATEGORY_LINEBREAK: CATEGORY_UNI_LINEBREAK,
CATEGORY_NOT_LINEBREAK: CATEGORY_UNI_NOT_LINEBREAK
}
# flags
SRE_FLAG_TEMPLATE = 1 # template mode (disable backtracking)
SRE_FLAG_IGNORECASE = 2 # case insensitive
SRE_FLAG_LOCALE = 4 # honour system locale
SRE_FLAG_MULTILINE = 8 # treat target as multiline string
SRE_FLAG_DOTALL = 16 # treat target as a single string
SRE_FLAG_UNICODE = 32 # use unicode "locale"
SRE_FLAG_VERBOSE = 64 # ignore whitespace and comments
SRE_FLAG_DEBUG = 128 # debugging
SRE_FLAG_ASCII = 256 # use ascii "locale"
# flags for INFO primitive
SRE_INFO_PREFIX = 1 # has prefix
SRE_INFO_LITERAL = 2 # entire pattern is literal (given by prefix)
SRE_INFO_CHARSET = 4 # pattern starts with character from given set
if __name__ == "__main__":
def dump(f, d, prefix):
items = sorted(d.items(), key=lambda a: a[1])
for k, v in items:
f.write("#define %s_%s %s\n" % (prefix, k.upper(), v))
f = open("sre_constants.h", "w")
f.write("""\
/*
* Secret Labs' Regular Expression Engine
*
* regular expression matching engine
*
* NOTE: This file is generated by sre_constants.py. If you need
* to change anything in here, edit sre_constants.py and run it.
*
* Copyright (c) 1997-2001 by Secret Labs AB. All rights reserved.
*
* See the _sre.c file for information on usage and redistribution.
*/
""")
f.write("#define SRE_MAGIC %d\n" % MAGIC)
dump(f, OPCODES, "SRE_OP")
dump(f, ATCODES, "SRE")
dump(f, CHCODES, "SRE")
f.write("#define SRE_FLAG_TEMPLATE %d\n" % SRE_FLAG_TEMPLATE)
f.write("#define SRE_FLAG_IGNORECASE %d\n" % SRE_FLAG_IGNORECASE)
f.write("#define SRE_FLAG_LOCALE %d\n" % SRE_FLAG_LOCALE)
f.write("#define SRE_FLAG_MULTILINE %d\n" % SRE_FLAG_MULTILINE)
f.write("#define SRE_FLAG_DOTALL %d\n" % SRE_FLAG_DOTALL)
f.write("#define SRE_FLAG_UNICODE %d\n" % SRE_FLAG_UNICODE)
f.write("#define SRE_FLAG_VERBOSE %d\n" % SRE_FLAG_VERBOSE)
f.write("#define SRE_INFO_PREFIX %d\n" % SRE_INFO_PREFIX)
f.write("#define SRE_INFO_LITERAL %d\n" % SRE_INFO_LITERAL)
f.write("#define SRE_INFO_CHARSET %d\n" % SRE_INFO_CHARSET)
f.close()
print("done")
| pass | identifier_body |
sre_constants.py | #
# Secret Labs' Regular Expression Engine
#
# various symbols used by the regular expression engine.
# run this script to update the _sre include files!
#
# Copyright (c) 1998-2001 by Secret Labs AB. All rights reserved.
#
# See the sre.py file for information on usage and redistribution.
#
"""Internal support module for sre"""
# update when constants are added or removed
MAGIC = 20031017
MAXREPEAT = 2147483648
#from _sre import MAXREPEAT
# SRE standard exception (access as sre.error)
# should this really be here?
class error(Exception):
pass
# operators
FAILURE = "failure"
SUCCESS = "success"
ANY = "any"
ANY_ALL = "any_all"
ASSERT = "assert"
ASSERT_NOT = "assert_not"
AT = "at"
BIGCHARSET = "bigcharset"
BRANCH = "branch"
CALL = "call"
CATEGORY = "category"
CHARSET = "charset"
GROUPREF = "groupref"
GROUPREF_IGNORE = "groupref_ignore"
GROUPREF_EXISTS = "groupref_exists"
IN = "in"
IN_IGNORE = "in_ignore"
INFO = "info"
JUMP = "jump"
LITERAL = "literal"
LITERAL_IGNORE = "literal_ignore"
MARK = "mark"
MAX_REPEAT = "max_repeat"
MAX_UNTIL = "max_until"
MIN_REPEAT = "min_repeat"
MIN_UNTIL = "min_until"
NEGATE = "negate"
NOT_LITERAL = "not_literal"
NOT_LITERAL_IGNORE = "not_literal_ignore"
RANGE = "range"
REPEAT = "repeat"
REPEAT_ONE = "repeat_one"
SUBPATTERN = "subpattern"
MIN_REPEAT_ONE = "min_repeat_one"
# positions
AT_BEGINNING = "at_beginning"
AT_BEGINNING_LINE = "at_beginning_line"
AT_BEGINNING_STRING = "at_beginning_string"
AT_BOUNDARY = "at_boundary"
AT_NON_BOUNDARY = "at_non_boundary"
AT_END = "at_end"
AT_END_LINE = "at_end_line"
AT_END_STRING = "at_end_string"
AT_LOC_BOUNDARY = "at_loc_boundary"
AT_LOC_NON_BOUNDARY = "at_loc_non_boundary"
AT_UNI_BOUNDARY = "at_uni_boundary"
AT_UNI_NON_BOUNDARY = "at_uni_non_boundary"
# categories
CATEGORY_DIGIT = "category_digit"
CATEGORY_NOT_DIGIT = "category_not_digit"
CATEGORY_SPACE = "category_space"
CATEGORY_NOT_SPACE = "category_not_space"
CATEGORY_WORD = "category_word"
CATEGORY_NOT_WORD = "category_not_word"
CATEGORY_LINEBREAK = "category_linebreak"
CATEGORY_NOT_LINEBREAK = "category_not_linebreak"
CATEGORY_LOC_WORD = "category_loc_word"
CATEGORY_LOC_NOT_WORD = "category_loc_not_word"
CATEGORY_UNI_DIGIT = "category_uni_digit"
CATEGORY_UNI_NOT_DIGIT = "category_uni_not_digit"
CATEGORY_UNI_SPACE = "category_uni_space"
CATEGORY_UNI_NOT_SPACE = "category_uni_not_space"
CATEGORY_UNI_WORD = "category_uni_word"
CATEGORY_UNI_NOT_WORD = "category_uni_not_word"
CATEGORY_UNI_LINEBREAK = "category_uni_linebreak"
CATEGORY_UNI_NOT_LINEBREAK = "category_uni_not_linebreak"
OPCODES = [
# failure=0 success=1 (just because it looks better that way :-)
FAILURE, SUCCESS,
ANY, ANY_ALL,
ASSERT, ASSERT_NOT,
AT,
BRANCH,
CALL,
CATEGORY,
CHARSET, BIGCHARSET,
GROUPREF, GROUPREF_EXISTS, GROUPREF_IGNORE,
IN, IN_IGNORE,
INFO,
JUMP,
LITERAL, LITERAL_IGNORE,
MARK,
MAX_UNTIL,
MIN_UNTIL,
NOT_LITERAL, NOT_LITERAL_IGNORE,
NEGATE,
RANGE,
REPEAT,
REPEAT_ONE,
SUBPATTERN,
MIN_REPEAT_ONE
]
ATCODES = [
AT_BEGINNING, AT_BEGINNING_LINE, AT_BEGINNING_STRING, AT_BOUNDARY,
AT_NON_BOUNDARY, AT_END, AT_END_LINE, AT_END_STRING,
AT_LOC_BOUNDARY, AT_LOC_NON_BOUNDARY, AT_UNI_BOUNDARY,
AT_UNI_NON_BOUNDARY
]
CHCODES = [
CATEGORY_DIGIT, CATEGORY_NOT_DIGIT, CATEGORY_SPACE,
CATEGORY_NOT_SPACE, CATEGORY_WORD, CATEGORY_NOT_WORD,
CATEGORY_LINEBREAK, CATEGORY_NOT_LINEBREAK, CATEGORY_LOC_WORD,
CATEGORY_LOC_NOT_WORD, CATEGORY_UNI_DIGIT, CATEGORY_UNI_NOT_DIGIT,
CATEGORY_UNI_SPACE, CATEGORY_UNI_NOT_SPACE, CATEGORY_UNI_WORD,
CATEGORY_UNI_NOT_WORD, CATEGORY_UNI_LINEBREAK,
CATEGORY_UNI_NOT_LINEBREAK
]
def makedict(list):
d = {}
i = 0
for item in list:
d[item] = i
i = i + 1
return d
OPCODES = makedict(OPCODES)
ATCODES = makedict(ATCODES)
CHCODES = makedict(CHCODES)
# replacement operations for "ignore case" mode
OP_IGNORE = {
GROUPREF: GROUPREF_IGNORE,
IN: IN_IGNORE,
LITERAL: LITERAL_IGNORE,
NOT_LITERAL: NOT_LITERAL_IGNORE
}
AT_MULTILINE = {
AT_BEGINNING: AT_BEGINNING_LINE,
AT_END: AT_END_LINE
}
AT_LOCALE = {
AT_BOUNDARY: AT_LOC_BOUNDARY,
AT_NON_BOUNDARY: AT_LOC_NON_BOUNDARY
}
AT_UNICODE = {
AT_BOUNDARY: AT_UNI_BOUNDARY,
AT_NON_BOUNDARY: AT_UNI_NON_BOUNDARY
}
CH_LOCALE = {
CATEGORY_DIGIT: CATEGORY_DIGIT,
CATEGORY_NOT_DIGIT: CATEGORY_NOT_DIGIT,
CATEGORY_SPACE: CATEGORY_SPACE,
CATEGORY_NOT_SPACE: CATEGORY_NOT_SPACE,
CATEGORY_WORD: CATEGORY_LOC_WORD,
CATEGORY_NOT_WORD: CATEGORY_LOC_NOT_WORD,
CATEGORY_LINEBREAK: CATEGORY_LINEBREAK,
CATEGORY_NOT_LINEBREAK: CATEGORY_NOT_LINEBREAK
}
CH_UNICODE = {
CATEGORY_DIGIT: CATEGORY_UNI_DIGIT,
CATEGORY_NOT_DIGIT: CATEGORY_UNI_NOT_DIGIT,
CATEGORY_SPACE: CATEGORY_UNI_SPACE,
CATEGORY_NOT_SPACE: CATEGORY_UNI_NOT_SPACE,
CATEGORY_WORD: CATEGORY_UNI_WORD,
CATEGORY_NOT_WORD: CATEGORY_UNI_NOT_WORD,
CATEGORY_LINEBREAK: CATEGORY_UNI_LINEBREAK,
CATEGORY_NOT_LINEBREAK: CATEGORY_UNI_NOT_LINEBREAK
}
# flags
SRE_FLAG_TEMPLATE = 1 # template mode (disable backtracking)
SRE_FLAG_IGNORECASE = 2 # case insensitive
SRE_FLAG_LOCALE = 4 # honour system locale
SRE_FLAG_MULTILINE = 8 # treat target as multiline string
SRE_FLAG_DOTALL = 16 # treat target as a single string
SRE_FLAG_UNICODE = 32 # use unicode "locale"
SRE_FLAG_VERBOSE = 64 # ignore whitespace and comments
SRE_FLAG_DEBUG = 128 # debugging
SRE_FLAG_ASCII = 256 # use ascii "locale"
# flags for INFO primitive
SRE_INFO_PREFIX = 1 # has prefix
SRE_INFO_LITERAL = 2 # entire pattern is literal (given by prefix)
SRE_INFO_CHARSET = 4 # pattern starts with character from given set
if __name__ == "__main__":
| def dump(f, d, prefix):
items = sorted(d.items(), key=lambda a: a[1])
for k, v in items:
f.write("#define %s_%s %s\n" % (prefix, k.upper(), v))
f = open("sre_constants.h", "w")
f.write("""\
/*
* Secret Labs' Regular Expression Engine
*
* regular expression matching engine
*
* NOTE: This file is generated by sre_constants.py. If you need
* to change anything in here, edit sre_constants.py and run it.
*
* Copyright (c) 1997-2001 by Secret Labs AB. All rights reserved.
*
* See the _sre.c file for information on usage and redistribution.
*/
""")
f.write("#define SRE_MAGIC %d\n" % MAGIC)
dump(f, OPCODES, "SRE_OP")
dump(f, ATCODES, "SRE")
dump(f, CHCODES, "SRE")
f.write("#define SRE_FLAG_TEMPLATE %d\n" % SRE_FLAG_TEMPLATE)
f.write("#define SRE_FLAG_IGNORECASE %d\n" % SRE_FLAG_IGNORECASE)
f.write("#define SRE_FLAG_LOCALE %d\n" % SRE_FLAG_LOCALE)
f.write("#define SRE_FLAG_MULTILINE %d\n" % SRE_FLAG_MULTILINE)
f.write("#define SRE_FLAG_DOTALL %d\n" % SRE_FLAG_DOTALL)
f.write("#define SRE_FLAG_UNICODE %d\n" % SRE_FLAG_UNICODE)
f.write("#define SRE_FLAG_VERBOSE %d\n" % SRE_FLAG_VERBOSE)
f.write("#define SRE_INFO_PREFIX %d\n" % SRE_INFO_PREFIX)
f.write("#define SRE_INFO_LITERAL %d\n" % SRE_INFO_LITERAL)
f.write("#define SRE_INFO_CHARSET %d\n" % SRE_INFO_CHARSET)
f.close()
print("done") | conditional_block |
|
sre_constants.py | #
# Secret Labs' Regular Expression Engine
#
# various symbols used by the regular expression engine.
# run this script to update the _sre include files!
#
# Copyright (c) 1998-2001 by Secret Labs AB. All rights reserved.
#
# See the sre.py file for information on usage and redistribution.
#
"""Internal support module for sre"""
# update when constants are added or removed
MAGIC = 20031017
MAXREPEAT = 2147483648
#from _sre import MAXREPEAT
# SRE standard exception (access as sre.error)
# should this really be here?
class error(Exception):
pass
# operators
FAILURE = "failure"
SUCCESS = "success"
ANY = "any"
ANY_ALL = "any_all"
ASSERT = "assert"
ASSERT_NOT = "assert_not"
AT = "at"
BIGCHARSET = "bigcharset"
BRANCH = "branch"
CALL = "call"
CATEGORY = "category"
CHARSET = "charset"
GROUPREF = "groupref"
GROUPREF_IGNORE = "groupref_ignore"
GROUPREF_EXISTS = "groupref_exists"
IN = "in"
IN_IGNORE = "in_ignore"
INFO = "info"
JUMP = "jump"
LITERAL = "literal"
LITERAL_IGNORE = "literal_ignore"
MARK = "mark"
MAX_REPEAT = "max_repeat"
MAX_UNTIL = "max_until"
MIN_REPEAT = "min_repeat"
MIN_UNTIL = "min_until"
NEGATE = "negate"
NOT_LITERAL = "not_literal"
NOT_LITERAL_IGNORE = "not_literal_ignore"
RANGE = "range"
REPEAT = "repeat"
REPEAT_ONE = "repeat_one"
SUBPATTERN = "subpattern"
MIN_REPEAT_ONE = "min_repeat_one"
# positions
AT_BEGINNING = "at_beginning"
AT_BEGINNING_LINE = "at_beginning_line"
AT_BEGINNING_STRING = "at_beginning_string"
AT_BOUNDARY = "at_boundary"
AT_NON_BOUNDARY = "at_non_boundary"
AT_END = "at_end"
AT_END_LINE = "at_end_line"
AT_END_STRING = "at_end_string"
AT_LOC_BOUNDARY = "at_loc_boundary"
AT_LOC_NON_BOUNDARY = "at_loc_non_boundary"
AT_UNI_BOUNDARY = "at_uni_boundary"
AT_UNI_NON_BOUNDARY = "at_uni_non_boundary"
# categories
CATEGORY_DIGIT = "category_digit"
CATEGORY_NOT_DIGIT = "category_not_digit"
CATEGORY_SPACE = "category_space"
CATEGORY_NOT_SPACE = "category_not_space"
CATEGORY_WORD = "category_word"
CATEGORY_NOT_WORD = "category_not_word"
CATEGORY_LINEBREAK = "category_linebreak"
CATEGORY_NOT_LINEBREAK = "category_not_linebreak"
CATEGORY_LOC_WORD = "category_loc_word"
CATEGORY_LOC_NOT_WORD = "category_loc_not_word"
CATEGORY_UNI_DIGIT = "category_uni_digit"
CATEGORY_UNI_NOT_DIGIT = "category_uni_not_digit"
CATEGORY_UNI_SPACE = "category_uni_space"
CATEGORY_UNI_NOT_SPACE = "category_uni_not_space"
CATEGORY_UNI_WORD = "category_uni_word"
CATEGORY_UNI_NOT_WORD = "category_uni_not_word"
CATEGORY_UNI_LINEBREAK = "category_uni_linebreak"
CATEGORY_UNI_NOT_LINEBREAK = "category_uni_not_linebreak"
OPCODES = [
# failure=0 success=1 (just because it looks better that way :-)
FAILURE, SUCCESS,
ANY, ANY_ALL,
ASSERT, ASSERT_NOT,
AT,
BRANCH,
CALL,
CATEGORY,
CHARSET, BIGCHARSET,
GROUPREF, GROUPREF_EXISTS, GROUPREF_IGNORE,
IN, IN_IGNORE,
INFO,
JUMP,
LITERAL, LITERAL_IGNORE,
MARK,
MAX_UNTIL,
MIN_UNTIL,
NOT_LITERAL, NOT_LITERAL_IGNORE,
NEGATE,
RANGE,
REPEAT,
REPEAT_ONE,
SUBPATTERN,
MIN_REPEAT_ONE
]
ATCODES = [
AT_BEGINNING, AT_BEGINNING_LINE, AT_BEGINNING_STRING, AT_BOUNDARY,
AT_NON_BOUNDARY, AT_END, AT_END_LINE, AT_END_STRING,
AT_LOC_BOUNDARY, AT_LOC_NON_BOUNDARY, AT_UNI_BOUNDARY,
AT_UNI_NON_BOUNDARY
]
CHCODES = [
CATEGORY_DIGIT, CATEGORY_NOT_DIGIT, CATEGORY_SPACE,
CATEGORY_NOT_SPACE, CATEGORY_WORD, CATEGORY_NOT_WORD,
CATEGORY_LINEBREAK, CATEGORY_NOT_LINEBREAK, CATEGORY_LOC_WORD,
CATEGORY_LOC_NOT_WORD, CATEGORY_UNI_DIGIT, CATEGORY_UNI_NOT_DIGIT,
CATEGORY_UNI_SPACE, CATEGORY_UNI_NOT_SPACE, CATEGORY_UNI_WORD,
CATEGORY_UNI_NOT_WORD, CATEGORY_UNI_LINEBREAK,
CATEGORY_UNI_NOT_LINEBREAK
]
def makedict(list):
d = {}
i = 0
for item in list:
d[item] = i
i = i + 1
return d
OPCODES = makedict(OPCODES)
ATCODES = makedict(ATCODES)
CHCODES = makedict(CHCODES)
# replacement operations for "ignore case" mode
OP_IGNORE = {
GROUPREF: GROUPREF_IGNORE,
IN: IN_IGNORE,
LITERAL: LITERAL_IGNORE,
NOT_LITERAL: NOT_LITERAL_IGNORE
}
AT_MULTILINE = {
AT_BEGINNING: AT_BEGINNING_LINE,
AT_END: AT_END_LINE
}
AT_LOCALE = {
AT_BOUNDARY: AT_LOC_BOUNDARY,
AT_NON_BOUNDARY: AT_LOC_NON_BOUNDARY
}
AT_UNICODE = {
AT_BOUNDARY: AT_UNI_BOUNDARY,
AT_NON_BOUNDARY: AT_UNI_NON_BOUNDARY
}
CH_LOCALE = {
CATEGORY_DIGIT: CATEGORY_DIGIT,
CATEGORY_NOT_DIGIT: CATEGORY_NOT_DIGIT,
CATEGORY_SPACE: CATEGORY_SPACE,
CATEGORY_NOT_SPACE: CATEGORY_NOT_SPACE,
CATEGORY_WORD: CATEGORY_LOC_WORD,
CATEGORY_NOT_WORD: CATEGORY_LOC_NOT_WORD,
CATEGORY_LINEBREAK: CATEGORY_LINEBREAK,
CATEGORY_NOT_LINEBREAK: CATEGORY_NOT_LINEBREAK
}
CH_UNICODE = {
CATEGORY_DIGIT: CATEGORY_UNI_DIGIT,
CATEGORY_NOT_DIGIT: CATEGORY_UNI_NOT_DIGIT,
CATEGORY_SPACE: CATEGORY_UNI_SPACE,
CATEGORY_NOT_SPACE: CATEGORY_UNI_NOT_SPACE,
CATEGORY_WORD: CATEGORY_UNI_WORD,
CATEGORY_NOT_WORD: CATEGORY_UNI_NOT_WORD,
CATEGORY_LINEBREAK: CATEGORY_UNI_LINEBREAK,
CATEGORY_NOT_LINEBREAK: CATEGORY_UNI_NOT_LINEBREAK
}
# flags
SRE_FLAG_TEMPLATE = 1 # template mode (disable backtracking)
SRE_FLAG_IGNORECASE = 2 # case insensitive
SRE_FLAG_LOCALE = 4 # honour system locale
SRE_FLAG_MULTILINE = 8 # treat target as multiline string
SRE_FLAG_DOTALL = 16 # treat target as a single string
SRE_FLAG_UNICODE = 32 # use unicode "locale"
SRE_FLAG_VERBOSE = 64 # ignore whitespace and comments | SRE_FLAG_ASCII = 256 # use ascii "locale"
# flags for INFO primitive
SRE_INFO_PREFIX = 1 # has prefix
SRE_INFO_LITERAL = 2 # entire pattern is literal (given by prefix)
SRE_INFO_CHARSET = 4 # pattern starts with character from given set
if __name__ == "__main__":
def dump(f, d, prefix):
items = sorted(d.items(), key=lambda a: a[1])
for k, v in items:
f.write("#define %s_%s %s\n" % (prefix, k.upper(), v))
f = open("sre_constants.h", "w")
f.write("""\
/*
* Secret Labs' Regular Expression Engine
*
* regular expression matching engine
*
* NOTE: This file is generated by sre_constants.py. If you need
* to change anything in here, edit sre_constants.py and run it.
*
* Copyright (c) 1997-2001 by Secret Labs AB. All rights reserved.
*
* See the _sre.c file for information on usage and redistribution.
*/
""")
f.write("#define SRE_MAGIC %d\n" % MAGIC)
dump(f, OPCODES, "SRE_OP")
dump(f, ATCODES, "SRE")
dump(f, CHCODES, "SRE")
f.write("#define SRE_FLAG_TEMPLATE %d\n" % SRE_FLAG_TEMPLATE)
f.write("#define SRE_FLAG_IGNORECASE %d\n" % SRE_FLAG_IGNORECASE)
f.write("#define SRE_FLAG_LOCALE %d\n" % SRE_FLAG_LOCALE)
f.write("#define SRE_FLAG_MULTILINE %d\n" % SRE_FLAG_MULTILINE)
f.write("#define SRE_FLAG_DOTALL %d\n" % SRE_FLAG_DOTALL)
f.write("#define SRE_FLAG_UNICODE %d\n" % SRE_FLAG_UNICODE)
f.write("#define SRE_FLAG_VERBOSE %d\n" % SRE_FLAG_VERBOSE)
f.write("#define SRE_INFO_PREFIX %d\n" % SRE_INFO_PREFIX)
f.write("#define SRE_INFO_LITERAL %d\n" % SRE_INFO_LITERAL)
f.write("#define SRE_INFO_CHARSET %d\n" % SRE_INFO_CHARSET)
f.close()
print("done") | SRE_FLAG_DEBUG = 128 # debugging | random_line_split |
sre_constants.py | #
# Secret Labs' Regular Expression Engine
#
# various symbols used by the regular expression engine.
# run this script to update the _sre include files!
#
# Copyright (c) 1998-2001 by Secret Labs AB. All rights reserved.
#
# See the sre.py file for information on usage and redistribution.
#
"""Internal support module for sre"""
# update when constants are added or removed
MAGIC = 20031017
MAXREPEAT = 2147483648
#from _sre import MAXREPEAT
# SRE standard exception (access as sre.error)
# should this really be here?
class error(Exception):
pass
# operators
FAILURE = "failure"
SUCCESS = "success"
ANY = "any"
ANY_ALL = "any_all"
ASSERT = "assert"
ASSERT_NOT = "assert_not"
AT = "at"
BIGCHARSET = "bigcharset"
BRANCH = "branch"
CALL = "call"
CATEGORY = "category"
CHARSET = "charset"
GROUPREF = "groupref"
GROUPREF_IGNORE = "groupref_ignore"
GROUPREF_EXISTS = "groupref_exists"
IN = "in"
IN_IGNORE = "in_ignore"
INFO = "info"
JUMP = "jump"
LITERAL = "literal"
LITERAL_IGNORE = "literal_ignore"
MARK = "mark"
MAX_REPEAT = "max_repeat"
MAX_UNTIL = "max_until"
MIN_REPEAT = "min_repeat"
MIN_UNTIL = "min_until"
NEGATE = "negate"
NOT_LITERAL = "not_literal"
NOT_LITERAL_IGNORE = "not_literal_ignore"
RANGE = "range"
REPEAT = "repeat"
REPEAT_ONE = "repeat_one"
SUBPATTERN = "subpattern"
MIN_REPEAT_ONE = "min_repeat_one"
# positions
AT_BEGINNING = "at_beginning"
AT_BEGINNING_LINE = "at_beginning_line"
AT_BEGINNING_STRING = "at_beginning_string"
AT_BOUNDARY = "at_boundary"
AT_NON_BOUNDARY = "at_non_boundary"
AT_END = "at_end"
AT_END_LINE = "at_end_line"
AT_END_STRING = "at_end_string"
AT_LOC_BOUNDARY = "at_loc_boundary"
AT_LOC_NON_BOUNDARY = "at_loc_non_boundary"
AT_UNI_BOUNDARY = "at_uni_boundary"
AT_UNI_NON_BOUNDARY = "at_uni_non_boundary"
# categories
CATEGORY_DIGIT = "category_digit"
CATEGORY_NOT_DIGIT = "category_not_digit"
CATEGORY_SPACE = "category_space"
CATEGORY_NOT_SPACE = "category_not_space"
CATEGORY_WORD = "category_word"
CATEGORY_NOT_WORD = "category_not_word"
CATEGORY_LINEBREAK = "category_linebreak"
CATEGORY_NOT_LINEBREAK = "category_not_linebreak"
CATEGORY_LOC_WORD = "category_loc_word"
CATEGORY_LOC_NOT_WORD = "category_loc_not_word"
CATEGORY_UNI_DIGIT = "category_uni_digit"
CATEGORY_UNI_NOT_DIGIT = "category_uni_not_digit"
CATEGORY_UNI_SPACE = "category_uni_space"
CATEGORY_UNI_NOT_SPACE = "category_uni_not_space"
CATEGORY_UNI_WORD = "category_uni_word"
CATEGORY_UNI_NOT_WORD = "category_uni_not_word"
CATEGORY_UNI_LINEBREAK = "category_uni_linebreak"
CATEGORY_UNI_NOT_LINEBREAK = "category_uni_not_linebreak"
OPCODES = [
# failure=0 success=1 (just because it looks better that way :-)
FAILURE, SUCCESS,
ANY, ANY_ALL,
ASSERT, ASSERT_NOT,
AT,
BRANCH,
CALL,
CATEGORY,
CHARSET, BIGCHARSET,
GROUPREF, GROUPREF_EXISTS, GROUPREF_IGNORE,
IN, IN_IGNORE,
INFO,
JUMP,
LITERAL, LITERAL_IGNORE,
MARK,
MAX_UNTIL,
MIN_UNTIL,
NOT_LITERAL, NOT_LITERAL_IGNORE,
NEGATE,
RANGE,
REPEAT,
REPEAT_ONE,
SUBPATTERN,
MIN_REPEAT_ONE
]
ATCODES = [
AT_BEGINNING, AT_BEGINNING_LINE, AT_BEGINNING_STRING, AT_BOUNDARY,
AT_NON_BOUNDARY, AT_END, AT_END_LINE, AT_END_STRING,
AT_LOC_BOUNDARY, AT_LOC_NON_BOUNDARY, AT_UNI_BOUNDARY,
AT_UNI_NON_BOUNDARY
]
CHCODES = [
CATEGORY_DIGIT, CATEGORY_NOT_DIGIT, CATEGORY_SPACE,
CATEGORY_NOT_SPACE, CATEGORY_WORD, CATEGORY_NOT_WORD,
CATEGORY_LINEBREAK, CATEGORY_NOT_LINEBREAK, CATEGORY_LOC_WORD,
CATEGORY_LOC_NOT_WORD, CATEGORY_UNI_DIGIT, CATEGORY_UNI_NOT_DIGIT,
CATEGORY_UNI_SPACE, CATEGORY_UNI_NOT_SPACE, CATEGORY_UNI_WORD,
CATEGORY_UNI_NOT_WORD, CATEGORY_UNI_LINEBREAK,
CATEGORY_UNI_NOT_LINEBREAK
]
def | (list):
d = {}
i = 0
for item in list:
d[item] = i
i = i + 1
return d
OPCODES = makedict(OPCODES)
ATCODES = makedict(ATCODES)
CHCODES = makedict(CHCODES)
# replacement operations for "ignore case" mode
OP_IGNORE = {
GROUPREF: GROUPREF_IGNORE,
IN: IN_IGNORE,
LITERAL: LITERAL_IGNORE,
NOT_LITERAL: NOT_LITERAL_IGNORE
}
AT_MULTILINE = {
AT_BEGINNING: AT_BEGINNING_LINE,
AT_END: AT_END_LINE
}
AT_LOCALE = {
AT_BOUNDARY: AT_LOC_BOUNDARY,
AT_NON_BOUNDARY: AT_LOC_NON_BOUNDARY
}
AT_UNICODE = {
AT_BOUNDARY: AT_UNI_BOUNDARY,
AT_NON_BOUNDARY: AT_UNI_NON_BOUNDARY
}
CH_LOCALE = {
CATEGORY_DIGIT: CATEGORY_DIGIT,
CATEGORY_NOT_DIGIT: CATEGORY_NOT_DIGIT,
CATEGORY_SPACE: CATEGORY_SPACE,
CATEGORY_NOT_SPACE: CATEGORY_NOT_SPACE,
CATEGORY_WORD: CATEGORY_LOC_WORD,
CATEGORY_NOT_WORD: CATEGORY_LOC_NOT_WORD,
CATEGORY_LINEBREAK: CATEGORY_LINEBREAK,
CATEGORY_NOT_LINEBREAK: CATEGORY_NOT_LINEBREAK
}
CH_UNICODE = {
CATEGORY_DIGIT: CATEGORY_UNI_DIGIT,
CATEGORY_NOT_DIGIT: CATEGORY_UNI_NOT_DIGIT,
CATEGORY_SPACE: CATEGORY_UNI_SPACE,
CATEGORY_NOT_SPACE: CATEGORY_UNI_NOT_SPACE,
CATEGORY_WORD: CATEGORY_UNI_WORD,
CATEGORY_NOT_WORD: CATEGORY_UNI_NOT_WORD,
CATEGORY_LINEBREAK: CATEGORY_UNI_LINEBREAK,
CATEGORY_NOT_LINEBREAK: CATEGORY_UNI_NOT_LINEBREAK
}
# flags
SRE_FLAG_TEMPLATE = 1 # template mode (disable backtracking)
SRE_FLAG_IGNORECASE = 2 # case insensitive
SRE_FLAG_LOCALE = 4 # honour system locale
SRE_FLAG_MULTILINE = 8 # treat target as multiline string
SRE_FLAG_DOTALL = 16 # treat target as a single string
SRE_FLAG_UNICODE = 32 # use unicode "locale"
SRE_FLAG_VERBOSE = 64 # ignore whitespace and comments
SRE_FLAG_DEBUG = 128 # debugging
SRE_FLAG_ASCII = 256 # use ascii "locale"
# flags for INFO primitive
SRE_INFO_PREFIX = 1 # has prefix
SRE_INFO_LITERAL = 2 # entire pattern is literal (given by prefix)
SRE_INFO_CHARSET = 4 # pattern starts with character from given set
if __name__ == "__main__":
def dump(f, d, prefix):
items = sorted(d.items(), key=lambda a: a[1])
for k, v in items:
f.write("#define %s_%s %s\n" % (prefix, k.upper(), v))
f = open("sre_constants.h", "w")
f.write("""\
/*
* Secret Labs' Regular Expression Engine
*
* regular expression matching engine
*
* NOTE: This file is generated by sre_constants.py. If you need
* to change anything in here, edit sre_constants.py and run it.
*
* Copyright (c) 1997-2001 by Secret Labs AB. All rights reserved.
*
* See the _sre.c file for information on usage and redistribution.
*/
""")
f.write("#define SRE_MAGIC %d\n" % MAGIC)
dump(f, OPCODES, "SRE_OP")
dump(f, ATCODES, "SRE")
dump(f, CHCODES, "SRE")
f.write("#define SRE_FLAG_TEMPLATE %d\n" % SRE_FLAG_TEMPLATE)
f.write("#define SRE_FLAG_IGNORECASE %d\n" % SRE_FLAG_IGNORECASE)
f.write("#define SRE_FLAG_LOCALE %d\n" % SRE_FLAG_LOCALE)
f.write("#define SRE_FLAG_MULTILINE %d\n" % SRE_FLAG_MULTILINE)
f.write("#define SRE_FLAG_DOTALL %d\n" % SRE_FLAG_DOTALL)
f.write("#define SRE_FLAG_UNICODE %d\n" % SRE_FLAG_UNICODE)
f.write("#define SRE_FLAG_VERBOSE %d\n" % SRE_FLAG_VERBOSE)
f.write("#define SRE_INFO_PREFIX %d\n" % SRE_INFO_PREFIX)
f.write("#define SRE_INFO_LITERAL %d\n" % SRE_INFO_LITERAL)
f.write("#define SRE_INFO_CHARSET %d\n" % SRE_INFO_CHARSET)
f.close()
print("done")
| makedict | identifier_name |
ApplicationRegisterHelpers.js | /**
* Copyright (c) 2016 Intel Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
(function () { |
App.factory('ApplicationRegisterHelpers', function(ApplicationRegisterResource, NotificationService) {
return {
getOfferingsOfApp: function(appGuid) {
return ApplicationRegisterResource
.withErrorMessage('Failed to retrieve service offerings from catalog')
.getClonedApplication(appGuid)
.then(function (response) {
return response.plain();
});
},
registerApp: function(request) {
return ApplicationRegisterResource
.withErrorMessage('Failed to register application in marketplace')
.registerApplication(request)
.then(function (response) {
NotificationService.success('Application has been registered in marketplace');
return response.plain();
});
}
};
});
})(); | 'use strict'; | random_line_split |
0029_auto_20170226_0745.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import umibukela.models
class | (migrations.Migration):
dependencies = [
('umibukela', '0028_cycle_materials'),
]
operations = [
migrations.CreateModel(
name='ProgrammeKoboRefreshToken',
fields=[
('programme', models.OneToOneField(related_name='kobo_refresh_token', primary_key=True, serialize=False, to='umibukela.Programme')),
('token', models.TextField()),
],
),
migrations.RenameModel(
old_name='KoboRefreshToken',
new_name='UserKoboRefreshToken',
),
migrations.AddField(
model_name='cycle',
name='auto_import',
field=models.BooleanField(default=False),
preserve_default=False,
),
migrations.AddField(
model_name='cycleresultset',
name='site_option_name',
field=models.TextField(default=''),
preserve_default=False,
),
migrations.AlterField(
model_name='cycle',
name='materials',
field=models.FileField(null=True, upload_to=umibukela.models.cycle_materials_filename, blank=True),
),
migrations.AlterField(
model_name='cycleresultset',
name='cycle',
field=models.ForeignKey(related_name='cycle_result_sets', to='umibukela.Cycle'),
),
migrations.AlterField(
model_name='cycleresultset',
name='survey',
field=models.ForeignKey(related_name='cycle_result_sets', blank=True, to='umibukela.Survey', null=True),
),
]
| Migration | identifier_name |
0029_auto_20170226_0745.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import umibukela.models
class Migration(migrations.Migration):
dependencies = [
('umibukela', '0028_cycle_materials'),
]
operations = [
migrations.CreateModel(
name='ProgrammeKoboRefreshToken',
fields=[
('programme', models.OneToOneField(related_name='kobo_refresh_token', primary_key=True, serialize=False, to='umibukela.Programme')),
('token', models.TextField()),
],
),
migrations.RenameModel(
old_name='KoboRefreshToken',
new_name='UserKoboRefreshToken',
),
migrations.AddField(
model_name='cycle',
name='auto_import',
field=models.BooleanField(default=False),
preserve_default=False,
),
migrations.AddField(
model_name='cycleresultset',
name='site_option_name',
field=models.TextField(default=''),
preserve_default=False,
),
migrations.AlterField(
model_name='cycle',
name='materials',
field=models.FileField(null=True, upload_to=umibukela.models.cycle_materials_filename, blank=True),
),
migrations.AlterField(
model_name='cycleresultset',
name='cycle',
field=models.ForeignKey(related_name='cycle_result_sets', to='umibukela.Cycle'),
),
migrations.AlterField( | model_name='cycleresultset',
name='survey',
field=models.ForeignKey(related_name='cycle_result_sets', blank=True, to='umibukela.Survey', null=True),
),
] | random_line_split |
|
0029_auto_20170226_0745.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import umibukela.models
class Migration(migrations.Migration):
| dependencies = [
('umibukela', '0028_cycle_materials'),
]
operations = [
migrations.CreateModel(
name='ProgrammeKoboRefreshToken',
fields=[
('programme', models.OneToOneField(related_name='kobo_refresh_token', primary_key=True, serialize=False, to='umibukela.Programme')),
('token', models.TextField()),
],
),
migrations.RenameModel(
old_name='KoboRefreshToken',
new_name='UserKoboRefreshToken',
),
migrations.AddField(
model_name='cycle',
name='auto_import',
field=models.BooleanField(default=False),
preserve_default=False,
),
migrations.AddField(
model_name='cycleresultset',
name='site_option_name',
field=models.TextField(default=''),
preserve_default=False,
),
migrations.AlterField(
model_name='cycle',
name='materials',
field=models.FileField(null=True, upload_to=umibukela.models.cycle_materials_filename, blank=True),
),
migrations.AlterField(
model_name='cycleresultset',
name='cycle',
field=models.ForeignKey(related_name='cycle_result_sets', to='umibukela.Cycle'),
),
migrations.AlterField(
model_name='cycleresultset',
name='survey',
field=models.ForeignKey(related_name='cycle_result_sets', blank=True, to='umibukela.Survey', null=True),
),
] | identifier_body |
|
sender.service.ts | * Copyright (C) 2017-2018 Patrice Le Gurun
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
import { Injectable } from '@angular/core';
import { HttpClient, HttpResponse } from '@angular/common/http';
import { throwError } from 'rxjs';
import { Observable } from 'rxjs/Observable';
import { catchError } from 'rxjs/operators/catchError';
import 'rxjs/add/operator/catch';
import 'rxjs/add/operator/map';
import { Sender } from '../common/sender';
const senderUrl = 'api/sender';
@Injectable()
export class SenderService {
constructor( private httpClient: HttpClient ) { }
getSenders(): Observable<Sender[]> {
return this.httpClient.get<Sender[]>( senderUrl )
.catch( this.handleError );
}
private handleError( error: HttpResponse<any>) {
let errMsg: string;
const body = error.body.json() || '';
const err = JSON.stringify( body );
errMsg = `${error.status} - ${error.statusText || ''} ${err}`;
console.error( errMsg );
return throwError( errMsg );
}
} | /**
* @license | random_line_split |
|
sender.service.ts | /**
* @license
* Copyright (C) 2017-2018 Patrice Le Gurun
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
import { Injectable } from '@angular/core';
import { HttpClient, HttpResponse } from '@angular/common/http';
import { throwError } from 'rxjs';
import { Observable } from 'rxjs/Observable';
import { catchError } from 'rxjs/operators/catchError';
import 'rxjs/add/operator/catch';
import 'rxjs/add/operator/map';
import { Sender } from '../common/sender';
const senderUrl = 'api/sender';
@Injectable()
export class SenderService {
constructor( private httpClient: HttpClient ) { }
getSenders(): Observable<Sender[]> {
return this.httpClient.get<Sender[]>( senderUrl )
.catch( this.handleError );
}
private handleError( error: HttpResponse<any>) |
}
| {
let errMsg: string;
const body = error.body.json() || '';
const err = JSON.stringify( body );
errMsg = `${error.status} - ${error.statusText || ''} ${err}`;
console.error( errMsg );
return throwError( errMsg );
} | identifier_body |
sender.service.ts | /**
* @license
* Copyright (C) 2017-2018 Patrice Le Gurun
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
import { Injectable } from '@angular/core';
import { HttpClient, HttpResponse } from '@angular/common/http';
import { throwError } from 'rxjs';
import { Observable } from 'rxjs/Observable';
import { catchError } from 'rxjs/operators/catchError';
import 'rxjs/add/operator/catch';
import 'rxjs/add/operator/map';
import { Sender } from '../common/sender';
const senderUrl = 'api/sender';
@Injectable()
export class SenderService {
constructor( private httpClient: HttpClient ) { }
| (): Observable<Sender[]> {
return this.httpClient.get<Sender[]>( senderUrl )
.catch( this.handleError );
}
private handleError( error: HttpResponse<any>) {
let errMsg: string;
const body = error.body.json() || '';
const err = JSON.stringify( body );
errMsg = `${error.status} - ${error.statusText || ''} ${err}`;
console.error( errMsg );
return throwError( errMsg );
}
}
| getSenders | identifier_name |
mastodon.js | import React from 'react';
import { Provider } from 'react-redux';
import PropTypes from 'prop-types';
import configureStore from '../store/configureStore';
import { BrowserRouter, Route } from 'react-router-dom';
import { ScrollContext } from 'react-router-scroll-4';
import UI from '../features/ui';
import { fetchCustomEmojis } from '../actions/custom_emojis';
import { hydrateStore } from '../actions/store';
import { connectUserStream } from '../actions/streaming';
import { IntlProvider, addLocaleData } from 'react-intl';
import { getLocale } from '../locales';
import initialState from '../initial_state';
import ErrorBoundary from '../components/error_boundary';
const { localeData, messages } = getLocale();
addLocaleData(localeData);
export const store = configureStore();
const hydrateAction = hydrateStore(initialState);
store.dispatch(hydrateAction);
store.dispatch(fetchCustomEmojis());
const createIdentityContext = state => ({
signedIn: !!state.meta.me,
accountId: state.meta.me,
accessToken: state.meta.access_token,
});
export default class Mastodon extends React.PureComponent {
static propTypes = {
locale: PropTypes.string.isRequired,
};
static childContextTypes = {
identity: PropTypes.shape({
signedIn: PropTypes.bool.isRequired,
accountId: PropTypes.string,
accessToken: PropTypes.string,
}).isRequired,
};
identity = createIdentityContext(initialState);
getChildContext() {
return {
identity: this.identity,
};
}
componentDidMount() {
if (this.identity.signedIn) {
this.disconnect = store.dispatch(connectUserStream());
}
}
componentWillUnmount () {
if (this.disconnect) |
}
shouldUpdateScroll (prevRouterProps, { location }) {
return !(location.state?.mastodonModalKey && location.state?.mastodonModalKey !== prevRouterProps?.location?.state?.mastodonModalKey);
}
render () {
const { locale } = this.props;
return (
<IntlProvider locale={locale} messages={messages}>
<Provider store={store}>
<ErrorBoundary>
<BrowserRouter basename='/web'>
<ScrollContext shouldUpdateScroll={this.shouldUpdateScroll}>
<Route path='/' component={UI} />
</ScrollContext>
</BrowserRouter>
</ErrorBoundary>
</Provider>
</IntlProvider>
);
}
}
| {
this.disconnect();
this.disconnect = null;
} | conditional_block |
mastodon.js | import React from 'react';
import { Provider } from 'react-redux';
import PropTypes from 'prop-types';
import configureStore from '../store/configureStore';
import { BrowserRouter, Route } from 'react-router-dom';
import { ScrollContext } from 'react-router-scroll-4';
import UI from '../features/ui';
import { fetchCustomEmojis } from '../actions/custom_emojis';
import { hydrateStore } from '../actions/store';
import { connectUserStream } from '../actions/streaming';
import { IntlProvider, addLocaleData } from 'react-intl';
import { getLocale } from '../locales';
import initialState from '../initial_state';
import ErrorBoundary from '../components/error_boundary';
const { localeData, messages } = getLocale();
addLocaleData(localeData);
export const store = configureStore();
const hydrateAction = hydrateStore(initialState);
store.dispatch(hydrateAction);
store.dispatch(fetchCustomEmojis());
const createIdentityContext = state => ({
signedIn: !!state.meta.me,
accountId: state.meta.me,
accessToken: state.meta.access_token,
});
export default class Mastodon extends React.PureComponent {
static propTypes = {
locale: PropTypes.string.isRequired,
};
static childContextTypes = {
identity: PropTypes.shape({
signedIn: PropTypes.bool.isRequired,
accountId: PropTypes.string,
accessToken: PropTypes.string,
}).isRequired,
};
identity = createIdentityContext(initialState);
| () {
return {
identity: this.identity,
};
}
componentDidMount() {
if (this.identity.signedIn) {
this.disconnect = store.dispatch(connectUserStream());
}
}
componentWillUnmount () {
if (this.disconnect) {
this.disconnect();
this.disconnect = null;
}
}
shouldUpdateScroll (prevRouterProps, { location }) {
return !(location.state?.mastodonModalKey && location.state?.mastodonModalKey !== prevRouterProps?.location?.state?.mastodonModalKey);
}
render () {
const { locale } = this.props;
return (
<IntlProvider locale={locale} messages={messages}>
<Provider store={store}>
<ErrorBoundary>
<BrowserRouter basename='/web'>
<ScrollContext shouldUpdateScroll={this.shouldUpdateScroll}>
<Route path='/' component={UI} />
</ScrollContext>
</BrowserRouter>
</ErrorBoundary>
</Provider>
</IntlProvider>
);
}
}
| getChildContext | identifier_name |
mastodon.js | import React from 'react';
import { Provider } from 'react-redux';
import PropTypes from 'prop-types';
import configureStore from '../store/configureStore';
import { BrowserRouter, Route } from 'react-router-dom';
import { ScrollContext } from 'react-router-scroll-4';
import UI from '../features/ui';
import { fetchCustomEmojis } from '../actions/custom_emojis';
import { hydrateStore } from '../actions/store';
import { connectUserStream } from '../actions/streaming';
import { IntlProvider, addLocaleData } from 'react-intl';
import { getLocale } from '../locales';
import initialState from '../initial_state';
import ErrorBoundary from '../components/error_boundary';
const { localeData, messages } = getLocale();
addLocaleData(localeData);
export const store = configureStore();
const hydrateAction = hydrateStore(initialState);
store.dispatch(hydrateAction);
store.dispatch(fetchCustomEmojis());
const createIdentityContext = state => ({
signedIn: !!state.meta.me,
accountId: state.meta.me,
accessToken: state.meta.access_token,
});
export default class Mastodon extends React.PureComponent {
static propTypes = {
locale: PropTypes.string.isRequired,
};
static childContextTypes = {
identity: PropTypes.shape({
signedIn: PropTypes.bool.isRequired,
accountId: PropTypes.string,
accessToken: PropTypes.string,
}).isRequired,
};
| return {
identity: this.identity,
};
}
componentDidMount() {
if (this.identity.signedIn) {
this.disconnect = store.dispatch(connectUserStream());
}
}
componentWillUnmount () {
if (this.disconnect) {
this.disconnect();
this.disconnect = null;
}
}
shouldUpdateScroll (prevRouterProps, { location }) {
return !(location.state?.mastodonModalKey && location.state?.mastodonModalKey !== prevRouterProps?.location?.state?.mastodonModalKey);
}
render () {
const { locale } = this.props;
return (
<IntlProvider locale={locale} messages={messages}>
<Provider store={store}>
<ErrorBoundary>
<BrowserRouter basename='/web'>
<ScrollContext shouldUpdateScroll={this.shouldUpdateScroll}>
<Route path='/' component={UI} />
</ScrollContext>
</BrowserRouter>
</ErrorBoundary>
</Provider>
</IntlProvider>
);
}
} | identity = createIdentityContext(initialState);
getChildContext() { | random_line_split |
input.js | /**
* @license
* Visual Blocks Editor
*
* Copyright 2012 Google Inc.
* https://developers.google.com/blockly/
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @fileoverview Object representing an input (value, statement, or dummy).
* @author [email protected] (Neil Fraser)
*/
'use strict';
goog.provide('Blockly.Input');
// TODO(scr): Fix circular dependencies
// goog.require('Blockly.Block');
goog.require('Blockly.Connection');
goog.require('Blockly.FieldLabel');
goog.require('goog.asserts');
/**
* Class for an input with an optional field.
* @param {number} type The type of the input.
* @param {string} name Language-neutral identifier which may used to find this
* input again.
* @param {!Blockly.Block} block The block containing this input.
* @param {Blockly.Connection} connection Optional connection for this input.
* @constructor
*/
Blockly.Input = function(type, name, block, connection) {
this.type = type;
this.name = name;
this.sourceBlock_ = block;
this.connection = connection;
this.fieldRow = [];
this.align = Blockly.ALIGN_LEFT;
this.visible_ = true;
};
/**
* Add an item to the end of the input's field row.
* @param {string|!Blockly.Field} field Something to add as a field.
* @param {string} opt_name Language-neutral identifier which may used to find
* this field again. Should be unique to the host block.
* @return {!Blockly.Input} The input being append to (to allow chaining).
*/
Blockly.Input.prototype.appendField = function(field, opt_name) {
// Empty string, Null or undefined generates no field, unless field is named.
if (!field && !opt_name) {
return this;
}
// Generate a FieldLabel when given a plain text field.
if (goog.isString(field)) {
field = new Blockly.FieldLabel(/** @type {string} */ (field));
}
if (this.sourceBlock_.svg_) {
field.init(this.sourceBlock_);
}
field.name = opt_name;
if (field.prefixField) {
// Add any prefix.
this.appendField(field.prefixField);
}
// Add the field to the field row.
this.fieldRow.push(field);
if (field.suffixField) {
// Add any suffix.
this.appendField(field.suffixField);
}
if (this.sourceBlock_.rendered) {
this.sourceBlock_.render();
// Adding a field will cause the block to change shape.
this.sourceBlock_.bumpNeighbours_();
}
return this;
};
/**
* Add an item to the end of the input's field row.
* @param {*} field Something to add as a field.
* @param {string} opt_name Language-neutral identifier which may used to find
* this field again. Should be unique to the host block.
* @return {!Blockly.Input} The input being append to (to allow chaining).
* @deprecated December 2013
*/
Blockly.Input.prototype.appendTitle = function(field, opt_name) {
console.log('Deprecated call to appendTitle, use appendField instead.');
return this.appendField(field, opt_name);
};
/**
* Remove a field from this input.
* @param {string} name The name of the field.
* @throws {goog.asserts.AssertionError} if the field is not present.
*/
Blockly.Input.prototype.removeField = function(name) {
for (var i = 0, field; field = this.fieldRow[i]; i++) {
if (field.name === name) {
field.dispose();
this.fieldRow.splice(i, 1);
if (this.sourceBlock_.rendered) {
this.sourceBlock_.render();
// Removing a field will cause the block to change shape.
this.sourceBlock_.bumpNeighbours_();
}
return;
}
}
goog.asserts.fail('Field "%s" not found.', name);
};
/**
* Gets whether this input is visible or not.
* @return {boolean} True if visible.
*/
Blockly.Input.prototype.isVisible = function() {
return this.visible_;
};
/**
* Sets whether this input is visible or not.
* @param {boolean} visible True if visible.
* @return {!Array.<!Blockly.Block>} List of blocks to render.
*/
Blockly.Input.prototype.setVisible = function(visible) {
var renderList = [];
if (this.visible_ == visible) {
return renderList;
}
this.visible_ = visible;
var display = visible ? 'block' : 'none';
for (var y = 0, field; field = this.fieldRow[y]; y++) {
field.setVisible(visible);
}
if (this.connection) {
// Has a connection.
if (visible) {
renderList = this.connection.unhideAll();
} else {
this.connection.hideAll();
}
var child = this.connection.targetBlock();
if (child) {
child.svg_.getRootElement().style.display = display;
if (!visible) {
child.rendered = false;
}
}
}
return renderList;
};
/**
* Change a connection's compatibility.
* @param {string|Array.<string>|null} check Compatible value type or
* list of value types. Null if all types are compatible.
* @return {!Blockly.Input} The input being modified (to allow chaining).
*/
Blockly.Input.prototype.setCheck = function(check) {
if (!this.connection) {
throw 'This input does not have a connection.';
}
this.connection.setCheck(check);
return this;
};
/**
* Change the alignment of the connection's field(s).
* @param {number} align One of Blockly.ALIGN_LEFT, ALIGN_CENTRE, ALIGN_RIGHT.
* In RTL mode directions are reversed, and ALIGN_RIGHT aligns to the left.
* @return {!Blockly.Input} The input being modified (to allow chaining).
*/
Blockly.Input.prototype.setAlign = function(align) {
this.align = align;
if (this.sourceBlock_.rendered) { | /**
* Initialize the fields on this input.
*/
Blockly.Input.prototype.init = function() {
for (var x = 0; x < this.fieldRow.length; x++) {
this.fieldRow[x].init(this.sourceBlock_);
}
};
/**
* Sever all links to this input.
*/
Blockly.Input.prototype.dispose = function() {
for (var i = 0, field; field = this.fieldRow[i]; i++) {
field.dispose();
}
if (this.connection) {
this.connection.dispose();
}
this.sourceBlock_ = null;
}; | this.sourceBlock_.render();
}
return this;
};
| random_line_split |
input.js | /**
* @license
* Visual Blocks Editor
*
* Copyright 2012 Google Inc.
* https://developers.google.com/blockly/
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @fileoverview Object representing an input (value, statement, or dummy).
* @author [email protected] (Neil Fraser)
*/
'use strict';
goog.provide('Blockly.Input');
// TODO(scr): Fix circular dependencies
// goog.require('Blockly.Block');
goog.require('Blockly.Connection');
goog.require('Blockly.FieldLabel');
goog.require('goog.asserts');
/**
* Class for an input with an optional field.
* @param {number} type The type of the input.
* @param {string} name Language-neutral identifier which may used to find this
* input again.
* @param {!Blockly.Block} block The block containing this input.
* @param {Blockly.Connection} connection Optional connection for this input.
* @constructor
*/
Blockly.Input = function(type, name, block, connection) {
this.type = type;
this.name = name;
this.sourceBlock_ = block;
this.connection = connection;
this.fieldRow = [];
this.align = Blockly.ALIGN_LEFT;
this.visible_ = true;
};
/**
* Add an item to the end of the input's field row.
* @param {string|!Blockly.Field} field Something to add as a field.
* @param {string} opt_name Language-neutral identifier which may used to find
* this field again. Should be unique to the host block.
* @return {!Blockly.Input} The input being append to (to allow chaining).
*/
Blockly.Input.prototype.appendField = function(field, opt_name) {
// Empty string, Null or undefined generates no field, unless field is named.
if (!field && !opt_name) {
return this;
}
// Generate a FieldLabel when given a plain text field.
if (goog.isString(field)) {
field = new Blockly.FieldLabel(/** @type {string} */ (field));
}
if (this.sourceBlock_.svg_) {
field.init(this.sourceBlock_);
}
field.name = opt_name;
if (field.prefixField) {
// Add any prefix.
this.appendField(field.prefixField);
}
// Add the field to the field row.
this.fieldRow.push(field);
if (field.suffixField) {
// Add any suffix.
this.appendField(field.suffixField);
}
if (this.sourceBlock_.rendered) |
return this;
};
/**
* Add an item to the end of the input's field row.
* @param {*} field Something to add as a field.
* @param {string} opt_name Language-neutral identifier which may used to find
* this field again. Should be unique to the host block.
* @return {!Blockly.Input} The input being append to (to allow chaining).
* @deprecated December 2013
*/
Blockly.Input.prototype.appendTitle = function(field, opt_name) {
console.log('Deprecated call to appendTitle, use appendField instead.');
return this.appendField(field, opt_name);
};
/**
* Remove a field from this input.
* @param {string} name The name of the field.
* @throws {goog.asserts.AssertionError} if the field is not present.
*/
Blockly.Input.prototype.removeField = function(name) {
for (var i = 0, field; field = this.fieldRow[i]; i++) {
if (field.name === name) {
field.dispose();
this.fieldRow.splice(i, 1);
if (this.sourceBlock_.rendered) {
this.sourceBlock_.render();
// Removing a field will cause the block to change shape.
this.sourceBlock_.bumpNeighbours_();
}
return;
}
}
goog.asserts.fail('Field "%s" not found.', name);
};
/**
* Gets whether this input is visible or not.
* @return {boolean} True if visible.
*/
Blockly.Input.prototype.isVisible = function() {
return this.visible_;
};
/**
* Sets whether this input is visible or not.
* @param {boolean} visible True if visible.
* @return {!Array.<!Blockly.Block>} List of blocks to render.
*/
Blockly.Input.prototype.setVisible = function(visible) {
var renderList = [];
if (this.visible_ == visible) {
return renderList;
}
this.visible_ = visible;
var display = visible ? 'block' : 'none';
for (var y = 0, field; field = this.fieldRow[y]; y++) {
field.setVisible(visible);
}
if (this.connection) {
// Has a connection.
if (visible) {
renderList = this.connection.unhideAll();
} else {
this.connection.hideAll();
}
var child = this.connection.targetBlock();
if (child) {
child.svg_.getRootElement().style.display = display;
if (!visible) {
child.rendered = false;
}
}
}
return renderList;
};
/**
* Change a connection's compatibility.
* @param {string|Array.<string>|null} check Compatible value type or
* list of value types. Null if all types are compatible.
* @return {!Blockly.Input} The input being modified (to allow chaining).
*/
Blockly.Input.prototype.setCheck = function(check) {
if (!this.connection) {
throw 'This input does not have a connection.';
}
this.connection.setCheck(check);
return this;
};
/**
* Change the alignment of the connection's field(s).
* @param {number} align One of Blockly.ALIGN_LEFT, ALIGN_CENTRE, ALIGN_RIGHT.
* In RTL mode directions are reversed, and ALIGN_RIGHT aligns to the left.
* @return {!Blockly.Input} The input being modified (to allow chaining).
*/
Blockly.Input.prototype.setAlign = function(align) {
this.align = align;
if (this.sourceBlock_.rendered) {
this.sourceBlock_.render();
}
return this;
};
/**
* Initialize the fields on this input.
*/
Blockly.Input.prototype.init = function() {
for (var x = 0; x < this.fieldRow.length; x++) {
this.fieldRow[x].init(this.sourceBlock_);
}
};
/**
* Sever all links to this input.
*/
Blockly.Input.prototype.dispose = function() {
for (var i = 0, field; field = this.fieldRow[i]; i++) {
field.dispose();
}
if (this.connection) {
this.connection.dispose();
}
this.sourceBlock_ = null;
};
| {
this.sourceBlock_.render();
// Adding a field will cause the block to change shape.
this.sourceBlock_.bumpNeighbours_();
} | conditional_block |
goceditor.py | #coding=UTF-8
"""
This file is part of GObjectCreator.
GObjectCreator is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
GObjectCreator is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with GObjectCreator (see file COPYING). If not, see
<http://www.gnu.org/licenses/>.
"""
import os
import gettext
_ = gettext.gettext
import locale
import pygtk
pygtk.require("2.0")
import gtk
from documents_view import DocumentsView
from documents_model import DocumentsModel
from resources.util import get_resource_path
from settings_dialog import SettingsDialog
import gobject_creator
class GOCEditor(object):
"""
Simple editor that supports the creation of meta definition
files for GObjectCreator
"""
TRANSL_DOMAIN = "goceditor"
def __init__(self, start_files=[]):
locale_dir = os.path.dirname(__file__)
locale_dir = os.path.abspath(locale_dir)
locale_dir += os.sep + "locale"
locale.setlocale(locale.LC_ALL, "")
locale.bindtextdomain(self.TRANSL_DOMAIN, locale_dir)
gettext.bindtextdomain(self.TRANSL_DOMAIN, locale_dir)
gettext.textdomain(self.TRANSL_DOMAIN)
self._builder = gtk.Builder()
self._builder.set_translation_domain(self.TRANSL_DOMAIN)
path = get_resource_path("goceditor.ui")
self._builder.add_from_file(path)
self._create_widgets()
self._builder.connect_signals(self)
for start_file in start_files:
if not os.path.exists(start_file):
fd = open(start_file, "w")
fd.close()
self._docs_model.load_document(start_file)
def run(self):
window = self._builder.get_object("main_window")
window.show_all()
gtk.main()
def on_file_new(self, *args):
|
def on_file_open(self, *args):
dialog = gtk.FileChooserDialog(
action = gtk.FILE_CHOOSER_ACTION_OPEN,
buttons = (_("Cancel"), gtk.RESPONSE_CANCEL,
_("Open"), gtk.RESPONSE_OK)
)
if dialog.run() == gtk.RESPONSE_OK:
file_name = dialog.get_filename()
else:
file_name = None
dialog.destroy()
if file_name:
self._docs_model.load_document(file_name)
def on_file_save(self, *args):
idx = self._documents.get_current_index()
if idx < 0:
return
old_path = self._docs_model.get_file_path(idx)
if os.path.exists(old_path):
new_path = old_path
else:
dialog = gtk.FileChooserDialog(
action = gtk.FILE_CHOOSER_ACTION_SAVE,
buttons = (_("Cancel"), gtk.RESPONSE_CANCEL,
_("Save"), gtk.RESPONSE_OK)
)
dialog.set_current_name("untitled.goc")
dialog.set_do_overwrite_confirmation(True)
if dialog.run() == gtk.RESPONSE_OK:
new_path = dialog.get_filename()
else:
new_path = None
dialog.destroy()
if new_path:
content = self._documents.get_content(idx)
self._docs_model.save_document(idx, new_path, content)
def on_file_save_as(self, *args):
idx = self._documents.get_current_index()
if idx < 0:
return
current_path = self._docs_model.get_file_path(idx)
if not current_path:
current_path = "untitled.goc"
dialog = gtk.FileChooserDialog(
action = gtk.FILE_CHOOSER_ACTION_SAVE,
buttons = (_("Cancel"), gtk.RESPONSE_CANCEL,
_("Save"), gtk.RESPONSE_OK)
)
dialog.set_current_name(os.path.basename(current_path))
dialog.set_do_overwrite_confirmation(True)
if dialog.run() == gtk.RESPONSE_OK:
new_path = dialog.get_filename()
else:
new_path = None
dialog.destroy()
if new_path:
content = self._documents.get_content(idx)
self._docs_model.save_document(idx, new_path, content)
def on_file_quit(self, *args):
gtk.main_quit()
def on_edit_cut(self, *args):
self._documents.exec_action("cut")
def on_edit_copy(self, *args):
self._documents.exec_action("copy")
def on_edit_paste(self, *args):
self._documents.exec_action("paste")
def on_edit_settings(self, *args):
SettingsDialog().run()
def on_help_info(self, *args):
builder = gtk.Builder()
builder.set_translation_domain(self.TRANSL_DOMAIN)
builder.add_from_file(get_resource_path("gocedit_info.ui"))
dialog = builder.get_object("info_dialog")
path = get_resource_path("hand_mit_stift_296x300.png")
logo = gtk.gdk.pixbuf_new_from_file(path)
dialog.set_logo(logo)
dialog.set_version(gobject_creator.VERSION)
dialog.run()
dialog.destroy()
def _create_widgets(self):
self._docs_model = DocumentsModel()
self._documents = DocumentsView(self._docs_model)
self._documents.widget.show()
vbox = self._builder.get_object("top_vbox")
vbox.show()
vbox.pack_start(self._documents.widget)
| self._docs_model.new_document() | identifier_body |
goceditor.py | #coding=UTF-8
"""
This file is part of GObjectCreator.
GObjectCreator is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
GObjectCreator is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with GObjectCreator (see file COPYING). If not, see
<http://www.gnu.org/licenses/>.
"""
import os
import gettext
_ = gettext.gettext
import locale
import pygtk
pygtk.require("2.0")
import gtk
from documents_view import DocumentsView
from documents_model import DocumentsModel
from resources.util import get_resource_path
from settings_dialog import SettingsDialog
import gobject_creator
class GOCEditor(object):
"""
Simple editor that supports the creation of meta definition
files for GObjectCreator
"""
TRANSL_DOMAIN = "goceditor"
def | (self, start_files=[]):
locale_dir = os.path.dirname(__file__)
locale_dir = os.path.abspath(locale_dir)
locale_dir += os.sep + "locale"
locale.setlocale(locale.LC_ALL, "")
locale.bindtextdomain(self.TRANSL_DOMAIN, locale_dir)
gettext.bindtextdomain(self.TRANSL_DOMAIN, locale_dir)
gettext.textdomain(self.TRANSL_DOMAIN)
self._builder = gtk.Builder()
self._builder.set_translation_domain(self.TRANSL_DOMAIN)
path = get_resource_path("goceditor.ui")
self._builder.add_from_file(path)
self._create_widgets()
self._builder.connect_signals(self)
for start_file in start_files:
if not os.path.exists(start_file):
fd = open(start_file, "w")
fd.close()
self._docs_model.load_document(start_file)
def run(self):
window = self._builder.get_object("main_window")
window.show_all()
gtk.main()
def on_file_new(self, *args):
self._docs_model.new_document()
def on_file_open(self, *args):
dialog = gtk.FileChooserDialog(
action = gtk.FILE_CHOOSER_ACTION_OPEN,
buttons = (_("Cancel"), gtk.RESPONSE_CANCEL,
_("Open"), gtk.RESPONSE_OK)
)
if dialog.run() == gtk.RESPONSE_OK:
file_name = dialog.get_filename()
else:
file_name = None
dialog.destroy()
if file_name:
self._docs_model.load_document(file_name)
def on_file_save(self, *args):
idx = self._documents.get_current_index()
if idx < 0:
return
old_path = self._docs_model.get_file_path(idx)
if os.path.exists(old_path):
new_path = old_path
else:
dialog = gtk.FileChooserDialog(
action = gtk.FILE_CHOOSER_ACTION_SAVE,
buttons = (_("Cancel"), gtk.RESPONSE_CANCEL,
_("Save"), gtk.RESPONSE_OK)
)
dialog.set_current_name("untitled.goc")
dialog.set_do_overwrite_confirmation(True)
if dialog.run() == gtk.RESPONSE_OK:
new_path = dialog.get_filename()
else:
new_path = None
dialog.destroy()
if new_path:
content = self._documents.get_content(idx)
self._docs_model.save_document(idx, new_path, content)
def on_file_save_as(self, *args):
idx = self._documents.get_current_index()
if idx < 0:
return
current_path = self._docs_model.get_file_path(idx)
if not current_path:
current_path = "untitled.goc"
dialog = gtk.FileChooserDialog(
action = gtk.FILE_CHOOSER_ACTION_SAVE,
buttons = (_("Cancel"), gtk.RESPONSE_CANCEL,
_("Save"), gtk.RESPONSE_OK)
)
dialog.set_current_name(os.path.basename(current_path))
dialog.set_do_overwrite_confirmation(True)
if dialog.run() == gtk.RESPONSE_OK:
new_path = dialog.get_filename()
else:
new_path = None
dialog.destroy()
if new_path:
content = self._documents.get_content(idx)
self._docs_model.save_document(idx, new_path, content)
def on_file_quit(self, *args):
gtk.main_quit()
def on_edit_cut(self, *args):
self._documents.exec_action("cut")
def on_edit_copy(self, *args):
self._documents.exec_action("copy")
def on_edit_paste(self, *args):
self._documents.exec_action("paste")
def on_edit_settings(self, *args):
SettingsDialog().run()
def on_help_info(self, *args):
builder = gtk.Builder()
builder.set_translation_domain(self.TRANSL_DOMAIN)
builder.add_from_file(get_resource_path("gocedit_info.ui"))
dialog = builder.get_object("info_dialog")
path = get_resource_path("hand_mit_stift_296x300.png")
logo = gtk.gdk.pixbuf_new_from_file(path)
dialog.set_logo(logo)
dialog.set_version(gobject_creator.VERSION)
dialog.run()
dialog.destroy()
def _create_widgets(self):
self._docs_model = DocumentsModel()
self._documents = DocumentsView(self._docs_model)
self._documents.widget.show()
vbox = self._builder.get_object("top_vbox")
vbox.show()
vbox.pack_start(self._documents.widget)
| __init__ | identifier_name |
goceditor.py | #coding=UTF-8
"""
This file is part of GObjectCreator.
GObjectCreator is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
GObjectCreator is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with GObjectCreator (see file COPYING). If not, see
<http://www.gnu.org/licenses/>.
"""
import os
import gettext
_ = gettext.gettext
import locale
import pygtk
pygtk.require("2.0")
import gtk
from documents_view import DocumentsView
from documents_model import DocumentsModel
from resources.util import get_resource_path
from settings_dialog import SettingsDialog
import gobject_creator
class GOCEditor(object):
"""
Simple editor that supports the creation of meta definition
files for GObjectCreator
"""
TRANSL_DOMAIN = "goceditor"
def __init__(self, start_files=[]):
locale_dir = os.path.dirname(__file__)
locale_dir = os.path.abspath(locale_dir)
locale_dir += os.sep + "locale"
locale.setlocale(locale.LC_ALL, "")
locale.bindtextdomain(self.TRANSL_DOMAIN, locale_dir)
gettext.bindtextdomain(self.TRANSL_DOMAIN, locale_dir)
gettext.textdomain(self.TRANSL_DOMAIN)
self._builder = gtk.Builder()
self._builder.set_translation_domain(self.TRANSL_DOMAIN)
path = get_resource_path("goceditor.ui")
self._builder.add_from_file(path)
self._create_widgets()
self._builder.connect_signals(self)
for start_file in start_files:
if not os.path.exists(start_file):
fd = open(start_file, "w")
fd.close()
self._docs_model.load_document(start_file)
def run(self):
window = self._builder.get_object("main_window")
window.show_all()
gtk.main()
def on_file_new(self, *args):
self._docs_model.new_document()
def on_file_open(self, *args):
dialog = gtk.FileChooserDialog(
action = gtk.FILE_CHOOSER_ACTION_OPEN,
buttons = (_("Cancel"), gtk.RESPONSE_CANCEL,
_("Open"), gtk.RESPONSE_OK)
)
if dialog.run() == gtk.RESPONSE_OK:
file_name = dialog.get_filename()
else:
file_name = None
dialog.destroy()
if file_name:
self._docs_model.load_document(file_name)
def on_file_save(self, *args):
idx = self._documents.get_current_index()
if idx < 0:
return
old_path = self._docs_model.get_file_path(idx)
if os.path.exists(old_path):
new_path = old_path
else:
dialog = gtk.FileChooserDialog(
action = gtk.FILE_CHOOSER_ACTION_SAVE,
buttons = (_("Cancel"), gtk.RESPONSE_CANCEL,
_("Save"), gtk.RESPONSE_OK)
)
dialog.set_current_name("untitled.goc")
dialog.set_do_overwrite_confirmation(True)
if dialog.run() == gtk.RESPONSE_OK:
new_path = dialog.get_filename()
else:
new_path = None
dialog.destroy()
if new_path:
content = self._documents.get_content(idx)
self._docs_model.save_document(idx, new_path, content)
def on_file_save_as(self, *args):
idx = self._documents.get_current_index()
if idx < 0:
return
current_path = self._docs_model.get_file_path(idx)
if not current_path:
current_path = "untitled.goc"
dialog = gtk.FileChooserDialog(
action = gtk.FILE_CHOOSER_ACTION_SAVE,
buttons = (_("Cancel"), gtk.RESPONSE_CANCEL,
_("Save"), gtk.RESPONSE_OK)
)
dialog.set_current_name(os.path.basename(current_path))
dialog.set_do_overwrite_confirmation(True)
if dialog.run() == gtk.RESPONSE_OK:
|
else:
new_path = None
dialog.destroy()
if new_path:
content = self._documents.get_content(idx)
self._docs_model.save_document(idx, new_path, content)
def on_file_quit(self, *args):
gtk.main_quit()
def on_edit_cut(self, *args):
self._documents.exec_action("cut")
def on_edit_copy(self, *args):
self._documents.exec_action("copy")
def on_edit_paste(self, *args):
self._documents.exec_action("paste")
def on_edit_settings(self, *args):
SettingsDialog().run()
def on_help_info(self, *args):
builder = gtk.Builder()
builder.set_translation_domain(self.TRANSL_DOMAIN)
builder.add_from_file(get_resource_path("gocedit_info.ui"))
dialog = builder.get_object("info_dialog")
path = get_resource_path("hand_mit_stift_296x300.png")
logo = gtk.gdk.pixbuf_new_from_file(path)
dialog.set_logo(logo)
dialog.set_version(gobject_creator.VERSION)
dialog.run()
dialog.destroy()
def _create_widgets(self):
self._docs_model = DocumentsModel()
self._documents = DocumentsView(self._docs_model)
self._documents.widget.show()
vbox = self._builder.get_object("top_vbox")
vbox.show()
vbox.pack_start(self._documents.widget)
| new_path = dialog.get_filename() | conditional_block |
goceditor.py | #coding=UTF-8
"""
This file is part of GObjectCreator.
GObjectCreator is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
GObjectCreator is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with GObjectCreator (see file COPYING). If not, see
<http://www.gnu.org/licenses/>.
"""
import os
import gettext
_ = gettext.gettext
import locale
import pygtk
pygtk.require("2.0")
import gtk
from documents_view import DocumentsView
from documents_model import DocumentsModel
from resources.util import get_resource_path
from settings_dialog import SettingsDialog
import gobject_creator
class GOCEditor(object):
"""
Simple editor that supports the creation of meta definition
files for GObjectCreator
"""
TRANSL_DOMAIN = "goceditor"
def __init__(self, start_files=[]):
locale_dir = os.path.dirname(__file__)
locale_dir = os.path.abspath(locale_dir)
locale_dir += os.sep + "locale"
locale.setlocale(locale.LC_ALL, "")
locale.bindtextdomain(self.TRANSL_DOMAIN, locale_dir)
gettext.bindtextdomain(self.TRANSL_DOMAIN, locale_dir)
gettext.textdomain(self.TRANSL_DOMAIN)
self._builder = gtk.Builder()
self._builder.set_translation_domain(self.TRANSL_DOMAIN)
path = get_resource_path("goceditor.ui")
self._builder.add_from_file(path)
self._create_widgets()
self._builder.connect_signals(self)
for start_file in start_files:
if not os.path.exists(start_file):
fd = open(start_file, "w")
fd.close()
self._docs_model.load_document(start_file)
def run(self):
window = self._builder.get_object("main_window")
window.show_all()
gtk.main()
def on_file_new(self, *args):
self._docs_model.new_document()
def on_file_open(self, *args):
dialog = gtk.FileChooserDialog( | buttons = (_("Cancel"), gtk.RESPONSE_CANCEL,
_("Open"), gtk.RESPONSE_OK)
)
if dialog.run() == gtk.RESPONSE_OK:
file_name = dialog.get_filename()
else:
file_name = None
dialog.destroy()
if file_name:
self._docs_model.load_document(file_name)
def on_file_save(self, *args):
idx = self._documents.get_current_index()
if idx < 0:
return
old_path = self._docs_model.get_file_path(idx)
if os.path.exists(old_path):
new_path = old_path
else:
dialog = gtk.FileChooserDialog(
action = gtk.FILE_CHOOSER_ACTION_SAVE,
buttons = (_("Cancel"), gtk.RESPONSE_CANCEL,
_("Save"), gtk.RESPONSE_OK)
)
dialog.set_current_name("untitled.goc")
dialog.set_do_overwrite_confirmation(True)
if dialog.run() == gtk.RESPONSE_OK:
new_path = dialog.get_filename()
else:
new_path = None
dialog.destroy()
if new_path:
content = self._documents.get_content(idx)
self._docs_model.save_document(idx, new_path, content)
def on_file_save_as(self, *args):
idx = self._documents.get_current_index()
if idx < 0:
return
current_path = self._docs_model.get_file_path(idx)
if not current_path:
current_path = "untitled.goc"
dialog = gtk.FileChooserDialog(
action = gtk.FILE_CHOOSER_ACTION_SAVE,
buttons = (_("Cancel"), gtk.RESPONSE_CANCEL,
_("Save"), gtk.RESPONSE_OK)
)
dialog.set_current_name(os.path.basename(current_path))
dialog.set_do_overwrite_confirmation(True)
if dialog.run() == gtk.RESPONSE_OK:
new_path = dialog.get_filename()
else:
new_path = None
dialog.destroy()
if new_path:
content = self._documents.get_content(idx)
self._docs_model.save_document(idx, new_path, content)
def on_file_quit(self, *args):
gtk.main_quit()
def on_edit_cut(self, *args):
self._documents.exec_action("cut")
def on_edit_copy(self, *args):
self._documents.exec_action("copy")
def on_edit_paste(self, *args):
self._documents.exec_action("paste")
def on_edit_settings(self, *args):
SettingsDialog().run()
def on_help_info(self, *args):
builder = gtk.Builder()
builder.set_translation_domain(self.TRANSL_DOMAIN)
builder.add_from_file(get_resource_path("gocedit_info.ui"))
dialog = builder.get_object("info_dialog")
path = get_resource_path("hand_mit_stift_296x300.png")
logo = gtk.gdk.pixbuf_new_from_file(path)
dialog.set_logo(logo)
dialog.set_version(gobject_creator.VERSION)
dialog.run()
dialog.destroy()
def _create_widgets(self):
self._docs_model = DocumentsModel()
self._documents = DocumentsView(self._docs_model)
self._documents.widget.show()
vbox = self._builder.get_object("top_vbox")
vbox.show()
vbox.pack_start(self._documents.widget) | action = gtk.FILE_CHOOSER_ACTION_OPEN, | random_line_split |
network.py | # -*- coding: utf-8 -*-
import socket
from paramiko import SSHClient, AutoAddPolicy, AuthenticationException
from bssh.utils import env
from bssh.auth import get_pkey
from bssh.logger import logger
def | (
hostname=None,
port=22,
username=None,
password=None,
pkey=None,
pkey_pwd=None,
sock=None,
timeout=env.timeout,
**kwargs
):
"""Connect the remote ssh server"""
passauth = True if password else False
pkey = pkey if passauth else get_pkey(pkey, pkey_pwd)
client = SSHClient()
client.set_missing_host_key_policy(AutoAddPolicy())
try:
client.connect(hostname=hostname,
port=int(port),
username=username,
password=password,
pkey=pkey,
sock=sock,
timeout=timeout)
logger.login.debug('%s connect successfully.' % hostname)
return client
except AuthenticationException:
logger.login.error('%s Validation failed.' % hostname)
except socket.error:
logger.login.error('%s Network Error' % hostname)
except Exception as e:
logger.login.error('%s %s' % (hostname, str(e)))
| connect | identifier_name |
network.py | # -*- coding: utf-8 -*-
import socket
from paramiko import SSHClient, AutoAddPolicy, AuthenticationException
from bssh.utils import env
from bssh.auth import get_pkey
from bssh.logger import logger
def connect(
hostname=None,
port=22,
username=None,
password=None,
pkey=None,
pkey_pwd=None,
sock=None,
timeout=env.timeout,
**kwargs
):
| """Connect the remote ssh server"""
passauth = True if password else False
pkey = pkey if passauth else get_pkey(pkey, pkey_pwd)
client = SSHClient()
client.set_missing_host_key_policy(AutoAddPolicy())
try:
client.connect(hostname=hostname,
port=int(port),
username=username,
password=password,
pkey=pkey,
sock=sock,
timeout=timeout)
logger.login.debug('%s connect successfully.' % hostname)
return client
except AuthenticationException:
logger.login.error('%s Validation failed.' % hostname)
except socket.error:
logger.login.error('%s Network Error' % hostname)
except Exception as e:
logger.login.error('%s %s' % (hostname, str(e))) | identifier_body |
|
network.py | # -*- coding: utf-8 -*-
import socket
from paramiko import SSHClient, AutoAddPolicy, AuthenticationException
from bssh.utils import env
from bssh.auth import get_pkey
from bssh.logger import logger
def connect(
hostname=None,
port=22, | pkey_pwd=None,
sock=None,
timeout=env.timeout,
**kwargs
):
"""Connect the remote ssh server"""
passauth = True if password else False
pkey = pkey if passauth else get_pkey(pkey, pkey_pwd)
client = SSHClient()
client.set_missing_host_key_policy(AutoAddPolicy())
try:
client.connect(hostname=hostname,
port=int(port),
username=username,
password=password,
pkey=pkey,
sock=sock,
timeout=timeout)
logger.login.debug('%s connect successfully.' % hostname)
return client
except AuthenticationException:
logger.login.error('%s Validation failed.' % hostname)
except socket.error:
logger.login.error('%s Network Error' % hostname)
except Exception as e:
logger.login.error('%s %s' % (hostname, str(e))) | username=None,
password=None,
pkey=None, | random_line_split |
main.component.ts | import { Component, OnInit } from '@angular/core'; | import { StateStorageService } from '../../shared';
@Component({
selector: 'jhi-main',
templateUrl: './main.component.html'
})
export class JhiMainComponent implements OnInit {
constructor(
private titleService: Title,
private router: Router,
private $storageService: StateStorageService,
) {}
private getPageTitle(routeSnapshot: ActivatedRouteSnapshot) {
let title: string = (routeSnapshot.data && routeSnapshot.data['pageTitle']) ? routeSnapshot.data['pageTitle'] : 'tasksApp';
if (routeSnapshot.firstChild) {
title = this.getPageTitle(routeSnapshot.firstChild) || title;
}
return title;
}
ngOnInit() {
this.router.events.subscribe((event) => {
if (event instanceof NavigationEnd) {
this.titleService.setTitle(this.getPageTitle(this.router.routerState.snapshot.root));
}
});
}
} | import { Router, ActivatedRouteSnapshot, NavigationEnd, RoutesRecognized } from '@angular/router';
import { Title } from '@angular/platform-browser'; | random_line_split |
main.component.ts | import { Component, OnInit } from '@angular/core';
import { Router, ActivatedRouteSnapshot, NavigationEnd, RoutesRecognized } from '@angular/router';
import { Title } from '@angular/platform-browser';
import { StateStorageService } from '../../shared';
@Component({
selector: 'jhi-main',
templateUrl: './main.component.html'
})
export class JhiMainComponent implements OnInit {
constructor(
private titleService: Title,
private router: Router,
private $storageService: StateStorageService,
) {}
private | (routeSnapshot: ActivatedRouteSnapshot) {
let title: string = (routeSnapshot.data && routeSnapshot.data['pageTitle']) ? routeSnapshot.data['pageTitle'] : 'tasksApp';
if (routeSnapshot.firstChild) {
title = this.getPageTitle(routeSnapshot.firstChild) || title;
}
return title;
}
ngOnInit() {
this.router.events.subscribe((event) => {
if (event instanceof NavigationEnd) {
this.titleService.setTitle(this.getPageTitle(this.router.routerState.snapshot.root));
}
});
}
}
| getPageTitle | identifier_name |
main.component.ts | import { Component, OnInit } from '@angular/core';
import { Router, ActivatedRouteSnapshot, NavigationEnd, RoutesRecognized } from '@angular/router';
import { Title } from '@angular/platform-browser';
import { StateStorageService } from '../../shared';
@Component({
selector: 'jhi-main',
templateUrl: './main.component.html'
})
export class JhiMainComponent implements OnInit {
constructor(
private titleService: Title,
private router: Router,
private $storageService: StateStorageService,
) {}
private getPageTitle(routeSnapshot: ActivatedRouteSnapshot) |
ngOnInit() {
this.router.events.subscribe((event) => {
if (event instanceof NavigationEnd) {
this.titleService.setTitle(this.getPageTitle(this.router.routerState.snapshot.root));
}
});
}
}
| {
let title: string = (routeSnapshot.data && routeSnapshot.data['pageTitle']) ? routeSnapshot.data['pageTitle'] : 'tasksApp';
if (routeSnapshot.firstChild) {
title = this.getPageTitle(routeSnapshot.firstChild) || title;
}
return title;
} | identifier_body |
main.component.ts | import { Component, OnInit } from '@angular/core';
import { Router, ActivatedRouteSnapshot, NavigationEnd, RoutesRecognized } from '@angular/router';
import { Title } from '@angular/platform-browser';
import { StateStorageService } from '../../shared';
@Component({
selector: 'jhi-main',
templateUrl: './main.component.html'
})
export class JhiMainComponent implements OnInit {
constructor(
private titleService: Title,
private router: Router,
private $storageService: StateStorageService,
) {}
private getPageTitle(routeSnapshot: ActivatedRouteSnapshot) {
let title: string = (routeSnapshot.data && routeSnapshot.data['pageTitle']) ? routeSnapshot.data['pageTitle'] : 'tasksApp';
if (routeSnapshot.firstChild) |
return title;
}
ngOnInit() {
this.router.events.subscribe((event) => {
if (event instanceof NavigationEnd) {
this.titleService.setTitle(this.getPageTitle(this.router.routerState.snapshot.root));
}
});
}
}
| {
title = this.getPageTitle(routeSnapshot.firstChild) || title;
} | conditional_block |
imagepicker.tsx | import * as React from "react";
import { SurveyQuestionElementBase } from "./reactquestion_element";
import { QuestionImagePickerModel } from "../question_imagepicker";
import { ItemValue } from "../itemvalue";
import { ReactQuestionFactory } from "./reactquestion_factory";
export class SurveyQuestionImagePicker extends SurveyQuestionElementBase {
constructor(props: any) {
super(props);
this.handleOnChange = this.handleOnChange.bind(this);
}
protected get question(): QuestionImagePickerModel {
return this.questionBase as QuestionImagePickerModel;
}
handleOnChange(event: any) {
if (this.question.multiSelect) {
if (event.target.checked) {
this.question.value = this.question.value.concat(event.target.value);
} else {
var currValue = this.question.value;
currValue.splice(this.question.value.indexOf(event.target.value), 1);
this.question.value = currValue;
}
} else {
this.question.value = event.target.value;
}
this.setState({ value: this.question.value });
}
protected renderElement(): JSX.Element {
var cssClasses = this.question.cssClasses;
return (
<fieldset className={cssClasses.root}>
<legend aria-label={this.question.locTitle.renderedHtml} />
{this.getItems(cssClasses)}
</fieldset>
);
}
protected getItems(cssClasses: any): Array<any> {
var items = [];
for (var i = 0; i < this.question.visibleChoices.length; i++) |
return items;
}
protected get textStyle(): any {
return { marginLeft: "3px", display: "inline", position: "static" };
}
protected renderItem(
key: string,
item: ItemValue,
cssClasses: any
): JSX.Element {
var isChecked = this.question.isItemSelected(item);
var id = this.question.inputId + "_" + item.value;
var itemClass = this.question.getItemClass(item);
var text = null;
if (this.question.showLabel) {
text = (
<span
title={item.text || item.value}
className={this.question.cssClasses.itemText}
>
{item.text || item.value}
</span>
);
}
var style: any = { objectFit: this.question.imageFit };
var control = null;
if (this.question.contentMode === "image") {
control = (
<img
className={cssClasses.image}
src={item["imageLink"]}
width={
this.question.imageWidth
? this.question.imageWidth + "px"
: undefined
}
height={
this.question.imageHeight
? this.question.imageHeight + "px"
: undefined
}
alt={item.text || item.value}
style={style}
/>
);
}
if (this.question.contentMode === "video") {
control = (
<embed
className={cssClasses.image}
src={item["imageLink"]}
width={
this.question.imageWidth
? this.question.imageWidth + "px"
: undefined
}
height={
this.question.imageHeight
? this.question.imageHeight + "px"
: undefined
}
style={style}
/>
);
}
return (
<div key={key} className={itemClass}>
<label className={cssClasses.label}>
<input
style={{ display: "none" }}
className={cssClasses.itemControl}
id={id}
type={this.question.multiSelect ? "checkbox" : "radio"}
name={this.question.name + "_" + this.questionBase.id}
checked={isChecked}
value={item.value}
disabled={this.isDisplayMode || !item.isEnabled}
onChange={this.handleOnChange}
aria-label={this.question.locTitle.renderedHtml}
aria-invalid={this.question.errors.length > 0}
aria-describedby={
this.question.errors.length > 0
? this.question.id + "_errors"
: null
}
/>
<div>
{control}
{text}
</div>
</label>
</div>
);
}
}
ReactQuestionFactory.Instance.registerQuestion("imagepicker", (props) => {
return React.createElement(SurveyQuestionImagePicker, props);
});
| {
var item = this.question.visibleChoices[i];
var key = "item" + i;
items.push(this.renderItem(key, item, cssClasses));
} | conditional_block |
imagepicker.tsx | import * as React from "react";
import { SurveyQuestionElementBase } from "./reactquestion_element";
import { QuestionImagePickerModel } from "../question_imagepicker";
import { ItemValue } from "../itemvalue";
import { ReactQuestionFactory } from "./reactquestion_factory";
export class SurveyQuestionImagePicker extends SurveyQuestionElementBase {
constructor(props: any) {
super(props);
this.handleOnChange = this.handleOnChange.bind(this);
}
protected get question(): QuestionImagePickerModel {
return this.questionBase as QuestionImagePickerModel;
}
handleOnChange(event: any) {
if (this.question.multiSelect) {
if (event.target.checked) {
this.question.value = this.question.value.concat(event.target.value);
} else {
var currValue = this.question.value;
currValue.splice(this.question.value.indexOf(event.target.value), 1);
this.question.value = currValue;
}
} else {
this.question.value = event.target.value;
}
this.setState({ value: this.question.value });
}
protected renderElement(): JSX.Element {
var cssClasses = this.question.cssClasses;
return (
<fieldset className={cssClasses.root}>
<legend aria-label={this.question.locTitle.renderedHtml} />
{this.getItems(cssClasses)}
</fieldset>
);
}
protected getItems(cssClasses: any): Array<any> {
var items = [];
for (var i = 0; i < this.question.visibleChoices.length; i++) {
var item = this.question.visibleChoices[i];
var key = "item" + i;
items.push(this.renderItem(key, item, cssClasses));
}
return items;
}
protected get textStyle(): any {
return { marginLeft: "3px", display: "inline", position: "static" };
}
protected renderItem(
key: string,
item: ItemValue,
cssClasses: any
): JSX.Element {
var isChecked = this.question.isItemSelected(item);
var id = this.question.inputId + "_" + item.value;
var itemClass = this.question.getItemClass(item);
var text = null;
if (this.question.showLabel) {
text = (
<span
title={item.text || item.value}
className={this.question.cssClasses.itemText}
>
{item.text || item.value}
</span>
);
}
var style: any = { objectFit: this.question.imageFit };
var control = null;
if (this.question.contentMode === "image") {
control = (
<img
className={cssClasses.image}
src={item["imageLink"]}
width={
this.question.imageWidth
? this.question.imageWidth + "px"
: undefined
}
height={
this.question.imageHeight
? this.question.imageHeight + "px"
: undefined
}
alt={item.text || item.value}
style={style}
/>
);
}
if (this.question.contentMode === "video") {
control = (
<embed
className={cssClasses.image}
src={item["imageLink"]}
width={
this.question.imageWidth
? this.question.imageWidth + "px"
: undefined
} | ? this.question.imageHeight + "px"
: undefined
}
style={style}
/>
);
}
return (
<div key={key} className={itemClass}>
<label className={cssClasses.label}>
<input
style={{ display: "none" }}
className={cssClasses.itemControl}
id={id}
type={this.question.multiSelect ? "checkbox" : "radio"}
name={this.question.name + "_" + this.questionBase.id}
checked={isChecked}
value={item.value}
disabled={this.isDisplayMode || !item.isEnabled}
onChange={this.handleOnChange}
aria-label={this.question.locTitle.renderedHtml}
aria-invalid={this.question.errors.length > 0}
aria-describedby={
this.question.errors.length > 0
? this.question.id + "_errors"
: null
}
/>
<div>
{control}
{text}
</div>
</label>
</div>
);
}
}
ReactQuestionFactory.Instance.registerQuestion("imagepicker", (props) => {
return React.createElement(SurveyQuestionImagePicker, props);
}); | height={
this.question.imageHeight | random_line_split |
imagepicker.tsx | import * as React from "react";
import { SurveyQuestionElementBase } from "./reactquestion_element";
import { QuestionImagePickerModel } from "../question_imagepicker";
import { ItemValue } from "../itemvalue";
import { ReactQuestionFactory } from "./reactquestion_factory";
export class SurveyQuestionImagePicker extends SurveyQuestionElementBase {
constructor(props: any) {
super(props);
this.handleOnChange = this.handleOnChange.bind(this);
}
protected get question(): QuestionImagePickerModel {
return this.questionBase as QuestionImagePickerModel;
}
handleOnChange(event: any) {
if (this.question.multiSelect) {
if (event.target.checked) {
this.question.value = this.question.value.concat(event.target.value);
} else {
var currValue = this.question.value;
currValue.splice(this.question.value.indexOf(event.target.value), 1);
this.question.value = currValue;
}
} else {
this.question.value = event.target.value;
}
this.setState({ value: this.question.value });
}
protected | (): JSX.Element {
var cssClasses = this.question.cssClasses;
return (
<fieldset className={cssClasses.root}>
<legend aria-label={this.question.locTitle.renderedHtml} />
{this.getItems(cssClasses)}
</fieldset>
);
}
protected getItems(cssClasses: any): Array<any> {
var items = [];
for (var i = 0; i < this.question.visibleChoices.length; i++) {
var item = this.question.visibleChoices[i];
var key = "item" + i;
items.push(this.renderItem(key, item, cssClasses));
}
return items;
}
protected get textStyle(): any {
return { marginLeft: "3px", display: "inline", position: "static" };
}
protected renderItem(
key: string,
item: ItemValue,
cssClasses: any
): JSX.Element {
var isChecked = this.question.isItemSelected(item);
var id = this.question.inputId + "_" + item.value;
var itemClass = this.question.getItemClass(item);
var text = null;
if (this.question.showLabel) {
text = (
<span
title={item.text || item.value}
className={this.question.cssClasses.itemText}
>
{item.text || item.value}
</span>
);
}
var style: any = { objectFit: this.question.imageFit };
var control = null;
if (this.question.contentMode === "image") {
control = (
<img
className={cssClasses.image}
src={item["imageLink"]}
width={
this.question.imageWidth
? this.question.imageWidth + "px"
: undefined
}
height={
this.question.imageHeight
? this.question.imageHeight + "px"
: undefined
}
alt={item.text || item.value}
style={style}
/>
);
}
if (this.question.contentMode === "video") {
control = (
<embed
className={cssClasses.image}
src={item["imageLink"]}
width={
this.question.imageWidth
? this.question.imageWidth + "px"
: undefined
}
height={
this.question.imageHeight
? this.question.imageHeight + "px"
: undefined
}
style={style}
/>
);
}
return (
<div key={key} className={itemClass}>
<label className={cssClasses.label}>
<input
style={{ display: "none" }}
className={cssClasses.itemControl}
id={id}
type={this.question.multiSelect ? "checkbox" : "radio"}
name={this.question.name + "_" + this.questionBase.id}
checked={isChecked}
value={item.value}
disabled={this.isDisplayMode || !item.isEnabled}
onChange={this.handleOnChange}
aria-label={this.question.locTitle.renderedHtml}
aria-invalid={this.question.errors.length > 0}
aria-describedby={
this.question.errors.length > 0
? this.question.id + "_errors"
: null
}
/>
<div>
{control}
{text}
</div>
</label>
</div>
);
}
}
ReactQuestionFactory.Instance.registerQuestion("imagepicker", (props) => {
return React.createElement(SurveyQuestionImagePicker, props);
});
| renderElement | identifier_name |
imagepicker.tsx | import * as React from "react";
import { SurveyQuestionElementBase } from "./reactquestion_element";
import { QuestionImagePickerModel } from "../question_imagepicker";
import { ItemValue } from "../itemvalue";
import { ReactQuestionFactory } from "./reactquestion_factory";
export class SurveyQuestionImagePicker extends SurveyQuestionElementBase {
constructor(props: any) {
super(props);
this.handleOnChange = this.handleOnChange.bind(this);
}
protected get question(): QuestionImagePickerModel |
handleOnChange(event: any) {
if (this.question.multiSelect) {
if (event.target.checked) {
this.question.value = this.question.value.concat(event.target.value);
} else {
var currValue = this.question.value;
currValue.splice(this.question.value.indexOf(event.target.value), 1);
this.question.value = currValue;
}
} else {
this.question.value = event.target.value;
}
this.setState({ value: this.question.value });
}
protected renderElement(): JSX.Element {
var cssClasses = this.question.cssClasses;
return (
<fieldset className={cssClasses.root}>
<legend aria-label={this.question.locTitle.renderedHtml} />
{this.getItems(cssClasses)}
</fieldset>
);
}
protected getItems(cssClasses: any): Array<any> {
var items = [];
for (var i = 0; i < this.question.visibleChoices.length; i++) {
var item = this.question.visibleChoices[i];
var key = "item" + i;
items.push(this.renderItem(key, item, cssClasses));
}
return items;
}
protected get textStyle(): any {
return { marginLeft: "3px", display: "inline", position: "static" };
}
protected renderItem(
key: string,
item: ItemValue,
cssClasses: any
): JSX.Element {
var isChecked = this.question.isItemSelected(item);
var id = this.question.inputId + "_" + item.value;
var itemClass = this.question.getItemClass(item);
var text = null;
if (this.question.showLabel) {
text = (
<span
title={item.text || item.value}
className={this.question.cssClasses.itemText}
>
{item.text || item.value}
</span>
);
}
var style: any = { objectFit: this.question.imageFit };
var control = null;
if (this.question.contentMode === "image") {
control = (
<img
className={cssClasses.image}
src={item["imageLink"]}
width={
this.question.imageWidth
? this.question.imageWidth + "px"
: undefined
}
height={
this.question.imageHeight
? this.question.imageHeight + "px"
: undefined
}
alt={item.text || item.value}
style={style}
/>
);
}
if (this.question.contentMode === "video") {
control = (
<embed
className={cssClasses.image}
src={item["imageLink"]}
width={
this.question.imageWidth
? this.question.imageWidth + "px"
: undefined
}
height={
this.question.imageHeight
? this.question.imageHeight + "px"
: undefined
}
style={style}
/>
);
}
return (
<div key={key} className={itemClass}>
<label className={cssClasses.label}>
<input
style={{ display: "none" }}
className={cssClasses.itemControl}
id={id}
type={this.question.multiSelect ? "checkbox" : "radio"}
name={this.question.name + "_" + this.questionBase.id}
checked={isChecked}
value={item.value}
disabled={this.isDisplayMode || !item.isEnabled}
onChange={this.handleOnChange}
aria-label={this.question.locTitle.renderedHtml}
aria-invalid={this.question.errors.length > 0}
aria-describedby={
this.question.errors.length > 0
? this.question.id + "_errors"
: null
}
/>
<div>
{control}
{text}
</div>
</label>
</div>
);
}
}
ReactQuestionFactory.Instance.registerQuestion("imagepicker", (props) => {
return React.createElement(SurveyQuestionImagePicker, props);
});
| {
return this.questionBase as QuestionImagePickerModel;
} | identifier_body |
settings.py | """
Django settings for djangoApp project.
Generated by 'django-admin startproject' using Django 1.10.5.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'r&j)3lay4i$rm44n%h)bsv_q(9ysqhl@7@aibjm2b=1)0fag9n'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = [] |
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'djangoApp.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'djangoApp.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/' | random_line_split |
|
get_profile_information.rs | //! `GET /_matrix/federation/*/query/profile`
//!
//! Endpoint to query profile information with a user id and optional field.
pub mod v1 {
//! `/v1/` ([spec])
//!
//! [spec]: https://spec.matrix.org/v1.2/server-server-api/#get_matrixfederationv1queryprofile
use ruma_common::{api::ruma_api, MxcUri, UserId};
use ruma_serde::StringEnum;
use crate::PrivOwnedStr;
ruma_api! {
metadata: {
description: "Get profile information, such as a display name or avatar, for a given user.",
name: "get_profile_information",
method: GET,
stable_path: "/_matrix/federation/v1/query/profile",
rate_limited: false,
authentication: ServerSignatures,
added: 1.0,
}
request: {
/// User ID to query.
#[ruma_api(query)]
pub user_id: &'a UserId,
/// Profile field to query.
#[serde(skip_serializing_if = "Option::is_none")]
#[ruma_api(query)]
pub field: Option<&'a ProfileField>,
}
#[derive(Default)]
response: {
/// Display name of the user.
#[serde(skip_serializing_if = "Option::is_none")]
pub displayname: Option<String>,
/// Avatar URL for the user's avatar.
///
/// If you activate the `compat` feature, this field being an empty string in JSON will result
/// in `None` here during deserialization.
#[serde(skip_serializing_if = "Option::is_none")]
#[cfg_attr(
feature = "compat",
serde(default, deserialize_with = "ruma_serde::empty_string_as_none")
)]
pub avatar_url: Option<Box<MxcUri>>,
/// The [BlurHash](https://blurha.sh) for the avatar pointed to by `avatar_url`.
///
/// This uses the unstable prefix in
/// [MSC2448](https://github.com/matrix-org/matrix-spec-proposals/pull/2448).
#[cfg(feature = "unstable-msc2448")]
#[serde(rename = "xyz.amorgan.blurhash", skip_serializing_if = "Option::is_none")]
pub blurhash: Option<String>,
}
}
impl<'a> Request<'a> {
/// Creates a new `Request` with the given user id.
pub fn new(user_id: &'a UserId) -> Self {
Self { user_id, field: None }
}
} | Default::default()
}
}
/// Profile fields to specify in query.
///
/// This type can hold an arbitrary string. To check for formats that are not available as a
/// documented variant here, use its string representation, obtained through `.as_str()`.
#[derive(Clone, Debug, PartialEq, Eq, StringEnum)]
#[non_exhaustive]
pub enum ProfileField {
/// Display name of the user.
#[ruma_enum(rename = "displayname")]
DisplayName,
/// Avatar URL for the user's avatar.
#[ruma_enum(rename = "avatar_url")]
AvatarUrl,
#[doc(hidden)]
_Custom(PrivOwnedStr),
}
impl ProfileField {
/// Creates a string slice from this `ProfileField`.
pub fn as_str(&self) -> &str {
self.as_ref()
}
}
} |
impl Response {
/// Creates an empty `Response`.
pub fn new() -> Self { | random_line_split |
get_profile_information.rs | //! `GET /_matrix/federation/*/query/profile`
//!
//! Endpoint to query profile information with a user id and optional field.
pub mod v1 {
//! `/v1/` ([spec])
//!
//! [spec]: https://spec.matrix.org/v1.2/server-server-api/#get_matrixfederationv1queryprofile
use ruma_common::{api::ruma_api, MxcUri, UserId};
use ruma_serde::StringEnum;
use crate::PrivOwnedStr;
ruma_api! {
metadata: {
description: "Get profile information, such as a display name or avatar, for a given user.",
name: "get_profile_information",
method: GET,
stable_path: "/_matrix/federation/v1/query/profile",
rate_limited: false,
authentication: ServerSignatures,
added: 1.0,
}
request: {
/// User ID to query.
#[ruma_api(query)]
pub user_id: &'a UserId,
/// Profile field to query.
#[serde(skip_serializing_if = "Option::is_none")]
#[ruma_api(query)]
pub field: Option<&'a ProfileField>,
}
#[derive(Default)]
response: {
/// Display name of the user.
#[serde(skip_serializing_if = "Option::is_none")]
pub displayname: Option<String>,
/// Avatar URL for the user's avatar.
///
/// If you activate the `compat` feature, this field being an empty string in JSON will result
/// in `None` here during deserialization.
#[serde(skip_serializing_if = "Option::is_none")]
#[cfg_attr(
feature = "compat",
serde(default, deserialize_with = "ruma_serde::empty_string_as_none")
)]
pub avatar_url: Option<Box<MxcUri>>,
/// The [BlurHash](https://blurha.sh) for the avatar pointed to by `avatar_url`.
///
/// This uses the unstable prefix in
/// [MSC2448](https://github.com/matrix-org/matrix-spec-proposals/pull/2448).
#[cfg(feature = "unstable-msc2448")]
#[serde(rename = "xyz.amorgan.blurhash", skip_serializing_if = "Option::is_none")]
pub blurhash: Option<String>,
}
}
impl<'a> Request<'a> {
/// Creates a new `Request` with the given user id.
pub fn new(user_id: &'a UserId) -> Self {
Self { user_id, field: None }
}
}
impl Response {
/// Creates an empty `Response`.
pub fn new() -> Self {
Default::default()
}
}
/// Profile fields to specify in query.
///
/// This type can hold an arbitrary string. To check for formats that are not available as a
/// documented variant here, use its string representation, obtained through `.as_str()`.
#[derive(Clone, Debug, PartialEq, Eq, StringEnum)]
#[non_exhaustive]
pub enum | {
/// Display name of the user.
#[ruma_enum(rename = "displayname")]
DisplayName,
/// Avatar URL for the user's avatar.
#[ruma_enum(rename = "avatar_url")]
AvatarUrl,
#[doc(hidden)]
_Custom(PrivOwnedStr),
}
impl ProfileField {
/// Creates a string slice from this `ProfileField`.
pub fn as_str(&self) -> &str {
self.as_ref()
}
}
}
| ProfileField | identifier_name |
lc804-unique-morse-code-words.py | # coding=utf-8
import unittest
"""804. Unique Morse Code Words
https://leetcode.com/problems/unique-morse-code-words/description/
International Morse Code defines a standard encoding where each letter is
mapped to a series of dots and dashes, as follows: `"a"` maps to `".-"`, `"b"`
maps to `"-..."`, `"c"` maps to `"-.-."`, and so on.
For convenience, the full table for the 26 letters of the English alphabet is
given below:
[".-","-...","-.-.","-..",".","..-.","--.","....","..",".---","-.-",".-..","--","-.","---",".--.","--.-",".-.","...","-","..-","...-",".--","-..-","-.--","--.."]
Now, given a list of words, each word can be written as a concatenation of the
Morse code of each letter. For example, "cab" can be written as "-.-.-....-",
(which is the concatenation "-.-." \+ "-..." \+ ".-"). We'll call such a
concatenation, the transformation of a word.
Return the number of different transformations among all words we have.
**Example:**
**Input:** words = ["gin", "zen", "gig", "msg"]
**Output:** 2
**Explanation:**
The transformation of each word is:
"gin" -> "--...-."
"zen" -> "--...-."
"gig" -> "--...--."
"msg" -> "--...--." |
**Note:**
* The length of `words` will be at most `100`.
* Each `words[i]` will have length in range `[1, 12]`.
* `words[i]` will only consist of lowercase letters.
Similar Questions:
"""
class Solution(object):
def uniqueMorseRepresentations(self, words):
"""
:type words: List[str]
:rtype: int
"""
self.CODE = [
".-", "-...", "-.-.", "-..", ".",
"..-.", "--.", "....", "..", ".---",
"-.-", ".-..", "--", "-.", "---",
".--.", "--.-", ".-.", "...", "-",
"..-", "...-", ".--", "-..-", "-.--", "--.."]
cache = {self._trans(i) for i in words}
return len(cache)
def _trans(self, w):
return ''.join(self.CODE[ord(i) - ord('a')] for i in w)
class T(unittest.TestCase):
def test(self):
s = Solution()
self.assertEqual(s.uniqueMorseRepresentations(["gin", "zen", "gig", "msg"]), 2)
if __name__ == "__main__":
unittest.main() |
There are 2 different transformations, "--...-." and "--...--.". | random_line_split |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.