file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
12.1k
| suffix
large_stringlengths 0
12k
| middle
large_stringlengths 0
7.51k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
ParamEffectsProcessedFCD.py | = False
interval = 0
inputFile = open(path.FQedgeDump, 'r')
for line in inputFile:
words = line.split('"')
if not begin and words[0].find("<end>") != -1:
words = words[0].split(">")
interval = int(words[1][:-5])
edgeDumpDict.setdefault(interval, [])
elif words[0].find("<interval") != -1 and int(words[1]) >= simStartTime:
interval = int(words[1])
begin = True
if begin and words[0].find("<edge id") != -1:
edge = words[1]
if edge[0] != ':':
speed = float(words[13])
entered = int(words[15])
# if no vehicle drove of the edge ignore the edge
if entered == 0:
continue
edgeDumpDict.setdefault(interval, []).append((edge, speed))
inputFile.close()
return edgeDumpDict
def readVtype():
"""Gets all necessary information of all vehicles."""
vtypeDict = {}
timestep = 0
begin = False
inputFile = open(path.FQvtype, 'r')
for line in inputFile:
words = line.split('"')
if words[0].find("<timestep ") != -1 and int(words[1]) >= simStartTime:
timestep = int(words[1])
begin = True
if begin and words[0].find("<vehicle id=") != -1:
if words[3][0] != ':': # except inner edges
edge = words[3][:-2]
# del / Part of edge
if edge.find("/") != -1:
edge = edge.split("/")[0]
# time id edge speed
# x y
vtypeDict.setdefault(timestep, []).append(
(words[1], edge, float(words[15]), words[13], words[11]))
inputFile.close()
return vtypeDict
def readProcessedFCD():
"""Reads the processed FCD and creates a List of vtypeDict fakes with can be used similarly."""
procFcdDict = {}
pqDateDict = {} # each date is a period / quota tupel assigned
simDate = '2007-07-18 '
day = 0
# create keys for the procFcdDict
for p in period:
for q in quota:
day += 86400
date, time = calcTime.getDateFromDepart(day).split(" ")
pqDateDict.setdefault(date, (p, q))
procFcdDict.setdefault((p, q), {})
# print date,p,q
inputFile = open(path.FQprocessedFCD, 'r')
for line in inputFile:
timestamp, edge, speed, cover, id = line.split('\t')
date, time = calcTime.getNiceTimeLabel(timestamp).split(" ")
# add values to actual Dict
timestep = calcTime.getTimeInSecs(simDate + time)
procFcdDict[pqDateDict[date]].setdefault(
timestep, []).append((id, edge, float(speed) / 3.6))
inputFile.close()
return procFcdDict
def getVehicleList(vtypeDict):
"""Collects all vehicles used in the simulation."""
vehSet = set()
for timestepList in vtypeDict.values():
for elm in timestepList:
vehSet.add(elm[0])
return list(vehSet)
def make(source, dependentOn, builder, buildNew=False, *builderParams):
"""Fills the target (a variable) with Information of source (pickelt var).
It Checks if the pickle file is up to date in comparison to the dependentOn file.
If not the builder function is called.
If buildNew is True the builder function is called anyway.
"""
# check if pickle file exists
if not os.path.exists(source):
buildNew = True
# check date
# if source is newer
if not buildNew and os.path.getmtime(source) > os.path.getmtime(dependentOn):
print("load source: ", os.path.basename(source), "...", end=' ')
target = load(open(source, 'rb'))
else:
print("build source: ", os.path.basename(source), "...", end=' ')
target = builder(*builderParams)
# pickle the target
dump(target, open(source, 'wb'), 1)
print("Done!")
return target
def chooseTaxis(vehList):
""" Chooses from the vehicle list random vehicles with should act as taxis."""
# calc absolute amount of taxis
taxiNo = int(round(quota * len(vehList) / 100))
random.shuffle(vehList)
return vehList[:taxiNo]
def reduceVtype(taxis):
"""Reduces the vtypeDict to the relevant information."""
taxis.sort() # sort it for binary search
newVtypeDict = {}
for timestep in vtypeDict:
# timesteps which are a multiple of the period
if timestep % period == 0:
newVtypeDict[timestep] = (
[tup for tup in vtypeDict[timestep] if BinarySearch.isElmInList(taxis, tup[0])])
return newVtypeDict
def writeRawFCD():
"""Creates a file in the raw-fcd-format of the chosen taxis"""
global vehId, vehIdDict
vehIdDict = {}
vehId = 0
day = 0
def getVehId(orgId):
"""creates new vehicle id's which consists only numerics"""
global vehId, vehIdDict
value = vehIdDict.get(orgId, vehId)
if value is vehId:
vehIdDict[orgId] = vehId
vehId = (vehId + 1) % 65500
return value
outputFile = open(path.FQrawFCD, 'w')
for period, quota, vtypeDictR, taxiSum in generatePeriodQuotaSets():
day += 86400
# reset dict so that every taxi (even if the vehicle is chosen several
# times) gets its own id
vehIdDict = {}
# dataset=0
sortedKeys = vtypeDictR.keys()
sortedKeys.sort()
for timestep in sortedKeys:
taxiList = vtypeDictR[timestep]
for tup in taxiList: # all elements in this timestep
# calc timestep ->for every period /quota set a new day
time = timestep + day
time = calcTime.getDateFromDepart(time)
# dataset+=1
# print ouptut
# veh_id date (time to simDate+time) x (remove and
# set comma new)
outputFile.write(str(getVehId(tup[0])) + '\t' + time + '\t' + tup[3][0:2] + '.' + tup[3][2:7] + tup[3][8:] +
# y (remove and set comma new)
# status speed form m/s in km/h
'\t' + tup[4][0:2] + '.' + tup[4][2:7] + tup[4][8:] + '\t' + "90" + '\t' + str(int(round(tup[2] * 3.6))) + '\n')
# print dataset, time
print(vehId)
outputFile.close()
def createOutput():
"""Creates a file with a comparison of speeds for each edge
between the taxis and the average speed from the current edge."""
firstPeriod = True
# get edge No
edgesNo = 0
edgesSet = set()
for timestep, taxiList in vtypeDict.iteritems():
for tup in taxiList:
edgesSet.add(tup[1])
edgesNo = len(edgesSet)
outputFile = open(path.FQoutput, 'w')
outputFile.write('<?xml version="1.0"?>\n')
outputFile.write('<paramEffects aggregationInterval="%d" vehicles="%d" edges="%d">\n' % (
aggInterval, vehSum, edgesNo))
for period, quota, vtypeDictR, taxiSum in generatePeriodQuotaSets(True):
if quota is None:
if not firstPeriod:
outputFile.write("\t</periods>\n")
else:
firstPeriod = False
outputFile.write('\t<periods period="%d">\n' % (period))
else:
simpleTaxiMeanVList = [0, 1]
simpleEdgeMeanVList = [0, 1]
drivenEdgesSet = set()
if len(vtypeDictR) == 0: # if the processed FCD returns no Values
print("noData p", period, " q", quota)
drivenEdgesSet.add(0)
else: # create mean from all taxi speed values
for timestep, taxiList in vtypeDictR.iteritems():
for tup in taxiList: # all elements in this timestep
| simpleTaxiMeanVList[0] += tup[2]
simpleTaxiMeanVList[1] += 1
drivenEdgesSet.add(tup[1]) | conditional_block |
|
ParamEffectsProcessedFCD.py | 250, 300]
# [0.1, 0.2, 0.5, 1.0, 2.0, 5.0, 10.0, 20., 50.] #how many taxis in percent of the total vehicles | single element or a hole list
quota = [0.1, 0.2, 0.5, 1.0, 2.0, 5.0, 10.0, 20., 50.]
iteration = 2
vehId = 0
vehIdDict = {}
edgeDumpDict = None
vtypeDict = None
vehList = None
vehSum = None
procFcdDict = None
def main():
global edgeDumpDict, vtypeDict, vehList, vehSum, period, quota, procFcdDict
print("start program")
edgeDumpDict = make(path.FQedgeDumpPickle, path.FQedgeDump, readEdgeDump)
vtypeDict = make(path.FQvtypePickle, path.FQvtype, readVtype)
vehList = make(
path.FQvehPickle, path.FQvtypePickle, getVehicleList, False, vtypeDict)
vehSum = len(vehList)
if mode == U_FCD:
print("load source: ", os.path.basename(
path.FQprocessedFCD), "...", end=' ')
procFcdDict = readProcessedFCD()
print("Done!")
orgPath = path.FQoutput
if mode == W_FCD:
orgPath = path.FQrawFCD
orgPeriod = period
orgQuota = quota
for i in range(iteration):
print("iteration: ", i)
period = orgPeriod
quota = orgQuota
path.FQoutput = orgPath + \
"interval900s_iteration" + str(i) + ".out.xml"
path.FQrawFCD = orgPath + \
"interval900s_iteration" + str(i) + ".out.dat"
if mode == W_FCD:
writeRawFCD()
else:
createOutput()
print("end")
def generatePeriodQuotaSets(stopByPeriod=False):
global period, quota
"""Generates all period-quota-sets (with creation of new Taxis for each set).
You can iterate over that generator and gets for each step the period and quota.
If stopByPeriod=True it stops not only in the quota block but in the period block to-> have a look at the code.
"""
if type(period) != list:
period = [period]
if type(quota) != list:
quota = [quota]
pList = period
qList = quota
for period in pList: | for quota in qList:
print("create output for: period ", period, " quota ", quota)
taxis = chooseTaxis(vehList)
taxiSum = len(taxis)
if mode == U_FCD:
vtypeDictR = procFcdDict[(period, quota)]
else:
vtypeDictR = reduceVtype(taxis)
del taxis
yield(period, quota, vtypeDictR, taxiSum)
def readEdgeDump():
"""Get for each interval all edges with corresponding speed."""
edgeDumpDict = {}
begin = False
interval = 0
inputFile = open(path.FQedgeDump, 'r')
for line in inputFile:
words = line.split('"')
if not begin and words[0].find("<end>") != -1:
words = words[0].split(">")
interval = int(words[1][:-5])
edgeDumpDict.setdefault(interval, [])
elif words[0].find("<interval") != -1 and int(words[1]) >= simStartTime:
interval = int(words[1])
begin = True
if begin and words[0].find("<edge id") != -1:
edge = words[1]
if edge[0] != ':':
speed = float(words[13])
entered = int(words[15])
# if no vehicle drove of the edge ignore the edge
if entered == 0:
continue
edgeDumpDict.setdefault(interval, []).append((edge, speed))
inputFile.close()
return edgeDumpDict
def readVtype():
"""Gets all necessary information of all vehicles."""
vtypeDict = {}
timestep = 0
begin = False
inputFile = open(path.FQvtype, 'r')
for line in inputFile:
words = line.split('"')
if words[0].find("<timestep ") != -1 and int(words[1]) >= simStartTime:
timestep = int(words[1])
begin = True
if begin and words[0].find("<vehicle id=") != -1:
if words[3][0] != ':': # except inner edges
edge = words[3][:-2]
# del / Part of edge
if edge.find("/") != -1:
edge = edge.split("/")[0]
# time id edge speed
# x y
vtypeDict.setdefault(timestep, []).append(
(words[1], edge, float(words[15]), words[13], words[11]))
inputFile.close()
return vtypeDict
def readProcessedFCD():
"""Reads the processed FCD and creates a List of vtypeDict fakes with can be used similarly."""
procFcdDict = {}
pqDateDict = {} # each date is a period / quota tupel assigned
simDate = '2007-07-18 '
day = 0
# create keys for the procFcdDict
for p in period:
for q in quota:
day += 86400
date, time = calcTime.getDateFromDepart(day).split(" ")
pqDateDict.setdefault(date, (p, q))
procFcdDict.setdefault((p, q), {})
# print date,p,q
inputFile = open(path.FQprocessedFCD, 'r')
for line in inputFile:
timestamp, edge, speed, cover, id = line.split('\t')
date, time = calcTime.getNiceTimeLabel(timestamp).split(" ")
# add values to actual Dict
timestep = calcTime.getTimeInSecs(simDate + time)
procFcdDict[pqDateDict[date]].setdefault(
timestep, []).append((id, edge, float(speed) / 3.6))
inputFile.close()
return procFcdDict
def getVehicleList(vtypeDict):
"""Collects all vehicles used in the simulation."""
vehSet = set()
for timestepList in vtypeDict.values():
for elm in timestepList:
vehSet.add(elm[0])
return list(vehSet)
def make(source, dependentOn, builder, buildNew=False, *builderParams):
"""Fills the target (a variable) with Information of source (pickelt var).
It Checks if the pickle file is up to date in comparison to the dependentOn file.
If not the builder function is called.
If buildNew is True the builder function is called anyway.
"""
# check if pickle file exists
if not os.path.exists(source):
buildNew = True
# check date
# if source is newer
if not buildNew and os.path.getmtime(source) > os.path.getmtime(dependentOn):
print("load source: ", os.path.basename(source), "...", end=' ')
target = load(open(source, 'rb'))
else:
print("build source: ", os.path.basename(source), "...", end=' ')
target = builder(*builderParams)
# pickle the target
dump(target, open(source, 'wb'), 1)
print("Done!")
return target
def chooseTaxis(vehList):
""" Chooses from the vehicle list random vehicles with should act as taxis."""
# calc absolute amount of taxis
taxiNo = int(round(quota * len(vehList) / 100))
random.shuffle(vehList)
return vehList[:taxiNo]
def reduceVtype(taxis):
"""Reduces the vtypeDict to the relevant information."""
taxis.sort() # sort it for binary search
newVtypeDict = {}
for timestep in vtypeDict:
# timesteps which are a multiple of the period
if timestep % period == 0:
newVtypeDict[timestep] = (
[tup for tup in vtypeDict[timestep] if BinarySearch.isElmInList(taxis, tup[0])])
return newVtypeDict
def writeRawFCD():
"""Creates a file in the raw-fcd-format of the chosen taxis"""
global vehId, vehIdDict
vehIdDict = {}
vehId = 0
day = 0
def getVehId(orgId):
"""creates new vehicle id's which consists only numerics"""
global vehId, vehIdDict
value = vehIdDict.get(orgId, vehId)
if value is vehId:
vehIdDict[orgId] = vehId
| if stopByPeriod:
yield (period, None, None, None) | random_line_split |
activemqartemisaddress_controller.go | activemqartemisaddress")
var namespacedNameToAddressName = make(map[types.NamespacedName]brokerv2alpha2.ActiveMQArtemisAddress)
//This channel is used to receive new ready pods
var C = make(chan types.NamespacedName)
/**
* USER ACTION REQUIRED: This is a scaffold file intended for the user to modify with their own Controller
* business logic. Delete these comments after modifying this file.*
*/
// Add creates a new ActiveMQArtemisAddress Controller and adds it to the Manager. The Manager will set fields on the Controller
// and Start it when the Manager is Started.
func Add(mgr manager.Manager) error {
return add(mgr, newReconciler(mgr))
}
// newReconciler returns a new reconcile.Reconciler
func newReconciler(mgr manager.Manager) reconcile.Reconciler {
go setupAddressObserver(mgr, C)
return &ReconcileActiveMQArtemisAddress{client: mgr.GetClient(), scheme: mgr.GetScheme()}
}
func setupAddressObserver(mgr manager.Manager, c chan types.NamespacedName) {
log.Info("Setting up address observer")
cfg, err := clientcmd.BuildConfigFromFlags("", "")
if err != nil {
log.Error(err, "Error building kubeconfig: %s", err.Error())
}
kubeClient, err := kubernetes.NewForConfig(cfg)
if err != nil {
log.Error(err, "Error building kubernetes clientset: %s", err.Error())
}
namespace, err := k8sutil.GetWatchNamespace()
if err != nil {
log.Error(err, "Failed to get watch namespace")
return
}
observer := NewAddressObserver(kubeClient, namespace, mgr.GetClient())
if err = observer.Run(C); err != nil {
log.Error(err, "Error running controller: %s", err.Error())
}
log.Info("Finish setup address observer")
return
}
// add adds a new Controller to mgr with r as the reconcile.Reconciler
func add(mgr manager.Manager, r reconcile.Reconciler) error {
// Create a new controller
c, err := controller.New("v2alpha2activemqartemisaddress-controller", mgr, controller.Options{Reconciler: r})
if err != nil {
return err
}
// Watch for changes to primary resource ActiveMQArtemisAddress
err = c.Watch(&source.Kind{Type: &brokerv2alpha2.ActiveMQArtemisAddress{}}, &handler.EnqueueRequestForObject{})
if err != nil {
return err
}
// TODO(user): Modify this to be the types you create that are owned by the primary resource
// Watch for changes to secondary resource Pods and requeue the owner ActiveMQArtemisAddress
err = c.Watch(&source.Kind{Type: &corev1.Pod{}}, &handler.EnqueueRequestForOwner{
IsController: true,
OwnerType: &brokerv2alpha2.ActiveMQArtemisAddress{},
})
if err != nil {
return err
}
return nil
}
var _ reconcile.Reconciler = &ReconcileActiveMQArtemisAddress{}
// ReconcileActiveMQArtemisAddress reconciles a ActiveMQArtemisAddress object
type ReconcileActiveMQArtemisAddress struct {
// This client, initialized using mgr.Client() above, is a split client
// that reads objects from the cache and writes to the apiserver
client client.Client
scheme *runtime.Scheme
}
// Reconcile reads that state of the cluster for a ActiveMQArtemisAddress object and makes changes based on the state read
// and what is in the ActiveMQArtemisAddress.Spec
// TODO(user): Modify this Reconcile function to implement your Controller logic. This example creates
// a Pod as an example
// Note:
// The Controller will requeue the Request to be processed again if the returned error is non-nil or
// Result.Requeue is true, otherwise upon completion it will remove the work from the queue.
func (r *ReconcileActiveMQArtemisAddress) Reconcile(request reconcile.Request) (reconcile.Result, error) {
reqLogger := log.WithValues("Request.Namespace", request.Namespace, "Request.Name", request.Name)
reqLogger.Info("Reconciling ActiveMQArtemisAddress")
// Fetch the ActiveMQArtemisAddress instance
instance := &brokerv2alpha2.ActiveMQArtemisAddress{}
err := r.client.Get(context.TODO(), request.NamespacedName, instance)
if err != nil {
// Delete action
addressInstance, lookupSucceeded := namespacedNameToAddressName[request.NamespacedName]
if lookupSucceeded {
if addressInstance.Spec.RemoveFromBrokerOnDelete {
err = deleteQueue(&addressInstance, request, r.client)
} else {
log.Info("Not to delete address", "address", addressInstance)
}
delete(namespacedNameToAddressName, request.NamespacedName)
}
if errors.IsNotFound(err) {
// Request object not found, could have been deleted after reconcile request.
// Owned objects are automatically garbage collected. For additional cleanup logic use finalizers.
// Return and don't requeue
return reconcile.Result{}, nil
}
log.Error(err, "Requeue the request for error")
// Error reading the object - requeue the request.
return reconcile.Result{}, err
} else {
err = createQueue(instance, request, r.client)
if nil == err {
namespacedNameToAddressName[request.NamespacedName] = *instance //.Spec.QueueName
}
}
return reconcile.Result{}, nil
}
func createQueue(instance *brokerv2alpha2.ActiveMQArtemisAddress, request reconcile.Request, client client.Client) error {
reqLogger := log.WithValues("Request.Namespace", request.Namespace, "Request.Name", request.Name)
reqLogger.Info("Creating ActiveMQArtemisAddress")
var err error = nil
artemisArray := getPodBrokers(instance, request, client)
if nil != artemisArray {
for _, a := range artemisArray {
if nil == a {
reqLogger.Info("Creating ActiveMQArtemisAddress artemisArray had a nil!")
continue
}
_, err := a.CreateQueue(instance.Spec.AddressName, instance.Spec.QueueName, instance.Spec.RoutingType)
if nil != err {
reqLogger.Info("Creating ActiveMQArtemisAddress error for " + instance.Spec.QueueName)
break
} else {
reqLogger.Info("Created ActiveMQArtemisAddress for " + instance.Spec.QueueName)
}
}
}
return err
}
func | (instance *brokerv2alpha2.ActiveMQArtemisAddress, request reconcile.Request, client client.Client) error {
reqLogger := log.WithValues("Request.Namespace", request.Namespace, "Request.Name", request.Name)
reqLogger.Info("Deleting ActiveMQArtemisAddress")
var err error = nil
artemisArray := getPodBrokers(instance, request, client)
if nil != artemisArray {
for _, a := range artemisArray {
_, err := a.DeleteQueue(instance.Spec.QueueName)
if nil != err {
reqLogger.Info("Deleting ActiveMQArtemisAddress error for " + instance.Spec.QueueName)
break
} else {
reqLogger.Info("Deleted ActiveMQArtemisAddress for " + instance.Spec.QueueName)
reqLogger.Info("Checking parent address for bindings " + instance.Spec.AddressName)
bindingsData, err := a.ListBindingsForAddress(instance.Spec.AddressName)
if nil == err {
if "" == bindingsData.Value {
reqLogger.Info("No bindings found removing " + instance.Spec.AddressName)
a.DeleteAddress(instance.Spec.AddressName)
} else {
reqLogger.Info("Bindings found, not removing " + instance.Spec.AddressName)
}
}
}
}
}
return err
}
func getPodBrokers(instance *brokerv2alpha2.ActiveMQArtemisAddress, request reconcile.Request, client client.Client) []*mgmt.Artemis {
reqLogger := log.WithValues("Request.Namespace", request.Namespace, "Request.Name", request.Name)
reqLogger.Info("Getting Pod Brokers")
var artemisArray []*mgmt.Artemis = nil
var err error = nil
ss.NameBuilder.Name()
if err != nil {
reqLogger.Error(err, "Failed to ge the statefulset name")
}
// Check to see if the statefulset already exists
ssNamespacedName := types.NamespacedName{
Name: ss.NameBuilder.Name(),
Namespace: request.Namespace,
}
statefulset, err := ss.RetrieveStatefulSet(ss.NameBuilder.Name(), ssNamespacedName, client)
if nil != err {
reqLogger.Info("Statefulset: " + ssNamespacedName.Name + " not found")
} else {
reqLogger.Info("Statefulset: " + ssNamespacedName.Name + " found")
pod := &corev1.Pod{}
podNamespacedName := types.NamespacedName{
Name: statefulset.Name + "-0",
Namespace: request.Namespace,
}
// For each of the replicas
var i int = 0
var replicas int = int(*statefulset.Spec.Replicas)
artemisArray = make([]*mgmt.Artemis, 0, replicas)
for i = 0; i | deleteQueue | identifier_name |
activemqartemisaddress_controller.go | activemqartemisaddress")
var namespacedNameToAddressName = make(map[types.NamespacedName]brokerv2alpha2.ActiveMQArtemisAddress)
//This channel is used to receive new ready pods
var C = make(chan types.NamespacedName)
/**
* USER ACTION REQUIRED: This is a scaffold file intended for the user to modify with their own Controller
* business logic. Delete these comments after modifying this file.*
*/
// Add creates a new ActiveMQArtemisAddress Controller and adds it to the Manager. The Manager will set fields on the Controller
// and Start it when the Manager is Started.
func Add(mgr manager.Manager) error {
return add(mgr, newReconciler(mgr))
}
// newReconciler returns a new reconcile.Reconciler
func newReconciler(mgr manager.Manager) reconcile.Reconciler {
go setupAddressObserver(mgr, C)
return &ReconcileActiveMQArtemisAddress{client: mgr.GetClient(), scheme: mgr.GetScheme()}
}
func setupAddressObserver(mgr manager.Manager, c chan types.NamespacedName) {
log.Info("Setting up address observer")
cfg, err := clientcmd.BuildConfigFromFlags("", "")
if err != nil {
log.Error(err, "Error building kubeconfig: %s", err.Error())
}
kubeClient, err := kubernetes.NewForConfig(cfg)
if err != nil {
log.Error(err, "Error building kubernetes clientset: %s", err.Error())
}
namespace, err := k8sutil.GetWatchNamespace()
if err != nil {
log.Error(err, "Failed to get watch namespace")
return
}
observer := NewAddressObserver(kubeClient, namespace, mgr.GetClient())
if err = observer.Run(C); err != nil {
log.Error(err, "Error running controller: %s", err.Error())
}
log.Info("Finish setup address observer")
return
}
// add adds a new Controller to mgr with r as the reconcile.Reconciler
func add(mgr manager.Manager, r reconcile.Reconciler) error {
// Create a new controller
c, err := controller.New("v2alpha2activemqartemisaddress-controller", mgr, controller.Options{Reconciler: r})
if err != nil {
return err
}
// Watch for changes to primary resource ActiveMQArtemisAddress
err = c.Watch(&source.Kind{Type: &brokerv2alpha2.ActiveMQArtemisAddress{}}, &handler.EnqueueRequestForObject{})
if err != nil {
return err
}
// TODO(user): Modify this to be the types you create that are owned by the primary resource
// Watch for changes to secondary resource Pods and requeue the owner ActiveMQArtemisAddress
err = c.Watch(&source.Kind{Type: &corev1.Pod{}}, &handler.EnqueueRequestForOwner{
IsController: true,
OwnerType: &brokerv2alpha2.ActiveMQArtemisAddress{},
})
if err != nil {
return err
}
return nil
}
var _ reconcile.Reconciler = &ReconcileActiveMQArtemisAddress{}
// ReconcileActiveMQArtemisAddress reconciles a ActiveMQArtemisAddress object
type ReconcileActiveMQArtemisAddress struct {
// This client, initialized using mgr.Client() above, is a split client
// that reads objects from the cache and writes to the apiserver
client client.Client
scheme *runtime.Scheme
}
// Reconcile reads that state of the cluster for a ActiveMQArtemisAddress object and makes changes based on the state read
// and what is in the ActiveMQArtemisAddress.Spec
// TODO(user): Modify this Reconcile function to implement your Controller logic. This example creates
// a Pod as an example
// Note:
// The Controller will requeue the Request to be processed again if the returned error is non-nil or
// Result.Requeue is true, otherwise upon completion it will remove the work from the queue.
func (r *ReconcileActiveMQArtemisAddress) Reconcile(request reconcile.Request) (reconcile.Result, error) {
reqLogger := log.WithValues("Request.Namespace", request.Namespace, "Request.Name", request.Name)
reqLogger.Info("Reconciling ActiveMQArtemisAddress")
// Fetch the ActiveMQArtemisAddress instance
instance := &brokerv2alpha2.ActiveMQArtemisAddress{}
err := r.client.Get(context.TODO(), request.NamespacedName, instance)
if err != nil {
// Delete action
addressInstance, lookupSucceeded := namespacedNameToAddressName[request.NamespacedName]
if lookupSucceeded {
if addressInstance.Spec.RemoveFromBrokerOnDelete {
err = deleteQueue(&addressInstance, request, r.client)
} else {
log.Info("Not to delete address", "address", addressInstance)
}
delete(namespacedNameToAddressName, request.NamespacedName)
}
if errors.IsNotFound(err) {
// Request object not found, could have been deleted after reconcile request.
// Owned objects are automatically garbage collected. For additional cleanup logic use finalizers.
// Return and don't requeue
return reconcile.Result{}, nil
}
log.Error(err, "Requeue the request for error")
// Error reading the object - requeue the request.
return reconcile.Result{}, err
} else {
err = createQueue(instance, request, r.client)
if nil == err {
namespacedNameToAddressName[request.NamespacedName] = *instance //.Spec.QueueName
}
}
return reconcile.Result{}, nil
}
func createQueue(instance *brokerv2alpha2.ActiveMQArtemisAddress, request reconcile.Request, client client.Client) error {
reqLogger := log.WithValues("Request.Namespace", request.Namespace, "Request.Name", request.Name)
reqLogger.Info("Creating ActiveMQArtemisAddress")
var err error = nil
artemisArray := getPodBrokers(instance, request, client)
if nil != artemisArray {
for _, a := range artemisArray {
if nil == a {
reqLogger.Info("Creating ActiveMQArtemisAddress artemisArray had a nil!")
continue
}
_, err := a.CreateQueue(instance.Spec.AddressName, instance.Spec.QueueName, instance.Spec.RoutingType)
if nil != err {
reqLogger.Info("Creating ActiveMQArtemisAddress error for " + instance.Spec.QueueName)
break
} else {
reqLogger.Info("Created ActiveMQArtemisAddress for " + instance.Spec.QueueName)
}
}
}
return err
}
func deleteQueue(instance *brokerv2alpha2.ActiveMQArtemisAddress, request reconcile.Request, client client.Client) error {
reqLogger := log.WithValues("Request.Namespace", request.Namespace, "Request.Name", request.Name)
reqLogger.Info("Deleting ActiveMQArtemisAddress")
var err error = nil
artemisArray := getPodBrokers(instance, request, client)
if nil != artemisArray {
for _, a := range artemisArray {
_, err := a.DeleteQueue(instance.Spec.QueueName)
if nil != err {
reqLogger.Info("Deleting ActiveMQArtemisAddress error for " + instance.Spec.QueueName)
break
} else {
reqLogger.Info("Deleted ActiveMQArtemisAddress for " + instance.Spec.QueueName)
reqLogger.Info("Checking parent address for bindings " + instance.Spec.AddressName)
bindingsData, err := a.ListBindingsForAddress(instance.Spec.AddressName)
if nil == err {
if "" == bindingsData.Value {
reqLogger.Info("No bindings found removing " + instance.Spec.AddressName)
a.DeleteAddress(instance.Spec.AddressName)
} else {
reqLogger.Info("Bindings found, not removing " + instance.Spec.AddressName)
}
}
}
}
}
return err
}
func getPodBrokers(instance *brokerv2alpha2.ActiveMQArtemisAddress, request reconcile.Request, client client.Client) []*mgmt.Artemis {
reqLogger := log.WithValues("Request.Namespace", request.Namespace, "Request.Name", request.Name)
reqLogger.Info("Getting Pod Brokers")
var artemisArray []*mgmt.Artemis = nil
var err error = nil
ss.NameBuilder.Name()
if err != nil {
reqLogger.Error(err, "Failed to ge the statefulset name")
}
// Check to see if the statefulset already exists
ssNamespacedName := types.NamespacedName{
Name: ss.NameBuilder.Name(),
Namespace: request.Namespace,
}
statefulset, err := ss.RetrieveStatefulSet(ss.NameBuilder.Name(), ssNamespacedName, client)
if nil != err | else {
reqLogger.Info("Statefulset: " + ssNamespacedName.Name + " found")
pod := &corev1.Pod{}
podNamespacedName := types.NamespacedName{
Name: statefulset.Name + "-0",
Namespace: request.Namespace,
}
// For each of the replicas
var i int = 0
var replicas int = int(*statefulset.Spec.Replicas)
artemisArray = make([]*mgmt.Artemis, 0, replicas)
for i = 0; i | {
reqLogger.Info("Statefulset: " + ssNamespacedName.Name + " not found")
} | conditional_block |
activemqartemisaddress_controller.go | activemqartemisaddress")
var namespacedNameToAddressName = make(map[types.NamespacedName]brokerv2alpha2.ActiveMQArtemisAddress)
//This channel is used to receive new ready pods
var C = make(chan types.NamespacedName)
/**
* USER ACTION REQUIRED: This is a scaffold file intended for the user to modify with their own Controller
* business logic. Delete these comments after modifying this file.*
*/
// Add creates a new ActiveMQArtemisAddress Controller and adds it to the Manager. The Manager will set fields on the Controller
// and Start it when the Manager is Started.
func Add(mgr manager.Manager) error {
return add(mgr, newReconciler(mgr))
}
// newReconciler returns a new reconcile.Reconciler
func newReconciler(mgr manager.Manager) reconcile.Reconciler {
go setupAddressObserver(mgr, C)
return &ReconcileActiveMQArtemisAddress{client: mgr.GetClient(), scheme: mgr.GetScheme()}
}
func setupAddressObserver(mgr manager.Manager, c chan types.NamespacedName) {
log.Info("Setting up address observer")
cfg, err := clientcmd.BuildConfigFromFlags("", "")
if err != nil {
log.Error(err, "Error building kubeconfig: %s", err.Error())
}
kubeClient, err := kubernetes.NewForConfig(cfg)
if err != nil {
log.Error(err, "Error building kubernetes clientset: %s", err.Error())
}
namespace, err := k8sutil.GetWatchNamespace()
if err != nil {
log.Error(err, "Failed to get watch namespace")
return
}
observer := NewAddressObserver(kubeClient, namespace, mgr.GetClient())
if err = observer.Run(C); err != nil {
log.Error(err, "Error running controller: %s", err.Error())
}
log.Info("Finish setup address observer")
return
}
// add adds a new Controller to mgr with r as the reconcile.Reconciler
func add(mgr manager.Manager, r reconcile.Reconciler) error {
// Create a new controller
c, err := controller.New("v2alpha2activemqartemisaddress-controller", mgr, controller.Options{Reconciler: r})
if err != nil {
return err
}
// Watch for changes to primary resource ActiveMQArtemisAddress
err = c.Watch(&source.Kind{Type: &brokerv2alpha2.ActiveMQArtemisAddress{}}, &handler.EnqueueRequestForObject{})
if err != nil {
return err
}
// TODO(user): Modify this to be the types you create that are owned by the primary resource
// Watch for changes to secondary resource Pods and requeue the owner ActiveMQArtemisAddress
err = c.Watch(&source.Kind{Type: &corev1.Pod{}}, &handler.EnqueueRequestForOwner{
IsController: true,
OwnerType: &brokerv2alpha2.ActiveMQArtemisAddress{},
})
if err != nil {
return err
}
return nil
}
var _ reconcile.Reconciler = &ReconcileActiveMQArtemisAddress{}
// ReconcileActiveMQArtemisAddress reconciles a ActiveMQArtemisAddress object
type ReconcileActiveMQArtemisAddress struct {
// This client, initialized using mgr.Client() above, is a split client
// that reads objects from the cache and writes to the apiserver
client client.Client
scheme *runtime.Scheme
}
// Reconcile reads that state of the cluster for a ActiveMQArtemisAddress object and makes changes based on the state read
// and what is in the ActiveMQArtemisAddress.Spec
// TODO(user): Modify this Reconcile function to implement your Controller logic. This example creates
// a Pod as an example
// Note:
// The Controller will requeue the Request to be processed again if the returned error is non-nil or
// Result.Requeue is true, otherwise upon completion it will remove the work from the queue.
func (r *ReconcileActiveMQArtemisAddress) Reconcile(request reconcile.Request) (reconcile.Result, error) {
reqLogger := log.WithValues("Request.Namespace", request.Namespace, "Request.Name", request.Name)
reqLogger.Info("Reconciling ActiveMQArtemisAddress")
// Fetch the ActiveMQArtemisAddress instance
instance := &brokerv2alpha2.ActiveMQArtemisAddress{}
err := r.client.Get(context.TODO(), request.NamespacedName, instance)
if err != nil {
// Delete action
addressInstance, lookupSucceeded := namespacedNameToAddressName[request.NamespacedName]
if lookupSucceeded {
if addressInstance.Spec.RemoveFromBrokerOnDelete {
err = deleteQueue(&addressInstance, request, r.client)
} else {
log.Info("Not to delete address", "address", addressInstance)
}
delete(namespacedNameToAddressName, request.NamespacedName)
}
if errors.IsNotFound(err) {
// Request object not found, could have been deleted after reconcile request.
// Owned objects are automatically garbage collected. For additional cleanup logic use finalizers.
// Return and don't requeue
return reconcile.Result{}, nil
}
log.Error(err, "Requeue the request for error")
// Error reading the object - requeue the request.
return reconcile.Result{}, err
} else {
err = createQueue(instance, request, r.client)
if nil == err {
namespacedNameToAddressName[request.NamespacedName] = *instance //.Spec.QueueName
}
}
return reconcile.Result{}, nil
}
func createQueue(instance *brokerv2alpha2.ActiveMQArtemisAddress, request reconcile.Request, client client.Client) error {
reqLogger := log.WithValues("Request.Namespace", request.Namespace, "Request.Name", request.Name)
reqLogger.Info("Creating ActiveMQArtemisAddress")
var err error = nil
artemisArray := getPodBrokers(instance, request, client)
if nil != artemisArray {
for _, a := range artemisArray {
if nil == a {
reqLogger.Info("Creating ActiveMQArtemisAddress artemisArray had a nil!")
continue
}
_, err := a.CreateQueue(instance.Spec.AddressName, instance.Spec.QueueName, instance.Spec.RoutingType)
if nil != err {
reqLogger.Info("Creating ActiveMQArtemisAddress error for " + instance.Spec.QueueName)
break
} else {
reqLogger.Info("Created ActiveMQArtemisAddress for " + instance.Spec.QueueName)
}
}
}
return err
}
func deleteQueue(instance *brokerv2alpha2.ActiveMQArtemisAddress, request reconcile.Request, client client.Client) error {
reqLogger := log.WithValues("Request.Namespace", request.Namespace, "Request.Name", request.Name)
reqLogger.Info("Deleting ActiveMQArtemisAddress")
var err error = nil
artemisArray := getPodBrokers(instance, request, client)
if nil != artemisArray {
for _, a := range artemisArray {
_, err := a.DeleteQueue(instance.Spec.QueueName)
if nil != err {
reqLogger.Info("Deleting ActiveMQArtemisAddress error for " + instance.Spec.QueueName)
break
} else {
reqLogger.Info("Deleted ActiveMQArtemisAddress for " + instance.Spec.QueueName)
reqLogger.Info("Checking parent address for bindings " + instance.Spec.AddressName)
bindingsData, err := a.ListBindingsForAddress(instance.Spec.AddressName)
if nil == err {
if "" == bindingsData.Value { | }
}
}
}
}
return err
}
func getPodBrokers(instance *brokerv2alpha2.ActiveMQArtemisAddress, request reconcile.Request, client client.Client) []*mgmt.Artemis {
reqLogger := log.WithValues("Request.Namespace", request.Namespace, "Request.Name", request.Name)
reqLogger.Info("Getting Pod Brokers")
var artemisArray []*mgmt.Artemis = nil
var err error = nil
ss.NameBuilder.Name()
if err != nil {
reqLogger.Error(err, "Failed to ge the statefulset name")
}
// Check to see if the statefulset already exists
ssNamespacedName := types.NamespacedName{
Name: ss.NameBuilder.Name(),
Namespace: request.Namespace,
}
statefulset, err := ss.RetrieveStatefulSet(ss.NameBuilder.Name(), ssNamespacedName, client)
if nil != err {
reqLogger.Info("Statefulset: " + ssNamespacedName.Name + " not found")
} else {
reqLogger.Info("Statefulset: " + ssNamespacedName.Name + " found")
pod := &corev1.Pod{}
podNamespacedName := types.NamespacedName{
Name: statefulset.Name + "-0",
Namespace: request.Namespace,
}
// For each of the replicas
var i int = 0
var replicas int = int(*statefulset.Spec.Replicas)
artemisArray = make([]*mgmt.Artemis, 0, replicas)
for i = 0; i < | reqLogger.Info("No bindings found removing " + instance.Spec.AddressName)
a.DeleteAddress(instance.Spec.AddressName)
} else {
reqLogger.Info("Bindings found, not removing " + instance.Spec.AddressName) | random_line_split |
activemqartemisaddress_controller.go | activemqartemisaddress")
var namespacedNameToAddressName = make(map[types.NamespacedName]brokerv2alpha2.ActiveMQArtemisAddress)
//This channel is used to receive new ready pods
var C = make(chan types.NamespacedName)
/**
* USER ACTION REQUIRED: This is a scaffold file intended for the user to modify with their own Controller
* business logic. Delete these comments after modifying this file.*
*/
// Add creates a new ActiveMQArtemisAddress Controller and adds it to the Manager. The Manager will set fields on the Controller
// and Start it when the Manager is Started.
func Add(mgr manager.Manager) error {
return add(mgr, newReconciler(mgr))
}
// newReconciler returns a new reconcile.Reconciler
func newReconciler(mgr manager.Manager) reconcile.Reconciler {
go setupAddressObserver(mgr, C)
return &ReconcileActiveMQArtemisAddress{client: mgr.GetClient(), scheme: mgr.GetScheme()}
}
func setupAddressObserver(mgr manager.Manager, c chan types.NamespacedName) {
log.Info("Setting up address observer")
cfg, err := clientcmd.BuildConfigFromFlags("", "")
if err != nil {
log.Error(err, "Error building kubeconfig: %s", err.Error())
}
kubeClient, err := kubernetes.NewForConfig(cfg)
if err != nil {
log.Error(err, "Error building kubernetes clientset: %s", err.Error())
}
namespace, err := k8sutil.GetWatchNamespace()
if err != nil {
log.Error(err, "Failed to get watch namespace")
return
}
observer := NewAddressObserver(kubeClient, namespace, mgr.GetClient())
if err = observer.Run(C); err != nil {
log.Error(err, "Error running controller: %s", err.Error())
}
log.Info("Finish setup address observer")
return
}
// add adds a new Controller to mgr with r as the reconcile.Reconciler
func add(mgr manager.Manager, r reconcile.Reconciler) error {
// Create a new controller
c, err := controller.New("v2alpha2activemqartemisaddress-controller", mgr, controller.Options{Reconciler: r})
if err != nil {
return err
}
// Watch for changes to primary resource ActiveMQArtemisAddress
err = c.Watch(&source.Kind{Type: &brokerv2alpha2.ActiveMQArtemisAddress{}}, &handler.EnqueueRequestForObject{})
if err != nil {
return err
}
// TODO(user): Modify this to be the types you create that are owned by the primary resource
// Watch for changes to secondary resource Pods and requeue the owner ActiveMQArtemisAddress
err = c.Watch(&source.Kind{Type: &corev1.Pod{}}, &handler.EnqueueRequestForOwner{
IsController: true,
OwnerType: &brokerv2alpha2.ActiveMQArtemisAddress{},
})
if err != nil {
return err
}
return nil
}
var _ reconcile.Reconciler = &ReconcileActiveMQArtemisAddress{}
// ReconcileActiveMQArtemisAddress reconciles a ActiveMQArtemisAddress object
type ReconcileActiveMQArtemisAddress struct {
// This client, initialized using mgr.Client() above, is a split client
// that reads objects from the cache and writes to the apiserver
client client.Client
scheme *runtime.Scheme
}
// Reconcile reads that state of the cluster for a ActiveMQArtemisAddress object and makes changes based on the state read
// and what is in the ActiveMQArtemisAddress.Spec
// TODO(user): Modify this Reconcile function to implement your Controller logic. This example creates
// a Pod as an example
// Note:
// The Controller will requeue the Request to be processed again if the returned error is non-nil or
// Result.Requeue is true, otherwise upon completion it will remove the work from the queue.
func (r *ReconcileActiveMQArtemisAddress) Reconcile(request reconcile.Request) (reconcile.Result, error) {
reqLogger := log.WithValues("Request.Namespace", request.Namespace, "Request.Name", request.Name)
reqLogger.Info("Reconciling ActiveMQArtemisAddress")
// Fetch the ActiveMQArtemisAddress instance
instance := &brokerv2alpha2.ActiveMQArtemisAddress{}
err := r.client.Get(context.TODO(), request.NamespacedName, instance)
if err != nil {
// Delete action
addressInstance, lookupSucceeded := namespacedNameToAddressName[request.NamespacedName]
if lookupSucceeded {
if addressInstance.Spec.RemoveFromBrokerOnDelete {
err = deleteQueue(&addressInstance, request, r.client)
} else {
log.Info("Not to delete address", "address", addressInstance)
}
delete(namespacedNameToAddressName, request.NamespacedName)
}
if errors.IsNotFound(err) {
// Request object not found, could have been deleted after reconcile request.
// Owned objects are automatically garbage collected. For additional cleanup logic use finalizers.
// Return and don't requeue
return reconcile.Result{}, nil
}
log.Error(err, "Requeue the request for error")
// Error reading the object - requeue the request.
return reconcile.Result{}, err
} else {
err = createQueue(instance, request, r.client)
if nil == err {
namespacedNameToAddressName[request.NamespacedName] = *instance //.Spec.QueueName
}
}
return reconcile.Result{}, nil
}
func createQueue(instance *brokerv2alpha2.ActiveMQArtemisAddress, request reconcile.Request, client client.Client) error | }
}
return err
}
func deleteQueue(instance *brokerv2alpha2.ActiveMQArtemisAddress, request reconcile.Request, client client.Client) error {
reqLogger := log.WithValues("Request.Namespace", request.Namespace, "Request.Name", request.Name)
reqLogger.Info("Deleting ActiveMQArtemisAddress")
var err error = nil
artemisArray := getPodBrokers(instance, request, client)
if nil != artemisArray {
for _, a := range artemisArray {
_, err := a.DeleteQueue(instance.Spec.QueueName)
if nil != err {
reqLogger.Info("Deleting ActiveMQArtemisAddress error for " + instance.Spec.QueueName)
break
} else {
reqLogger.Info("Deleted ActiveMQArtemisAddress for " + instance.Spec.QueueName)
reqLogger.Info("Checking parent address for bindings " + instance.Spec.AddressName)
bindingsData, err := a.ListBindingsForAddress(instance.Spec.AddressName)
if nil == err {
if "" == bindingsData.Value {
reqLogger.Info("No bindings found removing " + instance.Spec.AddressName)
a.DeleteAddress(instance.Spec.AddressName)
} else {
reqLogger.Info("Bindings found, not removing " + instance.Spec.AddressName)
}
}
}
}
}
return err
}
func getPodBrokers(instance *brokerv2alpha2.ActiveMQArtemisAddress, request reconcile.Request, client client.Client) []*mgmt.Artemis {
reqLogger := log.WithValues("Request.Namespace", request.Namespace, "Request.Name", request.Name)
reqLogger.Info("Getting Pod Brokers")
var artemisArray []*mgmt.Artemis = nil
var err error = nil
ss.NameBuilder.Name()
if err != nil {
reqLogger.Error(err, "Failed to ge the statefulset name")
}
// Check to see if the statefulset already exists
ssNamespacedName := types.NamespacedName{
Name: ss.NameBuilder.Name(),
Namespace: request.Namespace,
}
statefulset, err := ss.RetrieveStatefulSet(ss.NameBuilder.Name(), ssNamespacedName, client)
if nil != err {
reqLogger.Info("Statefulset: " + ssNamespacedName.Name + " not found")
} else {
reqLogger.Info("Statefulset: " + ssNamespacedName.Name + " found")
pod := &corev1.Pod{}
podNamespacedName := types.NamespacedName{
Name: statefulset.Name + "-0",
Namespace: request.Namespace,
}
// For each of the replicas
var i int = 0
var replicas int = int(*statefulset.Spec.Replicas)
artemisArray = make([]*mgmt.Artemis, 0, replicas)
for i = 0; i | {
reqLogger := log.WithValues("Request.Namespace", request.Namespace, "Request.Name", request.Name)
reqLogger.Info("Creating ActiveMQArtemisAddress")
var err error = nil
artemisArray := getPodBrokers(instance, request, client)
if nil != artemisArray {
for _, a := range artemisArray {
if nil == a {
reqLogger.Info("Creating ActiveMQArtemisAddress artemisArray had a nil!")
continue
}
_, err := a.CreateQueue(instance.Spec.AddressName, instance.Spec.QueueName, instance.Spec.RoutingType)
if nil != err {
reqLogger.Info("Creating ActiveMQArtemisAddress error for " + instance.Spec.QueueName)
break
} else {
reqLogger.Info("Created ActiveMQArtemisAddress for " + instance.Spec.QueueName)
} | identifier_body |
mod.rs | : 0.0 } ]);
}
if freq * 4.0 > sps {
return Option::None;
}
// How many of our smallest units of time represented
// by a sample do we need for a full cycle of the
// frequency.
let timepersample = 1.0f64 / sps as f64;
let units = ((1.0 / freq).abs() / timepersample).abs();
//println!("timepersample:{} freqfullwave:{}",
// timepersample,
// 1.0 / freq
//);
// Try to find a multiple of units that is as close as possible
// to a whole integer number of units.
let mut low_diff = std::f64::MAX;
let mut low_mul = 0usize;
for x in 1..100000 {
let m = units * x as f64;
let diff = m - m.floor();
if diff < low_diff {
low_diff = diff;
low_mul = x;
}
}
let iunits = (units * low_mul as f64).floor() as usize;
println!("pre-built cosine for freq({}) with units({}) and diff({})", freq, units, low_diff);
let mut out: Vec<Complex<f32>> = Vec::new();
for x in 0..iunits {
let curtime = (x as f64) * timepersample;
out.push(Complex {
i: (curtime * freq * std::f64::consts::PI * 2.0).cos() as f32 * amp,
q: (curtime * freq * std::f64::consts::PI * 2.0).sin() as f32 * amp,
});
}
Option::Some(out)
}
pub struct FMDemod {
sps: f64,
offset: f64,
bw: f32,
q0: usize,
q1: usize,
li: f32,
lq: f32,
ifs: Vec<Complex<f32>>,
ifsndx: usize,
rsum: f32,
tapi: usize,
tapvi: Vec<f32>,
tapvq: Vec<f32>,
taps: Vec<f32>,
decim: usize,
curslack: f32,
maxphase: f32,
slack: f32,
audiodecim: usize,
pub sq: isize,
devsqlimit: isize,
sindex: f64,
}
impl FMDemod {
pub fn new(sps: f64, decim: usize, offset: f64, bw: f32, taps: Vec<f32>, devsqlimit: isize) -> FMDemod {
let ifs = buildsine(offset, sps, 1.0).unwrap();
let mut tapvi: Vec<f32> = Vec::new();
let mut tapvq: Vec<f32> = Vec::new();
for x in 0..taps.len() {
tapvi.push(0.0);
tapvq.push(0.0);
}
// If the decimation is not set perfectly then we will have
// a fractional part and we need to insert some padding when
// it reaches a value representing a whole output sample.
let actual_audio_decim = (sps / (decim as f64) / 16000.0) as f32;
// Make sure that slack is not >= 1.0
let slack = actual_audio_decim.fract();
let pract_audio_decim = actual_audio_decim.ceil() as usize;
let fmaxphaserot = ((std::f64::consts::PI * 2.0f64) / (sps / (decim as f64))) * bw as f64;
println!("slack:{} pract_audio_decim:{} decim:{} actual_audio_decim:{}",
slack, pract_audio_decim, decim, actual_audio_decim
);
FMDemod {
devsqlimit: devsqlimit,
maxphase: fmaxphaserot as f32,
audiodecim: pract_audio_decim,
slack: slack,
sps: sps,
offset: offset,
bw: bw,
li: 0.0,
lq: 0.0,
q0: 0,
q1: 0,
ifs: ifs,
ifsndx: 0,
rsum: 0.0,
tapi: 0,
tapvi: tapvi,
tapvq: tapvq,
taps: taps,
decim: decim,
curslack: 0.0,
sq: 0,
sindex: 0.0,
}
}
pub fn work(&mut self, stream: &Vec<Complex<f32>>) -> Vec<f32> {
let mut buf: Vec<f32> = Vec::with_capacity(stream.len() / self.decim / self.audiodecim);
let timepersample = 1.0f64 / self.sps as f64;
let mut lr: f32 = 0.0;
for x in 0..stream.len() {
let mut s = stream[x].clone();
//let ifc = Complex {
// i: (timepersample * self.sindex * self.offset * std::f64::consts::PI * 2.0).cos() as f32,
// q: (timepersample * self.sindex * self.offset * std::f64::consts::PI * 2.0).sin() as f32,
//};
//self.sindex += 1.0;
s.mul(&self.ifs[self.ifsndx]);
self.ifsndx = if self.ifsndx + 1 >= self.ifs.len() {
0
} else {
self.ifsndx + 1
};
if self.q0 == self.decim {
self.q0 = 0;
if self.curslack >= 1.0 {
// Hopefully, the slack is < 1.0
self.curslack = self.curslack.fract();
self.li = s.i;
self.lq = s.q;
continue;
}
self.tapvi[self.tapi as usize] = s.i;
self.tapvq[self.tapi as usize] = s.q;
let mut si = 0.0f32;
let mut sq = 0.0f32;
for ti in 0..self.taps.len() {
let off = if (ti > self.tapi) | else { self.tapi - ti };
si += self.tapvi[off] * self.taps[ti];
sq += self.tapvq[off] * self.taps[ti];
}
self.tapi += 1;
if self.tapi >= self.taps.len() {
self.tapi = 0;
}
s.i = si;
s.q = sq;
let mut a = s.i.atan2(s.q);
let mut b = self.li.atan2(self.lq);
let mut r = 0f32;
r = a - b;
if r > std::f32::consts::PI {
r = std::f32::consts::PI * 2.0 - r;
}
if r < -std::f32::consts::PI {
r = std::f32::consts::PI * 2.0 + r;
}
// This limits sharp impulses where spikes have slipped
// through our taps filter.
if r.abs() < self.maxphase {
self.rsum += r;
lr = r;
self.sq -= 1;
if self.sq < -300 {
self.sq = -300;
}
} else {
//self.rsum += lr;
self.sq += 1;
if self.sq > 3 {
self.sq = 3;
}
}
self.q1 += 1;
if self.q1 == self.audiodecim {
self.q1 = 0;
// Track how much we are off on the audio output
// due to decimation of the input stream by a value
// that causes our audio decimation to have a fractional
// part.
self.curslack += self.sl | { self.taps.len() - (ti - self.tapi)} | conditional_block |
mod.rs |
}
pub struct Alsa {
sps: u32,
pcm: PCM<Prepared>,
}
impl Alsa {
pub fn new(sps: u32) -> Alsa {
let pcm = PCM::open("default", Stream::Playback, Mode::Blocking).unwrap();
let mut pcm = pcm.set_parameters(Format::FloatLE, Access::Interleaved, 1, sps as usize).ok().unwrap();
Alsa { sps: sps, pcm: pcm }
}
pub fn write(&mut self, buf: &Vec<f32>) {
self.pcm.write_interleaved(&buf).unwrap();
}
}
pub fn buildsine(freq: f64, sps: f64, amp: f32) -> Option<Vec<Complex<f32>>> {
// Do not build if too low in frequency.
if freq.abs() < 500.0 {
return Option::Some(vec![Complex { i: 1.0, q: 0.0 } ]);
}
if freq * 4.0 > sps {
return Option::None;
}
// How many of our smallest units of time represented
// by a sample do we need for a full cycle of the
// frequency.
let timepersample = 1.0f64 / sps as f64;
let units = ((1.0 / freq).abs() / timepersample).abs();
//println!("timepersample:{} freqfullwave:{}",
// timepersample,
// 1.0 / freq
//);
// Try to find a multiple of units that is as close as possible
// to a whole integer number of units.
let mut low_diff = std::f64::MAX;
let mut low_mul = 0usize;
for x in 1..100000 {
let m = units * x as f64;
let diff = m - m.floor();
if diff < low_diff {
low_diff = diff;
low_mul = x;
}
}
let iunits = (units * low_mul as f64).floor() as usize;
println!("pre-built cosine for freq({}) with units({}) and diff({})", freq, units, low_diff);
let mut out: Vec<Complex<f32>> = Vec::new();
for x in 0..iunits {
let curtime = (x as f64) * timepersample;
out.push(Complex {
i: (curtime * freq * std::f64::consts::PI * 2.0).cos() as f32 * amp,
q: (curtime * freq * std::f64::consts::PI * 2.0).sin() as f32 * amp,
});
}
Option::Some(out)
}
pub struct FMDemod {
sps: f64,
offset: f64,
bw: f32,
q0: usize,
q1: usize,
li: f32,
lq: f32,
ifs: Vec<Complex<f32>>,
ifsndx: usize,
rsum: f32,
tapi: usize,
tapvi: Vec<f32>,
tapvq: Vec<f32>,
taps: Vec<f32>,
decim: usize,
curslack: f32,
maxphase: f32,
slack: f32,
audiodecim: usize,
pub sq: isize,
devsqlimit: isize,
sindex: f64,
}
impl FMDemod {
pub fn new(sps: f64, decim: usize, offset: f64, bw: f32, taps: Vec<f32>, devsqlimit: isize) -> FMDemod {
let ifs = buildsine(offset, sps, 1.0).unwrap();
let mut tapvi: Vec<f32> = Vec::new();
let mut tapvq: Vec<f32> = Vec::new();
for x in 0..taps.len() {
tapvi.push(0.0);
tapvq.push(0.0);
}
// If the decimation is not set perfectly then we will have
// a fractional part and we need to insert some padding when
// it reaches a value representing a whole output sample.
let actual_audio_decim = (sps / (decim as f64) / 16000.0) as f32;
// Make sure that slack is not >= 1.0
let slack = actual_audio_decim.fract();
let pract_audio_decim = actual_audio_decim.ceil() as usize;
let fmaxphaserot = ((std::f64::consts::PI * 2.0f64) / (sps / (decim as f64))) * bw as f64;
println!("slack:{} pract_audio_decim:{} decim:{} actual_audio_decim:{}",
slack, pract_audio_decim, decim, actual_audio_decim
);
FMDemod {
devsqlimit: devsqlimit,
maxphase: fmaxphaserot as f32,
audiodecim: pract_audio_decim,
slack: slack,
sps: sps,
offset: offset,
bw: bw,
li: 0.0,
lq: 0.0,
q0: 0,
q1: 0,
ifs: ifs,
ifsndx: 0,
rsum: 0.0,
tapi: 0,
tapvi: tapvi,
tapvq: tapvq,
taps: taps,
decim: decim,
curslack: 0.0,
sq: 0,
sindex: 0.0,
}
}
pub fn work(&mut self, stream: &Vec<Complex<f32>>) -> Vec<f32> {
let mut buf: Vec<f32> = Vec::with_capacity(stream.len() / self.decim / self.audiodecim);
let timepersample = 1.0f64 / self.sps as f64;
let mut lr: f32 = 0.0;
for x in 0..stream.len() {
let mut s = stream[x].clone();
//let ifc = Complex {
// i: (timepersample * self.sindex * self.offset * std::f64::consts::PI * 2.0).cos() as f32,
// q: (timepersample * self.sindex * self.offset * std::f64::consts::PI * 2.0).sin() as f32,
//};
//self.sindex += 1.0;
s.mul(&self.ifs[self.ifsndx]);
self.ifsndx = if self.ifsndx + 1 >= self.ifs.len() {
0
} else {
self.ifsndx + 1
};
if self.q0 == self.decim {
self.q0 = 0;
if self.curslack >= 1.0 {
// Hopefully, the slack is < 1.0
self.curslack = self.curslack.fract();
self.li = s.i;
self.lq = s.q;
continue;
}
self.tapvi[self.tapi as usize] = s.i;
self.tapvq[self.tapi as usize] = s.q;
let mut si = 0.0f32;
let mut sq = 0.0f32;
for ti in 0..self.taps.len() {
let off = if (ti > self.tapi) { self.taps.len() - (ti - self.tapi)} else { self.tapi - ti };
si += self.tapvi[off] * self.taps[ti];
sq += self.tapvq[off] * self.taps[ti];
}
self.tapi += 1;
if self.tapi >= self.taps.len() {
self.tapi = 0;
}
s.i = si;
s.q = sq;
let mut a = s.i.atan2(s.q);
let mut b = self.li.atan2(self.lq);
let mut r = 0f32;
r = a - b;
if r | {
let i = self.i * a.i - self.q * a.q;
let q = self.i * a.q + self.q * a.i;
self.i = i;
self.q = q;
} | identifier_body |
|
mod.rs | (freq: f64, sps: f64, amp: f32) -> Option<Vec<Complex<f32>>> {
// Do not build if too low in frequency.
if freq.abs() < 500.0 {
return Option::Some(vec![Complex { i: 1.0, q: 0.0 } ]);
}
if freq * 4.0 > sps {
return Option::None;
}
// How many of our smallest units of time represented
// by a sample do we need for a full cycle of the
// frequency.
let timepersample = 1.0f64 / sps as f64;
let units = ((1.0 / freq).abs() / timepersample).abs();
//println!("timepersample:{} freqfullwave:{}",
// timepersample,
// 1.0 / freq
//);
// Try to find a multiple of units that is as close as possible
// to a whole integer number of units.
let mut low_diff = std::f64::MAX;
let mut low_mul = 0usize;
for x in 1..100000 {
let m = units * x as f64;
let diff = m - m.floor();
if diff < low_diff {
low_diff = diff;
low_mul = x;
}
}
let iunits = (units * low_mul as f64).floor() as usize;
println!("pre-built cosine for freq({}) with units({}) and diff({})", freq, units, low_diff);
let mut out: Vec<Complex<f32>> = Vec::new();
for x in 0..iunits {
let curtime = (x as f64) * timepersample;
out.push(Complex {
i: (curtime * freq * std::f64::consts::PI * 2.0).cos() as f32 * amp,
q: (curtime * freq * std::f64::consts::PI * 2.0).sin() as f32 * amp,
});
}
Option::Some(out)
}
pub struct FMDemod {
sps: f64,
offset: f64,
bw: f32,
q0: usize,
q1: usize,
li: f32,
lq: f32,
ifs: Vec<Complex<f32>>,
ifsndx: usize,
rsum: f32,
tapi: usize,
tapvi: Vec<f32>,
tapvq: Vec<f32>,
taps: Vec<f32>,
decim: usize,
curslack: f32,
maxphase: f32,
slack: f32,
audiodecim: usize,
pub sq: isize,
devsqlimit: isize,
sindex: f64,
}
impl FMDemod {
pub fn new(sps: f64, decim: usize, offset: f64, bw: f32, taps: Vec<f32>, devsqlimit: isize) -> FMDemod {
let ifs = buildsine(offset, sps, 1.0).unwrap();
let mut tapvi: Vec<f32> = Vec::new();
let mut tapvq: Vec<f32> = Vec::new();
for x in 0..taps.len() {
tapvi.push(0.0);
tapvq.push(0.0);
}
// If the decimation is not set perfectly then we will have
// a fractional part and we need to insert some padding when
// it reaches a value representing a whole output sample.
let actual_audio_decim = (sps / (decim as f64) / 16000.0) as f32;
// Make sure that slack is not >= 1.0
let slack = actual_audio_decim.fract();
let pract_audio_decim = actual_audio_decim.ceil() as usize;
let fmaxphaserot = ((std::f64::consts::PI * 2.0f64) / (sps / (decim as f64))) * bw as f64;
println!("slack:{} pract_audio_decim:{} decim:{} actual_audio_decim:{}",
slack, pract_audio_decim, decim, actual_audio_decim
);
FMDemod {
devsqlimit: devsqlimit,
maxphase: fmaxphaserot as f32,
audiodecim: pract_audio_decim,
slack: slack,
sps: sps,
offset: offset,
bw: bw,
li: 0.0,
lq: 0.0,
q0: 0,
q1: 0,
ifs: ifs,
ifsndx: 0,
rsum: 0.0,
tapi: 0,
tapvi: tapvi,
tapvq: tapvq,
taps: taps,
decim: decim,
curslack: 0.0,
sq: 0,
sindex: 0.0,
}
}
pub fn work(&mut self, stream: &Vec<Complex<f32>>) -> Vec<f32> {
let mut buf: Vec<f32> = Vec::with_capacity(stream.len() / self.decim / self.audiodecim);
let timepersample = 1.0f64 / self.sps as f64;
let mut lr: f32 = 0.0;
for x in 0..stream.len() {
let mut s = stream[x].clone();
//let ifc = Complex {
// i: (timepersample * self.sindex * self.offset * std::f64::consts::PI * 2.0).cos() as f32,
// q: (timepersample * self.sindex * self.offset * std::f64::consts::PI * 2.0).sin() as f32,
//};
//self.sindex += 1.0;
s.mul(&self.ifs[self.ifsndx]);
self.ifsndx = if self.ifsndx + 1 >= self.ifs.len() {
0
} else {
self.ifsndx + 1
};
if self.q0 == self.decim {
self.q0 = 0;
if self.curslack >= 1.0 {
// Hopefully, the slack is < 1.0
self.curslack = self.curslack.fract();
self.li = s.i;
self.lq = s.q;
continue;
}
self.tapvi[self.tapi as usize] = s.i;
self.tapvq[self.tapi as usize] = s.q;
let mut si = 0.0f32;
let mut sq = 0.0f32;
for ti in 0..self.taps.len() {
let off = if (ti > self.tapi) { self.taps.len() - (ti - self.tapi)} else { self.tapi - ti };
si += self.tapvi[off] * self.taps[ti];
sq += self.tapvq[off] * self.taps[ti];
}
self.tapi += 1;
if self.tapi >= self.taps.len() {
self.tapi = 0;
}
s.i = si;
s.q = sq;
let mut a = s.i.atan2(s.q);
let mut b = self.li.atan2(self.lq);
let mut r = 0f32;
r = a - b;
if r > std::f32::consts::PI {
r = std::f32::consts::PI * 2.0 - r;
}
if r < -std::f32::consts::PI {
r = std::f32::consts::PI * 2.0 + r;
}
// This limits sharp impulses where spikes have slipped
// through our taps filter.
if r.abs() < self.maxphase {
self.rsum += r;
lr = r;
self.sq -= 1;
if self.sq < -300 {
self.sq = -300;
}
} else {
//self.rsum += lr;
self.sq += 1;
if self.sq > 3 {
self.sq = 3;
}
}
self.q1 += 1;
if self | buildsine | identifier_name |
|
mod.rs | sps, 1.0).unwrap();
let mut tapvi: Vec<f32> = Vec::new();
let mut tapvq: Vec<f32> = Vec::new();
for x in 0..taps.len() {
tapvi.push(0.0);
tapvq.push(0.0);
}
// If the decimation is not set perfectly then we will have
// a fractional part and we need to insert some padding when
// it reaches a value representing a whole output sample.
let actual_audio_decim = (sps / (decim as f64) / 16000.0) as f32;
// Make sure that slack is not >= 1.0
let slack = actual_audio_decim.fract();
let pract_audio_decim = actual_audio_decim.ceil() as usize;
let fmaxphaserot = ((std::f64::consts::PI * 2.0f64) / (sps / (decim as f64))) * bw as f64;
println!("slack:{} pract_audio_decim:{} decim:{} actual_audio_decim:{}",
slack, pract_audio_decim, decim, actual_audio_decim
);
FMDemod {
devsqlimit: devsqlimit,
maxphase: fmaxphaserot as f32,
audiodecim: pract_audio_decim,
slack: slack,
sps: sps,
offset: offset,
bw: bw,
li: 0.0,
lq: 0.0,
q0: 0,
q1: 0,
ifs: ifs,
ifsndx: 0,
rsum: 0.0,
tapi: 0,
tapvi: tapvi,
tapvq: tapvq,
taps: taps,
decim: decim,
curslack: 0.0,
sq: 0,
sindex: 0.0,
}
}
pub fn work(&mut self, stream: &Vec<Complex<f32>>) -> Vec<f32> {
let mut buf: Vec<f32> = Vec::with_capacity(stream.len() / self.decim / self.audiodecim);
let timepersample = 1.0f64 / self.sps as f64;
let mut lr: f32 = 0.0;
for x in 0..stream.len() {
let mut s = stream[x].clone();
//let ifc = Complex {
// i: (timepersample * self.sindex * self.offset * std::f64::consts::PI * 2.0).cos() as f32,
// q: (timepersample * self.sindex * self.offset * std::f64::consts::PI * 2.0).sin() as f32,
//};
//self.sindex += 1.0;
s.mul(&self.ifs[self.ifsndx]);
self.ifsndx = if self.ifsndx + 1 >= self.ifs.len() {
0
} else {
self.ifsndx + 1
};
if self.q0 == self.decim {
self.q0 = 0;
if self.curslack >= 1.0 {
// Hopefully, the slack is < 1.0
self.curslack = self.curslack.fract();
self.li = s.i;
self.lq = s.q;
continue;
}
self.tapvi[self.tapi as usize] = s.i;
self.tapvq[self.tapi as usize] = s.q;
let mut si = 0.0f32;
let mut sq = 0.0f32;
for ti in 0..self.taps.len() {
let off = if (ti > self.tapi) { self.taps.len() - (ti - self.tapi)} else { self.tapi - ti };
si += self.tapvi[off] * self.taps[ti];
sq += self.tapvq[off] * self.taps[ti];
}
self.tapi += 1;
if self.tapi >= self.taps.len() {
self.tapi = 0;
}
s.i = si;
s.q = sq;
let mut a = s.i.atan2(s.q);
let mut b = self.li.atan2(self.lq);
let mut r = 0f32;
r = a - b;
if r > std::f32::consts::PI {
r = std::f32::consts::PI * 2.0 - r;
}
if r < -std::f32::consts::PI {
r = std::f32::consts::PI * 2.0 + r;
}
// This limits sharp impulses where spikes have slipped
// through our taps filter.
if r.abs() < self.maxphase {
self.rsum += r;
lr = r;
self.sq -= 1;
if self.sq < -300 {
self.sq = -300;
}
} else {
//self.rsum += lr;
self.sq += 1;
if self.sq > 3 {
self.sq = 3;
}
}
self.q1 += 1;
if self.q1 == self.audiodecim {
self.q1 = 0;
// Track how much we are off on the audio output
// due to decimation of the input stream by a value
// that causes our audio decimation to have a fractional
// part.
self.curslack += self.slack;
self.rsum /= self.audiodecim as f32;
if self.sq > 0 {
buf.push(0.0);
} else {
buf.push(self.rsum);
}
self.rsum = 0f32;
}
}
self.li = s.i;
self.lq = s.q;
self.q0 += 1;
}
// Return the buffer containing the demodulated data.
buf
}
}
#[inline]
fn u16tou8ale(v: u16) -> [u8; 2] {
[
v as u8,
(v >> 8) as u8,
]
}
// little endian
#[inline]
fn u32tou8ale(v: u32) -> [u8; 4] {
[
v as u8,
(v >> 8) as u8,
(v >> 16) as u8,
(v >> 24) as u8,
]
}
pub fn wavei8write(path: String, sps: u32, buf: &Vec<f32>) {
use std::fs::File;
use std::io::Write;
let datatotalsize = buf.len() as u32 * 4;
let mut fd = File::create(path).unwrap();
fd.write("RIFF".as_bytes()); // 4
fd.write(&u32tou8ale((datatotalsize + 44) - 8)); // filesize - 8 // 4
fd.write("WAVE".as_bytes()); // // 4
fd.write("fmt ".as_bytes()); // <format marker> // 4
fd.write(&u32tou8ale(16)); // <format data length> // 4
fd.write(&u16tou8ale(3)); // PCM // 2
fd.write(&u16tou8ale(1)); // 1 channel // 2
fd.write(&u32tou8ale(sps)); // sample frequency/rate // 4
fd.write(&u32tou8ale(sps * 4)); // sps * bitsize * channels / 8 (byte rate) // 4
fd.write(&u16tou8ale(4)); // bitsize * channels / 8 (block-align) // 2
fd.write(&u16tou8ale(32)); // bits per sample // 2
fd.write("data".as_bytes()); // <data marker> // 4
fd.write(&u32tou8ale(datatotalsize)); // datasize = filesize - 44 // 4
| for x in 0..buf.len() {
fd.write_f32::<LittleEndian>(buf[x]);
}
//unsafe { | random_line_split |
|
main.rs | , is it worth
// keeping some scratch mem for this and running our own StrPool?
// TODO for lint violations of names, emit a refactor script
#[macro_use]
extern crate log;
extern crate getopts;
extern crate rustc;
extern crate rustc_driver;
extern crate syntax;
extern crate strings;
use rustc::session::Session;
use rustc::session::config::{self, Input};
use rustc_driver::{driver, CompilerCalls, Compilation};
use syntax::ast;
use syntax::codemap::CodeMap;
use syntax::diagnostics;
use syntax::visit;
use std::path::PathBuf;
use std::collections::HashMap;
use changes::ChangeSet;
use visitor::FmtVisitor;
mod changes;
mod visitor;
mod functions;
mod missed_spans;
mod lists;
mod utils;
mod types;
mod expr;
mod imports;
const IDEAL_WIDTH: usize = 80;
const LEEWAY: usize = 5;
const MAX_WIDTH: usize = 100;
const MIN_STRING: usize = 10;
const TAB_SPACES: usize = 4;
const FN_BRACE_STYLE: BraceStyle = BraceStyle::SameLineWhere;
const FN_RETURN_INDENT: ReturnIndent = ReturnIndent::WithArgs;
// When we get scoped annotations, we should have rustfmt::skip.
const SKIP_ANNOTATION: &'static str = "rustfmt_skip";
#[derive(Copy, Clone)]
pub enum | {
Overwrite,
// str is the extension of the new file
NewFile(&'static str),
// Write the output to stdout.
Display,
// Return the result as a mapping from filenames to StringBuffers.
Return(&'static Fn(HashMap<String, String>)),
}
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
enum BraceStyle {
AlwaysNextLine,
PreferSameLine,
// Prefer same line except where there is a where clause, in which case force
// the brace to the next line.
SameLineWhere,
}
// How to indent a function's return type.
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
enum ReturnIndent {
// Aligned with the arguments
WithArgs,
// Aligned with the where clause
WithWhereClause,
}
// Formatting which depends on the AST.
fn fmt_ast<'a>(krate: &ast::Crate, codemap: &'a CodeMap) -> ChangeSet<'a> {
let mut visitor = FmtVisitor::from_codemap(codemap);
visit::walk_crate(&mut visitor, krate);
let files = codemap.files.borrow();
if let Some(last) = files.last() {
visitor.format_missing(last.end_pos);
}
visitor.changes
}
// Formatting done on a char by char or line by line basis.
// TODO warn on TODOs and FIXMEs without an issue number
// TODO warn on bad license
// TODO other stuff for parity with make tidy
fn fmt_lines(changes: &mut ChangeSet) {
let mut truncate_todo = Vec::new();
// Iterate over the chars in the change set.
for (f, text) in changes.text() {
let mut trims = vec![];
let mut last_wspace: Option<usize> = None;
let mut line_len = 0;
let mut cur_line = 1;
let mut newline_count = 0;
for (c, b) in text.chars() {
if c == '\n' { // TOOD test for \r too
// Check for (and record) trailing whitespace.
if let Some(lw) = last_wspace {
trims.push((cur_line, lw, b));
line_len -= b - lw;
}
// Check for any line width errors we couldn't correct.
if line_len > MAX_WIDTH {
// TODO store the error rather than reporting immediately.
println!("Rustfmt couldn't fix (sorry). {}:{}: line longer than {} characters",
f, cur_line, MAX_WIDTH);
}
line_len = 0;
cur_line += 1;
newline_count += 1;
last_wspace = None;
} else {
newline_count = 0;
line_len += 1;
if c.is_whitespace() {
if last_wspace.is_none() {
last_wspace = Some(b);
}
} else {
last_wspace = None;
}
}
}
if newline_count > 1 {
debug!("track truncate: {} {} {}", f, text.len, newline_count);
truncate_todo.push((f.to_string(), text.len - newline_count + 1))
}
for &(l, _, _) in trims.iter() {
// TODO store the error rather than reporting immediately.
println!("Rustfmt left trailing whitespace at {}:{} (sorry)", f, l);
}
}
for (f, l) in truncate_todo {
changes.get_mut(&f).truncate(l);
}
}
struct RustFmtCalls {
input_path: Option<PathBuf>,
write_mode: WriteMode,
}
impl<'a> CompilerCalls<'a> for RustFmtCalls {
fn early_callback(&mut self,
_: &getopts::Matches,
_: &diagnostics::registry::Registry)
-> Compilation {
Compilation::Continue
}
fn some_input(&mut self,
input: Input,
input_path: Option<PathBuf>)
-> (Input, Option<PathBuf>) {
match input_path {
Some(ref ip) => self.input_path = Some(ip.clone()),
_ => {
// FIXME should handle string input and write to stdout or something
panic!("No input path");
}
}
(input, input_path)
}
fn no_input(&mut self,
_: &getopts::Matches,
_: &config::Options,
_: &Option<PathBuf>,
_: &Option<PathBuf>,
_: &diagnostics::registry::Registry)
-> Option<(Input, Option<PathBuf>)> {
panic!("No input supplied to RustFmt");
}
fn late_callback(&mut self,
_: &getopts::Matches,
_: &Session,
_: &Input,
_: &Option<PathBuf>,
_: &Option<PathBuf>)
-> Compilation {
Compilation::Continue
}
fn build_controller(&mut self, _: &Session) -> driver::CompileController<'a> {
let write_mode = self.write_mode;
let mut control = driver::CompileController::basic();
control.after_parse.stop = Compilation::Stop;
control.after_parse.callback = box move |state| {
let krate = state.krate.unwrap();
let codemap = state.session.codemap();
let mut changes = fmt_ast(krate, codemap);
// For some reason, the codemap does not include terminating newlines
// so we must add one on for each file. This is sad.
changes.append_newlines();
fmt_lines(&mut changes);
// FIXME(#5) Should be user specified whether to show or replace.
let result = changes.write_all_files(write_mode);
match result {
Err(msg) => println!("Error writing files: {}", msg),
Ok(result) => {
if let WriteMode::Return(callback) = write_mode {
callback(result);
}
}
}
};
control
}
}
fn run(args: Vec<String>, write_mode: WriteMode) {
let mut call_ctxt = RustFmtCalls { input_path: None, write_mode: write_mode };
rustc_driver::run_compiler(&args, &mut call_ctxt);
}
#[cfg(not(test))]
fn main() {
let args: Vec<_> = std::env::args().collect();
//run(args, WriteMode::Display);
run(args, WriteMode::Overwrite);
std::env::set_exit_status(0);
// TODO unit tests
// let fmt = ListFormatting {
// tactic: ListTactic::Horizontal,
// separator: ",",
// trailing_separator: SeparatorTactic::Vertical,
// indent: 2,
// h_width: 80,
// v_width: 100,
// };
// let inputs = vec![(format!("foo"), String::new()),
// (format!("foo"), String::new()),
// (format!("foo"), String::new()),
// (format!("foo"), String::new()),
// (format!("foo"), String::new()),
// (format!("foo"), String::new()),
// (format!("foo"), String::new()),
// (format!("foo"), String::new())];
// let s = write_list(&inputs, &fmt);
// println!(" {}", s);
}
#[cfg(test)]
mod test {
use std::collections::HashMap;
use std::fs;
use std::io::Read;
use std::sync::atomic;
use super::*;
use super::run;
// For now, the only supported regression tests are idempotent tests - the input and
// output must match exactly.
// FIXME(#28) would be good to check for error messages and fail on them, or at least report.
#[test]
fn idempotent_tests() {
println!("Idempotent tests:");
FAILURES.store(0, atomic::Ordering::Relaxed);
// Get all files in the tests/idem directory
let files = fs::read_dir("tests/idem").unwrap();
// For each file, run rustfmt and collect the | WriteMode | identifier_name |
main.rs | , is it worth
// keeping some scratch mem for this and running our own StrPool?
// TODO for lint violations of names, emit a refactor script
#[macro_use]
extern crate log;
extern crate getopts;
extern crate rustc;
extern crate rustc_driver;
extern crate syntax;
extern crate strings;
use rustc::session::Session;
use rustc::session::config::{self, Input};
use rustc_driver::{driver, CompilerCalls, Compilation};
use syntax::ast;
use syntax::codemap::CodeMap;
use syntax::diagnostics;
use syntax::visit;
use std::path::PathBuf;
use std::collections::HashMap;
use changes::ChangeSet;
use visitor::FmtVisitor;
mod changes;
mod visitor;
mod functions;
mod missed_spans;
mod lists;
mod utils;
mod types;
mod expr;
mod imports;
const IDEAL_WIDTH: usize = 80;
const LEEWAY: usize = 5;
const MAX_WIDTH: usize = 100;
const MIN_STRING: usize = 10;
const TAB_SPACES: usize = 4;
const FN_BRACE_STYLE: BraceStyle = BraceStyle::SameLineWhere;
const FN_RETURN_INDENT: ReturnIndent = ReturnIndent::WithArgs;
// When we get scoped annotations, we should have rustfmt::skip.
const SKIP_ANNOTATION: &'static str = "rustfmt_skip";
#[derive(Copy, Clone)]
pub enum WriteMode {
Overwrite,
// str is the extension of the new file
NewFile(&'static str),
// Write the output to stdout.
Display,
// Return the result as a mapping from filenames to StringBuffers.
Return(&'static Fn(HashMap<String, String>)),
}
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
enum BraceStyle {
AlwaysNextLine,
PreferSameLine,
// Prefer same line except where there is a where clause, in which case force
// the brace to the next line.
SameLineWhere,
}
// How to indent a function's return type.
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
enum ReturnIndent {
// Aligned with the arguments
WithArgs,
// Aligned with the where clause
WithWhereClause,
}
// Formatting which depends on the AST.
fn fmt_ast<'a>(krate: &ast::Crate, codemap: &'a CodeMap) -> ChangeSet<'a> {
let mut visitor = FmtVisitor::from_codemap(codemap);
visit::walk_crate(&mut visitor, krate);
let files = codemap.files.borrow();
if let Some(last) = files.last() {
visitor.format_missing(last.end_pos);
}
visitor.changes
}
// Formatting done on a char by char or line by line basis.
// TODO warn on TODOs and FIXMEs without an issue number
// TODO warn on bad license
// TODO other stuff for parity with make tidy
fn fmt_lines(changes: &mut ChangeSet) {
let mut truncate_todo = Vec::new();
// Iterate over the chars in the change set.
for (f, text) in changes.text() {
let mut trims = vec![];
let mut last_wspace: Option<usize> = None;
let mut line_len = 0;
let mut cur_line = 1;
let mut newline_count = 0;
for (c, b) in text.chars() {
if c == '\n' { // TOOD test for \r too
// Check for (and record) trailing whitespace.
if let Some(lw) = last_wspace {
trims.push((cur_line, lw, b));
line_len -= b - lw;
}
// Check for any line width errors we couldn't correct.
if line_len > MAX_WIDTH {
// TODO store the error rather than reporting immediately.
println!("Rustfmt couldn't fix (sorry). {}:{}: line longer than {} characters",
f, cur_line, MAX_WIDTH);
}
line_len = 0;
cur_line += 1;
newline_count += 1;
last_wspace = None;
} else {
newline_count = 0;
line_len += 1;
if c.is_whitespace() {
if last_wspace.is_none() {
last_wspace = Some(b);
}
} else {
last_wspace = None;
}
}
}
if newline_count > 1 {
debug!("track truncate: {} {} {}", f, text.len, newline_count);
truncate_todo.push((f.to_string(), text.len - newline_count + 1))
}
for &(l, _, _) in trims.iter() {
// TODO store the error rather than reporting immediately.
println!("Rustfmt left trailing whitespace at {}:{} (sorry)", f, l);
}
}
for (f, l) in truncate_todo {
changes.get_mut(&f).truncate(l);
}
}
struct RustFmtCalls {
input_path: Option<PathBuf>,
write_mode: WriteMode,
}
impl<'a> CompilerCalls<'a> for RustFmtCalls {
fn early_callback(&mut self,
_: &getopts::Matches,
_: &diagnostics::registry::Registry)
-> Compilation {
Compilation::Continue
}
fn some_input(&mut self,
input: Input,
input_path: Option<PathBuf>)
-> (Input, Option<PathBuf>) {
match input_path {
Some(ref ip) => self.input_path = Some(ip.clone()),
_ => {
// FIXME should handle string input and write to stdout or something
panic!("No input path");
}
}
(input, input_path)
}
fn no_input(&mut self,
_: &getopts::Matches,
_: &config::Options,
_: &Option<PathBuf>,
_: &Option<PathBuf>,
_: &diagnostics::registry::Registry)
-> Option<(Input, Option<PathBuf>)> {
panic!("No input supplied to RustFmt");
}
fn late_callback(&mut self,
_: &getopts::Matches,
_: &Session,
_: &Input,
_: &Option<PathBuf>,
_: &Option<PathBuf>)
-> Compilation {
Compilation::Continue
}
fn build_controller(&mut self, _: &Session) -> driver::CompileController<'a> {
let write_mode = self.write_mode;
let mut control = driver::CompileController::basic();
control.after_parse.stop = Compilation::Stop;
control.after_parse.callback = box move |state| {
let krate = state.krate.unwrap();
let codemap = state.session.codemap();
let mut changes = fmt_ast(krate, codemap);
// For some reason, the codemap does not include terminating newlines
// so we must add one on for each file. This is sad.
changes.append_newlines();
fmt_lines(&mut changes);
// FIXME(#5) Should be user specified whether to show or replace.
let result = changes.write_all_files(write_mode);
match result {
Err(msg) => println!("Error writing files: {}", msg),
Ok(result) => {
if let WriteMode::Return(callback) = write_mode {
callback(result);
}
}
}
};
control
}
}
fn run(args: Vec<String>, write_mode: WriteMode) {
let mut call_ctxt = RustFmtCalls { input_path: None, write_mode: write_mode };
rustc_driver::run_compiler(&args, &mut call_ctxt);
}
#[cfg(not(test))]
fn main() {
let args: Vec<_> = std::env::args().collect();
//run(args, WriteMode::Display);
run(args, WriteMode::Overwrite);
std::env::set_exit_status(0);
// TODO unit tests
// let fmt = ListFormatting {
// tactic: ListTactic::Horizontal,
// separator: ",",
// trailing_separator: SeparatorTactic::Vertical,
// indent: 2,
// h_width: 80,
// v_width: 100,
// };
// let inputs = vec![(format!("foo"), String::new()),
// (format!("foo"), String::new()),
// (format!("foo"), String::new()),
// (format!("foo"), String::new()),
// (format!("foo"), String::new()),
// (format!("foo"), String::new()),
// (format!("foo"), String::new()),
// (format!("foo"), String::new())];
// let s = write_list(&inputs, &fmt);
// println!(" {}", s);
} | #[cfg(test)]
mod test {
use std::collections::HashMap;
use std::fs;
use std::io::Read;
use std::sync::atomic;
use super::*;
use super::run;
// For now, the only supported regression tests are idempotent tests - the input and
// output must match exactly.
// FIXME(#28) would be good to check for error messages and fail on them, or at least report.
#[test]
fn idempotent_tests() {
println!("Idempotent tests:");
FAILURES.store(0, atomic::Ordering::Relaxed);
// Get all files in the tests/idem directory
let files = fs::read_dir("tests/idem").unwrap();
// For each file, run rustfmt and collect the | random_line_split |
|
main.rs | ::var("USER").unwrap();
format!("postgres://{}@%2Frun%2Fpostgresql/pagefeed", user)
})
}
fn handle_request(req: &mut fastcgi::Request) -> Result<(), PagefeedError> {
let url = get_url(req)?;
let pathinfo = get_pathinfo(req);
let slug = pathinfo.trim_matches('/');
let mut w = io::BufWriter::new(req.stdout());
if slug.is_empty() {
handle_opml_request(&url, &mut w)
} else {
handle_feed_request(slug, &mut w)
}
}
fn handle_opml_request<W: Write>(url: &str, out: &mut W) -> Result<(), PagefeedError> {
let mut conn = database_connection()?;
let mut trans = conn.transaction()?;
let pages = get_enabled_pages(&mut trans)?;
trans.commit()?;
out.write_all(b"Content-Type: application/xml\n\n")?;
build_opml(url, &pages, out)?;
Ok(())
}
fn handle_feed_request<W: Write>(slug: &str, out: &mut W) -> Result<(), PagefeedError> {
let mut conn = database_connection()?;
let mut trans = conn.transaction()?;
let page = get_page(&mut trans, slug)?;
let page = page
.map(|page| refresh_page(&mut trans, page))
.transpose()?;
trans.commit()?;
match page {
None => {
out.write_all(b"Status: 404 Not Found\n\n")?;
Ok(())
}
Some(page) => {
let feed = build_feed(&page);
out.write_all(b"Content-Type: application/rss+xml\n\n")?;
feed.write_to(out)?;
Ok(())
}
}
}
fn get_url(req: &fastcgi::Request) -> Result<String, PagefeedError> {
use std::io::{Error, ErrorKind};
let https = match req.param("HTTPS") {
Some(ref s) => s == "on",
_ => false,
};
let server_addr = req
.param("SERVER_ADDR")
.ok_or_else(|| Error::new(ErrorKind::Other, "SERVER_ADDR unset"))?;
let server_port = req
.param("SERVER_PORT")
.ok_or_else(|| Error::new(ErrorKind::Other, "SERVER_PORT unset"))?
.parse::<u16>()
.map_err(|_| Error::new(ErrorKind::Other, "SERVER_PORT invalid"))?;
let mut script_name = req
.param("SCRIPT_NAME")
.ok_or_else(|| Error::new(ErrorKind::Other, "SCRIPT_NAME unset"))?;
if !script_name.starts_with('/') {
script_name.insert(0, '/')
}
if !script_name.ends_with('/') {
script_name.push('/')
}
Ok(match (https, server_port) {
(false, 80) => format!("http://{}{}", server_addr, script_name),
(false, _) => format!("http://{}:{}{}", server_addr, server_port, script_name),
(true, 443) => format!("https://{}{}", server_addr, script_name),
(true, _) => format!("https://{}:{}{}", server_addr, server_port, script_name),
})
}
fn get_pathinfo(req: &fastcgi::Request) -> String {
req.param("PATH_INFO").unwrap_or_default()
}
// ----------------------------------------------------------------------------
#[derive(Debug)]
enum PagefeedError {
Io(io::Error),
Postgres(postgres::error::Error),
QuickXml(quick_xml::de::DeError),
Regex(regex::Error),
Reqwest(reqwest::Error),
Rss(rss::Error),
}
impl From<io::Error> for PagefeedError {
fn from(err: io::Error) -> PagefeedError {
PagefeedError::Io(err)
}
}
impl From<postgres::error::Error> for PagefeedError {
fn from(err: postgres::error::Error) -> PagefeedError {
PagefeedError::Postgres(err)
}
}
impl From<regex::Error> for PagefeedError {
fn from(err: regex::Error) -> PagefeedError {
PagefeedError::Regex(err)
}
}
impl From<reqwest::Error> for PagefeedError {
fn from(err: reqwest::Error) -> PagefeedError {
PagefeedError::Reqwest(err)
}
}
impl From<rss::Error> for PagefeedError {
fn from(err: rss::Error) -> PagefeedError {
PagefeedError::Rss(err)
}
}
impl From<quick_xml::de::DeError> for PagefeedError {
fn from(err: quick_xml::de::DeError) -> PagefeedError {
PagefeedError::QuickXml(err)
}
}
// ----------------------------------------------------------------------------
fn build_feed(page: &Page) -> rss::Channel {
let mut items = vec![];
if page.last_modified.is_some() {
let guid = rss::GuidBuilder::default()
.value(format!("{}", page.item_id.unwrap().urn()))
.permalink(false)
.build();
let item = rss::ItemBuilder::default()
.title(page.name.to_owned())
.description(describe_page_status(page))
.link(page.url.to_owned())
.pub_date(page.last_modified.unwrap().to_rfc2822())
.guid(guid)
.build();
items.push(item);
}
rss::ChannelBuilder::default()
.title(page.name.to_owned())
.link(page.url.to_owned())
.items(items)
.build()
}
fn describe_page_status(page: &Page) -> String {
page.last_error.as_ref().map_or_else(
|| format!("{} was updated.", page.name),
|err| format!("Error while checking {}: {}", page.name, err),
)
}
fn | <W: Write>(url: &str, pages: &[Page], out: &mut W) -> Result<(), PagefeedError> {
#[derive(serde::Serialize)]
#[serde(rename = "opml")]
struct Opml<'a> {
version: &'a str,
head: Head,
body: Body<'a>,
}
#[derive(serde::Serialize)]
struct Head {}
#[derive(serde::Serialize)]
struct Body<'a> {
outline: Vec<Outline<'a>>,
}
#[derive(serde::Serialize)]
struct Outline<'a> {
#[serde(rename = "type")]
typ: &'a str,
text: String,
#[serde(rename = "xmlUrl")]
xml_url: String,
#[serde(rename = "htmlUrl")]
html_url: &'a str,
}
write!(out, "{}", quick_xml::se::to_string(
&Opml {
version: "2.0",
head: Head {},
body: Body {
outline: pages
.iter()
.map(|page| Outline {
typ: "rss",
text: htmlescape::encode_minimal(&page.name),
xml_url: format!("{}{}", url, page.slug),
html_url: &page.url,
})
.collect(),
},
},
)?)?;
Ok(())
}
// ----------------------------------------------------------------------------
#[derive(Clone)]
enum PageStatus {
Unmodified,
Modified {
body_hash: Vec<u8>,
etag: Option<String>,
},
FetchError(String),
}
fn refresh_page(
conn: &mut postgres::Transaction,
page: Page,
) -> Result<Page, postgres::error::Error> {
if !page_needs_checking(&page) {
return Ok(page);
}
let status = check_page(&page);
match status {
PageStatus::Unmodified => update_page_unchanged(conn, &page)?,
PageStatus::Modified {
ref body_hash,
ref etag,
} => update_page_changed(conn, &page, etag, body_hash)?,
PageStatus::FetchError(ref error) => update_page_error(conn, &page, error)?,
}
get_page(conn, &page.slug)
.transpose()
.expect("page disappeared??")
}
fn page_needs_checking(page: &Page) -> bool {
chrono::Utc::now() >= page.next_check
}
fn check_page(page: &Page) -> PageStatus {
use reqwest::header;
use reqwest::StatusCode;
let client = reqwest::blocking::Client::new();
let mut request = client
.get(&page.url)
.header(header::USER_AGENT, "Mozilla/5.0");
if let Some(ref etag) = page.http_etag {
request = request.header(header::IF_NONE_MATCH, etag.to_string());
}
let status = request
.send()
.map_err(PagefeedError::from)
.and_then(|mut response| {
if response.status() == StatusCode::NOT_MODIFIED {
Ok(PageStatus::Unmodified)
} else {
let etag = response
.headers()
.get(header::ETAG)
.and_then(|x| x.to_str().ok())
.map(str::to_string);
let body_hash = hash(page, &mut response)?;
Ok(PageStatus::Modified { body_hash, etag })
}
})
.unwrap_or_else(|err| PageStatus::FetchError(format!("{:?}", err)));
match status {
PageStatus::Modified { ref body_hash, .. }
if Some(body_hash) == page | build_opml | identifier_name |
main.rs | (false, _) => format!("http://{}:{}{}", server_addr, server_port, script_name),
(true, 443) => format!("https://{}{}", server_addr, script_name),
(true, _) => format!("https://{}:{}{}", server_addr, server_port, script_name),
})
}
fn get_pathinfo(req: &fastcgi::Request) -> String {
req.param("PATH_INFO").unwrap_or_default()
}
// ----------------------------------------------------------------------------
#[derive(Debug)]
enum PagefeedError {
Io(io::Error),
Postgres(postgres::error::Error),
QuickXml(quick_xml::de::DeError),
Regex(regex::Error),
Reqwest(reqwest::Error),
Rss(rss::Error),
}
impl From<io::Error> for PagefeedError {
fn from(err: io::Error) -> PagefeedError {
PagefeedError::Io(err)
}
}
impl From<postgres::error::Error> for PagefeedError {
fn from(err: postgres::error::Error) -> PagefeedError {
PagefeedError::Postgres(err)
}
}
impl From<regex::Error> for PagefeedError {
fn from(err: regex::Error) -> PagefeedError {
PagefeedError::Regex(err)
}
}
impl From<reqwest::Error> for PagefeedError {
fn from(err: reqwest::Error) -> PagefeedError {
PagefeedError::Reqwest(err)
}
}
impl From<rss::Error> for PagefeedError {
fn from(err: rss::Error) -> PagefeedError {
PagefeedError::Rss(err)
}
}
impl From<quick_xml::de::DeError> for PagefeedError {
fn from(err: quick_xml::de::DeError) -> PagefeedError {
PagefeedError::QuickXml(err)
}
}
// ----------------------------------------------------------------------------
fn build_feed(page: &Page) -> rss::Channel {
let mut items = vec![];
if page.last_modified.is_some() {
let guid = rss::GuidBuilder::default()
.value(format!("{}", page.item_id.unwrap().urn()))
.permalink(false)
.build();
let item = rss::ItemBuilder::default()
.title(page.name.to_owned())
.description(describe_page_status(page))
.link(page.url.to_owned())
.pub_date(page.last_modified.unwrap().to_rfc2822())
.guid(guid)
.build();
items.push(item);
}
rss::ChannelBuilder::default()
.title(page.name.to_owned())
.link(page.url.to_owned())
.items(items)
.build()
}
fn describe_page_status(page: &Page) -> String {
page.last_error.as_ref().map_or_else(
|| format!("{} was updated.", page.name),
|err| format!("Error while checking {}: {}", page.name, err),
)
}
fn build_opml<W: Write>(url: &str, pages: &[Page], out: &mut W) -> Result<(), PagefeedError> {
#[derive(serde::Serialize)]
#[serde(rename = "opml")]
struct Opml<'a> {
version: &'a str,
head: Head,
body: Body<'a>,
}
#[derive(serde::Serialize)]
struct Head {}
#[derive(serde::Serialize)]
struct Body<'a> {
outline: Vec<Outline<'a>>,
}
#[derive(serde::Serialize)]
struct Outline<'a> {
#[serde(rename = "type")]
typ: &'a str,
text: String,
#[serde(rename = "xmlUrl")]
xml_url: String,
#[serde(rename = "htmlUrl")]
html_url: &'a str,
}
write!(out, "{}", quick_xml::se::to_string(
&Opml {
version: "2.0",
head: Head {},
body: Body {
outline: pages
.iter()
.map(|page| Outline {
typ: "rss",
text: htmlescape::encode_minimal(&page.name),
xml_url: format!("{}{}", url, page.slug),
html_url: &page.url,
})
.collect(),
},
},
)?)?;
Ok(())
}
// ----------------------------------------------------------------------------
#[derive(Clone)]
enum PageStatus {
Unmodified,
Modified {
body_hash: Vec<u8>,
etag: Option<String>,
},
FetchError(String),
}
fn refresh_page(
conn: &mut postgres::Transaction,
page: Page,
) -> Result<Page, postgres::error::Error> {
if !page_needs_checking(&page) {
return Ok(page);
}
let status = check_page(&page);
match status {
PageStatus::Unmodified => update_page_unchanged(conn, &page)?,
PageStatus::Modified {
ref body_hash,
ref etag,
} => update_page_changed(conn, &page, etag, body_hash)?,
PageStatus::FetchError(ref error) => update_page_error(conn, &page, error)?,
}
get_page(conn, &page.slug)
.transpose()
.expect("page disappeared??")
}
fn page_needs_checking(page: &Page) -> bool {
chrono::Utc::now() >= page.next_check
}
fn check_page(page: &Page) -> PageStatus {
use reqwest::header;
use reqwest::StatusCode;
let client = reqwest::blocking::Client::new();
let mut request = client
.get(&page.url)
.header(header::USER_AGENT, "Mozilla/5.0");
if let Some(ref etag) = page.http_etag {
request = request.header(header::IF_NONE_MATCH, etag.to_string());
}
let status = request
.send()
.map_err(PagefeedError::from)
.and_then(|mut response| {
if response.status() == StatusCode::NOT_MODIFIED {
Ok(PageStatus::Unmodified)
} else {
let etag = response
.headers()
.get(header::ETAG)
.and_then(|x| x.to_str().ok())
.map(str::to_string);
let body_hash = hash(page, &mut response)?;
Ok(PageStatus::Modified { body_hash, etag })
}
})
.unwrap_or_else(|err| PageStatus::FetchError(format!("{:?}", err)));
match status {
PageStatus::Modified { ref body_hash, .. }
if Some(body_hash) == page.http_body_hash.as_ref() =>
{
PageStatus::Unmodified
}
PageStatus::FetchError(ref error) if Some(error) == page.last_error.as_ref() => {
PageStatus::Unmodified
}
_ => status,
}
}
// ----------------------------------------------------------------------------
fn hash(page: &Page, r: &mut dyn io::Read) -> Result<Vec<u8>, PagefeedError> {
let mut buf = Vec::new();
r.read_to_end(&mut buf)?;
if let Some(delete_regex) = page.delete_regex.as_ref() {
let re = regex::bytes::Regex::new(delete_regex)?;
buf = re.replace_all(&buf, &b""[..]).into_owned();
}
use tiny_keccak::{Hasher, Sha3};
let mut sha3 = Sha3::v256();
sha3.update(&buf);
let mut res: [u8; 32] = [0; 32];
sha3.finalize(&mut res);
Ok(res.to_vec())
}
// ----------------------------------------------------------------------------
fn get_enabled_pages(
conn: &mut postgres::Transaction,
) -> Result<Vec<Page>, postgres::error::Error> {
let query = "
select *,
greatest(
last_checked + check_interval,
last_modified + cooldown,
to_timestamp(0)
) as next_check
from pages
where enabled
";
conn.query(query, &[])
.map(|rows| rows.iter().map(instantiate_page).collect())
}
fn get_page(
conn: &mut postgres::Transaction,
slug: &str,
) -> Result<Option<Page>, postgres::error::Error> {
let query = "
select *,
greatest(
last_checked + check_interval,
last_modified + cooldown,
to_timestamp(0)
) as next_check
from pages
where enabled and slug = $1
";
conn.query(query, &[&slug])
.map(|rows| rows.get(0).map(instantiate_page))
}
fn instantiate_page(row: &postgres::row::Row) -> Page {
Page {
slug: row.get("slug"),
name: row.get("name"),
url: row.get("url"),
//enabled: row.get("enabled"),
delete_regex: row.get("delete_regex"),
next_check: row.get("next_check"),
//last_checked: row.get("last_checked"),
last_modified: row.get("last_modified"),
last_error: row.get("last_error"),
item_id: row.get("item_id"),
http_etag: row.get("http_etag"),
http_body_hash: row.get("http_body_hash"),
}
}
fn update_page_unchanged(
conn: &mut postgres::Transaction,
page: &Page,
) -> Result<(), postgres::error::Error> | {
let query = "
update pages
set last_checked = current_timestamp
where slug = $1
";
conn.execute(query, &[&page.slug])?;
Ok(())
} | identifier_body |
|
main.rs | ::var("USER").unwrap();
format!("postgres://{}@%2Frun%2Fpostgresql/pagefeed", user)
})
}
fn handle_request(req: &mut fastcgi::Request) -> Result<(), PagefeedError> {
let url = get_url(req)?;
let pathinfo = get_pathinfo(req);
let slug = pathinfo.trim_matches('/');
let mut w = io::BufWriter::new(req.stdout());
if slug.is_empty() {
handle_opml_request(&url, &mut w)
} else {
handle_feed_request(slug, &mut w)
}
}
fn handle_opml_request<W: Write>(url: &str, out: &mut W) -> Result<(), PagefeedError> {
let mut conn = database_connection()?;
let mut trans = conn.transaction()?;
let pages = get_enabled_pages(&mut trans)?;
trans.commit()?;
out.write_all(b"Content-Type: application/xml\n\n")?;
build_opml(url, &pages, out)?;
Ok(())
}
fn handle_feed_request<W: Write>(slug: &str, out: &mut W) -> Result<(), PagefeedError> {
let mut conn = database_connection()?;
let mut trans = conn.transaction()?;
let page = get_page(&mut trans, slug)?;
let page = page
.map(|page| refresh_page(&mut trans, page))
.transpose()?;
trans.commit()?;
match page {
None => {
out.write_all(b"Status: 404 Not Found\n\n")?;
Ok(())
}
Some(page) => {
let feed = build_feed(&page);
out.write_all(b"Content-Type: application/rss+xml\n\n")?;
feed.write_to(out)?;
Ok(())
}
}
}
fn get_url(req: &fastcgi::Request) -> Result<String, PagefeedError> {
use std::io::{Error, ErrorKind};
let https = match req.param("HTTPS") {
Some(ref s) => s == "on",
_ => false,
};
let server_addr = req
.param("SERVER_ADDR")
.ok_or_else(|| Error::new(ErrorKind::Other, "SERVER_ADDR unset"))?;
let server_port = req
.param("SERVER_PORT")
.ok_or_else(|| Error::new(ErrorKind::Other, "SERVER_PORT unset"))?
.parse::<u16>()
.map_err(|_| Error::new(ErrorKind::Other, "SERVER_PORT invalid"))?;
let mut script_name = req
.param("SCRIPT_NAME")
.ok_or_else(|| Error::new(ErrorKind::Other, "SCRIPT_NAME unset"))?;
if !script_name.starts_with('/') {
script_name.insert(0, '/')
}
if !script_name.ends_with('/') {
script_name.push('/')
}
Ok(match (https, server_port) {
(false, 80) => format!("http://{}{}", server_addr, script_name),
(false, _) => format!("http://{}:{}{}", server_addr, server_port, script_name),
(true, 443) => format!("https://{}{}", server_addr, script_name),
(true, _) => format!("https://{}:{}{}", server_addr, server_port, script_name),
})
}
fn get_pathinfo(req: &fastcgi::Request) -> String {
req.param("PATH_INFO").unwrap_or_default()
}
// ----------------------------------------------------------------------------
#[derive(Debug)]
enum PagefeedError {
Io(io::Error),
Postgres(postgres::error::Error),
QuickXml(quick_xml::de::DeError),
Regex(regex::Error),
Reqwest(reqwest::Error),
Rss(rss::Error),
}
impl From<io::Error> for PagefeedError {
fn from(err: io::Error) -> PagefeedError {
PagefeedError::Io(err)
}
}
impl From<postgres::error::Error> for PagefeedError {
fn from(err: postgres::error::Error) -> PagefeedError {
PagefeedError::Postgres(err)
}
}
impl From<regex::Error> for PagefeedError {
fn from(err: regex::Error) -> PagefeedError {
PagefeedError::Regex(err)
}
}
impl From<reqwest::Error> for PagefeedError {
fn from(err: reqwest::Error) -> PagefeedError {
PagefeedError::Reqwest(err)
}
}
impl From<rss::Error> for PagefeedError {
fn from(err: rss::Error) -> PagefeedError {
PagefeedError::Rss(err)
}
}
impl From<quick_xml::de::DeError> for PagefeedError {
fn from(err: quick_xml::de::DeError) -> PagefeedError {
PagefeedError::QuickXml(err)
}
}
// ----------------------------------------------------------------------------
fn build_feed(page: &Page) -> rss::Channel {
let mut items = vec![];
if page.last_modified.is_some() {
let guid = rss::GuidBuilder::default()
.value(format!("{}", page.item_id.unwrap().urn()))
.permalink(false)
.build();
let item = rss::ItemBuilder::default()
.title(page.name.to_owned())
.description(describe_page_status(page))
.link(page.url.to_owned())
.pub_date(page.last_modified.unwrap().to_rfc2822())
.guid(guid)
.build();
items.push(item);
}
rss::ChannelBuilder::default()
.title(page.name.to_owned())
.link(page.url.to_owned())
.items(items)
.build()
} | fn describe_page_status(page: &Page) -> String {
page.last_error.as_ref().map_or_else(
|| format!("{} was updated.", page.name),
|err| format!("Error while checking {}: {}", page.name, err),
)
}
fn build_opml<W: Write>(url: &str, pages: &[Page], out: &mut W) -> Result<(), PagefeedError> {
#[derive(serde::Serialize)]
#[serde(rename = "opml")]
struct Opml<'a> {
version: &'a str,
head: Head,
body: Body<'a>,
}
#[derive(serde::Serialize)]
struct Head {}
#[derive(serde::Serialize)]
struct Body<'a> {
outline: Vec<Outline<'a>>,
}
#[derive(serde::Serialize)]
struct Outline<'a> {
#[serde(rename = "type")]
typ: &'a str,
text: String,
#[serde(rename = "xmlUrl")]
xml_url: String,
#[serde(rename = "htmlUrl")]
html_url: &'a str,
}
write!(out, "{}", quick_xml::se::to_string(
&Opml {
version: "2.0",
head: Head {},
body: Body {
outline: pages
.iter()
.map(|page| Outline {
typ: "rss",
text: htmlescape::encode_minimal(&page.name),
xml_url: format!("{}{}", url, page.slug),
html_url: &page.url,
})
.collect(),
},
},
)?)?;
Ok(())
}
// ----------------------------------------------------------------------------
#[derive(Clone)]
enum PageStatus {
Unmodified,
Modified {
body_hash: Vec<u8>,
etag: Option<String>,
},
FetchError(String),
}
fn refresh_page(
conn: &mut postgres::Transaction,
page: Page,
) -> Result<Page, postgres::error::Error> {
if !page_needs_checking(&page) {
return Ok(page);
}
let status = check_page(&page);
match status {
PageStatus::Unmodified => update_page_unchanged(conn, &page)?,
PageStatus::Modified {
ref body_hash,
ref etag,
} => update_page_changed(conn, &page, etag, body_hash)?,
PageStatus::FetchError(ref error) => update_page_error(conn, &page, error)?,
}
get_page(conn, &page.slug)
.transpose()
.expect("page disappeared??")
}
fn page_needs_checking(page: &Page) -> bool {
chrono::Utc::now() >= page.next_check
}
fn check_page(page: &Page) -> PageStatus {
use reqwest::header;
use reqwest::StatusCode;
let client = reqwest::blocking::Client::new();
let mut request = client
.get(&page.url)
.header(header::USER_AGENT, "Mozilla/5.0");
if let Some(ref etag) = page.http_etag {
request = request.header(header::IF_NONE_MATCH, etag.to_string());
}
let status = request
.send()
.map_err(PagefeedError::from)
.and_then(|mut response| {
if response.status() == StatusCode::NOT_MODIFIED {
Ok(PageStatus::Unmodified)
} else {
let etag = response
.headers()
.get(header::ETAG)
.and_then(|x| x.to_str().ok())
.map(str::to_string);
let body_hash = hash(page, &mut response)?;
Ok(PageStatus::Modified { body_hash, etag })
}
})
.unwrap_or_else(|err| PageStatus::FetchError(format!("{:?}", err)));
match status {
PageStatus::Modified { ref body_hash, .. }
if Some(body_hash) == page.http | random_line_split |
|
main.rs | ::var("USER").unwrap();
format!("postgres://{}@%2Frun%2Fpostgresql/pagefeed", user)
})
}
fn handle_request(req: &mut fastcgi::Request) -> Result<(), PagefeedError> {
let url = get_url(req)?;
let pathinfo = get_pathinfo(req);
let slug = pathinfo.trim_matches('/');
let mut w = io::BufWriter::new(req.stdout());
if slug.is_empty() | else {
handle_feed_request(slug, &mut w)
}
}
fn handle_opml_request<W: Write>(url: &str, out: &mut W) -> Result<(), PagefeedError> {
let mut conn = database_connection()?;
let mut trans = conn.transaction()?;
let pages = get_enabled_pages(&mut trans)?;
trans.commit()?;
out.write_all(b"Content-Type: application/xml\n\n")?;
build_opml(url, &pages, out)?;
Ok(())
}
fn handle_feed_request<W: Write>(slug: &str, out: &mut W) -> Result<(), PagefeedError> {
let mut conn = database_connection()?;
let mut trans = conn.transaction()?;
let page = get_page(&mut trans, slug)?;
let page = page
.map(|page| refresh_page(&mut trans, page))
.transpose()?;
trans.commit()?;
match page {
None => {
out.write_all(b"Status: 404 Not Found\n\n")?;
Ok(())
}
Some(page) => {
let feed = build_feed(&page);
out.write_all(b"Content-Type: application/rss+xml\n\n")?;
feed.write_to(out)?;
Ok(())
}
}
}
fn get_url(req: &fastcgi::Request) -> Result<String, PagefeedError> {
use std::io::{Error, ErrorKind};
let https = match req.param("HTTPS") {
Some(ref s) => s == "on",
_ => false,
};
let server_addr = req
.param("SERVER_ADDR")
.ok_or_else(|| Error::new(ErrorKind::Other, "SERVER_ADDR unset"))?;
let server_port = req
.param("SERVER_PORT")
.ok_or_else(|| Error::new(ErrorKind::Other, "SERVER_PORT unset"))?
.parse::<u16>()
.map_err(|_| Error::new(ErrorKind::Other, "SERVER_PORT invalid"))?;
let mut script_name = req
.param("SCRIPT_NAME")
.ok_or_else(|| Error::new(ErrorKind::Other, "SCRIPT_NAME unset"))?;
if !script_name.starts_with('/') {
script_name.insert(0, '/')
}
if !script_name.ends_with('/') {
script_name.push('/')
}
Ok(match (https, server_port) {
(false, 80) => format!("http://{}{}", server_addr, script_name),
(false, _) => format!("http://{}:{}{}", server_addr, server_port, script_name),
(true, 443) => format!("https://{}{}", server_addr, script_name),
(true, _) => format!("https://{}:{}{}", server_addr, server_port, script_name),
})
}
fn get_pathinfo(req: &fastcgi::Request) -> String {
req.param("PATH_INFO").unwrap_or_default()
}
// ----------------------------------------------------------------------------
#[derive(Debug)]
enum PagefeedError {
Io(io::Error),
Postgres(postgres::error::Error),
QuickXml(quick_xml::de::DeError),
Regex(regex::Error),
Reqwest(reqwest::Error),
Rss(rss::Error),
}
impl From<io::Error> for PagefeedError {
fn from(err: io::Error) -> PagefeedError {
PagefeedError::Io(err)
}
}
impl From<postgres::error::Error> for PagefeedError {
fn from(err: postgres::error::Error) -> PagefeedError {
PagefeedError::Postgres(err)
}
}
impl From<regex::Error> for PagefeedError {
fn from(err: regex::Error) -> PagefeedError {
PagefeedError::Regex(err)
}
}
impl From<reqwest::Error> for PagefeedError {
fn from(err: reqwest::Error) -> PagefeedError {
PagefeedError::Reqwest(err)
}
}
impl From<rss::Error> for PagefeedError {
fn from(err: rss::Error) -> PagefeedError {
PagefeedError::Rss(err)
}
}
impl From<quick_xml::de::DeError> for PagefeedError {
fn from(err: quick_xml::de::DeError) -> PagefeedError {
PagefeedError::QuickXml(err)
}
}
// ----------------------------------------------------------------------------
fn build_feed(page: &Page) -> rss::Channel {
let mut items = vec![];
if page.last_modified.is_some() {
let guid = rss::GuidBuilder::default()
.value(format!("{}", page.item_id.unwrap().urn()))
.permalink(false)
.build();
let item = rss::ItemBuilder::default()
.title(page.name.to_owned())
.description(describe_page_status(page))
.link(page.url.to_owned())
.pub_date(page.last_modified.unwrap().to_rfc2822())
.guid(guid)
.build();
items.push(item);
}
rss::ChannelBuilder::default()
.title(page.name.to_owned())
.link(page.url.to_owned())
.items(items)
.build()
}
fn describe_page_status(page: &Page) -> String {
page.last_error.as_ref().map_or_else(
|| format!("{} was updated.", page.name),
|err| format!("Error while checking {}: {}", page.name, err),
)
}
fn build_opml<W: Write>(url: &str, pages: &[Page], out: &mut W) -> Result<(), PagefeedError> {
#[derive(serde::Serialize)]
#[serde(rename = "opml")]
struct Opml<'a> {
version: &'a str,
head: Head,
body: Body<'a>,
}
#[derive(serde::Serialize)]
struct Head {}
#[derive(serde::Serialize)]
struct Body<'a> {
outline: Vec<Outline<'a>>,
}
#[derive(serde::Serialize)]
struct Outline<'a> {
#[serde(rename = "type")]
typ: &'a str,
text: String,
#[serde(rename = "xmlUrl")]
xml_url: String,
#[serde(rename = "htmlUrl")]
html_url: &'a str,
}
write!(out, "{}", quick_xml::se::to_string(
&Opml {
version: "2.0",
head: Head {},
body: Body {
outline: pages
.iter()
.map(|page| Outline {
typ: "rss",
text: htmlescape::encode_minimal(&page.name),
xml_url: format!("{}{}", url, page.slug),
html_url: &page.url,
})
.collect(),
},
},
)?)?;
Ok(())
}
// ----------------------------------------------------------------------------
#[derive(Clone)]
enum PageStatus {
Unmodified,
Modified {
body_hash: Vec<u8>,
etag: Option<String>,
},
FetchError(String),
}
fn refresh_page(
conn: &mut postgres::Transaction,
page: Page,
) -> Result<Page, postgres::error::Error> {
if !page_needs_checking(&page) {
return Ok(page);
}
let status = check_page(&page);
match status {
PageStatus::Unmodified => update_page_unchanged(conn, &page)?,
PageStatus::Modified {
ref body_hash,
ref etag,
} => update_page_changed(conn, &page, etag, body_hash)?,
PageStatus::FetchError(ref error) => update_page_error(conn, &page, error)?,
}
get_page(conn, &page.slug)
.transpose()
.expect("page disappeared??")
}
fn page_needs_checking(page: &Page) -> bool {
chrono::Utc::now() >= page.next_check
}
fn check_page(page: &Page) -> PageStatus {
use reqwest::header;
use reqwest::StatusCode;
let client = reqwest::blocking::Client::new();
let mut request = client
.get(&page.url)
.header(header::USER_AGENT, "Mozilla/5.0");
if let Some(ref etag) = page.http_etag {
request = request.header(header::IF_NONE_MATCH, etag.to_string());
}
let status = request
.send()
.map_err(PagefeedError::from)
.and_then(|mut response| {
if response.status() == StatusCode::NOT_MODIFIED {
Ok(PageStatus::Unmodified)
} else {
let etag = response
.headers()
.get(header::ETAG)
.and_then(|x| x.to_str().ok())
.map(str::to_string);
let body_hash = hash(page, &mut response)?;
Ok(PageStatus::Modified { body_hash, etag })
}
})
.unwrap_or_else(|err| PageStatus::FetchError(format!("{:?}", err)));
match status {
PageStatus::Modified { ref body_hash, .. }
if Some(body_hash) == page | {
handle_opml_request(&url, &mut w)
} | conditional_block |
start.go | interface {
CheckRequirements() error
}
//go:generate mockgen -package mocks -destination mocks/cache.go code.cloudfoundry.org/cfdev/cmd/start Cache
type Cache interface {
Sync(resource.Catalog) error
}
//go:generate mockgen -package mocks -destination mocks/cfdevd.go code.cloudfoundry.org/cfdev/cmd/start CFDevD
type CFDevD interface {
Install() error
}
//go:generate mockgen -package mocks -destination mocks/vpnkit.go code.cloudfoundry.org/cfdev/cmd/start VpnKit
type VpnKit interface {
Start() error
Stop() error
Watch(chan string)
}
//go:generate mockgen -package mocks -destination mocks/analyticsd.go code.cloudfoundry.org/cfdev/cmd/start AnalyticsD
type AnalyticsD interface {
Start() error
Stop() error
IsRunning() (bool, error)
}
//go:generate mockgen -package mocks -destination mocks/hypervisor.go code.cloudfoundry.org/cfdev/cmd/start Hypervisor
type Hypervisor interface {
CreateVM(vm hypervisor.VM) error
Start(vmName string) error
Stop(vmName string) error
IsRunning(vmName string) (bool, error)
}
//go:generate mockgen -package mocks -destination mocks/provision.go code.cloudfoundry.org/cfdev/cmd/start Provisioner
type Provisioner interface {
Ping() error
DeployBosh() error
DeployCloudFoundry([]string) error
GetServices() ([]provision.Service, string, error)
WhiteListServices(string, []provision.Service) ([]provision.Service, error)
DeployServices(provision.UI, []provision.Service) error
ReportProgress(provision.UI, string)
}
//go:generate mockgen -package mocks -destination mocks/isoreader.go code.cloudfoundry.org/cfdev/cmd/start IsoReader
type IsoReader interface {
Read(isoPath string) (iso.Metadata, error)
}
//go:generate mockgen -package mocks -destination mocks/stop.go code.cloudfoundry.org/cfdev/cmd/start Stop
type Stop interface {
RunE(cmd *cobra.Command, args []string) error
}
type Args struct {
Registries string
DeploySingleService string
DepsIsoPath string
NoProvision bool
Cpus int
Mem int
}
type Start struct {
Exit chan struct{}
LocalExit chan string
UI UI
Config config.Config
IsoReader IsoReader
Analytics AnalyticsClient
AnalyticsToggle Toggle
HostNet HostNet
Host Host
Cache Cache
CFDevD CFDevD
VpnKit VpnKit
AnalyticsD AnalyticsD
Hypervisor Hypervisor
Provisioner Provisioner
Stop Stop
Profiler SystemProfiler
}
const compatibilityVersion = "v2"
const defaultMemory = 4192
func (s *Start) | () *cobra.Command {
args := Args{}
cmd := &cobra.Command{
Use: "start",
RunE: func(_ *cobra.Command, _ []string) error {
if err := s.Execute(args); err != nil {
return errors.SafeWrap(err, "cf dev start")
}
return nil
},
}
pf := cmd.PersistentFlags()
pf.StringVarP(&args.DepsIsoPath, "file", "f", "", "path to .dev file containing bosh & cf bits")
pf.StringVarP(&args.Registries, "registries", "r", "", "docker registries that skip ssl validation - ie. host:port,host2:port2")
pf.IntVarP(&args.Cpus, "cpus", "c", 4, "cpus to allocate to vm")
pf.IntVarP(&args.Mem, "memory", "m", 0, "memory to allocate to vm in MB")
pf.BoolVarP(&args.NoProvision, "no-provision", "n", false, "start vm but do not provision")
pf.StringVarP(&args.DeploySingleService, "white-listed-services", "s", "", "list of supported services to deploy")
pf.MarkHidden("no-provision")
return cmd
}
func (s *Start) Execute(args Args) error {
go func() {
select {
case <-s.Exit:
// no-op
case name := <-s.LocalExit:
s.UI.Say("ERROR: %s has stopped", name)
}
s.Hypervisor.Stop("cfdev")
s.VpnKit.Stop()
os.Exit(128)
}()
depsIsoName := "cf"
depsIsoPath := filepath.Join(s.Config.CacheDir, "cf-deps.iso")
if args.DepsIsoPath != "" {
depsIsoName = filepath.Base(args.DepsIsoPath)
var err error
depsIsoPath, err = filepath.Abs(args.DepsIsoPath)
if err != nil {
return errors.SafeWrap(err, "determining absolute path to deps iso")
}
if _, err := os.Stat(depsIsoPath); os.IsNotExist(err) {
return fmt.Errorf("no file found at: %s", depsIsoPath)
}
s.Config.Dependencies.Remove("cf-deps.iso")
}
s.AnalyticsToggle.SetProp("type", depsIsoName)
aMem, err := s.Profiler.GetAvailableMemory()
if err != nil {
fmt.Printf("AVAILABLE MEMORY ERROR: %v", err)
}
tMem, err := s.Profiler.GetTotalMemory()
if err != nil {
fmt.Printf("TOTAL MEMORY ERROR: %v", err)
}
if err := s.Host.CheckRequirements(); err != nil {
return err
}
if running, err := s.Hypervisor.IsRunning("cfdev"); err != nil {
return errors.SafeWrap(err, "is running")
} else if running {
s.UI.Say("CF Dev is already running...")
s.Analytics.Event(cfanalytics.START_END, map[string]interface{}{"alreadyrunning": true})
return nil
}
if err := s.Stop.RunE(nil, nil); err != nil {
return errors.SafeWrap(err, "stopping cfdev")
}
if err := env.SetupHomeDir(s.Config); err != nil {
return errors.SafeWrap(err, "setting up cfdev home dir")
}
if cfdevd := s.Config.Dependencies.Lookup("cfdevd"); cfdevd != nil {
s.UI.Say("Downloading Network Helper...")
if err := s.Cache.Sync(resource.Catalog{
Items: []resource.Item{*cfdevd},
}); err != nil {
return errors.SafeWrap(err, "Unable to download network helper")
}
s.Config.Dependencies.Remove("cfdevd")
}
if err := s.osSpecificSetup(); err != nil {
return err
}
if err := s.HostNet.AddLoopbackAliases(s.Config.BoshDirectorIP, s.Config.CFRouterIP); err != nil {
return errors.SafeWrap(err, "adding aliases")
}
registries, err := s.parseDockerRegistriesFlag(args.Registries)
if err != nil {
return errors.SafeWrap(err, "Unable to parse docker registries")
}
s.UI.Say("Downloading Resources...")
if err := s.Cache.Sync(s.Config.Dependencies); err != nil {
return errors.SafeWrap(err, "Unable to sync assets")
}
isoConfig, err := s.IsoReader.Read(depsIsoPath)
if err != nil {
return errors.SafeWrap(err, fmt.Sprintf("%s is not compatible with CF Dev. Please use a compatible file.", depsIsoName))
}
if isoConfig.Version != compatibilityVersion {
return fmt.Errorf("%s is not compatible with CF Dev. Please use a compatible file", depsIsoName)
}
s.Analytics.PromptOptInIfNeeded(isoConfig.AnalyticsMessage)
s.Analytics.Event(cfanalytics.START_BEGIN, map[string]interface{}{
"total memory": tMem,
"available memory": aMem,
})
if args.DeploySingleService != "" {
if !s.isServiceSupported(args.DeploySingleService, isoConfig.Services) {
return errors.SafeWrap(err, fmt.Sprintf("Service: '%v' is not supported", args.DeploySingleService))
}
s.Analytics.Event(cfanalytics.SELECTED_SERVICE, map[string]interface{}{"services_requested": args.DeploySingleService})
}
memoryToAllocate, err := s.allocateMemory(isoConfig, args.Mem)
if err != nil {
return err
}
s.UI.Say("Creating the VM...")
if err := s.Hypervisor.CreateVM(hypervisor.VM{
Name: "cfdev",
CPUs: args.Cpus,
MemoryMB: memoryToAllocate,
DepsIso: depsIsoPath,
}); err != nil {
return errors.SafeWrap(err, "creating the vm")
}
s.UI.Say("Starting VPNKit...")
if err := s.VpnKit.Start(); err != nil {
return errors.SafeWrap(err, "starting vpnkit")
}
s.VpnKit.Watch(s.LocalExit)
s.UI.Say("Starting the VM...")
if err := s.Hypervisor.Start("cfdev"); err != nil {
return errors.SafeWrap(err, "starting the vm")
}
| Cmd | identifier_name |
start.go | Host interface {
CheckRequirements() error
}
//go:generate mockgen -package mocks -destination mocks/cache.go code.cloudfoundry.org/cfdev/cmd/start Cache
type Cache interface {
Sync(resource.Catalog) error
}
//go:generate mockgen -package mocks -destination mocks/cfdevd.go code.cloudfoundry.org/cfdev/cmd/start CFDevD
type CFDevD interface {
Install() error
}
//go:generate mockgen -package mocks -destination mocks/vpnkit.go code.cloudfoundry.org/cfdev/cmd/start VpnKit
type VpnKit interface {
Start() error
Stop() error
Watch(chan string)
}
//go:generate mockgen -package mocks -destination mocks/analyticsd.go code.cloudfoundry.org/cfdev/cmd/start AnalyticsD
type AnalyticsD interface {
Start() error
Stop() error
IsRunning() (bool, error)
}
//go:generate mockgen -package mocks -destination mocks/hypervisor.go code.cloudfoundry.org/cfdev/cmd/start Hypervisor
type Hypervisor interface {
CreateVM(vm hypervisor.VM) error
Start(vmName string) error
Stop(vmName string) error
IsRunning(vmName string) (bool, error)
}
//go:generate mockgen -package mocks -destination mocks/provision.go code.cloudfoundry.org/cfdev/cmd/start Provisioner
type Provisioner interface {
Ping() error
DeployBosh() error
DeployCloudFoundry([]string) error
GetServices() ([]provision.Service, string, error)
WhiteListServices(string, []provision.Service) ([]provision.Service, error)
DeployServices(provision.UI, []provision.Service) error
ReportProgress(provision.UI, string)
}
//go:generate mockgen -package mocks -destination mocks/isoreader.go code.cloudfoundry.org/cfdev/cmd/start IsoReader
type IsoReader interface {
Read(isoPath string) (iso.Metadata, error)
}
//go:generate mockgen -package mocks -destination mocks/stop.go code.cloudfoundry.org/cfdev/cmd/start Stop
type Stop interface {
RunE(cmd *cobra.Command, args []string) error
}
type Args struct {
Registries string
DeploySingleService string
DepsIsoPath string | NoProvision bool
Cpus int
Mem int
}
type Start struct {
Exit chan struct{}
LocalExit chan string
UI UI
Config config.Config
IsoReader IsoReader
Analytics AnalyticsClient
AnalyticsToggle Toggle
HostNet HostNet
Host Host
Cache Cache
CFDevD CFDevD
VpnKit VpnKit
AnalyticsD AnalyticsD
Hypervisor Hypervisor
Provisioner Provisioner
Stop Stop
Profiler SystemProfiler
}
const compatibilityVersion = "v2"
const defaultMemory = 4192
func (s *Start) Cmd() *cobra.Command {
args := Args{}
cmd := &cobra.Command{
Use: "start",
RunE: func(_ *cobra.Command, _ []string) error {
if err := s.Execute(args); err != nil {
return errors.SafeWrap(err, "cf dev start")
}
return nil
},
}
pf := cmd.PersistentFlags()
pf.StringVarP(&args.DepsIsoPath, "file", "f", "", "path to .dev file containing bosh & cf bits")
pf.StringVarP(&args.Registries, "registries", "r", "", "docker registries that skip ssl validation - ie. host:port,host2:port2")
pf.IntVarP(&args.Cpus, "cpus", "c", 4, "cpus to allocate to vm")
pf.IntVarP(&args.Mem, "memory", "m", 0, "memory to allocate to vm in MB")
pf.BoolVarP(&args.NoProvision, "no-provision", "n", false, "start vm but do not provision")
pf.StringVarP(&args.DeploySingleService, "white-listed-services", "s", "", "list of supported services to deploy")
pf.MarkHidden("no-provision")
return cmd
}
func (s *Start) Execute(args Args) error {
go func() {
select {
case <-s.Exit:
// no-op
case name := <-s.LocalExit:
s.UI.Say("ERROR: %s has stopped", name)
}
s.Hypervisor.Stop("cfdev")
s.VpnKit.Stop()
os.Exit(128)
}()
depsIsoName := "cf"
depsIsoPath := filepath.Join(s.Config.CacheDir, "cf-deps.iso")
if args.DepsIsoPath != "" {
depsIsoName = filepath.Base(args.DepsIsoPath)
var err error
depsIsoPath, err = filepath.Abs(args.DepsIsoPath)
if err != nil {
return errors.SafeWrap(err, "determining absolute path to deps iso")
}
if _, err := os.Stat(depsIsoPath); os.IsNotExist(err) {
return fmt.Errorf("no file found at: %s", depsIsoPath)
}
s.Config.Dependencies.Remove("cf-deps.iso")
}
s.AnalyticsToggle.SetProp("type", depsIsoName)
aMem, err := s.Profiler.GetAvailableMemory()
if err != nil {
fmt.Printf("AVAILABLE MEMORY ERROR: %v", err)
}
tMem, err := s.Profiler.GetTotalMemory()
if err != nil {
fmt.Printf("TOTAL MEMORY ERROR: %v", err)
}
if err := s.Host.CheckRequirements(); err != nil {
return err
}
if running, err := s.Hypervisor.IsRunning("cfdev"); err != nil {
return errors.SafeWrap(err, "is running")
} else if running {
s.UI.Say("CF Dev is already running...")
s.Analytics.Event(cfanalytics.START_END, map[string]interface{}{"alreadyrunning": true})
return nil
}
if err := s.Stop.RunE(nil, nil); err != nil {
return errors.SafeWrap(err, "stopping cfdev")
}
if err := env.SetupHomeDir(s.Config); err != nil {
return errors.SafeWrap(err, "setting up cfdev home dir")
}
if cfdevd := s.Config.Dependencies.Lookup("cfdevd"); cfdevd != nil {
s.UI.Say("Downloading Network Helper...")
if err := s.Cache.Sync(resource.Catalog{
Items: []resource.Item{*cfdevd},
}); err != nil {
return errors.SafeWrap(err, "Unable to download network helper")
}
s.Config.Dependencies.Remove("cfdevd")
}
if err := s.osSpecificSetup(); err != nil {
return err
}
if err := s.HostNet.AddLoopbackAliases(s.Config.BoshDirectorIP, s.Config.CFRouterIP); err != nil {
return errors.SafeWrap(err, "adding aliases")
}
registries, err := s.parseDockerRegistriesFlag(args.Registries)
if err != nil {
return errors.SafeWrap(err, "Unable to parse docker registries")
}
s.UI.Say("Downloading Resources...")
if err := s.Cache.Sync(s.Config.Dependencies); err != nil {
return errors.SafeWrap(err, "Unable to sync assets")
}
isoConfig, err := s.IsoReader.Read(depsIsoPath)
if err != nil {
return errors.SafeWrap(err, fmt.Sprintf("%s is not compatible with CF Dev. Please use a compatible file.", depsIsoName))
}
if isoConfig.Version != compatibilityVersion {
return fmt.Errorf("%s is not compatible with CF Dev. Please use a compatible file", depsIsoName)
}
s.Analytics.PromptOptInIfNeeded(isoConfig.AnalyticsMessage)
s.Analytics.Event(cfanalytics.START_BEGIN, map[string]interface{}{
"total memory": tMem,
"available memory": aMem,
})
if args.DeploySingleService != "" {
if !s.isServiceSupported(args.DeploySingleService, isoConfig.Services) {
return errors.SafeWrap(err, fmt.Sprintf("Service: '%v' is not supported", args.DeploySingleService))
}
s.Analytics.Event(cfanalytics.SELECTED_SERVICE, map[string]interface{}{"services_requested": args.DeploySingleService})
}
memoryToAllocate, err := s.allocateMemory(isoConfig, args.Mem)
if err != nil {
return err
}
s.UI.Say("Creating the VM...")
if err := s.Hypervisor.CreateVM(hypervisor.VM{
Name: "cfdev",
CPUs: args.Cpus,
MemoryMB: memoryToAllocate,
DepsIso: depsIsoPath,
}); err != nil {
return errors.SafeWrap(err, "creating the vm")
}
s.UI.Say("Starting VPNKit...")
if err := s.VpnKit.Start(); err != nil {
return errors.SafeWrap(err, "starting vpnkit")
}
s.VpnKit.Watch(s.LocalExit)
s.UI.Say("Starting the VM...")
if err := s.Hypervisor.Start("cfdev"); err != nil {
return errors.SafeWrap(err, "starting the vm")
}
s | random_line_split |
|
start.go | f.BoolVarP(&args.NoProvision, "no-provision", "n", false, "start vm but do not provision")
pf.StringVarP(&args.DeploySingleService, "white-listed-services", "s", "", "list of supported services to deploy")
pf.MarkHidden("no-provision")
return cmd
}
func (s *Start) Execute(args Args) error {
go func() {
select {
case <-s.Exit:
// no-op
case name := <-s.LocalExit:
s.UI.Say("ERROR: %s has stopped", name)
}
s.Hypervisor.Stop("cfdev")
s.VpnKit.Stop()
os.Exit(128)
}()
depsIsoName := "cf"
depsIsoPath := filepath.Join(s.Config.CacheDir, "cf-deps.iso")
if args.DepsIsoPath != "" {
depsIsoName = filepath.Base(args.DepsIsoPath)
var err error
depsIsoPath, err = filepath.Abs(args.DepsIsoPath)
if err != nil {
return errors.SafeWrap(err, "determining absolute path to deps iso")
}
if _, err := os.Stat(depsIsoPath); os.IsNotExist(err) {
return fmt.Errorf("no file found at: %s", depsIsoPath)
}
s.Config.Dependencies.Remove("cf-deps.iso")
}
s.AnalyticsToggle.SetProp("type", depsIsoName)
aMem, err := s.Profiler.GetAvailableMemory()
if err != nil {
fmt.Printf("AVAILABLE MEMORY ERROR: %v", err)
}
tMem, err := s.Profiler.GetTotalMemory()
if err != nil {
fmt.Printf("TOTAL MEMORY ERROR: %v", err)
}
if err := s.Host.CheckRequirements(); err != nil {
return err
}
if running, err := s.Hypervisor.IsRunning("cfdev"); err != nil {
return errors.SafeWrap(err, "is running")
} else if running {
s.UI.Say("CF Dev is already running...")
s.Analytics.Event(cfanalytics.START_END, map[string]interface{}{"alreadyrunning": true})
return nil
}
if err := s.Stop.RunE(nil, nil); err != nil {
return errors.SafeWrap(err, "stopping cfdev")
}
if err := env.SetupHomeDir(s.Config); err != nil {
return errors.SafeWrap(err, "setting up cfdev home dir")
}
if cfdevd := s.Config.Dependencies.Lookup("cfdevd"); cfdevd != nil {
s.UI.Say("Downloading Network Helper...")
if err := s.Cache.Sync(resource.Catalog{
Items: []resource.Item{*cfdevd},
}); err != nil {
return errors.SafeWrap(err, "Unable to download network helper")
}
s.Config.Dependencies.Remove("cfdevd")
}
if err := s.osSpecificSetup(); err != nil {
return err
}
if err := s.HostNet.AddLoopbackAliases(s.Config.BoshDirectorIP, s.Config.CFRouterIP); err != nil {
return errors.SafeWrap(err, "adding aliases")
}
registries, err := s.parseDockerRegistriesFlag(args.Registries)
if err != nil {
return errors.SafeWrap(err, "Unable to parse docker registries")
}
s.UI.Say("Downloading Resources...")
if err := s.Cache.Sync(s.Config.Dependencies); err != nil {
return errors.SafeWrap(err, "Unable to sync assets")
}
isoConfig, err := s.IsoReader.Read(depsIsoPath)
if err != nil {
return errors.SafeWrap(err, fmt.Sprintf("%s is not compatible with CF Dev. Please use a compatible file.", depsIsoName))
}
if isoConfig.Version != compatibilityVersion {
return fmt.Errorf("%s is not compatible with CF Dev. Please use a compatible file", depsIsoName)
}
s.Analytics.PromptOptInIfNeeded(isoConfig.AnalyticsMessage)
s.Analytics.Event(cfanalytics.START_BEGIN, map[string]interface{}{
"total memory": tMem,
"available memory": aMem,
})
if args.DeploySingleService != "" {
if !s.isServiceSupported(args.DeploySingleService, isoConfig.Services) {
return errors.SafeWrap(err, fmt.Sprintf("Service: '%v' is not supported", args.DeploySingleService))
}
s.Analytics.Event(cfanalytics.SELECTED_SERVICE, map[string]interface{}{"services_requested": args.DeploySingleService})
}
memoryToAllocate, err := s.allocateMemory(isoConfig, args.Mem)
if err != nil {
return err
}
s.UI.Say("Creating the VM...")
if err := s.Hypervisor.CreateVM(hypervisor.VM{
Name: "cfdev",
CPUs: args.Cpus,
MemoryMB: memoryToAllocate,
DepsIso: depsIsoPath,
}); err != nil {
return errors.SafeWrap(err, "creating the vm")
}
s.UI.Say("Starting VPNKit...")
if err := s.VpnKit.Start(); err != nil {
return errors.SafeWrap(err, "starting vpnkit")
}
s.VpnKit.Watch(s.LocalExit)
s.UI.Say("Starting the VM...")
if err := s.Hypervisor.Start("cfdev"); err != nil {
return errors.SafeWrap(err, "starting the vm")
}
s.UI.Say("Waiting for Garden...")
s.waitForGarden()
if args.NoProvision {
s.UI.Say("VM will not be provisioned because '-n' (no-provision) flag was specified.")
return nil
}
if err := s.provision(isoConfig, registries, args.DeploySingleService); err != nil {
return err
}
if s.AnalyticsToggle.Enabled() {
err = s.AnalyticsD.Start()
}
s.Analytics.Event(cfanalytics.START_END)
return nil
}
func (s *Start) provision(isoConfig iso.Metadata, registries []string, deploySingleService string) error {
s.UI.Say("Deploying the BOSH Director...")
if err := s.Provisioner.DeployBosh(); err != nil {
return errors.SafeWrap(err, "Failed to deploy the BOSH Director")
}
s.UI.Say("Deploying CF...")
s.Provisioner.ReportProgress(s.UI, "cf")
if err := s.Provisioner.DeployCloudFoundry(registries); err != nil {
return errors.SafeWrap(err, "Failed to deploy the Cloud Foundry")
}
services, err := s.Provisioner.WhiteListServices(deploySingleService, isoConfig.Services)
if err != nil {
return errors.SafeWrap(err, "Failed to whitelist services")
}
if err := s.Provisioner.DeployServices(s.UI, services); err != nil {
return errors.SafeWrap(err, "Failed to deploy services")
}
if isoConfig.Message != "" {
t := template.Must(template.New("message").Parse(isoConfig.Message))
err := t.Execute(s.UI.Writer(), map[string]string{"SYSTEM_DOMAIN": "dev.cfdev.sh"})
if err != nil {
return errors.SafeWrap(err, "Failed to print deps file provided message")
}
}
return nil
}
func (s *Start) waitForGarden() {
for {
if err := s.Provisioner.Ping(); err == nil {
return
}
time.Sleep(time.Second)
}
}
func (s *Start) parseDockerRegistriesFlag(flag string) ([]string, error) {
if flag == "" {
return nil, nil
}
values := strings.Split(flag, ",")
registries := make([]string, 0, len(values))
for _, value := range values {
// Including the // will cause url.Parse to validate 'value' as a host:port
u, err := url.Parse("//" + value)
if err != nil {
// Grab the more succinct error message
if urlErr, ok := err.(*url.Error); ok {
err = urlErr.Err
}
return nil, fmt.Errorf("'%v' - %v", value, err)
}
registries = append(registries, u.Host)
}
return registries, nil
}
func (s *Start) isServiceSupported(service string, services []provision.Service) bool {
if strings.ToLower(service) == "all" || strings.ToLower(service) == "none" {
return true
}
for _, s := range services {
if strings.ToLower(s.Flagname) == strings.ToLower(service) {
return true
}
}
return false
}
func (s *Start) allocateMemory(isoConfig iso.Metadata, requestedMem int) (int, error) | {
baseMem := defaultMemory
if isoConfig.DefaultMemory > 0 {
baseMem = isoConfig.DefaultMemory
}
availableMem, err := s.Profiler.GetAvailableMemory()
if err != nil {
return 0, errors.SafeWrap(err, "error retrieving available system memory")
}
customMemProvided := requestedMem > 0
if customMemProvided {
if requestedMem >= baseMem {
if availableMem >= uint64(requestedMem) {
return requestedMem, nil
}
if availableMem < uint64(requestedMem) {
s.UI.Say("WARNING: This machine may not have enough available RAM to run with what is specified.") | identifier_body |
|
start.go | //go:generate mockgen -package mocks -destination mocks/stop.go code.cloudfoundry.org/cfdev/cmd/start Stop
type Stop interface {
RunE(cmd *cobra.Command, args []string) error
}
type Args struct {
Registries string
DeploySingleService string
DepsIsoPath string
NoProvision bool
Cpus int
Mem int
}
type Start struct {
Exit chan struct{}
LocalExit chan string
UI UI
Config config.Config
IsoReader IsoReader
Analytics AnalyticsClient
AnalyticsToggle Toggle
HostNet HostNet
Host Host
Cache Cache
CFDevD CFDevD
VpnKit VpnKit
AnalyticsD AnalyticsD
Hypervisor Hypervisor
Provisioner Provisioner
Stop Stop
Profiler SystemProfiler
}
const compatibilityVersion = "v2"
const defaultMemory = 4192
func (s *Start) Cmd() *cobra.Command {
args := Args{}
cmd := &cobra.Command{
Use: "start",
RunE: func(_ *cobra.Command, _ []string) error {
if err := s.Execute(args); err != nil {
return errors.SafeWrap(err, "cf dev start")
}
return nil
},
}
pf := cmd.PersistentFlags()
pf.StringVarP(&args.DepsIsoPath, "file", "f", "", "path to .dev file containing bosh & cf bits")
pf.StringVarP(&args.Registries, "registries", "r", "", "docker registries that skip ssl validation - ie. host:port,host2:port2")
pf.IntVarP(&args.Cpus, "cpus", "c", 4, "cpus to allocate to vm")
pf.IntVarP(&args.Mem, "memory", "m", 0, "memory to allocate to vm in MB")
pf.BoolVarP(&args.NoProvision, "no-provision", "n", false, "start vm but do not provision")
pf.StringVarP(&args.DeploySingleService, "white-listed-services", "s", "", "list of supported services to deploy")
pf.MarkHidden("no-provision")
return cmd
}
func (s *Start) Execute(args Args) error {
go func() {
select {
case <-s.Exit:
// no-op
case name := <-s.LocalExit:
s.UI.Say("ERROR: %s has stopped", name)
}
s.Hypervisor.Stop("cfdev")
s.VpnKit.Stop()
os.Exit(128)
}()
depsIsoName := "cf"
depsIsoPath := filepath.Join(s.Config.CacheDir, "cf-deps.iso")
if args.DepsIsoPath != "" {
depsIsoName = filepath.Base(args.DepsIsoPath)
var err error
depsIsoPath, err = filepath.Abs(args.DepsIsoPath)
if err != nil {
return errors.SafeWrap(err, "determining absolute path to deps iso")
}
if _, err := os.Stat(depsIsoPath); os.IsNotExist(err) {
return fmt.Errorf("no file found at: %s", depsIsoPath)
}
s.Config.Dependencies.Remove("cf-deps.iso")
}
s.AnalyticsToggle.SetProp("type", depsIsoName)
aMem, err := s.Profiler.GetAvailableMemory()
if err != nil {
fmt.Printf("AVAILABLE MEMORY ERROR: %v", err)
}
tMem, err := s.Profiler.GetTotalMemory()
if err != nil {
fmt.Printf("TOTAL MEMORY ERROR: %v", err)
}
if err := s.Host.CheckRequirements(); err != nil {
return err
}
if running, err := s.Hypervisor.IsRunning("cfdev"); err != nil {
return errors.SafeWrap(err, "is running")
} else if running {
s.UI.Say("CF Dev is already running...")
s.Analytics.Event(cfanalytics.START_END, map[string]interface{}{"alreadyrunning": true})
return nil
}
if err := s.Stop.RunE(nil, nil); err != nil {
return errors.SafeWrap(err, "stopping cfdev")
}
if err := env.SetupHomeDir(s.Config); err != nil {
return errors.SafeWrap(err, "setting up cfdev home dir")
}
if cfdevd := s.Config.Dependencies.Lookup("cfdevd"); cfdevd != nil {
s.UI.Say("Downloading Network Helper...")
if err := s.Cache.Sync(resource.Catalog{
Items: []resource.Item{*cfdevd},
}); err != nil {
return errors.SafeWrap(err, "Unable to download network helper")
}
s.Config.Dependencies.Remove("cfdevd")
}
if err := s.osSpecificSetup(); err != nil {
return err
}
if err := s.HostNet.AddLoopbackAliases(s.Config.BoshDirectorIP, s.Config.CFRouterIP); err != nil {
return errors.SafeWrap(err, "adding aliases")
}
registries, err := s.parseDockerRegistriesFlag(args.Registries)
if err != nil {
return errors.SafeWrap(err, "Unable to parse docker registries")
}
s.UI.Say("Downloading Resources...")
if err := s.Cache.Sync(s.Config.Dependencies); err != nil {
return errors.SafeWrap(err, "Unable to sync assets")
}
isoConfig, err := s.IsoReader.Read(depsIsoPath)
if err != nil {
return errors.SafeWrap(err, fmt.Sprintf("%s is not compatible with CF Dev. Please use a compatible file.", depsIsoName))
}
if isoConfig.Version != compatibilityVersion {
return fmt.Errorf("%s is not compatible with CF Dev. Please use a compatible file", depsIsoName)
}
s.Analytics.PromptOptInIfNeeded(isoConfig.AnalyticsMessage)
s.Analytics.Event(cfanalytics.START_BEGIN, map[string]interface{}{
"total memory": tMem,
"available memory": aMem,
})
if args.DeploySingleService != "" {
if !s.isServiceSupported(args.DeploySingleService, isoConfig.Services) {
return errors.SafeWrap(err, fmt.Sprintf("Service: '%v' is not supported", args.DeploySingleService))
}
s.Analytics.Event(cfanalytics.SELECTED_SERVICE, map[string]interface{}{"services_requested": args.DeploySingleService})
}
memoryToAllocate, err := s.allocateMemory(isoConfig, args.Mem)
if err != nil {
return err
}
s.UI.Say("Creating the VM...")
if err := s.Hypervisor.CreateVM(hypervisor.VM{
Name: "cfdev",
CPUs: args.Cpus,
MemoryMB: memoryToAllocate,
DepsIso: depsIsoPath,
}); err != nil {
return errors.SafeWrap(err, "creating the vm")
}
s.UI.Say("Starting VPNKit...")
if err := s.VpnKit.Start(); err != nil {
return errors.SafeWrap(err, "starting vpnkit")
}
s.VpnKit.Watch(s.LocalExit)
s.UI.Say("Starting the VM...")
if err := s.Hypervisor.Start("cfdev"); err != nil {
return errors.SafeWrap(err, "starting the vm")
}
s.UI.Say("Waiting for Garden...")
s.waitForGarden()
if args.NoProvision {
s.UI.Say("VM will not be provisioned because '-n' (no-provision) flag was specified.")
return nil
}
if err := s.provision(isoConfig, registries, args.DeploySingleService); err != nil {
return err
}
if s.AnalyticsToggle.Enabled() {
err = s.AnalyticsD.Start()
}
s.Analytics.Event(cfanalytics.START_END)
return nil
}
func (s *Start) provision(isoConfig iso.Metadata, registries []string, deploySingleService string) error {
s.UI.Say("Deploying the BOSH Director...")
if err := s.Provisioner.DeployBosh(); err != nil {
return errors.SafeWrap(err, "Failed to deploy the BOSH Director")
}
s.UI.Say("Deploying CF...")
s.Provisioner.ReportProgress(s.UI, "cf")
if err := s.Provisioner.DeployCloudFoundry(registries); err != nil {
return errors.SafeWrap(err, "Failed to deploy the Cloud Foundry")
}
services, err := s.Provisioner.WhiteListServices(deploySingleService, isoConfig.Services)
if err != nil {
return errors.SafeWrap(err, "Failed to whitelist services")
}
if err := s.Provisioner.DeployServices(s.UI, services); err != nil {
return errors.SafeWrap(err, "Failed to deploy services")
}
if isoConfig.Message != "" {
t := template.Must(template.New("message").Parse(isoConfig.Message))
err := t.Execute(s.UI.Writer(), map[string]string{"SYSTEM_DOMAIN": "dev.cfdev.sh"})
if err != nil {
return errors.SafeWrap(err, "Failed to print deps file provided message")
}
}
return nil
}
func (s *Start) waitForGarden() {
for | {
if err := s.Provisioner.Ping(); err == nil {
return
}
time.Sleep(time.Second)
} | conditional_block |
|
Embedding.py | at_reduce=True):
if rat_reduce:
g = gcd(a,b)
a //= g
b //= g
self.numerator = a
self.b = b
def pairwise_check(self,other):
if type(other) in [int,np.int32,np.int64]:
other = Rational(other,1,rat_reduce=False)
elif type(other) in [float,np.float64]:
other = continued_frac_approx_convergents(other)[-1]
return other
def __add__(self, other):
other = self.pairwise_check(other)
return Rational(self.numerator*other.b+other.numerator*self.b,self.b*other.b,rat_reduce=True)
def __neg__(self):
return Rational(-self.numerator,self.b)
def __sub__(self, other):
return self+(-other)
def __mul__(self, other):
other = self.pairwise_check(other)
return Rational(self.numerator*other.numerator,self.b*other.b,rat_reduce=True)
def __abs__(self):
return Rational(abs(self.numerator),abs(self.b))
def __repr__(self):
if self.b == 0:
return ("-" if self.numerator < 0 else "")+"INF"
return "{}/{}".format(self.numerator,self.b)
"""
called with the unary "~" operator
"""
def __invert__(self):
return Rational(self.b,self.numerator)
def __truediv__(self, other):
return self * (~other)
def __floordiv__(self, other):
return self / other
def __le__(self, other):
other = self.pairwise_check(other)
diff = self - other
return diff.numerator <= 0
def __eq__(self, other):
other = self.pairwise_check(other)
return (self.numerator == other.numerator) and (self.b == other.b)
def __lt__(self, other):
return (self <= other) and (self != other)
def __gt__(self, other):
return not (self <= other)
def __ge__(self, other):
return not (self < other)
class n_adic(Rational):
"""
d = None is taken to mean d = -inf (denom = 0 or x = inf)
"""
def __init__(self,a:int,n: int,d,n_adic_reduce=True):
assert ((d is None) or (type(d) == int))
if n_adic_reduce:
while ((a%n) == 0) and (d > 0):
a //= n
d -= 1
self.numerator = a
self.n = n
self.exp = d
if d is None:
super().__init__(a,0,rat_reduce=False)
else:
super().__init__(a,n**d,rat_reduce=False)
def __add__(self, other):
other = self.pairwise_check(other)
if self.exp is None:
if (other.exp is None) and (((self.numerator < 0) and (other.numerator > 0)) or ((self.numerator > 0) and (other.numerator < 0))):
raise ZeroDivisionError("INF + (-INF) is indeterminate")
return self
if other.exp is None:
return other
if self.exp > other.exp:
oscale = self.n**(self.exp-other.exp)
sscale = 1
elif other.exp > self.exp:
oscale = 1
sscale = self.n**(other.exp-self.exp)
else:
oscale = sscale = 1
return n_adic(self.numerator*sscale+other.numerator*oscale,self.n,max(self.exp,other.exp),n_adic_reduce=True)
def __mul__(self, other):
other = self.pairwise_check(other)
#(a/n^d)*(b/n^e) = (ab)/(n^(d+e))
return n_adic(self.numerator*other.numerator,self.n,self.exp+other.exp,n_adic_reduce=True)
def __neg__(self):
return n_adic(-self.numerator,self.n,self.exp,n_adic_reduce=False)
def __eq__(self, other):
return (self.n == other.n) and (self.numerator == other.numerator) and (self.exp == other.exp)
def __abs__(self):
return n_adic(abs(self.numerator),self.n,self.exp,n_adic_reduce=False)
def pairwise_check(self,other):
if type(other) in [int,np.int32,np.int64]:
other = n_adic(int(other),self.n,0)
elif type(other) in [float,np.float64]:
other = continued_frac_nadic_approx(other,self.n)
return other
def children(self):
chdenom = self.exp+1
chld = []
for chnum in range(self.n*(self.numerator-1)+1,self.n*self.numerator):
chld.append(n_adic(chnum,self.n,chdenom,n_adic_reduce=False))
return chld
def is_ancestor_of(self, other):
if self == other:
return True
if self.exp >= other.exp:
return False
#just need to check the main condition now
scale = self.n**(other.exp-self.exp)
rbound = scale*self.numerator
if other.numerator >= rbound:
return False
lbound = rbound-scale
return other.numerator >= lbound
'''
this version is ancestor-weighted ONLY (i.e. not descendant-weighted)
'''
def dist_to(self, other):
raw_dist = abs(self-other)
if self.is_ancestor_of(other):
return raw_dist
else:
return raw_dist+n_adic(1,self.n,0,n_adic_reduce=False)
'''
this is the descendent-ancestor weighted metric
'''
def dist_to_DA(self, other):#FIXME what all about this fails to match the progress report? (which is correct)
if self == other: #both descendant and ancestor case
return -n_adic(1,self.n,None,n_adic_reduce=False) #take this to mean (negative) infinity
s_anc_o = self.is_ancestor_of(other)
o_anc_s = other.is_ancestor_of(self)
#we know that not both of these ^^ are true at this point
if s_anc_o:
return n_adic(self.exp-other.exp,self.n,0,n_adic_reduce=False) #-(e-d)#FIXME this part of the distance metric is wrong
elif o_anc_s:
return n_adic(other.exp-self.exp,self.n,0,n_adic_reduce=False) #-(d-e)
else:
raw_dist = abs(self-other)
return raw_dist
def lnat_inner(x, max_depth, depth=0):
s = ''
if depth > max_depth:
return s
s += '\t'*depth
s += '[.'+str(x)
s += '\n'
#print all children
for ch in x.children():
s += lnat_inner(ch, max_depth, depth=depth+1)
s += '\t'*depth
s += ']\n'
return s
def eval_convergents(cidxs:List[int]):
res = Rational(cidxs[-1],1)
for cidx in cidxs[-2::-1]:
res = ~res
res += cidx
return res
def continued_frac_convergents(r_inp:Rational) -> List[Rational]:
#TODO is there a faster way than just brute forcing the actual convergents?
r = abs(r_inp)
i = r.numerator//r.b
cidxs = [i]
convs = [Rational(i,1)]
rem = r - i
while rem.numerator > 1:
i = rem.b//rem.numerator
rem = Rational(rem.b%rem.numerator,rem.numerator)
cidxs.append(i)
conv = eval_convergents(cidxs)
convs.append(conv)
convs.append(r)
return convs
def continued_frac_approx_convergents(x:Union[float,np.float64],w=100) -> List[Rational]:
if not np.isfinite(x):
return [Rational(int(np.sign(x)),0)]
#first generate a totally brain-dead guess (i.e. <integer part of x> + <rational part of x>*2^w / 2^w
i = int(x)
ratxnum = int((x-i)*(2**w))
if ratxnum == 0:
return [Rational(i,1)]
rat = Rational(ratxnum,1<<w) + i
convs = continued_frac_convergents(rat)
return convs
'''
w is the truncation width for the rational part of x
'''
def continued_frac_nadic_approx(x:Union[float,np.float64],n:int,w=100) -> n_adic:
convs = continued_frac_approx_convergents(x,w)
if convs[0].b == 1:
if len(convs) != 1:
convs = convs[1:]#drop things like /1 and what not
if convs[0].b == 0:
| return n_adic(convs[0].numerator,n,0) | conditional_block |
|
Embedding.py | 0:
current = q.pop(0)
chl = self.__gac__(current.address)#don't be too picky about types with this one
ai = 0
for v in current.neighbors:
if v.address is None:
if ai >= len(chl):
raise ValueError("The graph contains nodes of higher degree than the tree allows. Increase the tree branching parameter")
v.address = chl[ai]
v.adist = self.dist
ai += 1
q.append(v)
for v in G:
if (v.address is None) or (v.adist is None):
raise ValueError("Graph is not connected! Embedding incomplete.")
return G
def hyper_dist(a:complex,b:complex):
return np.arccosh(1 + (2*(abs(a-b)**2))/((1 - abs(a)**2)*(1 - abs(b)**2)))#FIXME why is this taking so long? Maybe we should precalculate/cache all neighbor distances
class Isometry:
def __init__(self,rotation,translation):
self.rot = rotation
self.trans = translation
def __repr__(self):
return 'ISO r = {}; t = {}'.format(self.rot,self.trans)
'''
arg 0 is an isometry (pair of complex numbers), arg 1 is a single complex number
'''
def evaluate(self,arg):
|
def cross(self,l):
return Isometry((self.rot * l.rot + l.rot * self.trans * np.conj(l.trans)) / (self.rot * l.trans * np.conj(self.trans) + 1),
(self.rot * l.trans + self.trans) / (self.rot * l.trans * np.conj(self.trans) + 1))
def inv(self):
a = Isometry(np.conj(self.rot),0)
b = Isometry(1,-self.trans)
return a.cross(b)
'''
algorithm 1
'''
def define_generators(q):
generators = []
rot_isom = Isometry(np.e ** (1j * (2 * np.pi / q)),0)
trans_isom = Isometry(1,np.tanh(np.arccosh(1 / (np.sin(np.pi / q))))).cross(Isometry(-1,0))
for i in range(q):
#for some reason doing it the way their pseudocode says to doesn't work because of this R^i thing
#it only affects the zeroth generator (and therefore only the root)
rot_isom_i = Isometry((np.array([rot_isom.rot,rot_isom.trans]) ** complex(i))[0],0j)
generators.append(rot_isom_i.cross(trans_isom).cross(rot_isom_i.inv()))
return generators
class Hyperbolic_Embedder(TreeEmbedder):
def __init__(self,q):
super().__init__('hyperbolic',({complex,np.complex128},{int,np.int64,np.int32},Isometry),{float,np.float64})#TODO double check typing here
self.q = q
'''
algorithm 3
'''
def calculate_daughter_coords(self,adr):
coords,idx,isom = adr
generators = define_generators(self.q)
d_coords = []
for i in range(self.q):
didx = (idx+i)%self.q
disom = isom.cross(generators[didx])
dcoord = disom.evaluate(0)
d_coords.append((dcoord,didx,disom))
return d_coords
def __dist__(self,adrx,adry):
coordx,_,_ = adrx
coordy,_,_ = adry
return hyper_dist(coordx,coordy)
def __gac__(self,adr):
return self.calculate_daughter_coords(adr)
def __grt__(self):
rcoords = 0 + 0j
ridx = 0
risom = Isometry(1 + 0j,0 + 0j)
return (rcoords,ridx,risom)
class NAdic_Embedder(TreeEmbedder):
def __init__(self,n):
super().__init__("n-adic",n_adic,n_adic)
if (type(n) != int) or (n < 2):
raise TypeError("n-adic embedding requires n to be an integer greater than 1")
self.n = n
def __dist__(self,adrx,adry):
return adrx.dist_to_DA(adry)
def __gac__(self,adr):
return adr.children()
def __grt__(self):
return n_adic(1,self.n,0,n_adic_reduce=False)
def long_divide(a,b):
q = a//b
adiffr0 = a - q*b
adiff0 = abs(adiffr0)
adiffr1 = adiffr0 - b
adiff1 = abs(adiffr1)
if adiff0 < adiff1:
return q,adiffr0
else:
return q+1,adiffr1
def ext_eucl_int(a:int,b:int,gcd_only=False):
if a == 0:
return b
if b == 0:
return a
carda = (1,0)
cardb = (0,1)
q,r = long_divide(a,b)
cardc = (carda[0] - (q*cardb[0]),carda[1] - (q*cardb[1]))
carda = cardb
cardb = cardc
a = b
b = r
while r != 0:
q, r = long_divide(a, b)
cardc = (carda[0]-(q*cardb[0]), carda[1]-(q*cardb[1]))
carda = cardb
cardb = cardc
a = b
b = r
if a < 0:
a = -a
carda = (-carda[0],-carda[1])
if gcd_only:
return a
else:
return a,carda
def gcd(a:int,b:int):
return ext_eucl_int(a,b,gcd_only=True)
class Rational:
"""
a rational number a/b where a and b are coprime* integers (b = 0 is thought of as being infinite)
* iff rat_reduce is set to true
"""
def __init__(self,a:int,b:int,rat_reduce=True):
if rat_reduce:
g = gcd(a,b)
a //= g
b //= g
self.numerator = a
self.b = b
def pairwise_check(self,other):
if type(other) in [int,np.int32,np.int64]:
other = Rational(other,1,rat_reduce=False)
elif type(other) in [float,np.float64]:
other = continued_frac_approx_convergents(other)[-1]
return other
def __add__(self, other):
other = self.pairwise_check(other)
return Rational(self.numerator*other.b+other.numerator*self.b,self.b*other.b,rat_reduce=True)
def __neg__(self):
return Rational(-self.numerator,self.b)
def __sub__(self, other):
return self+(-other)
def __mul__(self, other):
other = self.pairwise_check(other)
return Rational(self.numerator*other.numerator,self.b*other.b,rat_reduce=True)
def __abs__(self):
return Rational(abs(self.numerator),abs(self.b))
def __repr__(self):
if self.b == 0:
return ("-" if self.numerator < 0 else "")+"INF"
return "{}/{}".format(self.numerator,self.b)
"""
called with the unary "~" operator
"""
def __invert__(self):
return Rational(self.b,self.numerator)
def __truediv__(self, other):
return self * (~other)
def __floordiv__(self, other):
return self / other
def __le__(self, other):
other = self.pairwise_check(other)
diff = self - other
return diff.numerator <= 0
def __eq__(self, other):
other = self.pairwise_check(other)
return (self.numerator == other.numerator) and (self.b == other.b)
def __lt__(self, other):
return (self <= other) and (self != other)
def __gt__(self, other):
return not (self <= other)
def __ge__(self, other):
return not (self < other)
class n_adic(Rational):
"""
d = None is taken to mean d = -inf (denom = 0 or x = inf)
"""
def __init__(self,a:int,n: int,d,n_adic_reduce=True):
assert ((d is None) or (type(d) == int))
if n_adic_reduce:
while ((a%n) == 0) and (d > 0):
a //= n
d -= 1
self.numerator = a
self.n = n
self.exp = d
if d is None:
super().__init__(a,0,rat_reduce=False)
else:
super().__init__(a,n**d,rat_reduce=False | return (self.rot * arg + self.trans) / (1 + np.conj(self.trans) * self.rot * arg) | identifier_body |
Embedding.py | (self,etype,atype,dtype):
super().__init__(etype,atype,dtype)
def get_adr_child(self,adr,i):
self.atype_check(adr)
ch = self.__gchld__(adr,i)
self.atype_check(ch)
return ch
def get_root(self):
r = self.__grt__()
self.atype_check(r)
return r
@abstractmethod
def __gchld__(self,adr,i):
pass
@abstractmethod
def __grt__(self):
pass
def address_graph(self,G:List[TNNode],root_idx=None):
if root_idx is None:
root_idx = random.randint(0,len(G)-1)#random unless otherwise specified
root_adr = self.get_root()
#bfs on G to assign the addresses
q = [G[root_idx]]
G[root_idx].address = root_adr
G[root_idx].adist = self.dist
#we'll use the assignment of an address as a seen flag
while len(q) > 0:
current = q.pop(0)
ai = 0
for v in current.neighbors:
if v.address is None:
v.address = self.__gchld__(current.address,ai)
v.adist = self.dist
ai += 1
q.append(v)
for v in G:
if (v.address is None) or (v.adist is None):
raise ValueError("Graph is not connected! Embedding incomplete.")
return G
class TreeEmbedder(Embedder,ABC):
"""
this is specifically for tree-like embeddings
"""
def __init__(self, etype, atype, dtype):
super().__init__(etype,atype,dtype)
def get_adr_children(self,adr):
self.atype_check(adr)
chls = self.__gac__(adr)
for ch in chls:
self.atype_check(ch)
return chls.copy()
@abstractmethod
def __gac__(self,adr):
pass
def get_root(self):
r = self.__grt__()
self.atype_check(r)
return r
@abstractmethod
def __grt__(self):
pass
def address_graph(self,G:List[TNNode],root_idx=None):
if root_idx is None:
root_idx = random.randint(0,len(G)-1)#random unless otherwise specified
root_adr = self.get_root()
#bfs on G to assign the addresses
q = [G[root_idx]]
G[root_idx].address = root_adr
G[root_idx].adist = self.dist
#we'll use the assignment of an address as a seen flag
while len(q) > 0:
current = q.pop(0)
chl = self.__gac__(current.address)#don't be too picky about types with this one
ai = 0
for v in current.neighbors:
if v.address is None:
if ai >= len(chl):
raise ValueError("The graph contains nodes of higher degree than the tree allows. Increase the tree branching parameter")
v.address = chl[ai]
v.adist = self.dist
ai += 1
q.append(v)
for v in G:
if (v.address is None) or (v.adist is None):
raise ValueError("Graph is not connected! Embedding incomplete.")
return G
def hyper_dist(a:complex,b:complex):
return np.arccosh(1 + (2*(abs(a-b)**2))/((1 - abs(a)**2)*(1 - abs(b)**2)))#FIXME why is this taking so long? Maybe we should precalculate/cache all neighbor distances
class Isometry:
def __init__(self,rotation,translation):
self.rot = rotation
self.trans = translation
def __repr__(self):
return 'ISO r = {}; t = {}'.format(self.rot,self.trans)
'''
arg 0 is an isometry (pair of complex numbers), arg 1 is a single complex number
'''
def evaluate(self,arg):
return (self.rot * arg + self.trans) / (1 + np.conj(self.trans) * self.rot * arg)
def cross(self,l):
return Isometry((self.rot * l.rot + l.rot * self.trans * np.conj(l.trans)) / (self.rot * l.trans * np.conj(self.trans) + 1),
(self.rot * l.trans + self.trans) / (self.rot * l.trans * np.conj(self.trans) + 1))
def inv(self):
a = Isometry(np.conj(self.rot),0)
b = Isometry(1,-self.trans)
return a.cross(b)
'''
algorithm 1
'''
def define_generators(q):
generators = []
rot_isom = Isometry(np.e ** (1j * (2 * np.pi / q)),0)
trans_isom = Isometry(1,np.tanh(np.arccosh(1 / (np.sin(np.pi / q))))).cross(Isometry(-1,0))
for i in range(q):
#for some reason doing it the way their pseudocode says to doesn't work because of this R^i thing
#it only affects the zeroth generator (and therefore only the root)
rot_isom_i = Isometry((np.array([rot_isom.rot,rot_isom.trans]) ** complex(i))[0],0j)
generators.append(rot_isom_i.cross(trans_isom).cross(rot_isom_i.inv()))
return generators
class Hyperbolic_Embedder(TreeEmbedder):
def __init__(self,q):
super().__init__('hyperbolic',({complex,np.complex128},{int,np.int64,np.int32},Isometry),{float,np.float64})#TODO double check typing here
self.q = q
'''
algorithm 3
'''
def calculate_daughter_coords(self,adr):
coords,idx,isom = adr
generators = define_generators(self.q)
d_coords = []
for i in range(self.q):
didx = (idx+i)%self.q
disom = isom.cross(generators[didx])
dcoord = disom.evaluate(0)
d_coords.append((dcoord,didx,disom))
return d_coords
def __dist__(self,adrx,adry):
coordx,_,_ = adrx
coordy,_,_ = adry
return hyper_dist(coordx,coordy)
def __gac__(self,adr):
return self.calculate_daughter_coords(adr)
def __grt__(self):
rcoords = 0 + 0j
ridx = 0
risom = Isometry(1 + 0j,0 + 0j)
return (rcoords,ridx,risom)
class NAdic_Embedder(TreeEmbedder):
def __init__(self,n):
super().__init__("n-adic",n_adic,n_adic)
if (type(n) != int) or (n < 2):
raise TypeError("n-adic embedding requires n to be an integer greater than 1")
self.n = n
def __dist__(self,adrx,adry):
return adrx.dist_to_DA(adry)
def __gac__(self,adr):
return adr.children()
def __grt__(self):
return n_adic(1,self.n,0,n_adic_reduce=False)
def long_divide(a,b):
q = a//b
adiffr0 = a - q*b
adiff0 = abs(adiffr0)
adiffr1 = adiffr0 - b
adiff1 = abs(adiffr1)
if adiff0 < adiff1:
return q,adiffr0
else:
return q+1,adiffr1
def ext_eucl_int(a:int,b:int,gcd_only=False):
if a == 0:
return b
if b == 0:
return a
carda = (1,0)
cardb = (0,1)
q,r = long_divide(a,b)
cardc = (carda[0] - (q*cardb[0]),carda[1] - (q*cardb[1]))
carda = cardb
cardb = cardc
a = b
b = r
while r != 0:
q, r = long_divide(a, b)
cardc = (carda[0]-(q*cardb[0]), carda[1]-(q*cardb[1]))
carda = cardb
cardb = cardc
a = b
b = r
if a < 0:
a = -a
carda = (-carda[0],-carda[1])
if gcd_only:
return a
else:
return a,carda
def gcd(a:int,b:int):
return ext_eucl_int(a,b,gcd_only=True)
class Rational:
"""
a rational number a/b where a and b are coprime* integers (b = 0 is thought of as being infinite)
* iff rat_reduce is set to true
"""
def __init__(self,a:int,b:int,rat_reduce=True):
if rat_reduce:
g = gcd(a,b)
a //= | __init__ | identifier_name |
|
Embedding.py | 0:
current = q.pop(0)
chl = self.__gac__(current.address)#don't be too picky about types with this one
ai = 0
for v in current.neighbors:
if v.address is None:
if ai >= len(chl):
raise ValueError("The graph contains nodes of higher degree than the tree allows. Increase the tree branching parameter")
v.address = chl[ai]
v.adist = self.dist
ai += 1
q.append(v)
for v in G:
if (v.address is None) or (v.adist is None):
raise ValueError("Graph is not connected! Embedding incomplete.")
return G
def hyper_dist(a:complex,b:complex):
return np.arccosh(1 + (2*(abs(a-b)**2))/((1 - abs(a)**2)*(1 - abs(b)**2)))#FIXME why is this taking so long? Maybe we should precalculate/cache all neighbor distances
class Isometry:
def __init__(self,rotation,translation):
self.rot = rotation
self.trans = translation
def __repr__(self):
return 'ISO r = {}; t = {}'.format(self.rot,self.trans)
'''
arg 0 is an isometry (pair of complex numbers), arg 1 is a single complex number
'''
def evaluate(self,arg):
return (self.rot * arg + self.trans) / (1 + np.conj(self.trans) * self.rot * arg)
def cross(self,l):
return Isometry((self.rot * l.rot + l.rot * self.trans * np.conj(l.trans)) / (self.rot * l.trans * np.conj(self.trans) + 1),
(self.rot * l.trans + self.trans) / (self.rot * l.trans * np.conj(self.trans) + 1))
def inv(self):
a = Isometry(np.conj(self.rot),0)
b = Isometry(1,-self.trans)
return a.cross(b)
'''
algorithm 1
'''
def define_generators(q):
generators = []
rot_isom = Isometry(np.e ** (1j * (2 * np.pi / q)),0)
trans_isom = Isometry(1,np.tanh(np.arccosh(1 / (np.sin(np.pi / q))))).cross(Isometry(-1,0))
for i in range(q):
#for some reason doing it the way their pseudocode says to doesn't work because of this R^i thing
#it only affects the zeroth generator (and therefore only the root)
rot_isom_i = Isometry((np.array([rot_isom.rot,rot_isom.trans]) ** complex(i))[0],0j)
generators.append(rot_isom_i.cross(trans_isom).cross(rot_isom_i.inv()))
return generators
class Hyperbolic_Embedder(TreeEmbedder):
def __init__(self,q):
super().__init__('hyperbolic',({complex,np.complex128},{int,np.int64,np.int32},Isometry),{float,np.float64})#TODO double check typing here
self.q = q
'''
algorithm 3
'''
def calculate_daughter_coords(self,adr): | for i in range(self.q):
didx = (idx+i)%self.q
disom = isom.cross(generators[didx])
dcoord = disom.evaluate(0)
d_coords.append((dcoord,didx,disom))
return d_coords
def __dist__(self,adrx,adry):
coordx,_,_ = adrx
coordy,_,_ = adry
return hyper_dist(coordx,coordy)
def __gac__(self,adr):
return self.calculate_daughter_coords(adr)
def __grt__(self):
rcoords = 0 + 0j
ridx = 0
risom = Isometry(1 + 0j,0 + 0j)
return (rcoords,ridx,risom)
class NAdic_Embedder(TreeEmbedder):
def __init__(self,n):
super().__init__("n-adic",n_adic,n_adic)
if (type(n) != int) or (n < 2):
raise TypeError("n-adic embedding requires n to be an integer greater than 1")
self.n = n
def __dist__(self,adrx,adry):
return adrx.dist_to_DA(adry)
def __gac__(self,adr):
return adr.children()
def __grt__(self):
return n_adic(1,self.n,0,n_adic_reduce=False)
def long_divide(a,b):
q = a//b
adiffr0 = a - q*b
adiff0 = abs(adiffr0)
adiffr1 = adiffr0 - b
adiff1 = abs(adiffr1)
if adiff0 < adiff1:
return q,adiffr0
else:
return q+1,adiffr1
def ext_eucl_int(a:int,b:int,gcd_only=False):
if a == 0:
return b
if b == 0:
return a
carda = (1,0)
cardb = (0,1)
q,r = long_divide(a,b)
cardc = (carda[0] - (q*cardb[0]),carda[1] - (q*cardb[1]))
carda = cardb
cardb = cardc
a = b
b = r
while r != 0:
q, r = long_divide(a, b)
cardc = (carda[0]-(q*cardb[0]), carda[1]-(q*cardb[1]))
carda = cardb
cardb = cardc
a = b
b = r
if a < 0:
a = -a
carda = (-carda[0],-carda[1])
if gcd_only:
return a
else:
return a,carda
def gcd(a:int,b:int):
return ext_eucl_int(a,b,gcd_only=True)
class Rational:
"""
a rational number a/b where a and b are coprime* integers (b = 0 is thought of as being infinite)
* iff rat_reduce is set to true
"""
def __init__(self,a:int,b:int,rat_reduce=True):
if rat_reduce:
g = gcd(a,b)
a //= g
b //= g
self.numerator = a
self.b = b
def pairwise_check(self,other):
if type(other) in [int,np.int32,np.int64]:
other = Rational(other,1,rat_reduce=False)
elif type(other) in [float,np.float64]:
other = continued_frac_approx_convergents(other)[-1]
return other
def __add__(self, other):
other = self.pairwise_check(other)
return Rational(self.numerator*other.b+other.numerator*self.b,self.b*other.b,rat_reduce=True)
def __neg__(self):
return Rational(-self.numerator,self.b)
def __sub__(self, other):
return self+(-other)
def __mul__(self, other):
other = self.pairwise_check(other)
return Rational(self.numerator*other.numerator,self.b*other.b,rat_reduce=True)
def __abs__(self):
return Rational(abs(self.numerator),abs(self.b))
def __repr__(self):
if self.b == 0:
return ("-" if self.numerator < 0 else "")+"INF"
return "{}/{}".format(self.numerator,self.b)
"""
called with the unary "~" operator
"""
def __invert__(self):
return Rational(self.b,self.numerator)
def __truediv__(self, other):
return self * (~other)
def __floordiv__(self, other):
return self / other
def __le__(self, other):
other = self.pairwise_check(other)
diff = self - other
return diff.numerator <= 0
def __eq__(self, other):
other = self.pairwise_check(other)
return (self.numerator == other.numerator) and (self.b == other.b)
def __lt__(self, other):
return (self <= other) and (self != other)
def __gt__(self, other):
return not (self <= other)
def __ge__(self, other):
return not (self < other)
class n_adic(Rational):
"""
d = None is taken to mean d = -inf (denom = 0 or x = inf)
"""
def __init__(self,a:int,n: int,d,n_adic_reduce=True):
assert ((d is None) or (type(d) == int))
if n_adic_reduce:
while ((a%n) == 0) and (d > 0):
a //= n
d -= 1
self.numerator = a
self.n = n
self.exp = d
if d is None:
super().__init__(a,0,rat_reduce=False)
else:
super().__init__(a,n**d,rat_reduce=False)
| coords,idx,isom = adr
generators = define_generators(self.q)
d_coords = [] | random_line_split |
etcdstatedriver.go | encapsulates the etcd endpoints used to communicate
// with it.
type EtcdStateDriverConfig struct {
Etcd struct {
Machines []string
}
}
// EtcdStateDriver implements the StateDriver interface for an etcd based distributed
// key-value store used to store config and runtime state for the netplugin. | // KeysAPI client.KeysAPI
Client *client.Client
}
// Init the driver with a core.Config.
func (d *EtcdStateDriver) Init(instInfo *core.InstanceInfo) error {
var err error
var endpoint *url.URL
if instInfo == nil || len(instInfo.DbURL) == 0 {
return errors.New("no etcd config found")
}
tlsInfo := transport.TLSInfo{
CertFile: instInfo.DbTLSCert,
KeyFile: instInfo.DbTLSKey,
TrustedCAFile: instInfo.DbTLSCa,
}
tlsConfig, err := tlsInfo.ClientConfig()
if err != nil {
log.Fatalf("error tlsInfo Format. Err: %v", err)
}
if len(instInfo.DbTLSCert) ==0 && len(instInfo.DbTLSKey) ==0 && len(instInfo.DbTLSCa) ==0 {
tlsConfig = nil
}
for _,dburl := range instInfo.DbURL {
endpoint, err = url.Parse(dburl)
if err != nil {
return err
}
if endpoint.Scheme == "etcd" {
if len(instInfo.DbTLSCert) ==0 && len(instInfo.DbTLSKey) ==0 && len(instInfo.DbTLSCa) ==0 {
endpoint.Scheme = "http"
} else {
endpoint.Scheme = "https"
}
} else if endpoint.Scheme != "http" && endpoint.Scheme != "https" {
return core.Errorf("invalid etcd URL scheme %q", endpoint.Scheme)
}
}
cfg := client.Config{
Endpoints: instInfo.DbURL,
TLS: tlsConfig,
}
d.Client, err = client.New(cfg)
if err != nil {
log.Fatalf("error creating etcd client. Err: %v", err)
}
return nil
}
// Deinit is currently a no-op.
func (d *EtcdStateDriver) Deinit() {
d.Client.Close()
}
// Write state to key with value.
func (d *EtcdStateDriver) Write(key string, value []byte) error {
ctx, cancel := context.WithTimeout(context.Background(), ctxTimeout)
defer cancel()
var err error
for i := 0; i < maxEtcdRetries; i++ {
_, err = d.Client.KV.Put(ctx, key, string(value[:]))
if err != nil && err.Error() == client.ErrNoAvailableEndpoints.Error() {
// Retry after a delay
time.Sleep(time.Second)
continue
}
// when err == nil or anything other than connection refused
return err
}
return err
}
// Read state from key.
func (d *EtcdStateDriver) Read(key string) ([]byte, error) {
ctx, cancel := context.WithTimeout(context.Background(), ctxTimeout)
defer cancel()
var err error
var resp *client.GetResponse
for i := 0; i < maxEtcdRetries; i++ {
// etcd3 uses quorum for reads by default
resp, err = d.Client.KV.Get(ctx, key)
log.Infof("EtcdStateDriver_Read_Err:%+v", err)
if err != nil {
if err.Error() == client.ErrNoAvailableEndpoints.Error() {
// Retry after a delay
time.Sleep(time.Second)
continue
}
if resp != nil && len(resp.Kvs) != 0 {
return []byte(resp.Kvs[0].Value), nil
}
return []byte{}, fmt.Errorf("error reading from etcd")
}
if resp.Count == 0 {
return []byte{}, core.Errorf("key not found")
}
return resp.Kvs[0].Value, err
}
return []byte{}, err
}
// ReadAll state from baseKey.
func (d *EtcdStateDriver) ReadAll(baseKey string) ([][]byte, error) {
ctx, cancel := context.WithTimeout(context.Background(), ctxTimeout)
defer cancel()
var err error
var resp *client.GetResponse
for i := 0; i < maxEtcdRetries; i++ {
// etcd uses quorum for reads by default
resp, err = d.Client.KV.Get(ctx, baseKey, client.WithPrefix(), client.WithSort(client.SortByKey, client.SortAscend))
if err != nil {
if err.Error() == client.ErrNoAvailableEndpoints.Error() {
// Retry after a delay
time.Sleep(time.Second)
continue
}
}
if resp.Count == 0 {
return [][]byte{}, core.Errorf("key not found")
}
values := [][]byte{}
for _, node := range resp.Kvs {
values = append(values, []byte(node.Value))
}
return values, nil
}
return [][]byte{}, err
}
func (d *EtcdStateDriver) channelEtcdEvents(watcher client.WatchChan, rsps chan [2][]byte) {
for resp := range watcher {
for _, ev := range resp.Events {
// fmt.Printf("%s %q : %q\n", ev.Type, ev.Kv.Key, ev.Kv.Value)
rsp := [2][]byte{nil, nil}
eventStr := "create"
if string(ev.Kv.Value) != "" {
rsp[0] = ev.Kv.Value
}
if ev.PrevKv != nil && string(ev.PrevKv.Value) != "" {
rsp[1] = ev.PrevKv.Value
if string(ev.Kv.Value) != "" {
eventStr = "modify"
} else {
eventStr = "delete"
}
}
log.Debugf("Received %q for key: %s", eventStr, ev.Kv.Key)
//channel the translated response
rsps <- rsp
}
}
}
// WatchAll state transitions from baseKey
func (d *EtcdStateDriver) WatchAll(baseKey string, rsps chan [2][]byte) error {
watcher := d.Client.Watch(context.Background(), baseKey, client.WithPrefix())
go d.channelEtcdEvents(watcher, rsps)
return nil
}
// ClearState removes key from etcd
func (d *EtcdStateDriver) ClearState(key string) error {
ctx, cancel := context.WithTimeout(context.Background(), ctxTimeout)
defer cancel()
_, err := d.Client.KV.Delete(ctx, key)
return err
}
// ReadState reads key into a core.State with the unmarshaling function.
func (d *EtcdStateDriver) ReadState(key string, value core.State,
unmarshal func([]byte, interface{}) error) error {
encodedState, err := d.Read(key)
if err != nil {
return err
}
return unmarshal(encodedState, value)
}
// readAllStateCommon reads and unmarshals (given a function) all state into a
// list of core.State objects.
// XXX: move this to some common file
func readAllStateCommon(d core.StateDriver, baseKey string, sType core.State,
unmarshal func([]byte, interface{}) error) ([]core.State, error) {
stateType := reflect.TypeOf(sType)
sliceType := reflect.SliceOf(stateType)
values := reflect.MakeSlice(sliceType, 0, 1)
byteValues, err := d.ReadAll(baseKey)
if err != nil {
return nil, err
}
for _, byteValue := range byteValues {
value := reflect.New(stateType)
err = unmarshal(byteValue, value.Interface())
if err != nil {
return nil, err
}
values = reflect.Append(values, value.Elem())
}
stateValues := []core.State{}
for i := 0; i < values.Len(); i++ {
// sanity checks
if !values.Index(i).Elem().FieldByName("CommonState").IsValid() {
return nil, core.Errorf("The state structure %v is missing core.CommonState",
stateType)
}
//the following works as every core.State is expected to embed core.CommonState struct
values.Index(i).Elem().FieldByName("CommonState").FieldByName("StateDriver").Set(reflect.ValueOf(d))
stateValue := values.Index(i).Interface().(core.State)
stateValues = append(stateValues, stateValue)
}
return stateValues, nil
}
// ReadAllState Reads all the state from baseKey and returns a list of core.State.
func (d *EtcdStateDriver) ReadAllState(baseKey string, sType core.State,
unmarshal func([]byte, interface{}) error) ([]core.State, error) {
return readAllStateCommon(d, baseKey, sType, unmarshal)
}
// channelStateEvents watches for updates(created, modify, delete) to a state of
// specified type and unmarshals (given a function) all changes and puts then on
// channel of core.WatchState objects.
// XXX: move this to some common file
func channelStateEvents(d core.StateDriver, s | type EtcdStateDriver struct {
// Client client.Client | random_line_split |
etcdstatedriver.go | ulates the etcd endpoints used to communicate
// with it.
type EtcdStateDriverConfig struct {
Etcd struct {
Machines []string
}
}
// EtcdStateDriver implements the StateDriver interface for an etcd based distributed
// key-value store used to store config and runtime state for the netplugin.
type EtcdStateDriver struct {
// Client client.Client
// KeysAPI client.KeysAPI
Client *client.Client
}
// Init the driver with a core.Config.
func (d *EtcdStateDriver) Init(instInfo *core.InstanceInfo) error {
var err error
var endpoint *url.URL
if instInfo == nil || len(instInfo.DbURL) == 0 {
return errors.New("no etcd config found")
}
tlsInfo := transport.TLSInfo{
CertFile: instInfo.DbTLSCert,
KeyFile: instInfo.DbTLSKey,
TrustedCAFile: instInfo.DbTLSCa,
}
tlsConfig, err := tlsInfo.ClientConfig()
if err != nil {
log.Fatalf("error tlsInfo Format. Err: %v", err)
}
if len(instInfo.DbTLSCert) ==0 && len(instInfo.DbTLSKey) ==0 && len(instInfo.DbTLSCa) ==0 {
tlsConfig = nil
}
for _,dburl := range instInfo.DbURL {
endpoint, err = url.Parse(dburl)
if err != nil {
return err
}
if endpoint.Scheme == "etcd" {
if len(instInfo.DbTLSCert) ==0 && len(instInfo.DbTLSKey) ==0 && len(instInfo.DbTLSCa) ==0 {
endpoint.Scheme = "http"
} else {
endpoint.Scheme = "https"
}
} else if endpoint.Scheme != "http" && endpoint.Scheme != "https" |
}
cfg := client.Config{
Endpoints: instInfo.DbURL,
TLS: tlsConfig,
}
d.Client, err = client.New(cfg)
if err != nil {
log.Fatalf("error creating etcd client. Err: %v", err)
}
return nil
}
// Deinit is currently a no-op.
func (d *EtcdStateDriver) Deinit() {
d.Client.Close()
}
// Write state to key with value.
func (d *EtcdStateDriver) Write(key string, value []byte) error {
ctx, cancel := context.WithTimeout(context.Background(), ctxTimeout)
defer cancel()
var err error
for i := 0; i < maxEtcdRetries; i++ {
_, err = d.Client.KV.Put(ctx, key, string(value[:]))
if err != nil && err.Error() == client.ErrNoAvailableEndpoints.Error() {
// Retry after a delay
time.Sleep(time.Second)
continue
}
// when err == nil or anything other than connection refused
return err
}
return err
}
// Read state from key.
func (d *EtcdStateDriver) Read(key string) ([]byte, error) {
ctx, cancel := context.WithTimeout(context.Background(), ctxTimeout)
defer cancel()
var err error
var resp *client.GetResponse
for i := 0; i < maxEtcdRetries; i++ {
// etcd3 uses quorum for reads by default
resp, err = d.Client.KV.Get(ctx, key)
log.Infof("EtcdStateDriver_Read_Err:%+v", err)
if err != nil {
if err.Error() == client.ErrNoAvailableEndpoints.Error() {
// Retry after a delay
time.Sleep(time.Second)
continue
}
if resp != nil && len(resp.Kvs) != 0 {
return []byte(resp.Kvs[0].Value), nil
}
return []byte{}, fmt.Errorf("error reading from etcd")
}
if resp.Count == 0 {
return []byte{}, core.Errorf("key not found")
}
return resp.Kvs[0].Value, err
}
return []byte{}, err
}
// ReadAll state from baseKey.
func (d *EtcdStateDriver) ReadAll(baseKey string) ([][]byte, error) {
ctx, cancel := context.WithTimeout(context.Background(), ctxTimeout)
defer cancel()
var err error
var resp *client.GetResponse
for i := 0; i < maxEtcdRetries; i++ {
// etcd uses quorum for reads by default
resp, err = d.Client.KV.Get(ctx, baseKey, client.WithPrefix(), client.WithSort(client.SortByKey, client.SortAscend))
if err != nil {
if err.Error() == client.ErrNoAvailableEndpoints.Error() {
// Retry after a delay
time.Sleep(time.Second)
continue
}
}
if resp.Count == 0 {
return [][]byte{}, core.Errorf("key not found")
}
values := [][]byte{}
for _, node := range resp.Kvs {
values = append(values, []byte(node.Value))
}
return values, nil
}
return [][]byte{}, err
}
func (d *EtcdStateDriver) channelEtcdEvents(watcher client.WatchChan, rsps chan [2][]byte) {
for resp := range watcher {
for _, ev := range resp.Events {
// fmt.Printf("%s %q : %q\n", ev.Type, ev.Kv.Key, ev.Kv.Value)
rsp := [2][]byte{nil, nil}
eventStr := "create"
if string(ev.Kv.Value) != "" {
rsp[0] = ev.Kv.Value
}
if ev.PrevKv != nil && string(ev.PrevKv.Value) != "" {
rsp[1] = ev.PrevKv.Value
if string(ev.Kv.Value) != "" {
eventStr = "modify"
} else {
eventStr = "delete"
}
}
log.Debugf("Received %q for key: %s", eventStr, ev.Kv.Key)
//channel the translated response
rsps <- rsp
}
}
}
// WatchAll state transitions from baseKey
func (d *EtcdStateDriver) WatchAll(baseKey string, rsps chan [2][]byte) error {
watcher := d.Client.Watch(context.Background(), baseKey, client.WithPrefix())
go d.channelEtcdEvents(watcher, rsps)
return nil
}
// ClearState removes key from etcd
func (d *EtcdStateDriver) ClearState(key string) error {
ctx, cancel := context.WithTimeout(context.Background(), ctxTimeout)
defer cancel()
_, err := d.Client.KV.Delete(ctx, key)
return err
}
// ReadState reads key into a core.State with the unmarshaling function.
func (d *EtcdStateDriver) ReadState(key string, value core.State,
unmarshal func([]byte, interface{}) error) error {
encodedState, err := d.Read(key)
if err != nil {
return err
}
return unmarshal(encodedState, value)
}
// readAllStateCommon reads and unmarshals (given a function) all state into a
// list of core.State objects.
// XXX: move this to some common file
func readAllStateCommon(d core.StateDriver, baseKey string, sType core.State,
unmarshal func([]byte, interface{}) error) ([]core.State, error) {
stateType := reflect.TypeOf(sType)
sliceType := reflect.SliceOf(stateType)
values := reflect.MakeSlice(sliceType, 0, 1)
byteValues, err := d.ReadAll(baseKey)
if err != nil {
return nil, err
}
for _, byteValue := range byteValues {
value := reflect.New(stateType)
err = unmarshal(byteValue, value.Interface())
if err != nil {
return nil, err
}
values = reflect.Append(values, value.Elem())
}
stateValues := []core.State{}
for i := 0; i < values.Len(); i++ {
// sanity checks
if !values.Index(i).Elem().FieldByName("CommonState").IsValid() {
return nil, core.Errorf("The state structure %v is missing core.CommonState",
stateType)
}
//the following works as every core.State is expected to embed core.CommonState struct
values.Index(i).Elem().FieldByName("CommonState").FieldByName("StateDriver").Set(reflect.ValueOf(d))
stateValue := values.Index(i).Interface().(core.State)
stateValues = append(stateValues, stateValue)
}
return stateValues, nil
}
// ReadAllState Reads all the state from baseKey and returns a list of core.State.
func (d *EtcdStateDriver) ReadAllState(baseKey string, sType core.State,
unmarshal func([]byte, interface{}) error) ([]core.State, error) {
return readAllStateCommon(d, baseKey, sType, unmarshal)
}
// channelStateEvents watches for updates(created, modify, delete) to a state of
// specified type and unmarshals (given a function) all changes and puts then on
// channel of core.WatchState objects.
// XXX: move this to some common file
func channelStateEvents(d core.StateDriver | {
return core.Errorf("invalid etcd URL scheme %q", endpoint.Scheme)
} | conditional_block |
etcdstatedriver.go | ulates the etcd endpoints used to communicate
// with it.
type EtcdStateDriverConfig struct {
Etcd struct {
Machines []string
}
}
// EtcdStateDriver implements the StateDriver interface for an etcd based distributed
// key-value store used to store config and runtime state for the netplugin.
type EtcdStateDriver struct {
// Client client.Client
// KeysAPI client.KeysAPI
Client *client.Client
}
// Init the driver with a core.Config.
func (d *EtcdStateDriver) Init(instInfo *core.InstanceInfo) error {
var err error
var endpoint *url.URL
if instInfo == nil || len(instInfo.DbURL) == 0 {
return errors.New("no etcd config found")
}
tlsInfo := transport.TLSInfo{
CertFile: instInfo.DbTLSCert,
KeyFile: instInfo.DbTLSKey,
TrustedCAFile: instInfo.DbTLSCa,
}
tlsConfig, err := tlsInfo.ClientConfig()
if err != nil {
log.Fatalf("error tlsInfo Format. Err: %v", err)
}
if len(instInfo.DbTLSCert) ==0 && len(instInfo.DbTLSKey) ==0 && len(instInfo.DbTLSCa) ==0 {
tlsConfig = nil
}
for _,dburl := range instInfo.DbURL {
endpoint, err = url.Parse(dburl)
if err != nil {
return err
}
if endpoint.Scheme == "etcd" {
if len(instInfo.DbTLSCert) ==0 && len(instInfo.DbTLSKey) ==0 && len(instInfo.DbTLSCa) ==0 {
endpoint.Scheme = "http"
} else {
endpoint.Scheme = "https"
}
} else if endpoint.Scheme != "http" && endpoint.Scheme != "https" {
return core.Errorf("invalid etcd URL scheme %q", endpoint.Scheme)
}
}
cfg := client.Config{
Endpoints: instInfo.DbURL,
TLS: tlsConfig,
}
d.Client, err = client.New(cfg)
if err != nil {
log.Fatalf("error creating etcd client. Err: %v", err)
}
return nil
}
// Deinit is currently a no-op.
func (d *EtcdStateDriver) Deinit() {
d.Client.Close()
}
// Write state to key with value.
func (d *EtcdStateDriver) Write(key string, value []byte) error {
ctx, cancel := context.WithTimeout(context.Background(), ctxTimeout)
defer cancel()
var err error
for i := 0; i < maxEtcdRetries; i++ {
_, err = d.Client.KV.Put(ctx, key, string(value[:]))
if err != nil && err.Error() == client.ErrNoAvailableEndpoints.Error() {
// Retry after a delay
time.Sleep(time.Second)
continue
}
// when err == nil or anything other than connection refused
return err
}
return err
}
// Read state from key.
func (d *EtcdStateDriver) Read(key string) ([]byte, error) {
ctx, cancel := context.WithTimeout(context.Background(), ctxTimeout)
defer cancel()
var err error
var resp *client.GetResponse
for i := 0; i < maxEtcdRetries; i++ {
// etcd3 uses quorum for reads by default
resp, err = d.Client.KV.Get(ctx, key)
log.Infof("EtcdStateDriver_Read_Err:%+v", err)
if err != nil {
if err.Error() == client.ErrNoAvailableEndpoints.Error() {
// Retry after a delay
time.Sleep(time.Second)
continue
}
if resp != nil && len(resp.Kvs) != 0 {
return []byte(resp.Kvs[0].Value), nil
}
return []byte{}, fmt.Errorf("error reading from etcd")
}
if resp.Count == 0 {
return []byte{}, core.Errorf("key not found")
}
return resp.Kvs[0].Value, err
}
return []byte{}, err
}
// ReadAll state from baseKey.
func (d *EtcdStateDriver) ReadAll(baseKey string) ([][]byte, error) {
ctx, cancel := context.WithTimeout(context.Background(), ctxTimeout)
defer cancel()
var err error
var resp *client.GetResponse
for i := 0; i < maxEtcdRetries; i++ {
// etcd uses quorum for reads by default
resp, err = d.Client.KV.Get(ctx, baseKey, client.WithPrefix(), client.WithSort(client.SortByKey, client.SortAscend))
if err != nil {
if err.Error() == client.ErrNoAvailableEndpoints.Error() {
// Retry after a delay
time.Sleep(time.Second)
continue
}
}
if resp.Count == 0 {
return [][]byte{}, core.Errorf("key not found")
}
values := [][]byte{}
for _, node := range resp.Kvs {
values = append(values, []byte(node.Value))
}
return values, nil
}
return [][]byte{}, err
}
func (d *EtcdStateDriver) channelEtcdEvents(watcher client.WatchChan, rsps chan [2][]byte) {
for resp := range watcher {
for _, ev := range resp.Events {
// fmt.Printf("%s %q : %q\n", ev.Type, ev.Kv.Key, ev.Kv.Value)
rsp := [2][]byte{nil, nil}
eventStr := "create"
if string(ev.Kv.Value) != "" {
rsp[0] = ev.Kv.Value
}
if ev.PrevKv != nil && string(ev.PrevKv.Value) != "" {
rsp[1] = ev.PrevKv.Value
if string(ev.Kv.Value) != "" {
eventStr = "modify"
} else {
eventStr = "delete"
}
}
log.Debugf("Received %q for key: %s", eventStr, ev.Kv.Key)
//channel the translated response
rsps <- rsp
}
}
}
// WatchAll state transitions from baseKey
func (d *EtcdStateDriver) WatchAll(baseKey string, rsps chan [2][]byte) error |
// ClearState removes key from etcd
func (d *EtcdStateDriver) ClearState(key string) error {
ctx, cancel := context.WithTimeout(context.Background(), ctxTimeout)
defer cancel()
_, err := d.Client.KV.Delete(ctx, key)
return err
}
// ReadState reads key into a core.State with the unmarshaling function.
func (d *EtcdStateDriver) ReadState(key string, value core.State,
unmarshal func([]byte, interface{}) error) error {
encodedState, err := d.Read(key)
if err != nil {
return err
}
return unmarshal(encodedState, value)
}
// readAllStateCommon reads and unmarshals (given a function) all state into a
// list of core.State objects.
// XXX: move this to some common file
func readAllStateCommon(d core.StateDriver, baseKey string, sType core.State,
unmarshal func([]byte, interface{}) error) ([]core.State, error) {
stateType := reflect.TypeOf(sType)
sliceType := reflect.SliceOf(stateType)
values := reflect.MakeSlice(sliceType, 0, 1)
byteValues, err := d.ReadAll(baseKey)
if err != nil {
return nil, err
}
for _, byteValue := range byteValues {
value := reflect.New(stateType)
err = unmarshal(byteValue, value.Interface())
if err != nil {
return nil, err
}
values = reflect.Append(values, value.Elem())
}
stateValues := []core.State{}
for i := 0; i < values.Len(); i++ {
// sanity checks
if !values.Index(i).Elem().FieldByName("CommonState").IsValid() {
return nil, core.Errorf("The state structure %v is missing core.CommonState",
stateType)
}
//the following works as every core.State is expected to embed core.CommonState struct
values.Index(i).Elem().FieldByName("CommonState").FieldByName("StateDriver").Set(reflect.ValueOf(d))
stateValue := values.Index(i).Interface().(core.State)
stateValues = append(stateValues, stateValue)
}
return stateValues, nil
}
// ReadAllState Reads all the state from baseKey and returns a list of core.State.
func (d *EtcdStateDriver) ReadAllState(baseKey string, sType core.State,
unmarshal func([]byte, interface{}) error) ([]core.State, error) {
return readAllStateCommon(d, baseKey, sType, unmarshal)
}
// channelStateEvents watches for updates(created, modify, delete) to a state of
// specified type and unmarshals (given a function) all changes and puts then on
// channel of core.WatchState objects.
// XXX: move this to some common file
func channelStateEvents(d core.StateDriver | {
watcher := d.Client.Watch(context.Background(), baseKey, client.WithPrefix())
go d.channelEtcdEvents(watcher, rsps)
return nil
} | identifier_body |
etcdstatedriver.go | ulates the etcd endpoints used to communicate
// with it.
type EtcdStateDriverConfig struct {
Etcd struct {
Machines []string
}
}
// EtcdStateDriver implements the StateDriver interface for an etcd based distributed
// key-value store used to store config and runtime state for the netplugin.
type EtcdStateDriver struct {
// Client client.Client
// KeysAPI client.KeysAPI
Client *client.Client
}
// Init the driver with a core.Config.
func (d *EtcdStateDriver) | (instInfo *core.InstanceInfo) error {
var err error
var endpoint *url.URL
if instInfo == nil || len(instInfo.DbURL) == 0 {
return errors.New("no etcd config found")
}
tlsInfo := transport.TLSInfo{
CertFile: instInfo.DbTLSCert,
KeyFile: instInfo.DbTLSKey,
TrustedCAFile: instInfo.DbTLSCa,
}
tlsConfig, err := tlsInfo.ClientConfig()
if err != nil {
log.Fatalf("error tlsInfo Format. Err: %v", err)
}
if len(instInfo.DbTLSCert) ==0 && len(instInfo.DbTLSKey) ==0 && len(instInfo.DbTLSCa) ==0 {
tlsConfig = nil
}
for _,dburl := range instInfo.DbURL {
endpoint, err = url.Parse(dburl)
if err != nil {
return err
}
if endpoint.Scheme == "etcd" {
if len(instInfo.DbTLSCert) ==0 && len(instInfo.DbTLSKey) ==0 && len(instInfo.DbTLSCa) ==0 {
endpoint.Scheme = "http"
} else {
endpoint.Scheme = "https"
}
} else if endpoint.Scheme != "http" && endpoint.Scheme != "https" {
return core.Errorf("invalid etcd URL scheme %q", endpoint.Scheme)
}
}
cfg := client.Config{
Endpoints: instInfo.DbURL,
TLS: tlsConfig,
}
d.Client, err = client.New(cfg)
if err != nil {
log.Fatalf("error creating etcd client. Err: %v", err)
}
return nil
}
// Deinit is currently a no-op.
func (d *EtcdStateDriver) Deinit() {
d.Client.Close()
}
// Write state to key with value.
func (d *EtcdStateDriver) Write(key string, value []byte) error {
ctx, cancel := context.WithTimeout(context.Background(), ctxTimeout)
defer cancel()
var err error
for i := 0; i < maxEtcdRetries; i++ {
_, err = d.Client.KV.Put(ctx, key, string(value[:]))
if err != nil && err.Error() == client.ErrNoAvailableEndpoints.Error() {
// Retry after a delay
time.Sleep(time.Second)
continue
}
// when err == nil or anything other than connection refused
return err
}
return err
}
// Read state from key.
func (d *EtcdStateDriver) Read(key string) ([]byte, error) {
ctx, cancel := context.WithTimeout(context.Background(), ctxTimeout)
defer cancel()
var err error
var resp *client.GetResponse
for i := 0; i < maxEtcdRetries; i++ {
// etcd3 uses quorum for reads by default
resp, err = d.Client.KV.Get(ctx, key)
log.Infof("EtcdStateDriver_Read_Err:%+v", err)
if err != nil {
if err.Error() == client.ErrNoAvailableEndpoints.Error() {
// Retry after a delay
time.Sleep(time.Second)
continue
}
if resp != nil && len(resp.Kvs) != 0 {
return []byte(resp.Kvs[0].Value), nil
}
return []byte{}, fmt.Errorf("error reading from etcd")
}
if resp.Count == 0 {
return []byte{}, core.Errorf("key not found")
}
return resp.Kvs[0].Value, err
}
return []byte{}, err
}
// ReadAll state from baseKey.
func (d *EtcdStateDriver) ReadAll(baseKey string) ([][]byte, error) {
ctx, cancel := context.WithTimeout(context.Background(), ctxTimeout)
defer cancel()
var err error
var resp *client.GetResponse
for i := 0; i < maxEtcdRetries; i++ {
// etcd uses quorum for reads by default
resp, err = d.Client.KV.Get(ctx, baseKey, client.WithPrefix(), client.WithSort(client.SortByKey, client.SortAscend))
if err != nil {
if err.Error() == client.ErrNoAvailableEndpoints.Error() {
// Retry after a delay
time.Sleep(time.Second)
continue
}
}
if resp.Count == 0 {
return [][]byte{}, core.Errorf("key not found")
}
values := [][]byte{}
for _, node := range resp.Kvs {
values = append(values, []byte(node.Value))
}
return values, nil
}
return [][]byte{}, err
}
func (d *EtcdStateDriver) channelEtcdEvents(watcher client.WatchChan, rsps chan [2][]byte) {
for resp := range watcher {
for _, ev := range resp.Events {
// fmt.Printf("%s %q : %q\n", ev.Type, ev.Kv.Key, ev.Kv.Value)
rsp := [2][]byte{nil, nil}
eventStr := "create"
if string(ev.Kv.Value) != "" {
rsp[0] = ev.Kv.Value
}
if ev.PrevKv != nil && string(ev.PrevKv.Value) != "" {
rsp[1] = ev.PrevKv.Value
if string(ev.Kv.Value) != "" {
eventStr = "modify"
} else {
eventStr = "delete"
}
}
log.Debugf("Received %q for key: %s", eventStr, ev.Kv.Key)
//channel the translated response
rsps <- rsp
}
}
}
// WatchAll state transitions from baseKey
func (d *EtcdStateDriver) WatchAll(baseKey string, rsps chan [2][]byte) error {
watcher := d.Client.Watch(context.Background(), baseKey, client.WithPrefix())
go d.channelEtcdEvents(watcher, rsps)
return nil
}
// ClearState removes key from etcd
func (d *EtcdStateDriver) ClearState(key string) error {
ctx, cancel := context.WithTimeout(context.Background(), ctxTimeout)
defer cancel()
_, err := d.Client.KV.Delete(ctx, key)
return err
}
// ReadState reads key into a core.State with the unmarshaling function.
func (d *EtcdStateDriver) ReadState(key string, value core.State,
unmarshal func([]byte, interface{}) error) error {
encodedState, err := d.Read(key)
if err != nil {
return err
}
return unmarshal(encodedState, value)
}
// readAllStateCommon reads and unmarshals (given a function) all state into a
// list of core.State objects.
// XXX: move this to some common file
func readAllStateCommon(d core.StateDriver, baseKey string, sType core.State,
unmarshal func([]byte, interface{}) error) ([]core.State, error) {
stateType := reflect.TypeOf(sType)
sliceType := reflect.SliceOf(stateType)
values := reflect.MakeSlice(sliceType, 0, 1)
byteValues, err := d.ReadAll(baseKey)
if err != nil {
return nil, err
}
for _, byteValue := range byteValues {
value := reflect.New(stateType)
err = unmarshal(byteValue, value.Interface())
if err != nil {
return nil, err
}
values = reflect.Append(values, value.Elem())
}
stateValues := []core.State{}
for i := 0; i < values.Len(); i++ {
// sanity checks
if !values.Index(i).Elem().FieldByName("CommonState").IsValid() {
return nil, core.Errorf("The state structure %v is missing core.CommonState",
stateType)
}
//the following works as every core.State is expected to embed core.CommonState struct
values.Index(i).Elem().FieldByName("CommonState").FieldByName("StateDriver").Set(reflect.ValueOf(d))
stateValue := values.Index(i).Interface().(core.State)
stateValues = append(stateValues, stateValue)
}
return stateValues, nil
}
// ReadAllState Reads all the state from baseKey and returns a list of core.State.
func (d *EtcdStateDriver) ReadAllState(baseKey string, sType core.State,
unmarshal func([]byte, interface{}) error) ([]core.State, error) {
return readAllStateCommon(d, baseKey, sType, unmarshal)
}
// channelStateEvents watches for updates(created, modify, delete) to a state of
// specified type and unmarshals (given a function) all changes and puts then on
// channel of core.WatchState objects.
// XXX: move this to some common file
func channelStateEvents(d core.StateDriver, | Init | identifier_name |
deep_consensus.py | topology = input_args
n_nodes = int(n_nodes)
# Cluster specification; one port for the PS (usually 2222) and as many ports as needed
# for the workers
parameter_servers = ["localhost:2222"]
workers = ["localhost:{}".format(i) for i in range(2223, 2223 + n_nodes)]
cluster = tf.train.ClusterSpec({"ps": parameter_servers, "worker": workers})
# Network Topology
topology = get_graph(n_nodes, topology)
# Input flags; define if it is a PS or a worker and its corresponding task number
tf.app.flags.DEFINE_string("job_name", "worker", "either 'ps' or 'worker'")
tf.app.flags.DEFINE_integer("task_index", 0, "Index of task within the job")
FLAGS = tf.app.flags.FLAGS
# Start a server for a specific task
server = tf.train.Server(cluster,
job_name=FLAGS.job_name,
task_index=FLAGS.task_index)
# Config; this info can go in the input file;
# hardcoding was more convenient for testing
batch_size = 100
learning_rate = 0.005
training_epochs = 10
logs_path = "/tmp/mnist/1"
wdevs = [i for i in range(len(workers))] # number of worker devices
# Load mnist data set
from tensorflow.examples.tutorials.mnist import input_data
mnist = input_data.read_data_sets('MNIST_data', one_hot=True, seed=SEED)
if FLAGS.job_name == "ps":
server.join()
elif FLAGS.job_name == "worker":
# Instantiate a worker object to hold its properties: ID, parameter
# vector and gradients
worker = worker.Worker(FLAGS.task_index)
# Initialize the queues on the chief worker
# The dimensions of the tensors that the queues will receive should be predefined
with tf.device("/job:worker/task:0/cpu:0"):
token_queues = []
dtypes = [tf.float32]*8
q_shapes = [[5, 5, 1, 32],
[5, 5, 32, 64],
[7 * 7 * 64, 1024],
[1024, 10],
[32],
[64],
[1024],
[10]]
# 1 outgoing queue for each worker's neighbour
# token_queues will be a n_workers-length list with n_workers length item
# so essential a n_workers x n_workers matrix, which we can index
# the diagonal is redundant obviously - no worker sends to itself
for wdev in wdevs:
this_wdev_queues = [tf.FIFOQueue(1,
dtypes=dtypes,
shapes=q_shapes,
name="from_{}_to_{}/q".format(wdev, item),
shared_name="from_{}_to_{}/q".format(wdev, item)) for item in wdevs]
token_queues.append(this_wdev_queues)
# Between-graph replication; start assigning individual jobs to individual workers
full_device_name = "/job:worker/task:%d" % worker._id
with tf.device(tf.train.replica_device_setter(
worker_device=full_device_name,
cluster=cluster)):
# Placeholders for input tensors
with tf.name_scope('input'):
# None -> batch size can be any size, 784 -> flattened mnist image
x = tf.placeholder(tf.float32, shape=[None, 784], name="x-input")
# Target: 10 classes
y_ = tf.placeholder(tf.float32, shape=[None, 10], name="y-input")
# Initialize weights and biases (parameter vectors)
with tf.name_scope("weights"):
W_conv1 = tf.Variable(tf.truncated_normal([5, 5, 1, 32], stddev=0.1, seed=SEED))
W_conv2 = tf.Variable(tf.truncated_normal([5, 5, 32, 64], stddev=0.1, seed=SEED))
W_fc1 = tf.Variable(tf.truncated_normal([7 * 7 * 64, 1024], stddev=0.1, seed=SEED))
W_fc2 = tf.Variable(tf.truncated_normal([1024, 10], stddev=0.1, seed=SEED))
# Bias
with tf.name_scope("biases"):
b_conv1 = tf.Variable(tf.constant(0.1, shape=[32]))
b_conv2 = tf.Variable(tf.constant(0.1, shape=[64]))
b_fc1 = tf.Variable(tf.constant(0.1, shape=[1024]))
b_fc2 = tf.Variable(tf.constant(0.1, shape=[10]))
# Assign initialized variables to current worker
worker.get_vars([W_conv1, W_conv2, W_fc1, W_fc2, b_conv1, b_conv2, b_fc1, b_fc2])
# Initial enqueue list of operations which enqueue initial PVs to all queues
init_enq_op = []
for neighbour in topology[worker._id]:
op = token_queues[worker._id][neighbour].enqueue(worker.vars_)
init_enq_op.append(op)
# Function for applying gradients to current PV
# The result is multiplied by the learning_rate
def | (grads, vars_, learning_rate):
mul_grads = []
for grad, var in zip(grads, vars_):
mul_grad = tf.scalar_mul(learning_rate, grad)
mul_grads.append(mul_grad)
return mul_grads
with tf.name_scope("softmax"):
assign_ops = []
for tensor1, tensor2 in zip((W_conv1, W_conv2, W_fc1, W_fc2, b_conv1, b_conv2, b_fc1, b_fc2), worker.vars_):
assign_ops.append(tf.assign(tensor1, tensor2))
# This line must be equivalent to the above assign_ops
# but we need the assign_ops as a dependency below
# W1, W2, b1, b2 = worker.vars_
# Our deep CNN model, always subject to changes
with tf.control_dependencies(assign_ops):
x_image = tf.reshape(x, [-1, 28, 28, 1])
h_conv1 = tf.nn.relu(tf.nn.conv2d(x_image, W_conv1, strides=[1, 1, 1, 1], padding='SAME') + b_conv1)
h_pool1 = tf.nn.max_pool(h_conv1, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME')
h_conv2 = tf.nn.relu(tf.nn.conv2d(h_pool1, W_conv2, strides=[1, 1, 1, 1], padding='SAME') + b_conv2)
h_pool2 = tf.nn.max_pool(h_conv2, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME')
h_pool2_flat = tf.reshape(h_pool2, [-1, 7 * 7 * 64])
h_fc1 = tf.nn.relu(tf.matmul(h_pool2_flat, W_fc1) + b_fc1)
h_fc1_drop = tf.nn.dropout(h_fc1, 0.8)
# y is our prediction
y = tf.add(tf.matmul(h_fc1_drop, W_fc2), b_fc2, name="Prediction")
# Specify cost function
with tf.name_scope('cross_entropy'):
# both should be equivalent
#cross_entropy = tf.reduce_mean(-tf.reduce_sum(y_ * tf.log(y), axis=[1]))
cross_entropy = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits_v2(labels=y_, logits=y))
with tf.name_scope('compute_grads'):
# compute and assign gradients to the current worker
worker.get_grads(cross_entropy, tf.trainable_variables())
# dequeue from neighbours
pvs_nbr = [token_queues[neighbour][worker._id].dequeue() for neighbour in topology[worker._id]]
# stack the current PV to the neighbour ones
pvs_nbr.append(worker.vars_)
pvs_zipped = zip(*pvs_nbr)
# average all the PVs
mean_pvs = [tf.reduce_mean(item, axis=0) for item in pvs_zipped]
# A print operation that outputs the maximum value of the W_conv1 vector across 2 axes
# in order to check its value among different workers;
with tf.name_scope("print_operations2"), tf.control_dependencies([W_conv1]):
max_var0 = tf.reduce_max(tf.reduce_max(W_conv1, reduction_indices=[1]), reduction_indices=[0])
print_ops2 = [tf.print("Worker {} reduce max variable".format(worker._id), max_var0, output_stream=sys.stdout)]
# better: could be replaced with the norm
#w1_norm = tf.norm(W_conv1)
#print_ops2 = [tf.print("Worker {} reduce max variable".format(worker._id), w1_norm, output_stream=sys.stdout)]
# Now we deduct the grad | grads_x_lr | identifier_name |
deep_consensus.py | np.random.seed(SEED)
rn.seed(SEED)
# Raise error if trying to seed after graph construction
if len(tf.get_default_graph()._nodes_by_id.keys()) > 0:
raise RuntimeError("Seeding is not supported after building part of the graph. "
"Please move set_seed to the beginning of your code.")
# Read arguments from input file
with open("input.txt", "r") as f:
input_args = [line.split("=")[-1].rstrip("\n") for line in f]
n_nodes, topology = input_args
n_nodes = int(n_nodes)
# Cluster specification; one port for the PS (usually 2222) and as many ports as needed
# for the workers
parameter_servers = ["localhost:2222"]
workers = ["localhost:{}".format(i) for i in range(2223, 2223 + n_nodes)]
cluster = tf.train.ClusterSpec({"ps": parameter_servers, "worker": workers})
# Network Topology
topology = get_graph(n_nodes, topology)
# Input flags; define if it is a PS or a worker and its corresponding task number
tf.app.flags.DEFINE_string("job_name", "worker", "either 'ps' or 'worker'")
tf.app.flags.DEFINE_integer("task_index", 0, "Index of task within the job")
FLAGS = tf.app.flags.FLAGS
# Start a server for a specific task
server = tf.train.Server(cluster,
job_name=FLAGS.job_name,
task_index=FLAGS.task_index)
# Config; this info can go in the input file;
# hardcoding was more convenient for testing
batch_size = 100
learning_rate = 0.005
training_epochs = 10
logs_path = "/tmp/mnist/1"
wdevs = [i for i in range(len(workers))] # number of worker devices
# Load mnist data set
from tensorflow.examples.tutorials.mnist import input_data
mnist = input_data.read_data_sets('MNIST_data', one_hot=True, seed=SEED)
if FLAGS.job_name == "ps":
server.join()
elif FLAGS.job_name == "worker":
# Instantiate a worker object to hold its properties: ID, parameter
# vector and gradients
worker = worker.Worker(FLAGS.task_index)
# Initialize the queues on the chief worker
# The dimensions of the tensors that the queues will receive should be predefined
with tf.device("/job:worker/task:0/cpu:0"):
token_queues = []
dtypes = [tf.float32]*8
q_shapes = [[5, 5, 1, 32],
[5, 5, 32, 64],
[7 * 7 * 64, 1024],
[1024, 10],
[32],
[64],
[1024],
[10]]
# 1 outgoing queue for each worker's neighbour
# token_queues will be a n_workers-length list with n_workers length item
# so essential a n_workers x n_workers matrix, which we can index
# the diagonal is redundant obviously - no worker sends to itself
for wdev in wdevs:
this_wdev_queues = [tf.FIFOQueue(1,
dtypes=dtypes,
shapes=q_shapes,
name="from_{}_to_{}/q".format(wdev, item),
shared_name="from_{}_to_{}/q".format(wdev, item)) for item in wdevs]
token_queues.append(this_wdev_queues)
# Between-graph replication; start assigning individual jobs to individual workers
full_device_name = "/job:worker/task:%d" % worker._id
with tf.device(tf.train.replica_device_setter(
worker_device=full_device_name,
cluster=cluster)):
# Placeholders for input tensors
with tf.name_scope('input'):
# None -> batch size can be any size, 784 -> flattened mnist image
x = tf.placeholder(tf.float32, shape=[None, 784], name="x-input")
# Target: 10 classes
y_ = tf.placeholder(tf.float32, shape=[None, 10], name="y-input")
# Initialize weights and biases (parameter vectors)
with tf.name_scope("weights"):
W_conv1 = tf.Variable(tf.truncated_normal([5, 5, 1, 32], stddev=0.1, seed=SEED))
W_conv2 = tf.Variable(tf.truncated_normal([5, 5, 32, 64], stddev=0.1, seed=SEED))
W_fc1 = tf.Variable(tf.truncated_normal([7 * 7 * 64, 1024], stddev=0.1, seed=SEED))
W_fc2 = tf.Variable(tf.truncated_normal([1024, 10], stddev=0.1, seed=SEED))
# Bias
with tf.name_scope("biases"):
b_conv1 = tf.Variable(tf.constant(0.1, shape=[32]))
b_conv2 = tf.Variable(tf.constant(0.1, shape=[64]))
b_fc1 = tf.Variable(tf.constant(0.1, shape=[1024]))
b_fc2 = tf.Variable(tf.constant(0.1, shape=[10]))
# Assign initialized variables to current worker
worker.get_vars([W_conv1, W_conv2, W_fc1, W_fc2, b_conv1, b_conv2, b_fc1, b_fc2])
# Initial enqueue list of operations which enqueue initial PVs to all queues
init_enq_op = []
for neighbour in topology[worker._id]:
op = token_queues[worker._id][neighbour].enqueue(worker.vars_)
init_enq_op.append(op)
# Function for applying gradients to current PV
# The result is multiplied by the learning_rate
def grads_x_lr(grads, vars_, learning_rate):
mul_grads = []
for grad, var in zip(grads, vars_):
mul_grad = tf.scalar_mul(learning_rate, grad)
mul_grads.append(mul_grad)
return mul_grads
with tf.name_scope("softmax"):
assign_ops = []
for tensor1, tensor2 in zip((W_conv1, W_conv2, W_fc1, W_fc2, b_conv1, b_conv2, b_fc1, b_fc2), worker.vars_):
assign_ops.append(tf.assign(tensor1, tensor2))
# This line must be equivalent to the above assign_ops
# but we need the assign_ops as a dependency below
# W1, W2, b1, b2 = worker.vars_
# Our deep CNN model, always subject to changes
with tf.control_dependencies(assign_ops):
x_image = tf.reshape(x, [-1, 28, 28, 1])
h_conv1 = tf.nn.relu(tf.nn.conv2d(x_image, W_conv1, strides=[1, 1, 1, 1], padding='SAME') + b_conv1)
h_pool1 = tf.nn.max_pool(h_conv1, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME')
h_conv2 = tf.nn.relu(tf.nn.conv2d(h_pool1, W_conv2, strides=[1, 1, 1, 1], padding='SAME') + b_conv2)
h_pool2 = tf.nn.max_pool(h_conv2, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME')
h_pool2_flat = tf.reshape(h_pool2, [-1, 7 * 7 * 64])
h_fc1 = tf.nn.relu(tf.matmul(h_pool2_flat, W_fc1) + b_fc1)
h_fc1_drop = tf.nn.dropout(h_fc1, 0.8)
# y is our prediction
y = tf.add(tf.matmul(h_fc1_drop, W_fc2), b_fc2, name="Prediction")
# Specify cost function
with tf.name_scope('cross_entropy'):
# both should be equivalent
#cross_entropy = tf.reduce_mean(-tf.reduce_sum(y_ * tf.log(y), axis=[1]))
cross_entropy = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits_v2(labels=y_, logits=y))
with tf.name_scope('compute_grads'):
# compute and assign gradients to the current worker
worker.get_grads(cross_entropy, tf.trainable_variables())
# dequeue from neighbours
pvs_nbr = [token_queues[neighbour][worker._id].dequeue() for neighbour in topology[worker._id]]
# stack the current PV to the neighbour ones
pvs_nbr.append(worker.vars_)
pvs_zipped = zip(*pvs_nbr)
# average all the PVs
mean_pvs = [tf.reduce_mean(item, axis=0) for item in pvs_zipped]
# A print operation that outputs the maximum value of the W_conv1 vector across 2 axes
# in order to check its value among different | os.environ['PYTHONHASHSEED'] = '0'
SEED = 5
tf.set_random_seed(SEED) | random_line_split |
|
deep_consensus.py | 10], name="y-input")
# Initialize weights and biases (parameter vectors)
with tf.name_scope("weights"):
W_conv1 = tf.Variable(tf.truncated_normal([5, 5, 1, 32], stddev=0.1, seed=SEED))
W_conv2 = tf.Variable(tf.truncated_normal([5, 5, 32, 64], stddev=0.1, seed=SEED))
W_fc1 = tf.Variable(tf.truncated_normal([7 * 7 * 64, 1024], stddev=0.1, seed=SEED))
W_fc2 = tf.Variable(tf.truncated_normal([1024, 10], stddev=0.1, seed=SEED))
# Bias
with tf.name_scope("biases"):
b_conv1 = tf.Variable(tf.constant(0.1, shape=[32]))
b_conv2 = tf.Variable(tf.constant(0.1, shape=[64]))
b_fc1 = tf.Variable(tf.constant(0.1, shape=[1024]))
b_fc2 = tf.Variable(tf.constant(0.1, shape=[10]))
# Assign initialized variables to current worker
worker.get_vars([W_conv1, W_conv2, W_fc1, W_fc2, b_conv1, b_conv2, b_fc1, b_fc2])
# Initial enqueue list of operations which enqueue initial PVs to all queues
init_enq_op = []
for neighbour in topology[worker._id]:
op = token_queues[worker._id][neighbour].enqueue(worker.vars_)
init_enq_op.append(op)
# Function for applying gradients to current PV
# The result is multiplied by the learning_rate
def grads_x_lr(grads, vars_, learning_rate):
mul_grads = []
for grad, var in zip(grads, vars_):
mul_grad = tf.scalar_mul(learning_rate, grad)
mul_grads.append(mul_grad)
return mul_grads
with tf.name_scope("softmax"):
assign_ops = []
for tensor1, tensor2 in zip((W_conv1, W_conv2, W_fc1, W_fc2, b_conv1, b_conv2, b_fc1, b_fc2), worker.vars_):
assign_ops.append(tf.assign(tensor1, tensor2))
# This line must be equivalent to the above assign_ops
# but we need the assign_ops as a dependency below
# W1, W2, b1, b2 = worker.vars_
# Our deep CNN model, always subject to changes
with tf.control_dependencies(assign_ops):
x_image = tf.reshape(x, [-1, 28, 28, 1])
h_conv1 = tf.nn.relu(tf.nn.conv2d(x_image, W_conv1, strides=[1, 1, 1, 1], padding='SAME') + b_conv1)
h_pool1 = tf.nn.max_pool(h_conv1, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME')
h_conv2 = tf.nn.relu(tf.nn.conv2d(h_pool1, W_conv2, strides=[1, 1, 1, 1], padding='SAME') + b_conv2)
h_pool2 = tf.nn.max_pool(h_conv2, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME')
h_pool2_flat = tf.reshape(h_pool2, [-1, 7 * 7 * 64])
h_fc1 = tf.nn.relu(tf.matmul(h_pool2_flat, W_fc1) + b_fc1)
h_fc1_drop = tf.nn.dropout(h_fc1, 0.8)
# y is our prediction
y = tf.add(tf.matmul(h_fc1_drop, W_fc2), b_fc2, name="Prediction")
# Specify cost function
with tf.name_scope('cross_entropy'):
# both should be equivalent
#cross_entropy = tf.reduce_mean(-tf.reduce_sum(y_ * tf.log(y), axis=[1]))
cross_entropy = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits_v2(labels=y_, logits=y))
with tf.name_scope('compute_grads'):
# compute and assign gradients to the current worker
worker.get_grads(cross_entropy, tf.trainable_variables())
# dequeue from neighbours
pvs_nbr = [token_queues[neighbour][worker._id].dequeue() for neighbour in topology[worker._id]]
# stack the current PV to the neighbour ones
pvs_nbr.append(worker.vars_)
pvs_zipped = zip(*pvs_nbr)
# average all the PVs
mean_pvs = [tf.reduce_mean(item, axis=0) for item in pvs_zipped]
# A print operation that outputs the maximum value of the W_conv1 vector across 2 axes
# in order to check its value among different workers;
with tf.name_scope("print_operations2"), tf.control_dependencies([W_conv1]):
max_var0 = tf.reduce_max(tf.reduce_max(W_conv1, reduction_indices=[1]), reduction_indices=[0])
print_ops2 = [tf.print("Worker {} reduce max variable".format(worker._id), max_var0, output_stream=sys.stdout)]
# better: could be replaced with the norm
#w1_norm = tf.norm(W_conv1)
#print_ops2 = [tf.print("Worker {} reduce max variable".format(worker._id), w1_norm, output_stream=sys.stdout)]
# Now we deduct the grad
with tf.control_dependencies(print_ops2), tf.name_scope('apply_grads'):
#with tf.name_scope('apply_grads'):
mul_grads = grads_x_lr(worker.grads, worker.vars_, learning_rate)
new_vars = [tf.subtract(mean_var, mul_grad) for mean_var, mul_grad in zip(mean_pvs, mul_grads)]
# Update the worker's Parameter Vector
#worker.vars_ = new_vars
assign_ops2 = []
for tensor1, tensor2 in zip(worker.vars_, new_vars):
assign_ops2.append(tf.assign(tensor1, tensor2))
with tf.name_scope('Accuracy'):
correct_prediction = tf.equal(tf.argmax(y,1), tf.argmax(y_,1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
# Enqueue new PV in neighbours queues
with tf.control_dependencies(assign_ops2):
enq_ops = [token_queues[worker._id][neighbour].enqueue(worker.vars_) for neighbour in topology[worker._id]]
# In case we need to run another print operation separately
# this could be train_op = enq_ops + [print_op]
train_op = enq_ops
# Configure session; make sure that parallelism is set to 1
# for reproducible results
sv = tf.train.Supervisor(is_chief=(FLAGS.task_index == 0))
session_conf = tf.ConfigProto(intra_op_parallelism_threads=1,
inter_op_parallelism_threads=1)
begin_time = time.time()
frequency = 100
with sv.prepare_or_wait_for_session(server.target, config=session_conf) as sess:
np.random.seed(SEED)
tf.set_random_seed(SEED)
rn.seed(SEED)
# filenames for results
f0_name = "consensus_results_loss_iters{}.txt".format(FLAGS.task_index)
f1_name = "consensus_results_loss_time{}.txt".format(FLAGS.task_index)
f2_name = "consensus_results_time_iters{}.txt".format(FLAGS.task_index)
with open(f0_name,"w") as f0, open(f1_name,"w") as f1, open(f2_name,"w") as f2:
start_time = time.time()
# for the plots against iterations
overall_count = 0
# first run the initial enqueue operation
sess.run(init_enq_op)
for epoch in range(training_epochs):
# number of batches in one epoch
batch_count = int(mnist.train.num_examples/batch_size)
count = 0
print("Number of batches in dataset: ",batch_count)
for i in range(batch_count):
| print("Iteration NO: ", count)
batch_x, batch_y = mnist.train.next_batch(batch_size, shuffle=False)
# perform the operations we defined earlier on batch
_, cost = sess.run([train_op, cross_entropy],
feed_dict={x: batch_x, y_: batch_y})
count += 1
overall_count += 1
elapsed_time = time.time() - begin_time
# write on output files
f0.write("{0:d}\t{1:.4f}\n".format(overall_count, cost))
f1.write("{0:.2f}\t{1:.4f}\n".format(elapsed_time, cost))
f2.write("{0:d}\t{1:.2f}\n".format(overall_count, elapsed_time))
# Print every 100 iterations and in the end
if count % frequency == 0 or i+1 == batch_count:
elapsed_time = time.time() - start_time
start_time = time.time()
print(" Epoch: %2d," % (epoch+1), | conditional_block |
|
deep_consensus.py | , topology = input_args
n_nodes = int(n_nodes)
# Cluster specification; one port for the PS (usually 2222) and as many ports as needed
# for the workers
parameter_servers = ["localhost:2222"]
workers = ["localhost:{}".format(i) for i in range(2223, 2223 + n_nodes)]
cluster = tf.train.ClusterSpec({"ps": parameter_servers, "worker": workers})
# Network Topology
topology = get_graph(n_nodes, topology)
# Input flags; define if it is a PS or a worker and its corresponding task number
tf.app.flags.DEFINE_string("job_name", "worker", "either 'ps' or 'worker'")
tf.app.flags.DEFINE_integer("task_index", 0, "Index of task within the job")
FLAGS = tf.app.flags.FLAGS
# Start a server for a specific task
server = tf.train.Server(cluster,
job_name=FLAGS.job_name,
task_index=FLAGS.task_index)
# Config; this info can go in the input file;
# hardcoding was more convenient for testing
batch_size = 100
learning_rate = 0.005
training_epochs = 10
logs_path = "/tmp/mnist/1"
wdevs = [i for i in range(len(workers))] # number of worker devices
# Load mnist data set
from tensorflow.examples.tutorials.mnist import input_data
mnist = input_data.read_data_sets('MNIST_data', one_hot=True, seed=SEED)
if FLAGS.job_name == "ps":
server.join()
elif FLAGS.job_name == "worker":
# Instantiate a worker object to hold its properties: ID, parameter
# vector and gradients
worker = worker.Worker(FLAGS.task_index)
# Initialize the queues on the chief worker
# The dimensions of the tensors that the queues will receive should be predefined
with tf.device("/job:worker/task:0/cpu:0"):
token_queues = []
dtypes = [tf.float32]*8
q_shapes = [[5, 5, 1, 32],
[5, 5, 32, 64],
[7 * 7 * 64, 1024],
[1024, 10],
[32],
[64],
[1024],
[10]]
# 1 outgoing queue for each worker's neighbour
# token_queues will be a n_workers-length list with n_workers length item
# so essential a n_workers x n_workers matrix, which we can index
# the diagonal is redundant obviously - no worker sends to itself
for wdev in wdevs:
this_wdev_queues = [tf.FIFOQueue(1,
dtypes=dtypes,
shapes=q_shapes,
name="from_{}_to_{}/q".format(wdev, item),
shared_name="from_{}_to_{}/q".format(wdev, item)) for item in wdevs]
token_queues.append(this_wdev_queues)
# Between-graph replication; start assigning individual jobs to individual workers
full_device_name = "/job:worker/task:%d" % worker._id
with tf.device(tf.train.replica_device_setter(
worker_device=full_device_name,
cluster=cluster)):
# Placeholders for input tensors
with tf.name_scope('input'):
# None -> batch size can be any size, 784 -> flattened mnist image
x = tf.placeholder(tf.float32, shape=[None, 784], name="x-input")
# Target: 10 classes
y_ = tf.placeholder(tf.float32, shape=[None, 10], name="y-input")
# Initialize weights and biases (parameter vectors)
with tf.name_scope("weights"):
W_conv1 = tf.Variable(tf.truncated_normal([5, 5, 1, 32], stddev=0.1, seed=SEED))
W_conv2 = tf.Variable(tf.truncated_normal([5, 5, 32, 64], stddev=0.1, seed=SEED))
W_fc1 = tf.Variable(tf.truncated_normal([7 * 7 * 64, 1024], stddev=0.1, seed=SEED))
W_fc2 = tf.Variable(tf.truncated_normal([1024, 10], stddev=0.1, seed=SEED))
# Bias
with tf.name_scope("biases"):
b_conv1 = tf.Variable(tf.constant(0.1, shape=[32]))
b_conv2 = tf.Variable(tf.constant(0.1, shape=[64]))
b_fc1 = tf.Variable(tf.constant(0.1, shape=[1024]))
b_fc2 = tf.Variable(tf.constant(0.1, shape=[10]))
# Assign initialized variables to current worker
worker.get_vars([W_conv1, W_conv2, W_fc1, W_fc2, b_conv1, b_conv2, b_fc1, b_fc2])
# Initial enqueue list of operations which enqueue initial PVs to all queues
init_enq_op = []
for neighbour in topology[worker._id]:
op = token_queues[worker._id][neighbour].enqueue(worker.vars_)
init_enq_op.append(op)
# Function for applying gradients to current PV
# The result is multiplied by the learning_rate
def grads_x_lr(grads, vars_, learning_rate):
|
with tf.name_scope("softmax"):
assign_ops = []
for tensor1, tensor2 in zip((W_conv1, W_conv2, W_fc1, W_fc2, b_conv1, b_conv2, b_fc1, b_fc2), worker.vars_):
assign_ops.append(tf.assign(tensor1, tensor2))
# This line must be equivalent to the above assign_ops
# but we need the assign_ops as a dependency below
# W1, W2, b1, b2 = worker.vars_
# Our deep CNN model, always subject to changes
with tf.control_dependencies(assign_ops):
x_image = tf.reshape(x, [-1, 28, 28, 1])
h_conv1 = tf.nn.relu(tf.nn.conv2d(x_image, W_conv1, strides=[1, 1, 1, 1], padding='SAME') + b_conv1)
h_pool1 = tf.nn.max_pool(h_conv1, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME')
h_conv2 = tf.nn.relu(tf.nn.conv2d(h_pool1, W_conv2, strides=[1, 1, 1, 1], padding='SAME') + b_conv2)
h_pool2 = tf.nn.max_pool(h_conv2, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME')
h_pool2_flat = tf.reshape(h_pool2, [-1, 7 * 7 * 64])
h_fc1 = tf.nn.relu(tf.matmul(h_pool2_flat, W_fc1) + b_fc1)
h_fc1_drop = tf.nn.dropout(h_fc1, 0.8)
# y is our prediction
y = tf.add(tf.matmul(h_fc1_drop, W_fc2), b_fc2, name="Prediction")
# Specify cost function
with tf.name_scope('cross_entropy'):
# both should be equivalent
#cross_entropy = tf.reduce_mean(-tf.reduce_sum(y_ * tf.log(y), axis=[1]))
cross_entropy = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits_v2(labels=y_, logits=y))
with tf.name_scope('compute_grads'):
# compute and assign gradients to the current worker
worker.get_grads(cross_entropy, tf.trainable_variables())
# dequeue from neighbours
pvs_nbr = [token_queues[neighbour][worker._id].dequeue() for neighbour in topology[worker._id]]
# stack the current PV to the neighbour ones
pvs_nbr.append(worker.vars_)
pvs_zipped = zip(*pvs_nbr)
# average all the PVs
mean_pvs = [tf.reduce_mean(item, axis=0) for item in pvs_zipped]
# A print operation that outputs the maximum value of the W_conv1 vector across 2 axes
# in order to check its value among different workers;
with tf.name_scope("print_operations2"), tf.control_dependencies([W_conv1]):
max_var0 = tf.reduce_max(tf.reduce_max(W_conv1, reduction_indices=[1]), reduction_indices=[0])
print_ops2 = [tf.print("Worker {} reduce max variable".format(worker._id), max_var0, output_stream=sys.stdout)]
# better: could be replaced with the norm
#w1_norm = tf.norm(W_conv1)
#print_ops2 = [tf.print("Worker {} reduce max variable".format(worker._id), w1_norm, output_stream=sys.stdout)]
# Now we deduct the grad
| mul_grads = []
for grad, var in zip(grads, vars_):
mul_grad = tf.scalar_mul(learning_rate, grad)
mul_grads.append(mul_grad)
return mul_grads | identifier_body |
rally.rs | VehToVehSystem, CollisionWeaponFireHitboxSystem,
MoveParticlesSystem, MoveWeaponFireSystem, PathingLinesSystem, VehicleMoveSystem,
VehicleShieldArmorHealthSystem, VehicleStatusSystem, VehicleTrackingSystem,
VehicleWeaponsSystem,
};
pub const PLAYER_CAMERA: bool = false;
pub const DEBUG_LINES: bool = false;
//cargo run --features sdl_controller
//Damage at speed of 100
pub const BASE_COLLISION_DAMAGE: f32 = 20.0;
pub const COLLISION_PIERCING_DAMAGE_PCT: f32 = 0.0;
pub const COLLISION_SHIELD_DAMAGE_PCT: f32 = 25.0;
pub const COLLISION_ARMOR_DAMAGE_PCT: f32 = 80.0;
pub const COLLISION_HEALTH_DAMAGE_PCT: f32 = 100.0;
#[derive(Default)]
pub struct GameplayState<'a, 'b> {
player_ui_initialized: bool,
// // If the Game is paused or not
pub paused: bool,
// The UI root entity. Deleting this should remove the complete UI
ui_root: Option<Entity>,
// A reference to the FPS display, which we want to interact with
fps_display: Option<Entity>,
/// The `State` specific `Dispatcher`, containing `System`s only relevant for this `State`.
dispatcher: Option<Dispatcher<'a, 'b>>,
sprite_sheet_handle: Option<Handle<SpriteSheet>>, // Load the spritesheet necessary to render the graphics.
texture_sheet_handle: Option<Handle<SpriteSheet>>,
}
impl<'a, 'b> SimpleState for GameplayState<'a, 'b> {
fn on_start(&mut self, mut data: StateData<'_, GameData<'_, '_>>) {
self.player_ui_initialized = false;
let world = &mut data.world;
self.ui_root =
Some(world.exec(|mut creator: UiCreator<'_>| creator.create("ui/gameplay.ron", ())));
world.register::<UiText>();
world.register::<UiTransform>();
world.register::<Armor>();
world.register::<Health>();
world.register::<ArenaElement>();
world.register::<Hitbox>();
world.register::<Player>();
world.register::<Repair>();
world.register::<Shield>();
world.register::<Vehicle>();
world.register::<WeaponArray>();
world.register::<WeaponFire>();
world.register::<Particles>();
world.register::<PlayerWeaponIcon>();
// Setup debug lines as a resource
world.insert(DebugLines::new());
// Configure width of lines. Optional step
world.insert(DebugLinesParams { line_width: 2.0 });
// Setup debug lines as a component and add lines to render axis&grid
let debug_lines_component = DebugLinesComponent::new();
world.create_entity().with(debug_lines_component).build();
world.register::<Removal<u32>>();
self.sprite_sheet_handle.replace(load_sprite_sheet(
world,
"texture/rally_spritesheet.png".to_string(),
"texture/rally_spritesheet.ron".to_string(),
));
self.texture_sheet_handle.replace(load_sprite_sheet(
world,
"texture/rally_texture_sheet.png".to_string(),
"texture/rally_texture_sheet.ron".to_string(),
));
let weapon_fire_resource: WeaponFireResource =
initialize_weapon_fire_resource(world, self.sprite_sheet_handle.clone().unwrap());
initialize_timer_ui(world);
world.insert(ArenaNavMesh {
vertices: Vec::new(),
triangles: Vec::new(),
});
world.insert(ArenaNavMeshFinal { mesh: None });
intialize_arena(
world,
self.sprite_sheet_handle.clone().unwrap(),
self.texture_sheet_handle.clone().unwrap(),
);
let max_players;
let bot_players;
let arena_name;
{
let fetched_game_mode_setup = world.try_fetch::<GameModeSetup>();
if let Some(game_mode_setup) = fetched_game_mode_setup {
max_players = game_mode_setup.max_players;
bot_players = game_mode_setup.bot_players;
arena_name = game_mode_setup.arena_name.clone();
} else {
max_players = 4;
bot_players = 3;
arena_name = ArenaNames::OpenEmptyMap;
}
}
let arena_properties;
{
let fetched_arena_store = world.try_fetch::<ArenaStoreResource>();
if let Some(arena_store) = fetched_arena_store {
arena_properties = match arena_store.properties.get(&arena_name) {
Some(arena_props_get) => (*arena_props_get).clone(),
_ => ArenaProperties::default(),
};
} else {
arena_properties = ArenaProperties::default();
}
}
let player_to_team;
{
let fetched_game_team_setup = world.try_fetch::<GameTeamSetup>();
if let Some(game_team_setup) = fetched_game_team_setup {
player_to_team = game_team_setup.teams.clone();
} else {
player_to_team = [0, 1, 2, 3];
}
}
let player_status_text = PlayerStatusText {
shield: None,
armor: None,
health: None,
points: None,
lives: None,
};
for player_index in 0..max_players {
let vehicle_stats;
{
let fetched_game_vehicle_setup = world.try_fetch::<GameVehicleSetup>();
if let Some(game_vehicle_setup) = fetched_game_vehicle_setup {
vehicle_stats = game_vehicle_setup.stats[player_index].clone();
} else {
vehicle_stats = get_none_vehicle();
}
}
let is_bot = player_index >= max_players - bot_players;
let player = intialize_player(
world,
self.sprite_sheet_handle.clone().unwrap(),
player_index,
weapon_fire_resource.clone(),
player_to_team[player_index],
is_bot,
player_status_text.clone(),
vehicle_stats,
);
if PLAYER_CAMERA && !is_bot {
initialize_camera_to_player(world, &arena_properties, player);
}
}
if !PLAYER_CAMERA {
initialize_camera(world, &arena_properties);
}
// Create the `DispatcherBuilder` and register some `System`s that should only run for this `State`.
let mut dispatcher_builder = DispatcherBuilder::new();
dispatcher_builder.add(VehicleTrackingSystem, "vehicle_tracking_system", &[]);
dispatcher_builder.add(VehicleMoveSystem::default(), "vehicle_move_system", &[]);
dispatcher_builder.add(VehicleWeaponsSystem, "vehicle_weapons_system", &[]);
dispatcher_builder.add(
CollisionWeaponFireHitboxSystem::default(),
"collision_weapon_fire_hitbox_system",
&[],
);
dispatcher_builder.add(
MoveWeaponFireSystem::default(),
"move_weapon_fire_system",
&[],
);
dispatcher_builder.add(
CollisionVehToVehSystem,
"collision_vehicle_vehicle_system",
&[],
);
dispatcher_builder.add(
VehicleShieldArmorHealthSystem,
"vehicle_shield_armor_health_system",
&[],
);
dispatcher_builder.add(VehicleStatusSystem::default(), "vehicle_status_system", &[]);
dispatcher_builder.add(MoveParticlesSystem, "move_particles_system", &[]);
dispatcher_builder.add(PathingLinesSystem::default(), "pathing_lines_system", &[]);
dispatcher_builder.add(
CameraTrackingSystem::default(),
"camera_tracking_system",
&[],
);
// Build and setup the `Dispatcher`.
let mut dispatcher = dispatcher_builder.build();
dispatcher.setup(world);
self.dispatcher = Some(dispatcher);
}
fn on_pause(&mut self, _data: StateData<'_, GameData<'_, '_>>) {
self.paused = true;
}
fn on_resume(&mut self, _data: StateData<'_, GameData<'_, '_>>) {
self.paused = false;
}
fn on_stop(&mut self, data: StateData<'_, GameData<'_, '_>>) {
if let Some(root_entity) = self.ui_root {
data.world
.delete_entity(root_entity)
.expect("Failed to remove Game Screen");
}
let fetched_game_score = data.world.try_fetch::<GameScore>();
if let Some(game_score) = fetched_game_score {
if !game_score.game_ended {
exec_removal(&data.world.entities(), &data.world.read_storage(), 0 as u32);
}
} else {
exec_removal(&data.world.entities(), &data.world.read_storage(), 0 as u32);
}
self.player_ui_initialized = false;
self.ui_root = None;
self.fps_display = None;
}
fn handle_event(
&mut self,
_: StateData<'_, GameData<'_, '_>>,
event: StateEvent,
) -> SimpleTrans {
match &event {
StateEvent::Window(event) => {
if is_close_requested(&event) {
log::info!("[Trans::Quit] Quitting Application!");
Trans::Quit
} else if is_key_down(&event, VirtualKeyCode::Escape) {
log::info!("[Trans::Push] Pausing Game!");
Trans::Push(Box::new(PauseMenuState::default()))
} else {
Trans::None
}
}
StateEvent::Ui(_ui_event) => |
StateEvent::Input(_input) => {
//log::info!("Input Event detected: {:?}.", input);
Trans::None
}
}
| {
// log::info!(
// "[HANDLE_EVENT] You just interacted with a ui element: {:?}",
// ui_event
// );
Trans::None
} | conditional_block |
rally.rs | VehToVehSystem, CollisionWeaponFireHitboxSystem,
MoveParticlesSystem, MoveWeaponFireSystem, PathingLinesSystem, VehicleMoveSystem,
VehicleShieldArmorHealthSystem, VehicleStatusSystem, VehicleTrackingSystem,
VehicleWeaponsSystem,
};
pub const PLAYER_CAMERA: bool = false;
pub const DEBUG_LINES: bool = false;
//cargo run --features sdl_controller
//Damage at speed of 100
pub const BASE_COLLISION_DAMAGE: f32 = 20.0;
pub const COLLISION_PIERCING_DAMAGE_PCT: f32 = 0.0;
pub const COLLISION_SHIELD_DAMAGE_PCT: f32 = 25.0;
pub const COLLISION_ARMOR_DAMAGE_PCT: f32 = 80.0;
pub const COLLISION_HEALTH_DAMAGE_PCT: f32 = 100.0;
#[derive(Default)]
pub struct GameplayState<'a, 'b> {
player_ui_initialized: bool,
// // If the Game is paused or not
pub paused: bool,
// The UI root entity. Deleting this should remove the complete UI
ui_root: Option<Entity>,
// A reference to the FPS display, which we want to interact with
fps_display: Option<Entity>,
/// The `State` specific `Dispatcher`, containing `System`s only relevant for this `State`.
dispatcher: Option<Dispatcher<'a, 'b>>,
sprite_sheet_handle: Option<Handle<SpriteSheet>>, // Load the spritesheet necessary to render the graphics.
texture_sheet_handle: Option<Handle<SpriteSheet>>,
}
impl<'a, 'b> SimpleState for GameplayState<'a, 'b> {
fn on_start(&mut self, mut data: StateData<'_, GameData<'_, '_>>) {
self.player_ui_initialized = false;
let world = &mut data.world;
self.ui_root =
Some(world.exec(|mut creator: UiCreator<'_>| creator.create("ui/gameplay.ron", ())));
world.register::<UiText>();
world.register::<UiTransform>();
world.register::<Armor>();
world.register::<Health>();
world.register::<ArenaElement>();
world.register::<Hitbox>();
world.register::<Player>();
world.register::<Repair>();
world.register::<Shield>();
world.register::<Vehicle>();
world.register::<WeaponArray>();
world.register::<WeaponFire>();
world.register::<Particles>();
world.register::<PlayerWeaponIcon>();
// Setup debug lines as a resource
world.insert(DebugLines::new());
// Configure width of lines. Optional step
world.insert(DebugLinesParams { line_width: 2.0 });
// Setup debug lines as a component and add lines to render axis&grid
let debug_lines_component = DebugLinesComponent::new();
world.create_entity().with(debug_lines_component).build();
world.register::<Removal<u32>>();
self.sprite_sheet_handle.replace(load_sprite_sheet(
world,
"texture/rally_spritesheet.png".to_string(),
"texture/rally_spritesheet.ron".to_string(),
));
self.texture_sheet_handle.replace(load_sprite_sheet(
world,
"texture/rally_texture_sheet.png".to_string(),
"texture/rally_texture_sheet.ron".to_string(),
));
let weapon_fire_resource: WeaponFireResource =
initialize_weapon_fire_resource(world, self.sprite_sheet_handle.clone().unwrap());
initialize_timer_ui(world);
world.insert(ArenaNavMesh {
vertices: Vec::new(),
triangles: Vec::new(),
});
world.insert(ArenaNavMeshFinal { mesh: None });
intialize_arena(
world,
self.sprite_sheet_handle.clone().unwrap(),
self.texture_sheet_handle.clone().unwrap(),
);
let max_players;
let bot_players;
let arena_name;
{
let fetched_game_mode_setup = world.try_fetch::<GameModeSetup>();
if let Some(game_mode_setup) = fetched_game_mode_setup {
max_players = game_mode_setup.max_players;
bot_players = game_mode_setup.bot_players;
arena_name = game_mode_setup.arena_name.clone();
} else {
max_players = 4;
bot_players = 3;
arena_name = ArenaNames::OpenEmptyMap;
}
}
let arena_properties;
{
let fetched_arena_store = world.try_fetch::<ArenaStoreResource>();
if let Some(arena_store) = fetched_arena_store {
arena_properties = match arena_store.properties.get(&arena_name) {
Some(arena_props_get) => (*arena_props_get).clone(),
_ => ArenaProperties::default(),
};
} else {
arena_properties = ArenaProperties::default();
}
}
let player_to_team;
{
let fetched_game_team_setup = world.try_fetch::<GameTeamSetup>();
if let Some(game_team_setup) = fetched_game_team_setup {
player_to_team = game_team_setup.teams.clone();
} else {
player_to_team = [0, 1, 2, 3];
}
}
let player_status_text = PlayerStatusText {
shield: None,
armor: None,
health: None,
points: None,
lives: None,
};
for player_index in 0..max_players {
let vehicle_stats;
{
let fetched_game_vehicle_setup = world.try_fetch::<GameVehicleSetup>();
if let Some(game_vehicle_setup) = fetched_game_vehicle_setup {
vehicle_stats = game_vehicle_setup.stats[player_index].clone();
} else {
vehicle_stats = get_none_vehicle();
}
}
let is_bot = player_index >= max_players - bot_players;
let player = intialize_player(
world,
self.sprite_sheet_handle.clone().unwrap(),
player_index,
weapon_fire_resource.clone(),
player_to_team[player_index],
is_bot,
player_status_text.clone(),
vehicle_stats,
);
if PLAYER_CAMERA && !is_bot {
initialize_camera_to_player(world, &arena_properties, player);
}
}
if !PLAYER_CAMERA {
initialize_camera(world, &arena_properties);
}
// Create the `DispatcherBuilder` and register some `System`s that should only run for this `State`.
let mut dispatcher_builder = DispatcherBuilder::new();
dispatcher_builder.add(VehicleTrackingSystem, "vehicle_tracking_system", &[]);
dispatcher_builder.add(VehicleMoveSystem::default(), "vehicle_move_system", &[]);
dispatcher_builder.add(VehicleWeaponsSystem, "vehicle_weapons_system", &[]);
dispatcher_builder.add(
CollisionWeaponFireHitboxSystem::default(),
"collision_weapon_fire_hitbox_system",
&[],
);
dispatcher_builder.add(
MoveWeaponFireSystem::default(),
"move_weapon_fire_system",
&[],
);
dispatcher_builder.add(
CollisionVehToVehSystem,
"collision_vehicle_vehicle_system",
&[],
);
dispatcher_builder.add(
VehicleShieldArmorHealthSystem,
"vehicle_shield_armor_health_system",
&[],
);
dispatcher_builder.add(VehicleStatusSystem::default(), "vehicle_status_system", &[]);
dispatcher_builder.add(MoveParticlesSystem, "move_particles_system", &[]);
dispatcher_builder.add(PathingLinesSystem::default(), "pathing_lines_system", &[]);
dispatcher_builder.add(
CameraTrackingSystem::default(),
"camera_tracking_system",
&[],
);
// Build and setup the `Dispatcher`.
let mut dispatcher = dispatcher_builder.build();
dispatcher.setup(world);
self.dispatcher = Some(dispatcher);
} | }
fn on_resume(&mut self, _data: StateData<'_, GameData<'_, '_>>) {
self.paused = false;
}
fn on_stop(&mut self, data: StateData<'_, GameData<'_, '_>>) {
if let Some(root_entity) = self.ui_root {
data.world
.delete_entity(root_entity)
.expect("Failed to remove Game Screen");
}
let fetched_game_score = data.world.try_fetch::<GameScore>();
if let Some(game_score) = fetched_game_score {
if !game_score.game_ended {
exec_removal(&data.world.entities(), &data.world.read_storage(), 0 as u32);
}
} else {
exec_removal(&data.world.entities(), &data.world.read_storage(), 0 as u32);
}
self.player_ui_initialized = false;
self.ui_root = None;
self.fps_display = None;
}
fn handle_event(
&mut self,
_: StateData<'_, GameData<'_, '_>>,
event: StateEvent,
) -> SimpleTrans {
match &event {
StateEvent::Window(event) => {
if is_close_requested(&event) {
log::info!("[Trans::Quit] Quitting Application!");
Trans::Quit
} else if is_key_down(&event, VirtualKeyCode::Escape) {
log::info!("[Trans::Push] Pausing Game!");
Trans::Push(Box::new(PauseMenuState::default()))
} else {
Trans::None
}
}
StateEvent::Ui(_ui_event) => {
// log::info!(
// "[HANDLE_EVENT] You just interacted with a ui element: {:?}",
// ui_event
// );
Trans::None
}
StateEvent::Input(_input) => {
//log::info!("Input Event detected: {:?}.", input);
Trans::None
}
}
|
fn on_pause(&mut self, _data: StateData<'_, GameData<'_, '_>>) {
self.paused = true; | random_line_split |
rally.rs | ));
let weapon_fire_resource: WeaponFireResource =
initialize_weapon_fire_resource(world, self.sprite_sheet_handle.clone().unwrap());
initialize_timer_ui(world);
world.insert(ArenaNavMesh {
vertices: Vec::new(),
triangles: Vec::new(),
});
world.insert(ArenaNavMeshFinal { mesh: None });
intialize_arena(
world,
self.sprite_sheet_handle.clone().unwrap(),
self.texture_sheet_handle.clone().unwrap(),
);
let max_players;
let bot_players;
let arena_name;
{
let fetched_game_mode_setup = world.try_fetch::<GameModeSetup>();
if let Some(game_mode_setup) = fetched_game_mode_setup {
max_players = game_mode_setup.max_players;
bot_players = game_mode_setup.bot_players;
arena_name = game_mode_setup.arena_name.clone();
} else {
max_players = 4;
bot_players = 3;
arena_name = ArenaNames::OpenEmptyMap;
}
}
let arena_properties;
{
let fetched_arena_store = world.try_fetch::<ArenaStoreResource>();
if let Some(arena_store) = fetched_arena_store {
arena_properties = match arena_store.properties.get(&arena_name) {
Some(arena_props_get) => (*arena_props_get).clone(),
_ => ArenaProperties::default(),
};
} else {
arena_properties = ArenaProperties::default();
}
}
let player_to_team;
{
let fetched_game_team_setup = world.try_fetch::<GameTeamSetup>();
if let Some(game_team_setup) = fetched_game_team_setup {
player_to_team = game_team_setup.teams.clone();
} else {
player_to_team = [0, 1, 2, 3];
}
}
let player_status_text = PlayerStatusText {
shield: None,
armor: None,
health: None,
points: None,
lives: None,
};
for player_index in 0..max_players {
let vehicle_stats;
{
let fetched_game_vehicle_setup = world.try_fetch::<GameVehicleSetup>();
if let Some(game_vehicle_setup) = fetched_game_vehicle_setup {
vehicle_stats = game_vehicle_setup.stats[player_index].clone();
} else {
vehicle_stats = get_none_vehicle();
}
}
let is_bot = player_index >= max_players - bot_players;
let player = intialize_player(
world,
self.sprite_sheet_handle.clone().unwrap(),
player_index,
weapon_fire_resource.clone(),
player_to_team[player_index],
is_bot,
player_status_text.clone(),
vehicle_stats,
);
if PLAYER_CAMERA && !is_bot {
initialize_camera_to_player(world, &arena_properties, player);
}
}
if !PLAYER_CAMERA {
initialize_camera(world, &arena_properties);
}
// Create the `DispatcherBuilder` and register some `System`s that should only run for this `State`.
let mut dispatcher_builder = DispatcherBuilder::new();
dispatcher_builder.add(VehicleTrackingSystem, "vehicle_tracking_system", &[]);
dispatcher_builder.add(VehicleMoveSystem::default(), "vehicle_move_system", &[]);
dispatcher_builder.add(VehicleWeaponsSystem, "vehicle_weapons_system", &[]);
dispatcher_builder.add(
CollisionWeaponFireHitboxSystem::default(),
"collision_weapon_fire_hitbox_system",
&[],
);
dispatcher_builder.add(
MoveWeaponFireSystem::default(),
"move_weapon_fire_system",
&[],
);
dispatcher_builder.add(
CollisionVehToVehSystem,
"collision_vehicle_vehicle_system",
&[],
);
dispatcher_builder.add(
VehicleShieldArmorHealthSystem,
"vehicle_shield_armor_health_system",
&[],
);
dispatcher_builder.add(VehicleStatusSystem::default(), "vehicle_status_system", &[]);
dispatcher_builder.add(MoveParticlesSystem, "move_particles_system", &[]);
dispatcher_builder.add(PathingLinesSystem::default(), "pathing_lines_system", &[]);
dispatcher_builder.add(
CameraTrackingSystem::default(),
"camera_tracking_system",
&[],
);
// Build and setup the `Dispatcher`.
let mut dispatcher = dispatcher_builder.build();
dispatcher.setup(world);
self.dispatcher = Some(dispatcher);
}
fn on_pause(&mut self, _data: StateData<'_, GameData<'_, '_>>) {
self.paused = true;
}
fn on_resume(&mut self, _data: StateData<'_, GameData<'_, '_>>) {
self.paused = false;
}
fn on_stop(&mut self, data: StateData<'_, GameData<'_, '_>>) {
if let Some(root_entity) = self.ui_root {
data.world
.delete_entity(root_entity)
.expect("Failed to remove Game Screen");
}
let fetched_game_score = data.world.try_fetch::<GameScore>();
if let Some(game_score) = fetched_game_score {
if !game_score.game_ended {
exec_removal(&data.world.entities(), &data.world.read_storage(), 0 as u32);
}
} else {
exec_removal(&data.world.entities(), &data.world.read_storage(), 0 as u32);
}
self.player_ui_initialized = false;
self.ui_root = None;
self.fps_display = None;
}
fn handle_event(
&mut self,
_: StateData<'_, GameData<'_, '_>>,
event: StateEvent,
) -> SimpleTrans {
match &event {
StateEvent::Window(event) => {
if is_close_requested(&event) {
log::info!("[Trans::Quit] Quitting Application!");
Trans::Quit
} else if is_key_down(&event, VirtualKeyCode::Escape) {
log::info!("[Trans::Push] Pausing Game!");
Trans::Push(Box::new(PauseMenuState::default()))
} else {
Trans::None
}
}
StateEvent::Ui(_ui_event) => {
// log::info!(
// "[HANDLE_EVENT] You just interacted with a ui element: {:?}",
// ui_event
// );
Trans::None
}
StateEvent::Input(_input) => {
//log::info!("Input Event detected: {:?}.", input);
Trans::None
}
}
}
fn update(&mut self, data: &mut StateData<'_, GameData<'_, '_>>) -> SimpleTrans {
if let Some(dispatcher) = self.dispatcher.as_mut() {
dispatcher.dispatch(&data.world);
}
let world = &mut data.world;
// this cannot happen in 'on_start', as the entity might not be fully
// initialized/registered/created yet.
if self.fps_display.is_none() {
world.exec(|finder: UiFinder<'_>| {
if let Some(entity) = finder.find("fps") {
self.fps_display = Some(entity);
}
});
}
// it is important that the 'paused' field is actually pausing your game.
// Make sure to also pause your running systems.
if !self.paused {
let mut ui_text = world.write_storage::<UiText>();
if let Some(fps_display) = self.fps_display.and_then(|entity| ui_text.get_mut(entity)) {
if world.read_resource::<Time>().frame_number() % 20 == 0 && !self.paused {
let fps = world.read_resource::<FpsCounter>().sampled_fps();
fps_display.text = format!("FPS: {:.*}", 2, fps);
}
}
}
if !self.player_ui_initialized {
let connected_success = connect_players_to_ui(world);
if connected_success {
self.player_ui_initialized = true;
}
}
let fetched_game_score = world.try_fetch::<GameScore>();
if let Some(game_score) = fetched_game_score {
if game_score.game_ended {
return Trans::Switch(Box::new(ScoreScreen::default()));
}
}
Trans::None
}
}
pub fn load_sprite_sheet(world: &mut World, storage: String, store: String) -> Handle<SpriteSheet> {
// Load the sprite sheet necessary to render the graphics.
// The texture is the pixel data
// `texture_handle` is a cloneable reference to the texture
let texture_handle = {
let loader = world.read_resource::<Loader>();
let texture_storage = world.read_resource::<AssetStorage<Texture>>();
loader.load(storage, ImageFormat::default(), (), &texture_storage)
};
let loader = world.read_resource::<Loader>();
let sprite_sheet_store = world.read_resource::<AssetStorage<SpriteSheet>>();
loader.load(
store, // Here we load the associated ron file
SpriteSheetFormat(texture_handle),
(),
&sprite_sheet_store,
)
}
#[derive(Clone, Debug, Hash, PartialEq, Eq, Serialize, Deserialize)]
pub enum AxisBinding {
VehicleAccel(usize),
VehicleTurn(usize),
VehicleStrafe(usize),
}
#[derive(Clone, Debug, Hash, PartialEq, Eq, Serialize, Deserialize)]
pub enum ActionBinding {
VehiclePriFire(usize),
VehicleAltFire(usize),
VehicleRepair(usize),
}
impl Display for AxisBinding {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:?}", self)
}
}
impl Display for ActionBinding {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result | {
write!(f, "{:?}", self)
} | identifier_body |
|
rally.rs | VehToVehSystem, CollisionWeaponFireHitboxSystem,
MoveParticlesSystem, MoveWeaponFireSystem, PathingLinesSystem, VehicleMoveSystem,
VehicleShieldArmorHealthSystem, VehicleStatusSystem, VehicleTrackingSystem,
VehicleWeaponsSystem,
};
pub const PLAYER_CAMERA: bool = false;
pub const DEBUG_LINES: bool = false;
//cargo run --features sdl_controller
//Damage at speed of 100
pub const BASE_COLLISION_DAMAGE: f32 = 20.0;
pub const COLLISION_PIERCING_DAMAGE_PCT: f32 = 0.0;
pub const COLLISION_SHIELD_DAMAGE_PCT: f32 = 25.0;
pub const COLLISION_ARMOR_DAMAGE_PCT: f32 = 80.0;
pub const COLLISION_HEALTH_DAMAGE_PCT: f32 = 100.0;
#[derive(Default)]
pub struct GameplayState<'a, 'b> {
player_ui_initialized: bool,
// // If the Game is paused or not
pub paused: bool,
// The UI root entity. Deleting this should remove the complete UI
ui_root: Option<Entity>,
// A reference to the FPS display, which we want to interact with
fps_display: Option<Entity>,
/// The `State` specific `Dispatcher`, containing `System`s only relevant for this `State`.
dispatcher: Option<Dispatcher<'a, 'b>>,
sprite_sheet_handle: Option<Handle<SpriteSheet>>, // Load the spritesheet necessary to render the graphics.
texture_sheet_handle: Option<Handle<SpriteSheet>>,
}
impl<'a, 'b> SimpleState for GameplayState<'a, 'b> {
fn on_start(&mut self, mut data: StateData<'_, GameData<'_, '_>>) {
self.player_ui_initialized = false;
let world = &mut data.world;
self.ui_root =
Some(world.exec(|mut creator: UiCreator<'_>| creator.create("ui/gameplay.ron", ())));
world.register::<UiText>();
world.register::<UiTransform>();
world.register::<Armor>();
world.register::<Health>();
world.register::<ArenaElement>();
world.register::<Hitbox>();
world.register::<Player>();
world.register::<Repair>();
world.register::<Shield>();
world.register::<Vehicle>();
world.register::<WeaponArray>();
world.register::<WeaponFire>();
world.register::<Particles>();
world.register::<PlayerWeaponIcon>();
// Setup debug lines as a resource
world.insert(DebugLines::new());
// Configure width of lines. Optional step
world.insert(DebugLinesParams { line_width: 2.0 });
// Setup debug lines as a component and add lines to render axis&grid
let debug_lines_component = DebugLinesComponent::new();
world.create_entity().with(debug_lines_component).build();
world.register::<Removal<u32>>();
self.sprite_sheet_handle.replace(load_sprite_sheet(
world,
"texture/rally_spritesheet.png".to_string(),
"texture/rally_spritesheet.ron".to_string(),
));
self.texture_sheet_handle.replace(load_sprite_sheet(
world,
"texture/rally_texture_sheet.png".to_string(),
"texture/rally_texture_sheet.ron".to_string(),
));
let weapon_fire_resource: WeaponFireResource =
initialize_weapon_fire_resource(world, self.sprite_sheet_handle.clone().unwrap());
initialize_timer_ui(world);
world.insert(ArenaNavMesh {
vertices: Vec::new(),
triangles: Vec::new(),
});
world.insert(ArenaNavMeshFinal { mesh: None });
intialize_arena(
world,
self.sprite_sheet_handle.clone().unwrap(),
self.texture_sheet_handle.clone().unwrap(),
);
let max_players;
let bot_players;
let arena_name;
{
let fetched_game_mode_setup = world.try_fetch::<GameModeSetup>();
if let Some(game_mode_setup) = fetched_game_mode_setup {
max_players = game_mode_setup.max_players;
bot_players = game_mode_setup.bot_players;
arena_name = game_mode_setup.arena_name.clone();
} else {
max_players = 4;
bot_players = 3;
arena_name = ArenaNames::OpenEmptyMap;
}
}
let arena_properties;
{
let fetched_arena_store = world.try_fetch::<ArenaStoreResource>();
if let Some(arena_store) = fetched_arena_store {
arena_properties = match arena_store.properties.get(&arena_name) {
Some(arena_props_get) => (*arena_props_get).clone(),
_ => ArenaProperties::default(),
};
} else {
arena_properties = ArenaProperties::default();
}
}
let player_to_team;
{
let fetched_game_team_setup = world.try_fetch::<GameTeamSetup>();
if let Some(game_team_setup) = fetched_game_team_setup {
player_to_team = game_team_setup.teams.clone();
} else {
player_to_team = [0, 1, 2, 3];
}
}
let player_status_text = PlayerStatusText {
shield: None,
armor: None,
health: None,
points: None,
lives: None,
};
for player_index in 0..max_players {
let vehicle_stats;
{
let fetched_game_vehicle_setup = world.try_fetch::<GameVehicleSetup>();
if let Some(game_vehicle_setup) = fetched_game_vehicle_setup {
vehicle_stats = game_vehicle_setup.stats[player_index].clone();
} else {
vehicle_stats = get_none_vehicle();
}
}
let is_bot = player_index >= max_players - bot_players;
let player = intialize_player(
world,
self.sprite_sheet_handle.clone().unwrap(),
player_index,
weapon_fire_resource.clone(),
player_to_team[player_index],
is_bot,
player_status_text.clone(),
vehicle_stats,
);
if PLAYER_CAMERA && !is_bot {
initialize_camera_to_player(world, &arena_properties, player);
}
}
if !PLAYER_CAMERA {
initialize_camera(world, &arena_properties);
}
// Create the `DispatcherBuilder` and register some `System`s that should only run for this `State`.
let mut dispatcher_builder = DispatcherBuilder::new();
dispatcher_builder.add(VehicleTrackingSystem, "vehicle_tracking_system", &[]);
dispatcher_builder.add(VehicleMoveSystem::default(), "vehicle_move_system", &[]);
dispatcher_builder.add(VehicleWeaponsSystem, "vehicle_weapons_system", &[]);
dispatcher_builder.add(
CollisionWeaponFireHitboxSystem::default(),
"collision_weapon_fire_hitbox_system",
&[],
);
dispatcher_builder.add(
MoveWeaponFireSystem::default(),
"move_weapon_fire_system",
&[],
);
dispatcher_builder.add(
CollisionVehToVehSystem,
"collision_vehicle_vehicle_system",
&[],
);
dispatcher_builder.add(
VehicleShieldArmorHealthSystem,
"vehicle_shield_armor_health_system",
&[],
);
dispatcher_builder.add(VehicleStatusSystem::default(), "vehicle_status_system", &[]);
dispatcher_builder.add(MoveParticlesSystem, "move_particles_system", &[]);
dispatcher_builder.add(PathingLinesSystem::default(), "pathing_lines_system", &[]);
dispatcher_builder.add(
CameraTrackingSystem::default(),
"camera_tracking_system",
&[],
);
// Build and setup the `Dispatcher`.
let mut dispatcher = dispatcher_builder.build();
dispatcher.setup(world);
self.dispatcher = Some(dispatcher);
}
fn on_pause(&mut self, _data: StateData<'_, GameData<'_, '_>>) {
self.paused = true;
}
fn on_resume(&mut self, _data: StateData<'_, GameData<'_, '_>>) {
self.paused = false;
}
fn | (&mut self, data: StateData<'_, GameData<'_, '_>>) {
if let Some(root_entity) = self.ui_root {
data.world
.delete_entity(root_entity)
.expect("Failed to remove Game Screen");
}
let fetched_game_score = data.world.try_fetch::<GameScore>();
if let Some(game_score) = fetched_game_score {
if !game_score.game_ended {
exec_removal(&data.world.entities(), &data.world.read_storage(), 0 as u32);
}
} else {
exec_removal(&data.world.entities(), &data.world.read_storage(), 0 as u32);
}
self.player_ui_initialized = false;
self.ui_root = None;
self.fps_display = None;
}
fn handle_event(
&mut self,
_: StateData<'_, GameData<'_, '_>>,
event: StateEvent,
) -> SimpleTrans {
match &event {
StateEvent::Window(event) => {
if is_close_requested(&event) {
log::info!("[Trans::Quit] Quitting Application!");
Trans::Quit
} else if is_key_down(&event, VirtualKeyCode::Escape) {
log::info!("[Trans::Push] Pausing Game!");
Trans::Push(Box::new(PauseMenuState::default()))
} else {
Trans::None
}
}
StateEvent::Ui(_ui_event) => {
// log::info!(
// "[HANDLE_EVENT] You just interacted with a ui element: {:?}",
// ui_event
// );
Trans::None
}
StateEvent::Input(_input) => {
//log::info!("Input Event detected: {:?}.", input);
Trans::None
}
}
| on_stop | identifier_name |
file_header.go | if err == io.EOF {
break
}
return nil, "", err
}
}
return nil, "", nil
}
func (p *parser) ParseGitFileHeader() (*File, error) {
const prefix = "diff --git "
if !strings.HasPrefix(p.Line(0), prefix) {
return nil, nil
}
header := p.Line(0)[len(prefix):]
defaultName, err := parseGitHeaderName(header)
if err != nil {
return nil, p.Errorf(0, "git file header: %v", err)
}
f := &File{}
for {
end, err := parseGitHeaderData(f, p.Line(1), defaultName)
if err != nil {
return nil, p.Errorf(1, "git file header: %v", err)
}
if err := p.Next(); err != nil {
if err == io.EOF {
break
}
return nil, err
}
if end {
break
}
}
if f.OldName == "" && f.NewName == "" {
if defaultName == "" {
return nil, p.Errorf(0, "git file header: missing filename information")
}
f.OldName = defaultName
f.NewName = defaultName
}
if (f.NewName == "" && !f.IsDelete) || (f.OldName == "" && !f.IsNew) {
return nil, p.Errorf(0, "git file header: missing filename information")
}
return f, nil
}
func (p *parser) ParseTraditionalFileHeader() (*File, error) {
const shortestValidFragHeader = "@@ -1 +1 @@\n"
const (
oldPrefix = "--- "
newPrefix = "+++ "
)
oldLine, newLine := p.Line(0), p.Line(1)
if !strings.HasPrefix(oldLine, oldPrefix) || !strings.HasPrefix(newLine, newPrefix) {
return nil, nil
}
// heuristic: only a file header if followed by a (probable) fragment header
if len(p.Line(2)) < len(shortestValidFragHeader) || !strings.HasPrefix(p.Line(2), "@@ -") {
return nil, nil
}
// advance past the first two lines so parser is after the header
// no EOF check needed because we know there are >=3 valid lines
if err := p.Next(); err != nil {
return nil, err
}
if err := p.Next(); err != nil {
return nil, err
}
oldName, _, err := parseName(oldLine[len(oldPrefix):], '\t', 0)
if err != nil {
return nil, p.Errorf(0, "file header: %v", err)
}
newName, _, err := parseName(newLine[len(newPrefix):], '\t', 0)
if err != nil {
return nil, p.Errorf(1, "file header: %v", err)
}
f := &File{}
switch {
case oldName == devNull || hasEpochTimestamp(oldLine):
f.IsNew = true
f.NewName = newName
case newName == devNull || hasEpochTimestamp(newLine):
f.IsDelete = true
f.OldName = oldName
default:
// if old name is a prefix of new name, use that instead
// this avoids picking variants like "file.bak" or "file~"
if strings.HasPrefix(newName, oldName) {
f.OldName = oldName
f.NewName = oldName
} else {
f.OldName = newName
f.NewName = newName
}
}
return f, nil
}
// parseGitHeaderName extracts a default file name from the Git file header
// line. This is required for mode-only changes and creation/deletion of empty
// files. Other types of patch include the file name(s) in the header data.
// If the names in the header do not match because the patch is a rename,
// return an empty default name.
func parseGitHeaderName(header string) (string, error) {
header = strings.TrimSuffix(header, "\n")
if len(header) == 0 {
return "", nil
}
var err error
var first, second string
// there are 4 cases to account for:
//
// 1) unquoted unquoted
// 2) unquoted "quoted"
// 3) "quoted" unquoted
// 4) "quoted" "quoted"
//
quote := strings.IndexByte(header, '"')
switch {
case quote < 0:
// case 1
first = header
case quote > 0:
// case 2
first = header[:quote-1]
if !isSpace(header[quote-1]) {
return "", fmt.Errorf("missing separator")
}
second, _, err = parseQuotedName(header[quote:])
if err != nil {
return "", err
}
case quote == 0:
// case 3 or case 4
var n int
first, n, err = parseQuotedName(header)
if err != nil {
return "", err
}
// git accepts multiple spaces after a quoted name, but not after an
// unquoted name, since the name might end with one or more spaces
for n < len(header) && isSpace(header[n]) {
n++
}
if n == len(header) {
return "", nil
}
if header[n] == '"' {
second, _, err = parseQuotedName(header[n:])
if err != nil {
return "", err
}
} else {
second = header[n:]
}
}
first = trimTreePrefix(first, 1)
if second != "" {
if first == trimTreePrefix(second, 1) {
return first, nil
}
return "", nil
}
// at this point, both names are unquoted (case 1)
// since names may contain spaces, we can't use a known separator
// instead, look for a split that produces two equal names
for i := 0; i < len(first)-1; i++ {
if !isSpace(first[i]) {
continue
}
second = trimTreePrefix(first[i+1:], 1)
if name := first[:i]; name == second {
return name, nil
}
}
return "", nil
}
// parseGitHeaderData parses a single line of metadata from a Git file header.
// It returns true when header parsing is complete; in that case, line was the
// first line of non-header content.
func parseGitHeaderData(f *File, line, defaultName string) (end bool, err error) {
if len(line) > 0 && line[len(line)-1] == '\n' {
line = line[:len(line)-1]
}
for _, hdr := range []struct {
prefix string
end bool
parse func(*File, string, string) error
}{
{"@@ -", true, nil},
{"--- ", false, parseGitHeaderOldName},
{"+++ ", false, parseGitHeaderNewName},
{"old mode ", false, parseGitHeaderOldMode},
{"new mode ", false, parseGitHeaderNewMode},
{"deleted file mode ", false, parseGitHeaderDeletedMode},
{"new file mode ", false, parseGitHeaderCreatedMode},
{"copy from ", false, parseGitHeaderCopyFrom},
{"copy to ", false, parseGitHeaderCopyTo},
{"rename old ", false, parseGitHeaderRenameFrom},
{"rename new ", false, parseGitHeaderRenameTo},
{"rename from ", false, parseGitHeaderRenameFrom},
{"rename to ", false, parseGitHeaderRenameTo},
{"similarity index ", false, parseGitHeaderScore},
{"dissimilarity index ", false, parseGitHeaderScore},
{"index ", false, parseGitHeaderIndex},
} {
if strings.HasPrefix(line, hdr.prefix) {
if hdr.parse != nil {
err = hdr.parse(f, line[len(hdr.prefix):], defaultName)
}
return hdr.end, err
}
}
// unknown line indicates the end of the header
// this usually happens if the diff is empty
return true, nil
}
func parseGitHeaderOldName(f *File, line, defaultName string) error {
name, _, err := parseName(line, '\t', 1)
if err != nil {
return err
}
if f.OldName == "" && !f.IsNew {
f.OldName = name
return nil
}
return verifyGitHeaderName(name, f.OldName, f.IsNew, "old")
}
func parseGitHeaderNewName(f *File, line, defaultName string) error {
name, _, err := parseName(line, '\t', 1)
if err != nil {
return err
}
if f.NewName == "" && !f.IsDelete {
f.NewName = name
return nil
}
return verifyGitHeaderName(name, f.NewName, f | if err := p.Next(); err != nil { | random_line_split |
|
file_header.go |
if err := p.Next(); err != nil {
if err == io.EOF {
break
}
return nil, err
}
if end {
break
}
}
if f.OldName == "" && f.NewName == "" {
if defaultName == "" {
return nil, p.Errorf(0, "git file header: missing filename information")
}
f.OldName = defaultName
f.NewName = defaultName
}
if (f.NewName == "" && !f.IsDelete) || (f.OldName == "" && !f.IsNew) {
return nil, p.Errorf(0, "git file header: missing filename information")
}
return f, nil
}
func (p *parser) ParseTraditionalFileHeader() (*File, error) {
const shortestValidFragHeader = "@@ -1 +1 @@\n"
const (
oldPrefix = "--- "
newPrefix = "+++ "
)
oldLine, newLine := p.Line(0), p.Line(1)
if !strings.HasPrefix(oldLine, oldPrefix) || !strings.HasPrefix(newLine, newPrefix) {
return nil, nil
}
// heuristic: only a file header if followed by a (probable) fragment header
if len(p.Line(2)) < len(shortestValidFragHeader) || !strings.HasPrefix(p.Line(2), "@@ -") {
return nil, nil
}
// advance past the first two lines so parser is after the header
// no EOF check needed because we know there are >=3 valid lines
if err := p.Next(); err != nil {
return nil, err
}
if err := p.Next(); err != nil {
return nil, err
}
oldName, _, err := parseName(oldLine[len(oldPrefix):], '\t', 0)
if err != nil {
return nil, p.Errorf(0, "file header: %v", err)
}
newName, _, err := parseName(newLine[len(newPrefix):], '\t', 0)
if err != nil {
return nil, p.Errorf(1, "file header: %v", err)
}
f := &File{}
switch {
case oldName == devNull || hasEpochTimestamp(oldLine):
f.IsNew = true
f.NewName = newName
case newName == devNull || hasEpochTimestamp(newLine):
f.IsDelete = true
f.OldName = oldName
default:
// if old name is a prefix of new name, use that instead
// this avoids picking variants like "file.bak" or "file~"
if strings.HasPrefix(newName, oldName) {
f.OldName = oldName
f.NewName = oldName
} else {
f.OldName = newName
f.NewName = newName
}
}
return f, nil
}
// parseGitHeaderName extracts a default file name from the Git file header
// line. This is required for mode-only changes and creation/deletion of empty
// files. Other types of patch include the file name(s) in the header data.
// If the names in the header do not match because the patch is a rename,
// return an empty default name.
func parseGitHeaderName(header string) (string, error) {
header = strings.TrimSuffix(header, "\n")
if len(header) == 0 {
return "", nil
}
var err error
var first, second string
// there are 4 cases to account for:
//
// 1) unquoted unquoted
// 2) unquoted "quoted"
// 3) "quoted" unquoted
// 4) "quoted" "quoted"
//
quote := strings.IndexByte(header, '"')
switch {
case quote < 0:
// case 1
first = header
case quote > 0:
// case 2
first = header[:quote-1]
if !isSpace(header[quote-1]) {
return "", fmt.Errorf("missing separator")
}
second, _, err = parseQuotedName(header[quote:])
if err != nil {
return "", err
}
case quote == 0:
// case 3 or case 4
var n int
first, n, err = parseQuotedName(header)
if err != nil {
return "", err
}
// git accepts multiple spaces after a quoted name, but not after an
// unquoted name, since the name might end with one or more spaces
for n < len(header) && isSpace(header[n]) {
n++
}
if n == len(header) {
return "", nil
}
if header[n] == '"' {
second, _, err = parseQuotedName(header[n:])
if err != nil {
return "", err
}
} else {
second = header[n:]
}
}
first = trimTreePrefix(first, 1)
if second != "" {
if first == trimTreePrefix(second, 1) {
return first, nil
}
return "", nil
}
// at this point, both names are unquoted (case 1)
// since names may contain spaces, we can't use a known separator
// instead, look for a split that produces two equal names
for i := 0; i < len(first)-1; i++ {
if !isSpace(first[i]) {
continue
}
second = trimTreePrefix(first[i+1:], 1)
if name := first[:i]; name == second {
return name, nil
}
}
return "", nil
}
// parseGitHeaderData parses a single line of metadata from a Git file header.
// It returns true when header parsing is complete; in that case, line was the
// first line of non-header content.
func parseGitHeaderData(f *File, line, defaultName string) (end bool, err error) {
if len(line) > 0 && line[len(line)-1] == '\n' {
line = line[:len(line)-1]
}
for _, hdr := range []struct {
prefix string
end bool
parse func(*File, string, string) error
}{
{"@@ -", true, nil},
{"--- ", false, parseGitHeaderOldName},
{"+++ ", false, parseGitHeaderNewName},
{"old mode ", false, parseGitHeaderOldMode},
{"new mode ", false, parseGitHeaderNewMode},
{"deleted file mode ", false, parseGitHeaderDeletedMode},
{"new file mode ", false, parseGitHeaderCreatedMode},
{"copy from ", false, parseGitHeaderCopyFrom},
{"copy to ", false, parseGitHeaderCopyTo},
{"rename old ", false, parseGitHeaderRenameFrom},
{"rename new ", false, parseGitHeaderRenameTo},
{"rename from ", false, parseGitHeaderRenameFrom},
{"rename to ", false, parseGitHeaderRenameTo},
{"similarity index ", false, parseGitHeaderScore},
{"dissimilarity index ", false, parseGitHeaderScore},
{"index ", false, parseGitHeaderIndex},
} {
if strings.HasPrefix(line, hdr.prefix) {
if hdr.parse != nil {
err = hdr.parse(f, line[len(hdr.prefix):], defaultName)
}
return hdr.end, err
}
}
// unknown line indicates the end of the header
// this usually happens if the diff is empty
return true, nil
}
func parseGitHeaderOldName(f *File, line, defaultName string) error {
name, _, err := parseName(line, '\t', 1)
if err != nil {
return err
}
if f.OldName == "" && !f.IsNew {
f.OldName = name
return nil
}
return verifyGitHeaderName(name, f.OldName, f.IsNew, "old")
}
func parseGitHeaderNewName(f *File, line, defaultName string) error {
name, _, err := parseName(line, '\t', 1)
if err != nil {
return err
}
if f.NewName == "" && !f.IsDelete {
f.NewName = name
return nil
}
return verifyGitHeaderName(name, f.NewName, f.IsDelete, "new")
}
func parseGitHeaderOldMode(f *File, line, defaultName string) (err error) {
f.OldMode, err = parseMode(strings.TrimSpace(line))
return
}
func parseGitHeaderNewMode(f *File, line, defaultName string) (err error) {
f.NewMode, err = parseMode(strings.TrimSpace(line))
return
}
func parseGitHeaderDeletedMode(f *File, line, defaultName string) error {
f.IsDelete = true
f.OldName = defaultName
return parseGitHeaderOldMode(f, line, defaultName)
}
func parseGitHeaderCreatedMode(f *File, line, defaultName string) error {
f.IsNew = true
f.NewName = defaultName
return parseGitHeaderNewMode(f, line | {
return nil, p.Errorf(1, "git file header: %v", err)
} | conditional_block |
|
file_header.go | able) fragment header
if len(p.Line(2)) < len(shortestValidFragHeader) || !strings.HasPrefix(p.Line(2), "@@ -") {
return nil, nil
}
// advance past the first two lines so parser is after the header
// no EOF check needed because we know there are >=3 valid lines
if err := p.Next(); err != nil {
return nil, err
}
if err := p.Next(); err != nil {
return nil, err
}
oldName, _, err := parseName(oldLine[len(oldPrefix):], '\t', 0)
if err != nil {
return nil, p.Errorf(0, "file header: %v", err)
}
newName, _, err := parseName(newLine[len(newPrefix):], '\t', 0)
if err != nil {
return nil, p.Errorf(1, "file header: %v", err)
}
f := &File{}
switch {
case oldName == devNull || hasEpochTimestamp(oldLine):
f.IsNew = true
f.NewName = newName
case newName == devNull || hasEpochTimestamp(newLine):
f.IsDelete = true
f.OldName = oldName
default:
// if old name is a prefix of new name, use that instead
// this avoids picking variants like "file.bak" or "file~"
if strings.HasPrefix(newName, oldName) {
f.OldName = oldName
f.NewName = oldName
} else {
f.OldName = newName
f.NewName = newName
}
}
return f, nil
}
// parseGitHeaderName extracts a default file name from the Git file header
// line. This is required for mode-only changes and creation/deletion of empty
// files. Other types of patch include the file name(s) in the header data.
// If the names in the header do not match because the patch is a rename,
// return an empty default name.
func parseGitHeaderName(header string) (string, error) {
header = strings.TrimSuffix(header, "\n")
if len(header) == 0 {
return "", nil
}
var err error
var first, second string
// there are 4 cases to account for:
//
// 1) unquoted unquoted
// 2) unquoted "quoted"
// 3) "quoted" unquoted
// 4) "quoted" "quoted"
//
quote := strings.IndexByte(header, '"')
switch {
case quote < 0:
// case 1
first = header
case quote > 0:
// case 2
first = header[:quote-1]
if !isSpace(header[quote-1]) {
return "", fmt.Errorf("missing separator")
}
second, _, err = parseQuotedName(header[quote:])
if err != nil {
return "", err
}
case quote == 0:
// case 3 or case 4
var n int
first, n, err = parseQuotedName(header)
if err != nil {
return "", err
}
// git accepts multiple spaces after a quoted name, but not after an
// unquoted name, since the name might end with one or more spaces
for n < len(header) && isSpace(header[n]) {
n++
}
if n == len(header) {
return "", nil
}
if header[n] == '"' {
second, _, err = parseQuotedName(header[n:])
if err != nil {
return "", err
}
} else {
second = header[n:]
}
}
first = trimTreePrefix(first, 1)
if second != "" {
if first == trimTreePrefix(second, 1) {
return first, nil
}
return "", nil
}
// at this point, both names are unquoted (case 1)
// since names may contain spaces, we can't use a known separator
// instead, look for a split that produces two equal names
for i := 0; i < len(first)-1; i++ {
if !isSpace(first[i]) {
continue
}
second = trimTreePrefix(first[i+1:], 1)
if name := first[:i]; name == second {
return name, nil
}
}
return "", nil
}
// parseGitHeaderData parses a single line of metadata from a Git file header.
// It returns true when header parsing is complete; in that case, line was the
// first line of non-header content.
func parseGitHeaderData(f *File, line, defaultName string) (end bool, err error) {
if len(line) > 0 && line[len(line)-1] == '\n' {
line = line[:len(line)-1]
}
for _, hdr := range []struct {
prefix string
end bool
parse func(*File, string, string) error
}{
{"@@ -", true, nil},
{"--- ", false, parseGitHeaderOldName},
{"+++ ", false, parseGitHeaderNewName},
{"old mode ", false, parseGitHeaderOldMode},
{"new mode ", false, parseGitHeaderNewMode},
{"deleted file mode ", false, parseGitHeaderDeletedMode},
{"new file mode ", false, parseGitHeaderCreatedMode},
{"copy from ", false, parseGitHeaderCopyFrom},
{"copy to ", false, parseGitHeaderCopyTo},
{"rename old ", false, parseGitHeaderRenameFrom},
{"rename new ", false, parseGitHeaderRenameTo},
{"rename from ", false, parseGitHeaderRenameFrom},
{"rename to ", false, parseGitHeaderRenameTo},
{"similarity index ", false, parseGitHeaderScore},
{"dissimilarity index ", false, parseGitHeaderScore},
{"index ", false, parseGitHeaderIndex},
} {
if strings.HasPrefix(line, hdr.prefix) {
if hdr.parse != nil {
err = hdr.parse(f, line[len(hdr.prefix):], defaultName)
}
return hdr.end, err
}
}
// unknown line indicates the end of the header
// this usually happens if the diff is empty
return true, nil
}
func parseGitHeaderOldName(f *File, line, defaultName string) error {
name, _, err := parseName(line, '\t', 1)
if err != nil {
return err
}
if f.OldName == "" && !f.IsNew {
f.OldName = name
return nil
}
return verifyGitHeaderName(name, f.OldName, f.IsNew, "old")
}
func parseGitHeaderNewName(f *File, line, defaultName string) error {
name, _, err := parseName(line, '\t', 1)
if err != nil {
return err
}
if f.NewName == "" && !f.IsDelete {
f.NewName = name
return nil
}
return verifyGitHeaderName(name, f.NewName, f.IsDelete, "new")
}
func parseGitHeaderOldMode(f *File, line, defaultName string) (err error) {
f.OldMode, err = parseMode(strings.TrimSpace(line))
return
}
func parseGitHeaderNewMode(f *File, line, defaultName string) (err error) {
f.NewMode, err = parseMode(strings.TrimSpace(line))
return
}
func parseGitHeaderDeletedMode(f *File, line, defaultName string) error {
f.IsDelete = true
f.OldName = defaultName
return parseGitHeaderOldMode(f, line, defaultName)
}
func parseGitHeaderCreatedMode(f *File, line, defaultName string) error {
f.IsNew = true
f.NewName = defaultName
return parseGitHeaderNewMode(f, line, defaultName)
}
func parseGitHeaderCopyFrom(f *File, line, defaultName string) (err error) {
f.IsCopy = true
f.OldName, _, err = parseName(line, 0, 0)
return
}
func parseGitHeaderCopyTo(f *File, line, defaultName string) (err error) {
f.IsCopy = true
f.NewName, _, err = parseName(line, 0, 0)
return
}
func parseGitHeaderRenameFrom(f *File, line, defaultName string) (err error) {
f.IsRename = true
f.OldName, _, err = parseName(line, 0, 0)
return
}
func parseGitHeaderRenameTo(f *File, line, defaultName string) (err error) {
f.IsRename = true
f.NewName, _, err = parseName(line, 0, 0)
return
}
func parseGitHeaderScore(f *File, line, defaultName string) error {
score, err := strconv.ParseInt(strings.TrimSuffix(line, "%"), 10, 32)
if err != nil {
nerr := err.(*strconv.NumError)
return fmt.Errorf("invalid score line: %v", nerr.Err)
}
if score <= 100 {
f.Score = int(score)
}
return nil
}
func | parseGitHeaderIndex | identifier_name |
|
file_header.go | f.NewName = defaultName
}
if (f.NewName == "" && !f.IsDelete) || (f.OldName == "" && !f.IsNew) {
return nil, p.Errorf(0, "git file header: missing filename information")
}
return f, nil
}
func (p *parser) ParseTraditionalFileHeader() (*File, error) {
const shortestValidFragHeader = "@@ -1 +1 @@\n"
const (
oldPrefix = "--- "
newPrefix = "+++ "
)
oldLine, newLine := p.Line(0), p.Line(1)
if !strings.HasPrefix(oldLine, oldPrefix) || !strings.HasPrefix(newLine, newPrefix) {
return nil, nil
}
// heuristic: only a file header if followed by a (probable) fragment header
if len(p.Line(2)) < len(shortestValidFragHeader) || !strings.HasPrefix(p.Line(2), "@@ -") {
return nil, nil
}
// advance past the first two lines so parser is after the header
// no EOF check needed because we know there are >=3 valid lines
if err := p.Next(); err != nil {
return nil, err
}
if err := p.Next(); err != nil {
return nil, err
}
oldName, _, err := parseName(oldLine[len(oldPrefix):], '\t', 0)
if err != nil {
return nil, p.Errorf(0, "file header: %v", err)
}
newName, _, err := parseName(newLine[len(newPrefix):], '\t', 0)
if err != nil {
return nil, p.Errorf(1, "file header: %v", err)
}
f := &File{}
switch {
case oldName == devNull || hasEpochTimestamp(oldLine):
f.IsNew = true
f.NewName = newName
case newName == devNull || hasEpochTimestamp(newLine):
f.IsDelete = true
f.OldName = oldName
default:
// if old name is a prefix of new name, use that instead
// this avoids picking variants like "file.bak" or "file~"
if strings.HasPrefix(newName, oldName) {
f.OldName = oldName
f.NewName = oldName
} else {
f.OldName = newName
f.NewName = newName
}
}
return f, nil
}
// parseGitHeaderName extracts a default file name from the Git file header
// line. This is required for mode-only changes and creation/deletion of empty
// files. Other types of patch include the file name(s) in the header data.
// If the names in the header do not match because the patch is a rename,
// return an empty default name.
func parseGitHeaderName(header string) (string, error) {
header = strings.TrimSuffix(header, "\n")
if len(header) == 0 {
return "", nil
}
var err error
var first, second string
// there are 4 cases to account for:
//
// 1) unquoted unquoted
// 2) unquoted "quoted"
// 3) "quoted" unquoted
// 4) "quoted" "quoted"
//
quote := strings.IndexByte(header, '"')
switch {
case quote < 0:
// case 1
first = header
case quote > 0:
// case 2
first = header[:quote-1]
if !isSpace(header[quote-1]) {
return "", fmt.Errorf("missing separator")
}
second, _, err = parseQuotedName(header[quote:])
if err != nil {
return "", err
}
case quote == 0:
// case 3 or case 4
var n int
first, n, err = parseQuotedName(header)
if err != nil {
return "", err
}
// git accepts multiple spaces after a quoted name, but not after an
// unquoted name, since the name might end with one or more spaces
for n < len(header) && isSpace(header[n]) {
n++
}
if n == len(header) {
return "", nil
}
if header[n] == '"' {
second, _, err = parseQuotedName(header[n:])
if err != nil {
return "", err
}
} else {
second = header[n:]
}
}
first = trimTreePrefix(first, 1)
if second != "" {
if first == trimTreePrefix(second, 1) {
return first, nil
}
return "", nil
}
// at this point, both names are unquoted (case 1)
// since names may contain spaces, we can't use a known separator
// instead, look for a split that produces two equal names
for i := 0; i < len(first)-1; i++ {
if !isSpace(first[i]) {
continue
}
second = trimTreePrefix(first[i+1:], 1)
if name := first[:i]; name == second {
return name, nil
}
}
return "", nil
}
// parseGitHeaderData parses a single line of metadata from a Git file header.
// It returns true when header parsing is complete; in that case, line was the
// first line of non-header content.
func parseGitHeaderData(f *File, line, defaultName string) (end bool, err error) {
if len(line) > 0 && line[len(line)-1] == '\n' {
line = line[:len(line)-1]
}
for _, hdr := range []struct {
prefix string
end bool
parse func(*File, string, string) error
}{
{"@@ -", true, nil},
{"--- ", false, parseGitHeaderOldName},
{"+++ ", false, parseGitHeaderNewName},
{"old mode ", false, parseGitHeaderOldMode},
{"new mode ", false, parseGitHeaderNewMode},
{"deleted file mode ", false, parseGitHeaderDeletedMode},
{"new file mode ", false, parseGitHeaderCreatedMode},
{"copy from ", false, parseGitHeaderCopyFrom},
{"copy to ", false, parseGitHeaderCopyTo},
{"rename old ", false, parseGitHeaderRenameFrom},
{"rename new ", false, parseGitHeaderRenameTo},
{"rename from ", false, parseGitHeaderRenameFrom},
{"rename to ", false, parseGitHeaderRenameTo},
{"similarity index ", false, parseGitHeaderScore},
{"dissimilarity index ", false, parseGitHeaderScore},
{"index ", false, parseGitHeaderIndex},
} {
if strings.HasPrefix(line, hdr.prefix) {
if hdr.parse != nil {
err = hdr.parse(f, line[len(hdr.prefix):], defaultName)
}
return hdr.end, err
}
}
// unknown line indicates the end of the header
// this usually happens if the diff is empty
return true, nil
}
func parseGitHeaderOldName(f *File, line, defaultName string) error |
func parseGitHeaderNewName(f *File, line, defaultName string) error {
name, _, err := parseName(line, '\t', 1)
if err != nil {
return err
}
if f.NewName == "" && !f.IsDelete {
f.NewName = name
return nil
}
return verifyGitHeaderName(name, f.NewName, f.IsDelete, "new")
}
func parseGitHeaderOldMode(f *File, line, defaultName string) (err error) {
f.OldMode, err = parseMode(strings.TrimSpace(line))
return
}
func parseGitHeaderNewMode(f *File, line, defaultName string) (err error) {
f.NewMode, err = parseMode(strings.TrimSpace(line))
return
}
func parseGitHeaderDeletedMode(f *File, line, defaultName string) error {
f.IsDelete = true
f.OldName = defaultName
return parseGitHeaderOldMode(f, line, defaultName)
}
func parseGitHeaderCreatedMode(f *File, line, defaultName string) error {
f.IsNew = true
f.NewName = defaultName
return parseGitHeaderNewMode(f, line, defaultName)
}
func parseGitHeaderCopyFrom(f *File, line, defaultName string) (err error) {
f.IsCopy = true
f.OldName, _, err = parseName(line, 0, 0)
return
}
func parseGitHeaderCopyTo(f *File, line, defaultName string) (err error) {
f.IsCopy = true
f.NewName, _, err = parseName(line, 0, 0)
return
}
func parseGitHeaderRenameFrom(f *File, line, defaultName string) (err | {
name, _, err := parseName(line, '\t', 1)
if err != nil {
return err
}
if f.OldName == "" && !f.IsNew {
f.OldName = name
return nil
}
return verifyGitHeaderName(name, f.OldName, f.IsNew, "old")
} | identifier_body |
data_helper.py | , 'DATA SOURCES')['filename_prev']
file_pop_pred = ConfigSectionMap(config, 'DATA SOURCES')['filename_pop_pred']
file_best_weights = ConfigSectionMap(config, 'DATA SOURCES')['filename_best_weights']
file_music_dataset = ConfigSectionMap(config, 'DATA SOURCES')['filename_music_dataset']
file_final_res = ConfigSectionMap(config, 'DATA SOURCES')['filename_final_results']
sources = {'Filename_audio_HL': file_audio_HL,
'Filename_audio_LL': file_audio_LL,
'Filename_lyrics':file_lyrics,
'Filename_semantic':file_semantic,
'Filename_prew_audio': file_prev_audio,
'Filename_pop_pred': file_pop_pred,
'Filename_best_weights':file_best_weights,
'Filename_music_dataset':file_music_dataset,
'Filename_final_results':file_final_res}
return sources
def read_data_directories(filename='config.ini'):
config = cp.ConfigParser()
config.read(filename)
directories = {}
saved_dir = ConfigSectionMap(config, 'DATA DIRECTORIES')['saved_dir']
data_dir = ConfigSectionMap(config, 'DATA DIRECTORIES')['data_dir']
saved_models = ConfigSectionMap(config, 'DATA DIRECTORIES')['saved_models']
saved_audio_dir = ConfigSectionMap(config, 'DATA DIRECTORIES')['saved_audio_dir']
cv_dir = ConfigSectionMap(config, 'DATA DIRECTORIES')['cv_dir']
ml_dataset_dir = ConfigSectionMap(config, 'DATA DIRECTORIES')['datasets_dir']
final_res_dir = ConfigSectionMap(config, 'DATA DIRECTORIES')['evaluation_dir']
directories = {'saved_directory': saved_dir,
'data_directory': data_dir,
'saved_audio_directory':saved_audio_dir,
'saved_models':saved_models,
'cv_dir':cv_dir,
'ml_dataset_dir':ml_dataset_dir,
'validation_res_dir': final_res_dir}
return directories
def get_date():
current_date = datetime.datetime.now()
final_date = current_date.strftime("%Y%m%d")
return final_date
def save_dataframe(filename, df, root_dir='data', enc='utf8'):
data_dir = os.path.join(root_dir, filename)
if not os.path.exists(root_dir):
os.makedirs(root_dir)
df.to_csv(data_dir,encoding=enc)
print('Data Stored correctly!\n')
return True
def read_dataframe(filename, root_dir = 'data', enc='utf8'):
data_dir = os.path.join(root_dir, filename)
tp = pd.read_csv(data_dir, encoding=enc, index_col=0,
iterator=True, chunksize=100)
df = pd.concat(tp, ignore_index=True)
df.apply(lambda x: pd.api.types.infer_dtype)
return df
def save_dataframe_as_hdfs(filename, df, root_dir='data',enc='utf8'):
data_dir = os.path.join(root_dir, filename)
if not os.path.exists(root_dir):
os.makedirs(root_dir)
client_hdfs = InsecureClient('http://' + os.environ['IP_HDFS'] + ':50070')
with client_hdfs.write(data_dir, encoding = enc) as writer:
df.to_csv(writer)
return True
def read_hdfs(filename, root_dir='data'):
data_dir = os.path.join(root_dir, filename)
client_hdfs = InsecureClient('http://' + os.environ['IP_HDFS'] + ':50070')
with client_hdfs.read(data_dir, encoding = 'latin-1') as reader:
df = pd.read_csv(reader,index_col=0)
return df
def get_text_language(language):
valid_language = ''
if language =='en':
valid_language = 'english'
elif language == 'ar':
valid_language = 'arabic'
elif language == 'az':
|
elif language == 'da':
valid_language = 'danish'
elif language == 'nl':
valid_language = 'dutch'
elif language == 'fr':
valid_language = 'french'
elif language == 'de':
valid_language = 'german'
elif language == 'el':
valid_language = 'greek'
elif language == 'hu':
valid_language = 'hungarian'
elif language == 'id':
valid_language = 'indonesian'
elif language == 'it':
valid_language = 'italian'
elif language == 'kk':
valid_language = 'kazakh'
#
elif language == 'ne':
valid_language = 'nepali'
elif language == 'no':
valid_language = 'norwegian'
elif language == 'pt':
valid_language = 'portuguese'
elif language == 'ro':
valid_language = 'romanian'
elif language == 'ru':
valid_language = 'russian'
#
elif language == 'es':
valid_language = 'spanish'
elif language == 'sv':
valid_language = 'swedish'
elif language == 'tr':
valid_language = 'turkish'
else:
valid_language = 'NOT VALID' # By default
return valid_language
def do_analysis(row):
error_message = 'we are not licensed to display the full lyrics for this song at the moment'
if (row is not np.nan and row is not '' and
row is not None and error_message not in row):
return True
else:
return False
def clean_song_name(song_name):
# Remove parenthesis
done = 0
while done !=1:
start = song_name.find( '(' )
end = song_name.find( ')' )
if start != -1 and end != -1:
result = song_name[start+1:end]
removed_str = '(' + result + ')'
song_name = song_name.replace(removed_str, '')
else:
done = 1
# Remove brakets
done = 0
while done !=1:
start = song_name.find( '[' )
end = song_name.find( ']' )
if start != -1 and end != -1:
result = song_name[start+1:end]
removed_str = '[' + result + ']'
song_name = song_name.replace(removed_str, '')
else:
done = 1
# Remove remix
song_name = song_name.replace('remix', '').replace('Remix', '')
# Remove dash
start = song_name.find(' - ')
if start != -1:
removed_str = song_name[start:]
song_name = song_name.replace(removed_str, '')
return song_name
def check_infinity_data(history):
history_new = history
# Check if there is an infinity data point
if False in np.isfinite(history):
# Replace nans with 99
valid_idx = list(np.where(np.isfinite(history))[0])
for idx, val in enumerate(history):
# Not finite value
if idx not in valid_idx:
history_new[idx] = 99
return history_new
def get_model_subdir(semantic_an=True, metadata=True, audio_LL=True):
model_subDir = ''
if not semantic_an and not metadata and not audio_LL:
print('Not valid Analysis')
return None
elif not semantic_an and not metadata and audio_LL:
model_subDir = 'audio_models'
elif not semantic_an and metadata and not audio_LL:
model_subDir = 'metadata_models'
elif not semantic_an and metadata and audio_LL:
model_subDir = 'metadata_audio_models'
elif semantic_an and not metadata and not audio_LL:
model_subDir = 'semantic_models'
elif semantic_an and not metadata and audio_LL:
model_subDir = 'semantic_audio_models'
elif semantic_an and metadata and not audio_LL:
model_subDir = 'semantic_metadata_models'
elif semantic_an and metadata and audio_LL:
model_subDir = 'semantic_metadata_audio_models'
return model_subDir
def get_popular_genres():
WIKI_URL = "https://en.wikipedia.org/wiki/List_of_popular_music_genres"
req = requests.get(WIKI_URL)
b = BeautifulSoup(req.content, 'lxml')
req = requests.get(WIKI_URL)
b = BeautifulSoup(req.content, 'html.parser')
links = []
# in this case, all of the links we're in a '<li>' brackets.
for i in b.find_all(name = 'li'):
links.append(i.text)
general_genres = {'African':links[81:127], 'Asian':links[128:132],
'East Asian':links[133:145],
'South & southeast Asian':links[146:164],
'Avant-garde':links[165:169],'Blues':links[170:196],
'Caribbean':links[197:233], 'Comedy':links[234:237],
'Country':links[238:273], 'Easy listening':links[274:280],
'Electronic':links[280:504], 'Folk':links[505:524],
'Hip hop & Rap':links[525 | valid_language = 'azerbaijani' | conditional_block |
data_helper.py | latin-1') as reader:
df = pd.read_csv(reader,index_col=0)
return df
def get_text_language(language):
valid_language = ''
if language =='en':
valid_language = 'english'
elif language == 'ar':
valid_language = 'arabic'
elif language == 'az':
valid_language = 'azerbaijani'
elif language == 'da':
valid_language = 'danish'
elif language == 'nl':
valid_language = 'dutch'
elif language == 'fr':
valid_language = 'french'
elif language == 'de':
valid_language = 'german'
elif language == 'el':
valid_language = 'greek'
elif language == 'hu':
valid_language = 'hungarian'
elif language == 'id':
valid_language = 'indonesian'
elif language == 'it':
valid_language = 'italian'
elif language == 'kk':
valid_language = 'kazakh'
#
elif language == 'ne':
valid_language = 'nepali'
elif language == 'no':
valid_language = 'norwegian'
elif language == 'pt':
valid_language = 'portuguese'
elif language == 'ro':
valid_language = 'romanian'
elif language == 'ru':
valid_language = 'russian'
#
elif language == 'es':
valid_language = 'spanish'
elif language == 'sv':
valid_language = 'swedish'
elif language == 'tr':
valid_language = 'turkish'
else:
valid_language = 'NOT VALID' # By default
return valid_language
def do_analysis(row):
error_message = 'we are not licensed to display the full lyrics for this song at the moment'
if (row is not np.nan and row is not '' and
row is not None and error_message not in row):
return True
else:
return False
def clean_song_name(song_name):
# Remove parenthesis
done = 0
while done !=1:
start = song_name.find( '(' )
end = song_name.find( ')' )
if start != -1 and end != -1:
result = song_name[start+1:end]
removed_str = '(' + result + ')'
song_name = song_name.replace(removed_str, '')
else:
done = 1
# Remove brakets
done = 0
while done !=1:
start = song_name.find( '[' )
end = song_name.find( ']' )
if start != -1 and end != -1:
result = song_name[start+1:end]
removed_str = '[' + result + ']'
song_name = song_name.replace(removed_str, '')
else:
done = 1
# Remove remix
song_name = song_name.replace('remix', '').replace('Remix', '')
# Remove dash
start = song_name.find(' - ')
if start != -1:
removed_str = song_name[start:]
song_name = song_name.replace(removed_str, '')
return song_name
def check_infinity_data(history):
history_new = history
# Check if there is an infinity data point
if False in np.isfinite(history):
# Replace nans with 99
valid_idx = list(np.where(np.isfinite(history))[0])
for idx, val in enumerate(history):
# Not finite value
if idx not in valid_idx:
history_new[idx] = 99
return history_new
def get_model_subdir(semantic_an=True, metadata=True, audio_LL=True):
model_subDir = ''
if not semantic_an and not metadata and not audio_LL:
print('Not valid Analysis')
return None
elif not semantic_an and not metadata and audio_LL:
model_subDir = 'audio_models'
elif not semantic_an and metadata and not audio_LL:
model_subDir = 'metadata_models'
elif not semantic_an and metadata and audio_LL:
model_subDir = 'metadata_audio_models'
elif semantic_an and not metadata and not audio_LL:
model_subDir = 'semantic_models'
elif semantic_an and not metadata and audio_LL:
model_subDir = 'semantic_audio_models'
elif semantic_an and metadata and not audio_LL:
model_subDir = 'semantic_metadata_models'
elif semantic_an and metadata and audio_LL:
model_subDir = 'semantic_metadata_audio_models'
return model_subDir
def get_popular_genres():
WIKI_URL = "https://en.wikipedia.org/wiki/List_of_popular_music_genres"
req = requests.get(WIKI_URL)
b = BeautifulSoup(req.content, 'lxml')
req = requests.get(WIKI_URL)
b = BeautifulSoup(req.content, 'html.parser')
links = []
# in this case, all of the links we're in a '<li>' brackets.
for i in b.find_all(name = 'li'):
links.append(i.text)
general_genres = {'African':links[81:127], 'Asian':links[128:132],
'East Asian':links[133:145],
'South & southeast Asian':links[146:164],
'Avant-garde':links[165:169],'Blues':links[170:196],
'Caribbean':links[197:233], 'Comedy':links[234:237],
'Country':links[238:273], 'Easy listening':links[274:280],
'Electronic':links[280:504], 'Folk':links[505:524],
'Hip hop & Rap':links[525:571], 'Jazz':links[572:623],
'Latin':links[624:687], 'Pop':links[688:755],
'R&B & Soul':links[756:774], 'Rock':links[775:919]}
for key, list_genre in general_genres.items():
clean_genres = []
clean_genres = [g.split('\n') for g in list_genre]
all_clean_genres = list(itertools.chain.from_iterable(clean_genres))
# Remove duplicate values
set_genres = list(set(all_clean_genres))
general_genres[key] = [g.lower().replace('-', ' ') for g in set_genres]
# Add edm to Electronic
general_genres['Electronic'].append('edm')
general_genres['Electronic'].append('house')
general_genres['African'].append('afrobeats')
general_genres['African'].append('afropop')
general_genres['Latin'].remove('folk')
return general_genres
def available_data_spotify_features():
spotify_features = {'acousticness':True,'danceability':True,'energy':True,'duration_ms':True,
'instrumentalness':True,'liveness':True,'loudness':True, 'mode':True, 'key':True,
'speechiness':True,'tempo':True, 'valence':True, 'Name':True, 'Artist':True,
'Artist_Popularity':True, 'Artist_followers':True,
'Genre':False, 'Release Date':False,
'Playlist':True, 'id':True}
return spotify_features
def available_data_popularity():
popularity_data = {'Popularity':True, 'Popularity_Class':True,'General_genre':True}
return popularity_data
def available_data_audio_features(mfcc=(40,True),chromagram=(12,True),melSpectrogram=(128,True), spect_contr=(7,True),
tonnetz=(6,True)):
| cols_mfccs = ['MFCSS_' + str(i + 1) for i in range(mfcc[0])]
k = [mfcc[1] for i in range(mfcc[0])]
audio_features = dict(zip(cols_mfccs, k))
cols_chroma = ['Chroma_' + str(i + 1) for i in range(chromagram[0])]
k = [chromagram[1] for i in range(chromagram[0])]
audio_features .update(zip(cols_chroma, k))
cols_mel = ['Mel_' + str(i + 1) for i in range(melSpectrogram[0])]
k = [melSpectrogram[1] for i in range(melSpectrogram[0])]
audio_features.update(zip(cols_mel, k))
cols_contrast = ['Spectral_contrast_' + str(i + 1) for i in range(spect_contr[0])]
k = [spect_contr[1] for i in range(spect_contr[0])]
audio_features.update(zip(cols_contrast, k))
cols_tonnetz = ['Tonnetz_' + str(i + 1) for i in range(tonnetz[0])]
k = [tonnetz[1] for i in range(tonnetz[0])]
audio_features.update(zip(cols_tonnetz, k))
| identifier_body |
|
data_helper.py | ':
valid_language = 'kazakh'
#
elif language == 'ne':
valid_language = 'nepali'
elif language == 'no':
valid_language = 'norwegian'
elif language == 'pt':
valid_language = 'portuguese'
elif language == 'ro':
valid_language = 'romanian'
elif language == 'ru':
valid_language = 'russian'
#
elif language == 'es':
valid_language = 'spanish'
elif language == 'sv':
valid_language = 'swedish'
elif language == 'tr':
valid_language = 'turkish'
else:
valid_language = 'NOT VALID' # By default
return valid_language
def do_analysis(row):
error_message = 'we are not licensed to display the full lyrics for this song at the moment'
if (row is not np.nan and row is not '' and
row is not None and error_message not in row):
return True
else:
return False
def clean_song_name(song_name):
# Remove parenthesis
done = 0
while done !=1:
start = song_name.find( '(' )
end = song_name.find( ')' )
if start != -1 and end != -1:
result = song_name[start+1:end]
removed_str = '(' + result + ')'
song_name = song_name.replace(removed_str, '')
else:
done = 1
# Remove brakets
done = 0
while done !=1:
start = song_name.find( '[' )
end = song_name.find( ']' )
if start != -1 and end != -1:
result = song_name[start+1:end]
removed_str = '[' + result + ']'
song_name = song_name.replace(removed_str, '')
else:
done = 1
# Remove remix
song_name = song_name.replace('remix', '').replace('Remix', '')
# Remove dash
start = song_name.find(' - ')
if start != -1:
removed_str = song_name[start:]
song_name = song_name.replace(removed_str, '')
return song_name
def check_infinity_data(history):
history_new = history
# Check if there is an infinity data point
if False in np.isfinite(history):
# Replace nans with 99
valid_idx = list(np.where(np.isfinite(history))[0])
for idx, val in enumerate(history):
# Not finite value
if idx not in valid_idx:
history_new[idx] = 99
return history_new
def get_model_subdir(semantic_an=True, metadata=True, audio_LL=True):
model_subDir = ''
if not semantic_an and not metadata and not audio_LL:
print('Not valid Analysis')
return None
elif not semantic_an and not metadata and audio_LL:
model_subDir = 'audio_models'
elif not semantic_an and metadata and not audio_LL:
model_subDir = 'metadata_models'
elif not semantic_an and metadata and audio_LL:
model_subDir = 'metadata_audio_models'
elif semantic_an and not metadata and not audio_LL:
model_subDir = 'semantic_models'
elif semantic_an and not metadata and audio_LL:
model_subDir = 'semantic_audio_models'
elif semantic_an and metadata and not audio_LL:
model_subDir = 'semantic_metadata_models'
elif semantic_an and metadata and audio_LL:
model_subDir = 'semantic_metadata_audio_models'
return model_subDir
def get_popular_genres():
WIKI_URL = "https://en.wikipedia.org/wiki/List_of_popular_music_genres"
req = requests.get(WIKI_URL)
b = BeautifulSoup(req.content, 'lxml')
req = requests.get(WIKI_URL)
b = BeautifulSoup(req.content, 'html.parser')
links = []
# in this case, all of the links we're in a '<li>' brackets.
for i in b.find_all(name = 'li'):
links.append(i.text)
general_genres = {'African':links[81:127], 'Asian':links[128:132],
'East Asian':links[133:145],
'South & southeast Asian':links[146:164],
'Avant-garde':links[165:169],'Blues':links[170:196],
'Caribbean':links[197:233], 'Comedy':links[234:237],
'Country':links[238:273], 'Easy listening':links[274:280],
'Electronic':links[280:504], 'Folk':links[505:524],
'Hip hop & Rap':links[525:571], 'Jazz':links[572:623],
'Latin':links[624:687], 'Pop':links[688:755],
'R&B & Soul':links[756:774], 'Rock':links[775:919]}
for key, list_genre in general_genres.items():
clean_genres = []
clean_genres = [g.split('\n') for g in list_genre]
all_clean_genres = list(itertools.chain.from_iterable(clean_genres))
# Remove duplicate values
set_genres = list(set(all_clean_genres))
general_genres[key] = [g.lower().replace('-', ' ') for g in set_genres]
# Add edm to Electronic
general_genres['Electronic'].append('edm')
general_genres['Electronic'].append('house')
general_genres['African'].append('afrobeats')
general_genres['African'].append('afropop')
general_genres['Latin'].remove('folk')
return general_genres
def available_data_spotify_features():
spotify_features = {'acousticness':True,'danceability':True,'energy':True,'duration_ms':True,
'instrumentalness':True,'liveness':True,'loudness':True, 'mode':True, 'key':True,
'speechiness':True,'tempo':True, 'valence':True, 'Name':True, 'Artist':True,
'Artist_Popularity':True, 'Artist_followers':True,
'Genre':False, 'Release Date':False,
'Playlist':True, 'id':True}
return spotify_features
def available_data_popularity():
popularity_data = {'Popularity':True, 'Popularity_Class':True,'General_genre':True}
return popularity_data
def available_data_audio_features(mfcc=(40,True),chromagram=(12,True),melSpectrogram=(128,True), spect_contr=(7,True),
tonnetz=(6,True)):
cols_mfccs = ['MFCSS_' + str(i + 1) for i in range(mfcc[0])]
k = [mfcc[1] for i in range(mfcc[0])]
audio_features = dict(zip(cols_mfccs, k))
cols_chroma = ['Chroma_' + str(i + 1) for i in range(chromagram[0])]
k = [chromagram[1] for i in range(chromagram[0])]
audio_features .update(zip(cols_chroma, k))
cols_mel = ['Mel_' + str(i + 1) for i in range(melSpectrogram[0])]
k = [melSpectrogram[1] for i in range(melSpectrogram[0])]
audio_features.update(zip(cols_mel, k))
cols_contrast = ['Spectral_contrast_' + str(i + 1) for i in range(spect_contr[0])]
k = [spect_contr[1] for i in range(spect_contr[0])]
audio_features.update(zip(cols_contrast, k))
cols_tonnetz = ['Tonnetz_' + str(i + 1) for i in range(tonnetz[0])]
k = [tonnetz[1] for i in range(tonnetz[0])]
audio_features.update(zip(cols_tonnetz, k))
return audio_features
def available_data_semantic_features():
semantic_features = {'Flesch_reading_ease':True, 'Sentence_similarity':True, 'Freq_distribution_coeff':True,
'lexical_coefficient':True, 'Average_syllables_length':True, 'Average_sentences_length':True,
'Number_sentebnes':True}
return semantic_features
def load_X_y_data(data_dir=None, filenames=None):
# Filenames is a list where the first position belongs to X_train file and the second to y_train
X = read_dataframe(root_dir=data_dir, filename=filenames[0])
y = read_dataframe(root_dir=data_dir, filename=filenames[1])
return X, y
def get_open_port():
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(("",0))
s.listen(1)
port = s.getsockname()[1]
s.close()
return str(port)
def | str2bool | identifier_name |
|
data_helper.py | (config, 'DATA SOURCES')['filename_prev']
file_pop_pred = ConfigSectionMap(config, 'DATA SOURCES')['filename_pop_pred']
file_best_weights = ConfigSectionMap(config, 'DATA SOURCES')['filename_best_weights']
file_music_dataset = ConfigSectionMap(config, 'DATA SOURCES')['filename_music_dataset']
file_final_res = ConfigSectionMap(config, 'DATA SOURCES')['filename_final_results']
sources = {'Filename_audio_HL': file_audio_HL,
'Filename_audio_LL': file_audio_LL,
'Filename_lyrics':file_lyrics,
'Filename_semantic':file_semantic,
'Filename_prew_audio': file_prev_audio,
'Filename_pop_pred': file_pop_pred,
'Filename_best_weights':file_best_weights,
'Filename_music_dataset':file_music_dataset,
'Filename_final_results':file_final_res}
return sources
def read_data_directories(filename='config.ini'):
config = cp.ConfigParser()
config.read(filename)
directories = {}
saved_dir = ConfigSectionMap(config, 'DATA DIRECTORIES')['saved_dir']
data_dir = ConfigSectionMap(config, 'DATA DIRECTORIES')['data_dir']
saved_models = ConfigSectionMap(config, 'DATA DIRECTORIES')['saved_models']
saved_audio_dir = ConfigSectionMap(config, 'DATA DIRECTORIES')['saved_audio_dir']
cv_dir = ConfigSectionMap(config, 'DATA DIRECTORIES')['cv_dir']
ml_dataset_dir = ConfigSectionMap(config, 'DATA DIRECTORIES')['datasets_dir']
final_res_dir = ConfigSectionMap(config, 'DATA DIRECTORIES')['evaluation_dir']
directories = {'saved_directory': saved_dir,
'data_directory': data_dir,
'saved_audio_directory':saved_audio_dir,
'saved_models':saved_models,
'cv_dir':cv_dir,
'ml_dataset_dir':ml_dataset_dir,
'validation_res_dir': final_res_dir}
return directories
def get_date():
current_date = datetime.datetime.now()
final_date = current_date.strftime("%Y%m%d")
return final_date
def save_dataframe(filename, df, root_dir='data', enc='utf8'):
data_dir = os.path.join(root_dir, filename)
if not os.path.exists(root_dir):
os.makedirs(root_dir)
df.to_csv(data_dir,encoding=enc)
print('Data Stored correctly!\n')
return True
def read_dataframe(filename, root_dir = 'data', enc='utf8'):
data_dir = os.path.join(root_dir, filename)
tp = pd.read_csv(data_dir, encoding=enc, index_col=0,
iterator=True, chunksize=100)
df = pd.concat(tp, ignore_index=True)
df.apply(lambda x: pd.api.types.infer_dtype)
return df
def save_dataframe_as_hdfs(filename, df, root_dir='data',enc='utf8'):
data_dir = os.path.join(root_dir, filename)
if not os.path.exists(root_dir):
os.makedirs(root_dir)
client_hdfs = InsecureClient('http://' + os.environ['IP_HDFS'] + ':50070')
with client_hdfs.write(data_dir, encoding = enc) as writer:
df.to_csv(writer)
return True
def read_hdfs(filename, root_dir='data'):
data_dir = os.path.join(root_dir, filename)
client_hdfs = InsecureClient('http://' + os.environ['IP_HDFS'] + ':50070')
with client_hdfs.read(data_dir, encoding = 'latin-1') as reader:
df = pd.read_csv(reader,index_col=0)
return df
def get_text_language(language):
valid_language = ''
if language =='en':
valid_language = 'english'
elif language == 'ar':
valid_language = 'arabic'
elif language == 'az':
valid_language = 'azerbaijani'
elif language == 'da':
valid_language = 'danish'
elif language == 'nl':
valid_language = 'dutch'
elif language == 'fr':
valid_language = 'french'
elif language == 'de':
valid_language = 'german'
elif language == 'el':
valid_language = 'greek'
elif language == 'hu':
valid_language = 'hungarian'
elif language == 'id':
valid_language = 'indonesian'
elif language == 'it':
valid_language = 'italian'
elif language == 'kk':
valid_language = 'kazakh'
#
elif language == 'ne':
valid_language = 'nepali'
elif language == 'no':
valid_language = 'norwegian'
elif language == 'pt':
valid_language = 'portuguese'
elif language == 'ro':
valid_language = 'romanian'
elif language == 'ru':
valid_language = 'russian'
#
elif language == 'es':
valid_language = 'spanish'
elif language == 'sv':
valid_language = 'swedish'
elif language == 'tr':
valid_language = 'turkish'
else:
valid_language = 'NOT VALID' # By default
return valid_language
def do_analysis(row):
error_message = 'we are not licensed to display the full lyrics for this song at the moment'
if (row is not np.nan and row is not '' and
row is not None and error_message not in row):
return True
else:
return False
def clean_song_name(song_name):
# Remove parenthesis
done = 0
while done !=1:
start = song_name.find( '(' )
end = song_name.find( ')' )
if start != -1 and end != -1:
result = song_name[start+1:end]
removed_str = '(' + result + ')'
song_name = song_name.replace(removed_str, '')
else:
done = 1
# Remove brakets
done = 0
while done !=1:
start = song_name.find( '[' )
end = song_name.find( ']' )
if start != -1 and end != -1:
result = song_name[start+1:end]
removed_str = '[' + result + ']'
song_name = song_name.replace(removed_str, '')
else:
done = 1
# Remove remix
song_name = song_name.replace('remix', '').replace('Remix', '')
# Remove dash
start = song_name.find(' - ')
if start != -1:
removed_str = song_name[start:]
song_name = song_name.replace(removed_str, '')
return song_name
def check_infinity_data(history):
history_new = history
# Check if there is an infinity data point
if False in np.isfinite(history):
# Replace nans with 99
valid_idx = list(np.where(np.isfinite(history))[0])
for idx, val in enumerate(history):
# Not finite value
if idx not in valid_idx:
history_new[idx] = 99
return history_new
def get_model_subdir(semantic_an=True, metadata=True, audio_LL=True):
model_subDir = ''
if not semantic_an and not metadata and not audio_LL:
print('Not valid Analysis')
return None
elif not semantic_an and not metadata and audio_LL:
model_subDir = 'audio_models'
elif not semantic_an and metadata and not audio_LL:
model_subDir = 'metadata_models'
elif not semantic_an and metadata and audio_LL:
model_subDir = 'metadata_audio_models'
elif semantic_an and not metadata and not audio_LL:
model_subDir = 'semantic_models'
elif semantic_an and not metadata and audio_LL:
model_subDir = 'semantic_audio_models'
elif semantic_an and metadata and not audio_LL:
model_subDir = 'semantic_metadata_models'
elif semantic_an and metadata and audio_LL:
model_subDir = 'semantic_metadata_audio_models'
return model_subDir
def get_popular_genres():
WIKI_URL = "https://en.wikipedia.org/wiki/List_of_popular_music_genres"
req = requests.get(WIKI_URL)
b = BeautifulSoup(req.content, 'lxml')
req = requests.get(WIKI_URL)
b = BeautifulSoup(req.content, 'html.parser')
links = [] | 'East Asian':links[133:145],
'South & southeast Asian':links[146:164],
'Avant-garde':links[165:169],'Blues':links[170:196],
'Caribbean':links[197:233], 'Comedy':links[234:237],
'Country':links[238:273], 'Easy listening':links[274:280],
'Electronic':links[280:504], 'Folk':links[505:524],
'Hip hop & Rap':links[525 | # in this case, all of the links we're in a '<li>' brackets.
for i in b.find_all(name = 'li'):
links.append(i.text)
general_genres = {'African':links[81:127], 'Asian':links[128:132], | random_line_split |
Minebot.py | 4', 'Diamond'], ['265', 'Iron Ingot'],
['266', 'Gold Ingot'], ['267', 'Iron Sword'], ['268', 'Wooden Sword'], ['269', 'Wooden Shovel'], ['270', 'Wooden Pickaxe'], ['271', 'Wooden Axe'], ['272', 'Stone Sword'],
['273', 'Stone Shovel'], ['274', 'Stone Pickaxe'], ['275', 'Stone Axe'], ['276', 'Diamond Sword'], ['277', 'Diamond Shovel'], ['278', 'Diamond Pickaxe'], ['279', 'Diamond Axe'],
['280', 'Stick'], ['281', 'Bowl'], ['282', 'Mushroom Soup'], ['283', 'Gold Sword'], ['284', 'Gold Shovel'], ['285', 'Gold Pickaxe'], ['286', 'Gold Axe'], ['287', 'String'],
['288', 'Feather'], ['289', 'Sulphur'], ['290', 'Wooden Hoe'], ['291', 'Stone Hoe'], ['292', 'Iron Hoe'], ['293', 'Diamond Hoe'], ['294', 'Gold Hoe'], ['295', 'Seeds'], ['296', 'Wheat'],
['297', 'Bread'], ['298', 'Leather Helmet'], ['299', 'Leather Chestplate'], ['300', 'Leather Leggings'], ['301', 'Leather Boots'], ['302', 'Chainmail Helmet'],
['303', 'Chainmail Chestplate'], ['304', 'Chainmail Leggings'], ['305', 'Chainmail Boots'], ['306', 'Iron Helmet'], ['307', 'Iron Chestplate'], ['308', 'Iron Leggings'],
['309', 'Iron Boots'], ['310', 'Diamond Helmet'], ['311', 'Diamond Chestplate'], ['312', 'Diamond Leggings'], ['313', 'Diamond Boots'], ['314', 'Gold Helmet'], ['315', 'Gold Chestplate'],
['316', 'Gold Leggings'], ['317', 'Gold Boots'], ['318', 'Flint'], ['319', 'Raw Porkchop'], ['320', 'Cooked Porkchop'], ['321', 'Painting'], ['322', 'Golden Apple'], ['323', 'Sign'],
['324', 'Wooden Door'], ['325', 'Bucket'], ['326', 'Water Bucket'], ['327', 'Lava Bucket'], ['328', 'Minecart'], ['329', 'Saddle'], ['330', 'Iron Door'], ['331', 'Redstone'],
['332', 'Snowball'], ['333', 'Boat'], ['334', 'Leather'], ['335', 'Milk Bucket'], ['336', 'Clay Brick'], ['337', 'Clay Balls'], ['338', 'Sugarcane'], ['339', 'Paper'], ['340', 'Book'],
['341', 'Slimeball'], ['342', 'Storage Minecart'], ['343', 'Powered Minecart'], ['344', 'Egg'], ['345', 'Compass'], ['346', 'Fishing Rod'], ['347', 'Clock'], ['348', 'Glowstone Dust'],
['349', 'Raw Fish'], ['350', 'Cooked Fish'], ['351', 'Ink Sack'], ['351:1', 'Rose Red'], ['351:2', 'Cactus Green'], ['351:3', 'Coco Beans'], ['351:4', 'Lapis Lazuli'],
['351:5', 'Purple Dye'], ['351:6', 'Cyan Dye'], ['351:7', 'Light Gray Dye'], ['351:8', 'Gray Dye'], ['351:9', 'Pink Dye'], ['351:10', 'Lime Dye'], ['351:11', 'Dandelion Yellow'],
['351:12', 'Light Blue Dye'], ['351:13', 'Magenta Dye'], ['351:14', 'Orange Dye'], ['351:15', 'Bone Meal'], ['352', 'Bone'], ['353', 'Sugar'], ['354', 'Cake'], ['355', 'Bed'],
['356', 'Redstone Repeater'], ['357', 'Cookie'], ['358', 'Map'], ['359', 'Shears'], ['2256', 'Gold Music Disc'], ['2257', 'Green Music Disc']]
items = map(lambda x:[x[0],x[1].lower()],items)
def search(name):
name = name.lower()
match = lambda i:name in str(" ".join(i[1:]))
exact = lambda i:name == str(" ".join(i[1:]))
res = filter(match,items)
if not res:
toMinecraft("No items found")
return
sres = filter(exact,items)
if sres or len(res)==1:
toMinecraft("ID: "+str(res[0]))
return res[0][0]
if len(res) > 1:
lst = ", ".join(map(lambda x:" ".join(x[1:]),res))
toMinecraft("Multiple matches: "+lst)
return None
def give(player,name,quant=1):
try:
print "give",player,name,quant
num = None
try:
num = int(name)
except:
num = search(name)
print num
if num == None:
raise Exception()
qnt = int(quant)
print qnt
global mc
while qnt>0:
mc.send("give %s %s %s\n"%(player,num,qnt))
qnt -= 64
except:
toMinecraft("Give failed.")
bot = None
mc = None
running = False
def on_load(b):
b.join("#mc")
global bot,mc,running
bot = b
mc = socket.socket()
# bot.say("#adullam","Connecting to minecraft")
try:
mc.connect(("gene.indstate.edu",10240))
running = True
# bot.say("#adullam","Connected!")
toMinecraft("Plugin activated")
thread.start_new_thread(mcThread,(None,))
except e:
bot.say("#adullam","Failed to connect")
raise e
def Lookup(shorthand):
print "Lookup:",shorthand
mc.send("list\n")
resp = mc.recv(1024)
if "Connected players:" not in resp:
toMinecraft("Something strange happend...")
toMinecraft(resp)
return shorthand
players = resp.split("players:")[1].replace(",","").split()
matches = filter(lambda x:shorthand in x, players)
if len(matches)==1:
return matches[0]
if len(matches)>1:
toMinecraft("Matches: %r"%matches)
if len(matches)==0:
toMinecraft("No matches.")
return shorthand
def TP(src,dst):
mc.send("tp {} {}\n".format(src,dst))
def mcThread(args):
global running,mc,bot
buffer = ""
while running:
tmp = mc.recv(100)
buffer += tmp
if not tmp or buffer.count("\n")==0:
continue
line,buffer=buffer.split("\n",1)
msg = re.match("^.*INFO] <(.*)> (.*)$",line)
if msg:
g = msg.groups()
toAdullam = g[1].startswith("t ")
msg = g[1]
if toAdullam:
msg=g[1][2:]
tp = re.match("!tp (.*) (.*)$",msg)
if tp:
TP(Lookup(tp.groups()[0]),Lookup(tp.groups()[1]))
continue
tp = re.match("!tp (.*)$",msg)
if tp:
TP(g[0],Lookup(tp.groups()[0]))
continue
cheats = """
id = re.match("!id (.*)",msg)
if id:
search(id.groups()[0])
continue
gi = re.match("!give (.*) (.*)",msg)
if gi:
give(g[0],*gi.groups())
continue
ar = re.match("!armor", msg)
if ar:
give(g[0],"diamond helmet")
give(g[0],"diamond chestplate")
give(g[0],"diamond legging")
give(g[0],"diamond boots")
"""#"""
| random_line_split |
||
Minebot.py | Brown Wool'], ['35:13', 'Dark Green Wool'],
['35:14', 'Red Wool'], ['35:15', 'Black Wool'], ['37', 'Yellow Flower'], ['38', 'Red Rose'], ['39', 'Brown Mushroom'], ['40', 'Red Mushroom'], ['41', 'Gold Block'], ['42', 'Iron Block'],
['43', 'Double Stone Slab'], ['43:1', 'Double Sandstone Slab'], ['43:2', 'Double Wooden Slab'], ['43:3', 'Double Cobblestone Slab'], ['44', 'Stone Slab'], ['44:1', 'Sandstone Slab'],
['44:2', 'Wooden Slab'], ['44:3', 'Cobblestone Slab'], ['45', 'Brick'], ['46', 'TNT'], ['47', 'Bookshelf'], ['48', 'Mossy Cobblestone'], ['49', 'Obsidian'], ['50', 'Torch'], ['51', 'Fire'],
['52', 'Monster Spawner'], ['53', 'Wooden Stairs'], ['54', 'Chest'], ['55', 'Redstone Wire'], ['56', 'Diamond Ore'], ['57', 'Diamond Block'], ['58', 'Workbench'], ['59', 'Crops'],
['60', 'Soil'], ['61', 'Furnace'], ['62', 'Burning Furnace'], ['63', 'Sign Post'], ['64', 'Wooden Door'], ['65', 'Ladder'], ['66', 'Rails'], ['67', 'Cobblestone Stairs'], ['68', 'Wall Sign'],
['69', 'Lever'], ['70', 'Stone Pressure Plate'], ['71', 'Iron Door'], ['72', 'Wooden Pressure Plate'], ['73', 'Redstone Ore'], ['74', 'Glowing Redstone Ore'], ['75', 'Redstone Torch (off)'],
['76', 'Redstone Torch (on)'], ['77', 'Stone Button'], ['78', 'Snow'], ['79', 'Ice'], ['80', 'Snow Block'], ['81', 'Cactus'], ['82', 'Clay'], ['83', 'Sugar Cane'], ['84', 'Jukebox'],
['85', 'Fence'], ['86', 'Pumpkin'], ['87', 'Netherrack'], ['88', 'Soul Sand'], ['89', 'Glowstone'], ['90', 'Portal'], ['91', 'Jack-O-Lantern'], ['92', 'Cake Block'],
['93', 'Redstone Repeater Block (off)'], ['94', 'Redstone Repeater Block (on)'], ['95', 'Locked Chest'], ['96', 'Trapdoor'], ['256', 'Iron Shovel'], ['257', 'Iron Pickaxe'],
['258', 'Iron Axe'], ['259', 'Flint and Steel'], ['260', 'Apple'], ['261', 'Bow'], ['262', 'Arrow'], ['263', 'Coal'], ['263:1', 'Charcoal'], ['264', 'Diamond'], ['265', 'Iron Ingot'],
['266', 'Gold Ingot'], ['267', 'Iron Sword'], ['268', 'Wooden Sword'], ['269', 'Wooden Shovel'], ['270', 'Wooden Pickaxe'], ['271', 'Wooden Axe'], ['272', 'Stone Sword'],
['273', 'Stone Shovel'], ['274', 'Stone Pickaxe'], ['275', 'Stone Axe'], ['276', 'Diamond Sword'], ['277', 'Diamond Shovel'], ['278', 'Diamond Pickaxe'], ['279', 'Diamond Axe'],
['280', 'Stick'], ['281', 'Bowl'], ['282', 'Mushroom Soup'], ['283', 'Gold Sword'], ['284', 'Gold Shovel'], ['285', 'Gold Pickaxe'], ['286', 'Gold Axe'], ['287', 'String'],
['288', 'Feather'], ['289', 'Sulphur'], ['290', 'Wooden Hoe'], ['291', 'Stone Hoe'], ['292', 'Iron Hoe'], ['293', 'Diamond Hoe'], ['294', 'Gold Hoe'], ['295', 'Seeds'], ['296', 'Wheat'],
['297', 'Bread'], ['298', 'Leather Helmet'], ['299', 'Leather Chestplate'], ['300', 'Leather Leggings'], ['301', 'Leather Boots'], ['302', 'Chainmail Helmet'],
['303', 'Chainmail Chestplate'], ['304', 'Chainmail Leggings'], ['305', 'Chainmail Boots'], ['306', 'Iron Helmet'], ['307', 'Iron Chestplate'], ['308', 'Iron Leggings'],
['309', 'Iron Boots'], ['310', 'Diamond Helmet'], ['311', 'Diamond Chestplate'], ['312', 'Diamond Leggings'], ['313', 'Diamond Boots'], ['314', 'Gold Helmet'], ['315', 'Gold Chestplate'],
['316', 'Gold Leggings'], ['317', 'Gold Boots'], ['318', 'Flint'], ['319', 'Raw Porkchop'], ['320', 'Cooked Porkchop'], ['321', 'Painting'], ['322', 'Golden Apple'], ['323', 'Sign'],
['324', 'Wooden Door'], ['325', 'Bucket'], ['326', 'Water Bucket'], ['327', 'Lava Bucket'], ['328', 'Minecart'], ['329', 'Saddle'], ['330', 'Iron Door'], ['331', 'Redstone'],
['332', 'Snowball'], ['333', 'Boat'], ['334', 'Leather'], ['335', 'Milk Bucket'], ['336', 'Clay Brick'], ['337', 'Clay Balls'], ['338', 'Sugarcane'], ['339', 'Paper'], ['340', 'Book'],
['341', 'Slimeball'], ['342', 'Storage Minecart'], ['343', 'Powered Minecart'], ['344', 'Egg'], ['345', 'Compass'], ['346', 'Fishing Rod'], ['347', 'Clock'], ['348', 'Glowstone Dust'],
['349', 'Raw Fish'], ['350', 'Cooked Fish'], ['351', 'Ink Sack'], ['351:1', 'Rose Red'], ['351:2', 'Cactus Green'], ['351:3', 'Coco Beans'], ['351:4', 'Lapis Lazuli'],
['351:5', 'Purple Dye'], ['351:6', 'Cyan Dye'], ['351:7', 'Light Gray Dye'], ['351:8', 'Gray Dye'], ['351:9', 'Pink Dye'], ['351:10', 'Lime Dye'], ['351:11', 'Dandelion Yellow'],
['351:12', 'Light Blue Dye'], ['351:13', 'Magenta Dye'], ['351:14', 'Orange Dye'], ['351:15', 'Bone Meal'], ['352', 'Bone'], ['353', 'Sugar'], ['354', 'Cake'], ['355', 'Bed'],
['356', 'Redstone Repeater'], ['357', 'Cookie'], ['358', 'Map'], ['359', 'Shears'], ['2256', 'Gold Music Disc'], ['2257', 'Green Music Disc']]
items = map(lambda x:[x[0],x[1].lower()],items)
def search(name):
name = name.lower()
match = lambda i:name in str(" ".join(i[1:]))
exact = lambda i:name == str(" ".join(i[1:]))
res = filter(match,items)
if not res:
|
sres = filter(exact,items)
if sres or len(res)==1:
toMinecraft("ID: "+str(res[0]))
return res[0][0]
if len(res) > 1:
lst = ", ".join(map(lambda x:" ".join(x[1: | toMinecraft("No items found")
return | conditional_block |
Minebot.py | Pressure Plate'], ['71', 'Iron Door'], ['72', 'Wooden Pressure Plate'], ['73', 'Redstone Ore'], ['74', 'Glowing Redstone Ore'], ['75', 'Redstone Torch (off)'],
['76', 'Redstone Torch (on)'], ['77', 'Stone Button'], ['78', 'Snow'], ['79', 'Ice'], ['80', 'Snow Block'], ['81', 'Cactus'], ['82', 'Clay'], ['83', 'Sugar Cane'], ['84', 'Jukebox'],
['85', 'Fence'], ['86', 'Pumpkin'], ['87', 'Netherrack'], ['88', 'Soul Sand'], ['89', 'Glowstone'], ['90', 'Portal'], ['91', 'Jack-O-Lantern'], ['92', 'Cake Block'],
['93', 'Redstone Repeater Block (off)'], ['94', 'Redstone Repeater Block (on)'], ['95', 'Locked Chest'], ['96', 'Trapdoor'], ['256', 'Iron Shovel'], ['257', 'Iron Pickaxe'],
['258', 'Iron Axe'], ['259', 'Flint and Steel'], ['260', 'Apple'], ['261', 'Bow'], ['262', 'Arrow'], ['263', 'Coal'], ['263:1', 'Charcoal'], ['264', 'Diamond'], ['265', 'Iron Ingot'],
['266', 'Gold Ingot'], ['267', 'Iron Sword'], ['268', 'Wooden Sword'], ['269', 'Wooden Shovel'], ['270', 'Wooden Pickaxe'], ['271', 'Wooden Axe'], ['272', 'Stone Sword'],
['273', 'Stone Shovel'], ['274', 'Stone Pickaxe'], ['275', 'Stone Axe'], ['276', 'Diamond Sword'], ['277', 'Diamond Shovel'], ['278', 'Diamond Pickaxe'], ['279', 'Diamond Axe'],
['280', 'Stick'], ['281', 'Bowl'], ['282', 'Mushroom Soup'], ['283', 'Gold Sword'], ['284', 'Gold Shovel'], ['285', 'Gold Pickaxe'], ['286', 'Gold Axe'], ['287', 'String'],
['288', 'Feather'], ['289', 'Sulphur'], ['290', 'Wooden Hoe'], ['291', 'Stone Hoe'], ['292', 'Iron Hoe'], ['293', 'Diamond Hoe'], ['294', 'Gold Hoe'], ['295', 'Seeds'], ['296', 'Wheat'],
['297', 'Bread'], ['298', 'Leather Helmet'], ['299', 'Leather Chestplate'], ['300', 'Leather Leggings'], ['301', 'Leather Boots'], ['302', 'Chainmail Helmet'],
['303', 'Chainmail Chestplate'], ['304', 'Chainmail Leggings'], ['305', 'Chainmail Boots'], ['306', 'Iron Helmet'], ['307', 'Iron Chestplate'], ['308', 'Iron Leggings'],
['309', 'Iron Boots'], ['310', 'Diamond Helmet'], ['311', 'Diamond Chestplate'], ['312', 'Diamond Leggings'], ['313', 'Diamond Boots'], ['314', 'Gold Helmet'], ['315', 'Gold Chestplate'],
['316', 'Gold Leggings'], ['317', 'Gold Boots'], ['318', 'Flint'], ['319', 'Raw Porkchop'], ['320', 'Cooked Porkchop'], ['321', 'Painting'], ['322', 'Golden Apple'], ['323', 'Sign'],
['324', 'Wooden Door'], ['325', 'Bucket'], ['326', 'Water Bucket'], ['327', 'Lava Bucket'], ['328', 'Minecart'], ['329', 'Saddle'], ['330', 'Iron Door'], ['331', 'Redstone'],
['332', 'Snowball'], ['333', 'Boat'], ['334', 'Leather'], ['335', 'Milk Bucket'], ['336', 'Clay Brick'], ['337', 'Clay Balls'], ['338', 'Sugarcane'], ['339', 'Paper'], ['340', 'Book'],
['341', 'Slimeball'], ['342', 'Storage Minecart'], ['343', 'Powered Minecart'], ['344', 'Egg'], ['345', 'Compass'], ['346', 'Fishing Rod'], ['347', 'Clock'], ['348', 'Glowstone Dust'],
['349', 'Raw Fish'], ['350', 'Cooked Fish'], ['351', 'Ink Sack'], ['351:1', 'Rose Red'], ['351:2', 'Cactus Green'], ['351:3', 'Coco Beans'], ['351:4', 'Lapis Lazuli'],
['351:5', 'Purple Dye'], ['351:6', 'Cyan Dye'], ['351:7', 'Light Gray Dye'], ['351:8', 'Gray Dye'], ['351:9', 'Pink Dye'], ['351:10', 'Lime Dye'], ['351:11', 'Dandelion Yellow'],
['351:12', 'Light Blue Dye'], ['351:13', 'Magenta Dye'], ['351:14', 'Orange Dye'], ['351:15', 'Bone Meal'], ['352', 'Bone'], ['353', 'Sugar'], ['354', 'Cake'], ['355', 'Bed'],
['356', 'Redstone Repeater'], ['357', 'Cookie'], ['358', 'Map'], ['359', 'Shears'], ['2256', 'Gold Music Disc'], ['2257', 'Green Music Disc']]
items = map(lambda x:[x[0],x[1].lower()],items)
def search(name):
name = name.lower()
match = lambda i:name in str(" ".join(i[1:]))
exact = lambda i:name == str(" ".join(i[1:]))
res = filter(match,items)
if not res:
toMinecraft("No items found")
return
sres = filter(exact,items)
if sres or len(res)==1:
toMinecraft("ID: "+str(res[0]))
return res[0][0]
if len(res) > 1:
lst = ", ".join(map(lambda x:" ".join(x[1:]),res))
toMinecraft("Multiple matches: "+lst)
return None
def give(player,name,quant=1):
try:
print "give",player,name,quant
num = None
try:
num = int(name)
except:
num = search(name)
print num
if num == None:
raise Exception()
qnt = int(quant)
print qnt
global mc
while qnt>0:
mc.send("give %s %s %s\n"%(player,num,qnt))
qnt -= 64
except:
toMinecraft("Give failed.")
bot = None
mc = None
running = False
def on_load(b):
b.join("#mc")
global bot,mc,running
bot = b
mc = socket.socket()
# bot.say("#adullam","Connecting to minecraft")
try:
mc.connect(("gene.indstate.edu",10240))
running = True
# bot.say("#adullam","Connected!")
toMinecraft("Plugin activated")
thread.start_new_thread(mcThread,(None,))
except e:
bot.say("#adullam","Failed to connect")
raise e
def Lookup(shorthand):
| print "Lookup:",shorthand
mc.send("list\n")
resp = mc.recv(1024)
if "Connected players:" not in resp:
toMinecraft("Something strange happend...")
toMinecraft(resp)
return shorthand
players = resp.split("players:")[1].replace(",","").split()
matches = filter(lambda x:shorthand in x, players)
if len(matches)==1:
return matches[0]
if len(matches)>1:
toMinecraft("Matches: %r"%matches)
if len(matches)==0:
toMinecraft("No matches.")
return shorthand | identifier_body |
|
Minebot.py | 'Redstone Ore'], ['74', 'Glowing Redstone Ore'], ['75', 'Redstone Torch (off)'],
['76', 'Redstone Torch (on)'], ['77', 'Stone Button'], ['78', 'Snow'], ['79', 'Ice'], ['80', 'Snow Block'], ['81', 'Cactus'], ['82', 'Clay'], ['83', 'Sugar Cane'], ['84', 'Jukebox'],
['85', 'Fence'], ['86', 'Pumpkin'], ['87', 'Netherrack'], ['88', 'Soul Sand'], ['89', 'Glowstone'], ['90', 'Portal'], ['91', 'Jack-O-Lantern'], ['92', 'Cake Block'],
['93', 'Redstone Repeater Block (off)'], ['94', 'Redstone Repeater Block (on)'], ['95', 'Locked Chest'], ['96', 'Trapdoor'], ['256', 'Iron Shovel'], ['257', 'Iron Pickaxe'],
['258', 'Iron Axe'], ['259', 'Flint and Steel'], ['260', 'Apple'], ['261', 'Bow'], ['262', 'Arrow'], ['263', 'Coal'], ['263:1', 'Charcoal'], ['264', 'Diamond'], ['265', 'Iron Ingot'],
['266', 'Gold Ingot'], ['267', 'Iron Sword'], ['268', 'Wooden Sword'], ['269', 'Wooden Shovel'], ['270', 'Wooden Pickaxe'], ['271', 'Wooden Axe'], ['272', 'Stone Sword'],
['273', 'Stone Shovel'], ['274', 'Stone Pickaxe'], ['275', 'Stone Axe'], ['276', 'Diamond Sword'], ['277', 'Diamond Shovel'], ['278', 'Diamond Pickaxe'], ['279', 'Diamond Axe'],
['280', 'Stick'], ['281', 'Bowl'], ['282', 'Mushroom Soup'], ['283', 'Gold Sword'], ['284', 'Gold Shovel'], ['285', 'Gold Pickaxe'], ['286', 'Gold Axe'], ['287', 'String'],
['288', 'Feather'], ['289', 'Sulphur'], ['290', 'Wooden Hoe'], ['291', 'Stone Hoe'], ['292', 'Iron Hoe'], ['293', 'Diamond Hoe'], ['294', 'Gold Hoe'], ['295', 'Seeds'], ['296', 'Wheat'],
['297', 'Bread'], ['298', 'Leather Helmet'], ['299', 'Leather Chestplate'], ['300', 'Leather Leggings'], ['301', 'Leather Boots'], ['302', 'Chainmail Helmet'],
['303', 'Chainmail Chestplate'], ['304', 'Chainmail Leggings'], ['305', 'Chainmail Boots'], ['306', 'Iron Helmet'], ['307', 'Iron Chestplate'], ['308', 'Iron Leggings'],
['309', 'Iron Boots'], ['310', 'Diamond Helmet'], ['311', 'Diamond Chestplate'], ['312', 'Diamond Leggings'], ['313', 'Diamond Boots'], ['314', 'Gold Helmet'], ['315', 'Gold Chestplate'],
['316', 'Gold Leggings'], ['317', 'Gold Boots'], ['318', 'Flint'], ['319', 'Raw Porkchop'], ['320', 'Cooked Porkchop'], ['321', 'Painting'], ['322', 'Golden Apple'], ['323', 'Sign'],
['324', 'Wooden Door'], ['325', 'Bucket'], ['326', 'Water Bucket'], ['327', 'Lava Bucket'], ['328', 'Minecart'], ['329', 'Saddle'], ['330', 'Iron Door'], ['331', 'Redstone'],
['332', 'Snowball'], ['333', 'Boat'], ['334', 'Leather'], ['335', 'Milk Bucket'], ['336', 'Clay Brick'], ['337', 'Clay Balls'], ['338', 'Sugarcane'], ['339', 'Paper'], ['340', 'Book'],
['341', 'Slimeball'], ['342', 'Storage Minecart'], ['343', 'Powered Minecart'], ['344', 'Egg'], ['345', 'Compass'], ['346', 'Fishing Rod'], ['347', 'Clock'], ['348', 'Glowstone Dust'],
['349', 'Raw Fish'], ['350', 'Cooked Fish'], ['351', 'Ink Sack'], ['351:1', 'Rose Red'], ['351:2', 'Cactus Green'], ['351:3', 'Coco Beans'], ['351:4', 'Lapis Lazuli'],
['351:5', 'Purple Dye'], ['351:6', 'Cyan Dye'], ['351:7', 'Light Gray Dye'], ['351:8', 'Gray Dye'], ['351:9', 'Pink Dye'], ['351:10', 'Lime Dye'], ['351:11', 'Dandelion Yellow'],
['351:12', 'Light Blue Dye'], ['351:13', 'Magenta Dye'], ['351:14', 'Orange Dye'], ['351:15', 'Bone Meal'], ['352', 'Bone'], ['353', 'Sugar'], ['354', 'Cake'], ['355', 'Bed'],
['356', 'Redstone Repeater'], ['357', 'Cookie'], ['358', 'Map'], ['359', 'Shears'], ['2256', 'Gold Music Disc'], ['2257', 'Green Music Disc']]
items = map(lambda x:[x[0],x[1].lower()],items)
def search(name):
name = name.lower()
match = lambda i:name in str(" ".join(i[1:]))
exact = lambda i:name == str(" ".join(i[1:]))
res = filter(match,items)
if not res:
toMinecraft("No items found")
return
sres = filter(exact,items)
if sres or len(res)==1:
toMinecraft("ID: "+str(res[0]))
return res[0][0]
if len(res) > 1:
lst = ", ".join(map(lambda x:" ".join(x[1:]),res))
toMinecraft("Multiple matches: "+lst)
return None
def give(player,name,quant=1):
try:
print "give",player,name,quant
num = None
try:
num = int(name)
except:
num = search(name)
print num
if num == None:
raise Exception()
qnt = int(quant)
print qnt
global mc
while qnt>0:
mc.send("give %s %s %s\n"%(player,num,qnt))
qnt -= 64
except:
toMinecraft("Give failed.")
bot = None
mc = None
running = False
def on_load(b):
b.join("#mc")
global bot,mc,running
bot = b
mc = socket.socket()
# bot.say("#adullam","Connecting to minecraft")
try:
mc.connect(("gene.indstate.edu",10240))
running = True
# bot.say("#adullam","Connected!")
toMinecraft("Plugin activated")
thread.start_new_thread(mcThread,(None,))
except e:
bot.say("#adullam","Failed to connect")
raise e
def Lookup(shorthand):
print "Lookup:",shorthand
mc.send("list\n")
resp = mc.recv(1024)
if "Connected players:" not in resp:
toMinecraft("Something strange happend...")
toMinecraft(resp)
return shorthand
players = resp.split("players:")[1].replace(",","").split()
matches = filter(lambda x:shorthand in x, players)
if len(matches)==1:
return matches[0]
if len(matches)>1:
toMinecraft("Matches: %r"%matches)
if len(matches)==0:
toMinecraft("No matches.")
return shorthand
def TP(src,dst):
mc.send("tp {} {}\n".format(src,dst))
def | mcThread | identifier_name |
|
judger.rs | eng_protocol::internal::{ConnectionSettings, ErrorInfo, PartialConnectionSettings};
use heng_protocol::internal::ws_json::{
CreateJudgeArgs, FinishJudgeArgs, Message as RpcMessage, ReportStatusArgs,
Request as RpcRequest, Response as RpcResponse, UpdateJudgeArgs,
};
use std::sync::atomic::{AtomicU32, AtomicU64, Ordering::Relaxed};
use std::sync::Arc;
use std::time::Duration;
use anyhow::Result;
use chrono::Utc;
use dashmap::DashMap;
use futures::stream::SplitStream;
use futures::StreamExt;
use futures::TryFutureExt;
use serde::Serialize;
use serde_json::value::RawValue;
use tokio::sync::{mpsc, oneshot, Mutex};
use tokio::{task, time};
use tokio_stream::wrappers::ReceiverStream;
use tokio_tungstenite::tungstenite;
use tracing::{debug, error, info, warn};
use tungstenite::protocol::frame::coding::CloseCode;
use tungstenite::protocol::CloseFrame;
pub struct Judger {
settings: Settings,
counter: Mutex<Counter>,
session: WsSession,
rpc_timeout: u64,
}
struct WsSession {
sender: mpsc::Sender<WsMessage>,
seq: AtomicU32,
callbacks: DashMap<u32, oneshot::Sender<RpcResponse>>,
}
struct Settings {
status_report_interval: AtomicU64,
}
#[derive(Debug, Clone)]
struct Counter {
pending: u64,
judging: u64,
finished: u64,
}
impl Judger {
pub async fn run(ws_stream: WsStream) -> Result<()> {
let config = inject::<Config>();
let (ws_sink, ws_stream) = ws_stream.split();
let (tx, rx) = mpsc::channel::<WsMessage>(4096);
task::spawn(
ReceiverStream::new(rx)
.map(Ok)
.forward(ws_sink)
.inspect_err(|err| error!(%err, "ws forward error")),
);
let judger = Arc::new(Self {
settings: Settings {
status_report_interval: AtomicU64::new(1000),
},
session: WsSession {
sender: tx,
seq: AtomicU32::new(0),
callbacks: DashMap::new(),
},
counter: Mutex::new(Counter {
pending: 0,
judging: 0,
finished: 0,
}),
rpc_timeout: config.judger.rpc_timeout,
});
task::spawn(judger.clone().report_status_loop());
judger.main_loop(ws_stream).await
}
async fn main_loop(self: Arc<Self>, mut ws_stream: SplitStream<WsStream>) -> Result<()> | };
let _ = self.session.sender.send(Close(Some(close_frame))).await;
return Err(err.into());
}
};
match rpc_msg {
RpcMessage::Request { seq, body, .. } => {
let this = self.clone();
task::spawn(async move {
let response = this.clone().handle_rpc_request(body).await;
let rpc_msg = RpcMessage::Response {
seq,
time: Utc::now(),
body: response,
};
let ws_msg =
WsMessage::text(serde_json::to_string(&rpc_msg).unwrap());
let _ = this.session.sender.send(ws_msg).await;
});
}
RpcMessage::Response { seq, body, .. } => {
match self.session.callbacks.remove(&seq) {
None => warn!(?seq, "no such callback"),
Some((_, cb)) => match cb.send(body) {
Ok(()) => {}
Err(_) => warn!(?seq, "the callback is timeouted"),
},
}
}
}
}
_ => {
warn!("drop ws message");
drop(frame);
}
}
}
Ok(())
}
async fn report_status_loop(self: Arc<Self>) -> Result<()> {
loop {
let delay = self.settings.status_report_interval.load(Relaxed);
time::sleep(Duration::from_millis(delay)).await;
let result = self
.wsrpc(RpcRequest::ReportStatus(ReportStatusArgs {
collect_time: Utc::now(),
next_report_time: Utc::now() + chrono::Duration::milliseconds(delay as i64),
report: None, // FIXME
}))
.await;
let cnt = self.count(|cnt| cnt.clone()).await;
match result {
Ok(RpcResponse::Output(None)) => {
debug!(interval=?delay, count=?cnt, "report status")
}
Ok(RpcResponse::Output(Some(value))) => warn!(?value, "unexpected response"),
Ok(RpcResponse::Error(err)) => warn!(%err, "report status"),
Err(_) => warn!("the request failed"),
}
}
}
async fn handle_rpc_request(self: Arc<Self>, req: RpcRequest) -> RpcResponse {
match req {
RpcRequest::CreateJudge(args) => to_null_response(self.create_judge(args).await),
RpcRequest::Control(args) => to_response(self.control(args).await),
_ => RpcResponse::Error(ErrorInfo {
code: ErrorCode::NotSupported,
message: None,
}),
}
}
async fn wsrpc(&self, req: RpcRequest) -> Result<RpcResponse> {
let session = &self.session;
let seq = session.seq.fetch_add(1, Relaxed).wrapping_add(1);
let (tx, rx) = oneshot::channel();
let rpc_msg = RpcMessage::Request {
seq,
time: Utc::now(),
body: req,
};
let ws_msg = WsMessage::text(serde_json::to_string(&rpc_msg).unwrap());
{
session.callbacks.insert(seq, tx);
session.sender.send(ws_msg).await.unwrap();
}
match time::timeout(Duration::from_millis(self.rpc_timeout), rx).await {
Ok(res) => Ok(res.unwrap()),
Err(err) => {
let _ = session.callbacks.remove(&seq);
return Err(anyhow::Error::new(err));
}
}
}
async fn count<T>(&self, f: impl FnOnce(&mut Counter) -> T) -> T {
let mut counter = self.counter.lock().await;
f(&mut counter)
}
async fn control(
&self,
settings: Option<PartialConnectionSettings>,
) -> Result<ConnectionSettings> {
if let Some(settings) = settings {
if let Some(interval) = settings.status_report_interval {
self.settings
.status_report_interval
.store(interval, Relaxed);
}
}
let current_settings = ConnectionSettings {
status_report_interval: self.settings.status_report_interval.load(Relaxed),
};
Ok(current_settings)
}
async fn create_judge(self: Arc<Self>, judge: CreateJudgeArgs) -> Result<()> {
task::spawn(async move {
self.count(|cnt| cnt.pending += 1).await;
self.count(|cnt| {
cnt.pending -= 1;
cnt.judging += 1;
})
.await;
let finish = FinishJudgeArgs {
id: judge.id.clone(),
result: JudgeResult {
cases: Vec::new(),
extra: None,
},
};
self.count(|cnt| {
cnt.judging -= 1;
cnt.finished += 1;
})
.await;
self.finish_judge(finish).await
});
Ok(())
}
async fn update_judge(&self, update: UpdateJudgeArgs) -> Result<()> {
let res = self.wsrpc(RpcRequest::UpdateJudge(update)).await?;
let output = to_anyhow(res)?;
if output.is_some() {
warn!(?output, "unexpected output")
}
Ok(())
}
async fn finish_judge(&self, finish: FinishJudgeArgs) -> Result<()> {
let res = self.wsrpc(RpcRequest::FinishJudge(finish)).await?;
let output = to_anyhow(res)?;
if output.is_some() {
warn!(?output, "unexpected output")
}
Ok(())
}
}
fn to_response<T: Serialize>(result: Result<T>) -> RpcResponse {
match result {
Ok(value) => {
let raw_value = RawValue::from_string(serde_json::to_string(&value).unwrap()).unwrap();
RpcResponse::Output(Some(raw_value))
}
Err(err) => RpcResponse::Error(ErrorInfo {
code: ErrorCode::UnknownError,
message: Some(err.to_string()),
}),
}
}
fn to_null_response(result | {
info!("starting main loop");
while let Some(frame) = ws_stream.next().await {
use tungstenite::Message::*;
let frame = frame?;
match frame {
Close(reason) => {
warn!(?reason, "ws session closed");
return Ok(());
}
Text(text) => {
let rpc_msg: RpcMessage = match serde_json::from_str(&text) {
Ok(m) => m,
Err(err) => {
error!(%err, "internal protocol: message format error:\n{:?}\n",text);
let close_frame = CloseFrame {
code: CloseCode::Invalid,
reason: "internal protocol message format error".into(), | identifier_body |
judger.rs | eng_protocol::internal::{ConnectionSettings, ErrorInfo, PartialConnectionSettings};
use heng_protocol::internal::ws_json::{
CreateJudgeArgs, FinishJudgeArgs, Message as RpcMessage, ReportStatusArgs,
Request as RpcRequest, Response as RpcResponse, UpdateJudgeArgs,
};
use std::sync::atomic::{AtomicU32, AtomicU64, Ordering::Relaxed};
use std::sync::Arc;
use std::time::Duration;
use anyhow::Result;
use chrono::Utc;
use dashmap::DashMap;
use futures::stream::SplitStream;
use futures::StreamExt;
use futures::TryFutureExt;
use serde::Serialize;
use serde_json::value::RawValue;
use tokio::sync::{mpsc, oneshot, Mutex};
use tokio::{task, time};
use tokio_stream::wrappers::ReceiverStream;
use tokio_tungstenite::tungstenite;
use tracing::{debug, error, info, warn};
use tungstenite::protocol::frame::coding::CloseCode;
use tungstenite::protocol::CloseFrame;
pub struct Judger {
settings: Settings,
counter: Mutex<Counter>,
session: WsSession,
rpc_timeout: u64,
}
struct WsSession {
sender: mpsc::Sender<WsMessage>,
seq: AtomicU32,
callbacks: DashMap<u32, oneshot::Sender<RpcResponse>>,
}
struct Settings {
status_report_interval: AtomicU64,
}
#[derive(Debug, Clone)]
struct Counter {
pending: u64,
judging: u64,
finished: u64,
}
impl Judger {
pub async fn | (ws_stream: WsStream) -> Result<()> {
let config = inject::<Config>();
let (ws_sink, ws_stream) = ws_stream.split();
let (tx, rx) = mpsc::channel::<WsMessage>(4096);
task::spawn(
ReceiverStream::new(rx)
.map(Ok)
.forward(ws_sink)
.inspect_err(|err| error!(%err, "ws forward error")),
);
let judger = Arc::new(Self {
settings: Settings {
status_report_interval: AtomicU64::new(1000),
},
session: WsSession {
sender: tx,
seq: AtomicU32::new(0),
callbacks: DashMap::new(),
},
counter: Mutex::new(Counter {
pending: 0,
judging: 0,
finished: 0,
}),
rpc_timeout: config.judger.rpc_timeout,
});
task::spawn(judger.clone().report_status_loop());
judger.main_loop(ws_stream).await
}
async fn main_loop(self: Arc<Self>, mut ws_stream: SplitStream<WsStream>) -> Result<()> {
info!("starting main loop");
while let Some(frame) = ws_stream.next().await {
use tungstenite::Message::*;
let frame = frame?;
match frame {
Close(reason) => {
warn!(?reason, "ws session closed");
return Ok(());
}
Text(text) => {
let rpc_msg: RpcMessage = match serde_json::from_str(&text) {
Ok(m) => m,
Err(err) => {
error!(%err, "internal protocol: message format error:\n{:?}\n",text);
let close_frame = CloseFrame {
code: CloseCode::Invalid,
reason: "internal protocol message format error".into(),
};
let _ = self.session.sender.send(Close(Some(close_frame))).await;
return Err(err.into());
}
};
match rpc_msg {
RpcMessage::Request { seq, body, .. } => {
let this = self.clone();
task::spawn(async move {
let response = this.clone().handle_rpc_request(body).await;
let rpc_msg = RpcMessage::Response {
seq,
time: Utc::now(),
body: response,
};
let ws_msg =
WsMessage::text(serde_json::to_string(&rpc_msg).unwrap());
let _ = this.session.sender.send(ws_msg).await;
});
}
RpcMessage::Response { seq, body, .. } => {
match self.session.callbacks.remove(&seq) {
None => warn!(?seq, "no such callback"),
Some((_, cb)) => match cb.send(body) {
Ok(()) => {}
Err(_) => warn!(?seq, "the callback is timeouted"),
},
}
}
}
}
_ => {
warn!("drop ws message");
drop(frame);
}
}
}
Ok(())
}
async fn report_status_loop(self: Arc<Self>) -> Result<()> {
loop {
let delay = self.settings.status_report_interval.load(Relaxed);
time::sleep(Duration::from_millis(delay)).await;
let result = self
.wsrpc(RpcRequest::ReportStatus(ReportStatusArgs {
collect_time: Utc::now(),
next_report_time: Utc::now() + chrono::Duration::milliseconds(delay as i64),
report: None, // FIXME
}))
.await;
let cnt = self.count(|cnt| cnt.clone()).await;
match result {
Ok(RpcResponse::Output(None)) => {
debug!(interval=?delay, count=?cnt, "report status")
}
Ok(RpcResponse::Output(Some(value))) => warn!(?value, "unexpected response"),
Ok(RpcResponse::Error(err)) => warn!(%err, "report status"),
Err(_) => warn!("the request failed"),
}
}
}
async fn handle_rpc_request(self: Arc<Self>, req: RpcRequest) -> RpcResponse {
match req {
RpcRequest::CreateJudge(args) => to_null_response(self.create_judge(args).await),
RpcRequest::Control(args) => to_response(self.control(args).await),
_ => RpcResponse::Error(ErrorInfo {
code: ErrorCode::NotSupported,
message: None,
}),
}
}
async fn wsrpc(&self, req: RpcRequest) -> Result<RpcResponse> {
let session = &self.session;
let seq = session.seq.fetch_add(1, Relaxed).wrapping_add(1);
let (tx, rx) = oneshot::channel();
let rpc_msg = RpcMessage::Request {
seq,
time: Utc::now(),
body: req,
};
let ws_msg = WsMessage::text(serde_json::to_string(&rpc_msg).unwrap());
{
session.callbacks.insert(seq, tx);
session.sender.send(ws_msg).await.unwrap();
}
match time::timeout(Duration::from_millis(self.rpc_timeout), rx).await {
Ok(res) => Ok(res.unwrap()),
Err(err) => {
let _ = session.callbacks.remove(&seq);
return Err(anyhow::Error::new(err));
}
}
}
async fn count<T>(&self, f: impl FnOnce(&mut Counter) -> T) -> T {
let mut counter = self.counter.lock().await;
f(&mut counter)
}
async fn control(
&self,
settings: Option<PartialConnectionSettings>,
) -> Result<ConnectionSettings> {
if let Some(settings) = settings {
if let Some(interval) = settings.status_report_interval {
self.settings
.status_report_interval
.store(interval, Relaxed);
}
}
let current_settings = ConnectionSettings {
status_report_interval: self.settings.status_report_interval.load(Relaxed),
};
Ok(current_settings)
}
async fn create_judge(self: Arc<Self>, judge: CreateJudgeArgs) -> Result<()> {
task::spawn(async move {
self.count(|cnt| cnt.pending += 1).await;
self.count(|cnt| {
cnt.pending -= 1;
cnt.judging += 1;
})
.await;
let finish = FinishJudgeArgs {
id: judge.id.clone(),
result: JudgeResult {
cases: Vec::new(),
extra: None,
},
};
self.count(|cnt| {
cnt.judging -= 1;
cnt.finished += 1;
})
.await;
self.finish_judge(finish).await
});
Ok(())
}
async fn update_judge(&self, update: UpdateJudgeArgs) -> Result<()> {
let res = self.wsrpc(RpcRequest::UpdateJudge(update)).await?;
let output = to_anyhow(res)?;
if output.is_some() {
warn!(?output, "unexpected output")
}
Ok(())
}
async fn finish_judge(&self, finish: FinishJudgeArgs) -> Result<()> {
let res = self.wsrpc(RpcRequest::FinishJudge(finish)).await?;
let output = to_anyhow(res)?;
if output.is_some() {
warn!(?output, "unexpected output")
}
Ok(())
}
}
fn to_response<T: Serialize>(result: Result<T>) -> RpcResponse {
match result {
Ok(value) => {
let raw_value = RawValue::from_string(serde_json::to_string(&value).unwrap()).unwrap();
RpcResponse::Output(Some(raw_value))
}
Err(err) => RpcResponse::Error(ErrorInfo {
code: ErrorCode::UnknownError,
message: Some(err.to_string()),
}),
}
}
fn to_null_response(result | run | identifier_name |
judger.rs | eng_protocol::internal::{ConnectionSettings, ErrorInfo, PartialConnectionSettings};
use heng_protocol::internal::ws_json::{
CreateJudgeArgs, FinishJudgeArgs, Message as RpcMessage, ReportStatusArgs,
Request as RpcRequest, Response as RpcResponse, UpdateJudgeArgs,
};
use std::sync::atomic::{AtomicU32, AtomicU64, Ordering::Relaxed};
use std::sync::Arc;
use std::time::Duration;
use anyhow::Result;
use chrono::Utc;
use dashmap::DashMap;
use futures::stream::SplitStream;
use futures::StreamExt;
use futures::TryFutureExt;
use serde::Serialize;
use serde_json::value::RawValue;
use tokio::sync::{mpsc, oneshot, Mutex};
use tokio::{task, time};
use tokio_stream::wrappers::ReceiverStream;
use tokio_tungstenite::tungstenite;
use tracing::{debug, error, info, warn};
use tungstenite::protocol::frame::coding::CloseCode;
use tungstenite::protocol::CloseFrame;
pub struct Judger {
settings: Settings,
counter: Mutex<Counter>,
session: WsSession,
rpc_timeout: u64,
}
struct WsSession {
sender: mpsc::Sender<WsMessage>,
seq: AtomicU32,
callbacks: DashMap<u32, oneshot::Sender<RpcResponse>>,
}
struct Settings {
status_report_interval: AtomicU64,
}
#[derive(Debug, Clone)]
struct Counter {
pending: u64,
judging: u64,
finished: u64,
}
impl Judger {
pub async fn run(ws_stream: WsStream) -> Result<()> {
let config = inject::<Config>();
let (ws_sink, ws_stream) = ws_stream.split();
let (tx, rx) = mpsc::channel::<WsMessage>(4096);
task::spawn( |
let judger = Arc::new(Self {
settings: Settings {
status_report_interval: AtomicU64::new(1000),
},
session: WsSession {
sender: tx,
seq: AtomicU32::new(0),
callbacks: DashMap::new(),
},
counter: Mutex::new(Counter {
pending: 0,
judging: 0,
finished: 0,
}),
rpc_timeout: config.judger.rpc_timeout,
});
task::spawn(judger.clone().report_status_loop());
judger.main_loop(ws_stream).await
}
async fn main_loop(self: Arc<Self>, mut ws_stream: SplitStream<WsStream>) -> Result<()> {
info!("starting main loop");
while let Some(frame) = ws_stream.next().await {
use tungstenite::Message::*;
let frame = frame?;
match frame {
Close(reason) => {
warn!(?reason, "ws session closed");
return Ok(());
}
Text(text) => {
let rpc_msg: RpcMessage = match serde_json::from_str(&text) {
Ok(m) => m,
Err(err) => {
error!(%err, "internal protocol: message format error:\n{:?}\n",text);
let close_frame = CloseFrame {
code: CloseCode::Invalid,
reason: "internal protocol message format error".into(),
};
let _ = self.session.sender.send(Close(Some(close_frame))).await;
return Err(err.into());
}
};
match rpc_msg {
RpcMessage::Request { seq, body, .. } => {
let this = self.clone();
task::spawn(async move {
let response = this.clone().handle_rpc_request(body).await;
let rpc_msg = RpcMessage::Response {
seq,
time: Utc::now(),
body: response,
};
let ws_msg =
WsMessage::text(serde_json::to_string(&rpc_msg).unwrap());
let _ = this.session.sender.send(ws_msg).await;
});
}
RpcMessage::Response { seq, body, .. } => {
match self.session.callbacks.remove(&seq) {
None => warn!(?seq, "no such callback"),
Some((_, cb)) => match cb.send(body) {
Ok(()) => {}
Err(_) => warn!(?seq, "the callback is timeouted"),
},
}
}
}
}
_ => {
warn!("drop ws message");
drop(frame);
}
}
}
Ok(())
}
async fn report_status_loop(self: Arc<Self>) -> Result<()> {
loop {
let delay = self.settings.status_report_interval.load(Relaxed);
time::sleep(Duration::from_millis(delay)).await;
let result = self
.wsrpc(RpcRequest::ReportStatus(ReportStatusArgs {
collect_time: Utc::now(),
next_report_time: Utc::now() + chrono::Duration::milliseconds(delay as i64),
report: None, // FIXME
}))
.await;
let cnt = self.count(|cnt| cnt.clone()).await;
match result {
Ok(RpcResponse::Output(None)) => {
debug!(interval=?delay, count=?cnt, "report status")
}
Ok(RpcResponse::Output(Some(value))) => warn!(?value, "unexpected response"),
Ok(RpcResponse::Error(err)) => warn!(%err, "report status"),
Err(_) => warn!("the request failed"),
}
}
}
async fn handle_rpc_request(self: Arc<Self>, req: RpcRequest) -> RpcResponse {
match req {
RpcRequest::CreateJudge(args) => to_null_response(self.create_judge(args).await),
RpcRequest::Control(args) => to_response(self.control(args).await),
_ => RpcResponse::Error(ErrorInfo {
code: ErrorCode::NotSupported,
message: None,
}),
}
}
async fn wsrpc(&self, req: RpcRequest) -> Result<RpcResponse> {
let session = &self.session;
let seq = session.seq.fetch_add(1, Relaxed).wrapping_add(1);
let (tx, rx) = oneshot::channel();
let rpc_msg = RpcMessage::Request {
seq,
time: Utc::now(),
body: req,
};
let ws_msg = WsMessage::text(serde_json::to_string(&rpc_msg).unwrap());
{
session.callbacks.insert(seq, tx);
session.sender.send(ws_msg).await.unwrap();
}
match time::timeout(Duration::from_millis(self.rpc_timeout), rx).await {
Ok(res) => Ok(res.unwrap()),
Err(err) => {
let _ = session.callbacks.remove(&seq);
return Err(anyhow::Error::new(err));
}
}
}
async fn count<T>(&self, f: impl FnOnce(&mut Counter) -> T) -> T {
let mut counter = self.counter.lock().await;
f(&mut counter)
}
async fn control(
&self,
settings: Option<PartialConnectionSettings>,
) -> Result<ConnectionSettings> {
if let Some(settings) = settings {
if let Some(interval) = settings.status_report_interval {
self.settings
.status_report_interval
.store(interval, Relaxed);
}
}
let current_settings = ConnectionSettings {
status_report_interval: self.settings.status_report_interval.load(Relaxed),
};
Ok(current_settings)
}
async fn create_judge(self: Arc<Self>, judge: CreateJudgeArgs) -> Result<()> {
task::spawn(async move {
self.count(|cnt| cnt.pending += 1).await;
self.count(|cnt| {
cnt.pending -= 1;
cnt.judging += 1;
})
.await;
let finish = FinishJudgeArgs {
id: judge.id.clone(),
result: JudgeResult {
cases: Vec::new(),
extra: None,
},
};
self.count(|cnt| {
cnt.judging -= 1;
cnt.finished += 1;
})
.await;
self.finish_judge(finish).await
});
Ok(())
}
async fn update_judge(&self, update: UpdateJudgeArgs) -> Result<()> {
let res = self.wsrpc(RpcRequest::UpdateJudge(update)).await?;
let output = to_anyhow(res)?;
if output.is_some() {
warn!(?output, "unexpected output")
}
Ok(())
}
async fn finish_judge(&self, finish: FinishJudgeArgs) -> Result<()> {
let res = self.wsrpc(RpcRequest::FinishJudge(finish)).await?;
let output = to_anyhow(res)?;
if output.is_some() {
warn!(?output, "unexpected output")
}
Ok(())
}
}
fn to_response<T: Serialize>(result: Result<T>) -> RpcResponse {
match result {
Ok(value) => {
let raw_value = RawValue::from_string(serde_json::to_string(&value).unwrap()).unwrap();
RpcResponse::Output(Some(raw_value))
}
Err(err) => RpcResponse::Error(ErrorInfo {
code: ErrorCode::UnknownError,
message: Some(err.to_string()),
}),
}
}
fn to_null_response(result | ReceiverStream::new(rx)
.map(Ok)
.forward(ws_sink)
.inspect_err(|err| error!(%err, "ws forward error")),
); | random_line_split |
augment.py | 1, 0)
"""
def __init__(self, swaps):
self.swaps = swaps
def __call__(self, image):
"""
Args:
image (Tensor): image tensor to be transformed
Return:
a tensor with channels swapped according to swap
"""
# if torch.is_tensor(image):
# image = image.data.cpu().numpy()
# else:
# image = np.array(image)
image = image[:, :, self.swaps]
return image
class RandomLightingNoise(object):
def __init__(self):
self.perms = ((0, 1, 2), (0, 2, 1),
(1, 0, 2), (1, 2, 0),
(2, 0, 1), (2, 1, 0))
def __call__(self, image, boxes=None, labels=None):
if random.randint(2):
swap = self.perms[random.randint(len(self.perms))]
shuffle = SwapChannels(swap) # shuffle channels
image = shuffle(image)
return image, boxes, labels
class ConvertColor(object):
def __init__(self, current='BGR', transform='HSV'):
self.transform = transform
self.current = current
def __call__(self, image, boxes=None, labels=None):
if self.current == 'BGR' and self.transform == 'HSV':
image = cv2.cvtColor(image, cv2.COLOR_BGR2HSV)
elif self.current == 'HSV' and self.transform == 'BGR':
image = cv2.cvtColor(image, cv2.COLOR_HSV2BGR)
else:
raise NotImplementedError
return image, boxes, labels
class RandomContrast(object):
def __init__(self, lower=0.5, upper=1.5):
self.lower = lower
self.upper = upper
assert self.upper >= self.lower, "contrast upper must be >= lower."
assert self.lower >= 0, "contrast lower must be non-negative."
# expects float image
def __call__(self, image, boxes=None, labels=None):
if random.randint(2):
alpha = random.uniform(self.lower, self.upper)
image *= alpha
return image, boxes, labels
class RandomBrightness(object):
def __init__(self, delta=32):
assert delta >= 0.0
assert delta <= 255.0
self.delta = delta
def __call__(self, image, boxes=None, labels=None):
if random.randint(2):
delta = random.uniform(-self.delta, self.delta)
image += delta
return image, boxes, labels
class PhotometricDistort(object):
def __init__(self):
self.pd = [
RandomContrast(),
ConvertColor(transform='HSV'),
RandomSaturation(),
RandomHue(),
ConvertColor(current='HSV', transform='BGR'),
RandomContrast()
]
self.rand_brightness = RandomBrightness()
self.rand_light_noise = RandomLightingNoise()
def __call__(self, image, boxes, labels):
im = image.copy()
im, boxes, labels = self.rand_brightness(im, boxes, labels)
if random.randint(2):
distort = Compose(self.pd[:-1])
else:
distort = Compose(self.pd[1:])
im, boxes, labels = distort(im, boxes, labels)
return self.rand_light_noise(im, boxes, labels)
def intersect(box_a, box_b):
max_xy = np.minimum(box_a[:, 2:], box_b[2:])
min_xy = np.maximum(box_a[:, :2], box_b[:2])
inter = np.clip((max_xy - min_xy), a_min=0, a_max=np.inf)
return inter[:, 0] * inter[:, 1]
def jaccard_numpy(box_a, box_b):
"""Compute the jaccard overlap of two sets of boxes. The jaccard overlap
is simply the intersection over union of two boxes.
Args:
box_a: Multiple bounding boxes, Shape: [num_boxes,4]
box_b: Single bounding box, Shape: [4]
Return:
jaccard overlap: Shape: [box_a.shape[0], box_a.shape[1]]
"""
inter = intersect(box_a, box_b)
area_a = ((box_a[:, 2] - box_a[:, 0]) *
(box_a[:, 3] - box_a[:, 1])) # [A,B]
area_b = ((box_b[2] - box_b[0]) *
(box_b[3] - box_b[1])) # [A,B]
union = area_a + area_b - inter
return inter / union # [A,B]
class Compose(object):
def __init__(self, transforms):
self.transforms = transforms
def __call__(self, img, boxes=None, labels=None):
for t in self.transforms:
img, boxes, labels = t(img, boxes, labels)
return img, boxes, labels
class Expand(object):
"""
expand是指对图像进行缩小,图像的其余区域补mean
"""
def __init__(self, mean):
self.mean = mean
def __call__(s | s, labels):
if random.randint(5):
return image, boxes, labels
height, width, depth = image.shape
ratio = random.uniform(1, 4)
left = random.uniform(0, width * ratio - width)
top = random.uniform(0, height * ratio - height)
expand_image = np.zeros(
(int(height * ratio), int(width * ratio), depth),
dtype=image.dtype)
expand_image[:, :, :] = self.mean
expand_image[int(top):int(top + height),
int(left):int(left + width)] = image
image = expand_image
boxes = boxes.copy()
boxes[:, :2] += (int(left), int(top))
boxes[:, 2:] += (int(left), int(top))
return image, boxes, labels
class RandomSSDCrop(object):
"""Crop
Arguments:
img (Image): the image being input during training
boxes (Tensor): the original bounding boxes in pt form
labels (Tensor): the class labels for each bbox
mode (float tuple): the min and max jaccard overlaps
Return:
(img, boxes, classes)
img (Image): the cropped image
boxes (Tensor): the adjusted bounding boxes in pt form
labels (Tensor): the class labels for each bbox
"""
def __init__(self):
self.sample_options = (
# using entire original input image
None,
# sample a patch s.t. MIN jaccard w/ obj in .1,.3,.4,.7,.9
(0.1, None),
(0.3, None),
(0.7, None),
(0.9, None),
# randomly sample a patch
(None, None),
)
def __call__(self, image, boxes=None, labels=None):
"""
:param image: 3-d array,channel last
:param boxes: 2-d array,(num_gt,(x1,y1,x2,y2)
:param labels: 1-d array(num_gt)
:return:
"""
height, width, _ = image.shape
while True:
# randomly choose a mode
mode = random.choice(self.sample_options)
if mode is None:
return image, boxes, labels
min_iou, max_iou = mode
if min_iou is None:
min_iou = float('-inf')
if max_iou is None:
max_iou = float('inf')
# max trails (50)
for _ in range(50):
current_image = image
w = random.uniform(0.3 * width, width)
h = random.uniform(0.3 * height, height)
# aspect ratio constraint b/t .5 & 2
if h / w < 0.5 or h / w > 2:
continue
left = random.uniform(width - w)
top = random.uniform(height - h)
# convert to integer rect x1,y1,x2,y2
rect = np.array([int(left), int(top), int(left + w), int(top + h)])
# calculate IoU (jaccard overlap) b/t the cropped and gt boxes
# print(boxes, rect)
overlap = jaccard_numpy(boxes, rect)
# is min and max overlap constraint satisfied? if not try again
if overlap.min() < min_iou and max_iou < overlap.max():
continue
# cut the crop from the image
current_image = current_image[rect[1]:rect[3], rect[0]:rect[2], :]
# keep overlap with gt box IF center in sampled patch
centers = (boxes[:, :2] + boxes[:, 2:]) / 2.0
# mask in all gt boxes that above and to the left of centers
m1 = (rect[0] < centers[:, 0]) * (rect[1] < centers[:, 1])
# mask in all gt boxes that under and to the right of centers
m2 = | elf, image, boxe | identifier_body |
augment.py | (box_a[:, 2:], box_b[2:])
min_xy = np.maximum(box_a[:, :2], box_b[:2])
inter = np.clip((max_xy - min_xy), a_min=0, a_max=np.inf)
return inter[:, 0] * inter[:, 1]
def jaccard_numpy(box_a, box_b):
"""Compute the jaccard overlap of two sets of boxes. The jaccard overlap
is simply the intersection over union of two boxes.
Args:
box_a: Multiple bounding boxes, Shape: [num_boxes,4]
box_b: Single bounding box, Shape: [4]
Return:
jaccard overlap: Shape: [box_a.shape[0], box_a.shape[1]]
"""
inter = intersect(box_a, box_b)
area_a = ((box_a[:, 2] - box_a[:, 0]) *
(box_a[:, 3] - box_a[:, 1])) # [A,B]
area_b = ((box_b[2] - box_b[0]) *
(box_b[3] - box_b[1])) # [A,B]
union = area_a + area_b - inter
return inter / union # [A,B]
class Compose(object):
def __init__(self, transforms):
self.transforms = transforms
def __call__(self, img, boxes=None, labels=None):
for t in self.transforms:
img, boxes, labels = t(img, boxes, labels)
return img, boxes, labels
class Expand(object):
"""
expand是指对图像进行缩小,图像的其余区域补mean
"""
def __init__(self, mean):
self.mean = mean
def __call__(self, image, boxes, labels):
if random.randint(5):
return image, boxes, labels
height, width, depth = image.shape
ratio = random.uniform(1, 4)
left = random.uniform(0, width * ratio - width)
top = random.uniform(0, height * ratio - height)
expand_image = np.zeros(
(int(height * ratio), int(width * ratio), depth),
dtype=image.dtype)
expand_image[:, :, :] = self.mean
expand_image[int(top):int(top + height),
int(left):int(left + width)] = image
image = expand_image
boxes = boxes.copy()
boxes[:, :2] += (int(left), int(top))
boxes[:, 2:] += (int(left), int(top))
return image, boxes, labels
class RandomSSDCrop(object):
"""Crop
Arguments:
img (Image): the image being input during training
boxes (Tensor): the original bounding boxes in pt form
labels (Tensor): the class labels for each bbox
mode (float tuple): the min and max jaccard overlaps
Return:
(img, boxes, classes)
img (Image): the cropped image
boxes (Tensor): the adjusted bounding boxes in pt form
labels (Tensor): the class labels for each bbox
"""
def __init__(self):
self.sample_options = (
# using entire original input image
None,
# sample a patch s.t. MIN jaccard w/ obj in .1,.3,.4,.7,.9
(0.1, None),
(0.3, None),
(0.7, None),
(0.9, None),
# randomly sample a patch
(None, None),
)
def __call__(self, image, boxes=None, labels=None):
"""
:param image: 3-d array,channel last
:param boxes: 2-d array,(num_gt,(x1,y1,x2,y2)
:param labels: 1-d array(num_gt)
:return:
"""
height, width, _ = image.shape
while True:
# randomly choose a mode
mode = random.choice(self.sample_options)
if mode is None:
return image, boxes, labels
min_iou, max_iou = mode
if min_iou is None:
min_iou = float('-inf')
if max_iou is None:
max_iou = float('inf')
# max trails (50)
for _ in range(50):
current_image = image
w = random.uniform(0.3 * width, width)
h = random.uniform(0.3 * height, height)
# aspect ratio constraint b/t .5 & 2
if h / w < 0.5 or h / w > 2:
continue
left = random.uniform(width - w)
top = random.uniform(height - h)
# convert to integer rect x1,y1,x2,y2
rect = np.array([int(left), int(top), int(left + w), int(top + h)])
# calculate IoU (jaccard overlap) b/t the cropped and gt boxes
# print(boxes, rect)
overlap = jaccard_numpy(boxes, rect)
# is min and max overlap constraint satisfied? if not try again
if overlap.min() < min_iou and max_iou < overlap.max():
continue
# cut the crop from the image
current_image = current_image[rect[1]:rect[3], rect[0]:rect[2], :]
# keep overlap with gt box IF center in sampled patch
centers = (boxes[:, :2] + boxes[:, 2:]) / 2.0
# mask in all gt boxes that above and to the left of centers
m1 = (rect[0] < centers[:, 0]) * (rect[1] < centers[:, 1])
# mask in all gt boxes that under and to the right of centers
m2 = (rect[2] > centers[:, 0]) * (rect[3] > centers[:, 1])
# mask in that both m1 and m2 are true
mask = m1 * m2
# have any valid boxes? try again if not
if not mask.any():
continue
# take only matching gt boxes
current_boxes = boxes[mask, :].copy()
# take only matching gt labels
current_labels = labels[mask]
# should we use the box left and top corner or the crop's
current_boxes[:, :2] = np.maximum(current_boxes[:, :2], rect[:2])
# adjust to crop (by substracting crop's left,top)
current_boxes[:, :2] -= rect[:2]
current_boxes[:, 2:] = np.minimum(current_boxes[:, 2:], rect[2:])
current_boxes[:, 2:] -= rect[:2]
return current_image, current_boxes, current_labels
class RandomRotation(object):
"""
图片旋转增强,实现任意角度旋转,旋转后的图片会自动拓展,图片会变大,
会留有黑角,相应的box的大小和位置都会自动变化
"""
def __init__(self, angle=None, center=None, scale=1.0):
if angle is None:
self.angle = np.random.randint(-3, 3)
else:
self.angle = angle
self.center = center
self.scale = scale
def __call__(self, image, boxes, labels=None):
angle = self.angle % 360.0
img = Image.fromarray((image*255).astype(np.uint8))
h = img.size[1]
w = img.size[0]
center_x = int(np.floor(w / 2))
center_y = int(np.floor(h / 2))
img_rotate = img.rotate(angle, expand=True)
angle_pi = math.radians(angle)
def transform(x, y): # angle 必须是弧度
return (x - center_x) * round(math.cos(angle_pi), 15) + \
(y - center_y) * round(math.sin(angle_pi), 15) + center_x, \
-(x - center_x) * round(math.sin(angle_pi), 15) + \
(y - center_y) * round(math.cos(angle_pi), 15) + center_y
xx = []
yy = []
for x, y in ((0, 0), (w, 0), (w, h), (0, h)):
x_, y_ = transform(x, y)
xx.append(x_)
yy.append(y_)
dx = abs(int(math.floor(min(xx))))
dy = abs(int(math.floor(min(yy))))
# print('dx,dy', dx, dy)
boxes_rot = []
for box in boxes:
xx = []
yy = []
for x, y in ((box[0], box[1]), (box[0], box[3]), (box[2], box[1]), (box[2], box[3])):
x_, y_ = tra | nsform(x, y)
xx.append(x_)
yy.append(y_)
box_rot = [min(xx) + dx, min(yy) + dy, max(xx) + dx, max(yy) + dy]
boxes_rot.append(box_rot)
img_out,box_out = np.array(img_rotate)/255.0, np.array(boxes_rot)
return img_out,box_out, labels
# class Augmentation(object):
# def | conditional_block |
|
augment.py | (self, image, boxes=None, labels=None):
return image.astype(np.float32), boxes, labels
class RandomSaturation(object):
def __init__(self, lower=0.5, upper=1.5):
self.lower = lower
self.upper = upper
assert self.upper >= self.lower, "contrast upper must be >= lower."
assert self.lower >= 0, "contrast lower must be non-negative."
def __call__(self, image, boxes=None, labels=None):
if random.randint(2):
image[:, :, 1] *= random.uniform(self.lower, self.upper)
return image, boxes, labels
class RandomHue(object):
def __init__(self, delta=18.0):
assert delta >= 0.0 and delta <= 360.0
self.delta = delta
def __call__(self, image, boxes=None, labels=None):
if random.randint(2):
image[:, :, 0] += random.uniform(-self.delta, self.delta)
image[:, :, 0][image[:, :, 0] > 360.0] -= 360.0
image[:, :, 0][image[:, :, 0] < 0.0] += 360.0
return image, boxes, labels
class SwapChannels(object):
"""Transforms a tensorized image by swapping the channels in the order
specified in the swap tuple.
Args:
swaps (int triple): final order of channels
eg: (2, 1, 0)
"""
def __init__(self, swaps):
self.swaps = swaps
def __call__(self, image):
"""
Args:
image (Tensor): image tensor to be transformed
Return:
a tensor with channels swapped according to swap
"""
# if torch.is_tensor(image):
# image = image.data.cpu().numpy()
# else:
# image = np.array(image)
image = image[:, :, self.swaps]
return image
class RandomLightingNoise(object):
def __init__(self):
self.perms = ((0, 1, 2), (0, 2, 1),
(1, 0, 2), (1, 2, 0),
(2, 0, 1), (2, 1, 0))
def __call__(self, image, boxes=None, labels=None):
if random.randint(2):
swap = self.perms[random.randint(len(self.perms))]
shuffle = SwapChannels(swap) # shuffle channels
image = shuffle(image)
return image, boxes, labels
class ConvertColor(object):
def __init__(self, current='BGR', transform='HSV'):
self.transform = transform
self.current = current
def __call__(self, image, boxes=None, labels=None):
if self.current == 'BGR' and self.transform == 'HSV':
image = cv2.cvtColor(image, cv2.COLOR_BGR2HSV)
elif self.current == 'HSV' and self.transform == 'BGR':
image = cv2.cvtColor(image, cv2.COLOR_HSV2BGR)
else:
raise NotImplementedError
return image, boxes, labels
class RandomContrast(object):
def __init__(self, lower=0.5, upper=1.5):
self.lower = lower
self.upper = upper
assert self.upper >= self.lower, "contrast upper must be >= lower."
assert self.lower >= 0, "contrast lower must be non-negative."
# expects float image
def __call__(self, image, boxes=None, labels=None):
if random.randint(2):
alpha = random.uniform(self.lower, self.upper)
image *= alpha
return image, boxes, labels
class RandomBrightness(object):
def __init__(self, delta=32):
assert delta >= 0.0
assert delta <= 255.0
self.delta = delta
def __call__(self, image, boxes=None, labels=None):
if random.randint(2):
delta = random.uniform(-self.delta, self.delta)
image += delta
return image, boxes, labels
class PhotometricDistort(object):
def __init__(self):
self.pd = [
RandomContrast(),
ConvertColor(transform='HSV'),
RandomSaturation(),
RandomHue(),
ConvertColor(current='HSV', transform='BGR'),
RandomContrast()
]
self.rand_brightness = RandomBrightness()
self.rand_light_noise = RandomLightingNoise()
def __call__(self, image, boxes, labels):
im = image.copy()
im, boxes, labels = self.rand_brightness(im, boxes, labels)
if random.randint(2):
distort = Compose(self.pd[:-1])
else:
distort = Compose(self.pd[1:])
im, boxes, labels = distort(im, boxes, labels)
return self.rand_light_noise(im, boxes, labels)
def intersect(box_a, box_b):
max_xy = np.minimum(box_a[:, 2:], box_b[2:])
min_xy = np.maximum(box_a[:, :2], box_b[:2])
inter = np.clip((max_xy - min_xy), a_min=0, a_max=np.inf)
return inter[:, 0] * inter[:, 1]
def jaccard_numpy(box_a, box_b):
"""Compute the jaccard overlap of two sets of boxes. The jaccard overlap
is simply the intersection over union of two boxes.
Args:
box_a: Multiple bounding boxes, Shape: [num_boxes,4]
box_b: Single bounding box, Shape: [4]
Return:
jaccard overlap: Shape: [box_a.shape[0], box_a.shape[1]]
"""
inter = intersect(box_a, box_b)
area_a = ((box_a[:, 2] - box_a[:, 0]) *
(box_a[:, 3] - box_a[:, 1])) # [A,B]
area_b = ((box_b[2] - box_b[0]) *
(box_b[3] - box_b[1])) # [A,B]
union = area_a + area_b - inter
return inter / union # [A,B]
class Compose(object):
def __init__(self, transforms):
self.transforms = transforms
def __call__(self, img, boxes=None, labels=None):
for t in self.transforms:
img, boxes, labels = t(img, boxes, labels)
return img, boxes, labels
class Expand(object):
"""
expand是指对图像进行缩小,图像的其余区域补mean
"""
def __init__(self, mean):
self.mean = mean
def __call__(self, image, boxes, labels):
if random.randint(5):
return image, boxes, labels
height, width, depth = image.shape
ratio = random.uniform(1, 4)
left = random.uniform(0, width * ratio - width)
top = random.uniform(0, height * ratio - height)
expand_image = np.zeros(
(int(height * ratio), int(width * ratio), depth),
dtype=image.dtype)
expand_image[:, :, :] = self.mean
expand_image[int(top):int(top + height),
int(left):int(left + width)] = image
image = expand_image
boxes = boxes.copy()
boxes[:, :2] += (int(left), int(top))
boxes[:, 2:] += (int(left), int(top))
return image, boxes, labels
class RandomSSDCrop(object):
"""Crop
Arguments:
img (Image): the image being input during training
boxes (Tensor): the original bounding boxes in pt form
labels (Tensor): the class labels for each bbox
mode (float tuple): the min and max jaccard overlaps
Return:
(img, boxes, classes)
img (Image): the cropped image
boxes (Tensor): the adjusted bounding boxes in pt form
labels (Tensor): the class labels for each bbox
"""
def __init__(self):
self.sample_options = (
# using entire original input image
None,
# sample a patch s.t. MIN jaccard w/ obj in .1,.3,.4,.7,.9
(0.1, None),
(0.3, None),
(0.7, None),
(0.9, None),
# randomly sample a patch
(None, None),
)
def __call__(self, image, boxes=None, labels=None):
"""
:param image: 3-d array,channel last
:param boxes: 2-d array,(num_gt,(x1,y1,x2,y2)
:param labels: 1-d array(num_gt)
:return:
"""
height, width, _ = image.shape
while True:
# randomly choose a mode
mode = random.choice(self.sample_options)
if mode is None:
return image, boxes, labels
min_iou, max_iou = mode
if min_iou is None:
min_iou = float('-inf')
if max_iou is None:
max_iou = float('inf')
# max trails (50)
for _ in | __call__ | identifier_name |
|
augment.py | = cv2.cvtColor(image, cv2.COLOR_BGR2HSV)
elif self.current == 'HSV' and self.transform == 'BGR':
image = cv2.cvtColor(image, cv2.COLOR_HSV2BGR)
else:
raise NotImplementedError
return image, boxes, labels
class RandomContrast(object):
def __init__(self, lower=0.5, upper=1.5):
self.lower = lower
self.upper = upper
assert self.upper >= self.lower, "contrast upper must be >= lower."
assert self.lower >= 0, "contrast lower must be non-negative."
# expects float image
def __call__(self, image, boxes=None, labels=None):
if random.randint(2):
alpha = random.uniform(self.lower, self.upper)
image *= alpha
return image, boxes, labels
class RandomBrightness(object):
def __init__(self, delta=32):
assert delta >= 0.0
assert delta <= 255.0
self.delta = delta
def __call__(self, image, boxes=None, labels=None):
if random.randint(2):
delta = random.uniform(-self.delta, self.delta)
image += delta
return image, boxes, labels
class PhotometricDistort(object):
def __init__(self):
self.pd = [
RandomContrast(),
ConvertColor(transform='HSV'),
RandomSaturation(),
RandomHue(),
ConvertColor(current='HSV', transform='BGR'),
RandomContrast()
]
self.rand_brightness = RandomBrightness()
self.rand_light_noise = RandomLightingNoise()
def __call__(self, image, boxes, labels):
im = image.copy()
im, boxes, labels = self.rand_brightness(im, boxes, labels)
if random.randint(2):
distort = Compose(self.pd[:-1])
else:
distort = Compose(self.pd[1:])
im, boxes, labels = distort(im, boxes, labels)
return self.rand_light_noise(im, boxes, labels)
def intersect(box_a, box_b):
max_xy = np.minimum(box_a[:, 2:], box_b[2:])
min_xy = np.maximum(box_a[:, :2], box_b[:2])
inter = np.clip((max_xy - min_xy), a_min=0, a_max=np.inf)
return inter[:, 0] * inter[:, 1]
def jaccard_numpy(box_a, box_b):
"""Compute the jaccard overlap of two sets of boxes. The jaccard overlap
is simply the intersection over union of two boxes.
Args:
box_a: Multiple bounding boxes, Shape: [num_boxes,4]
box_b: Single bounding box, Shape: [4]
Return:
jaccard overlap: Shape: [box_a.shape[0], box_a.shape[1]]
"""
inter = intersect(box_a, box_b)
area_a = ((box_a[:, 2] - box_a[:, 0]) *
(box_a[:, 3] - box_a[:, 1])) # [A,B]
area_b = ((box_b[2] - box_b[0]) *
(box_b[3] - box_b[1])) # [A,B]
union = area_a + area_b - inter
return inter / union # [A,B]
class Compose(object):
def __init__(self, transforms):
self.transforms = transforms
def __call__(self, img, boxes=None, labels=None):
for t in self.transforms:
img, boxes, labels = t(img, boxes, labels)
return img, boxes, labels
class Expand(object):
"""
expand是指对图像进行缩小,图像的其余区域补mean
"""
def __init__(self, mean):
self.mean = mean
def __call__(self, image, boxes, labels):
if random.randint(5):
return image, boxes, labels
height, width, depth = image.shape
ratio = random.uniform(1, 4)
left = random.uniform(0, width * ratio - width)
top = random.uniform(0, height * ratio - height)
expand_image = np.zeros(
(int(height * ratio), int(width * ratio), depth),
dtype=image.dtype)
expand_image[:, :, :] = self.mean
expand_image[int(top):int(top + height),
int(left):int(left + width)] = image
image = expand_image
boxes = boxes.copy()
boxes[:, :2] += (int(left), int(top))
boxes[:, 2:] += (int(left), int(top))
return image, boxes, labels
class RandomSSDCrop(object):
"""Crop
Arguments:
img (Image): the image being input during training
boxes (Tensor): the original bounding boxes in pt form
labels (Tensor): the class labels for each bbox
mode (float tuple): the min and max jaccard overlaps
Return:
(img, boxes, classes)
img (Image): the cropped image
boxes (Tensor): the adjusted bounding boxes in pt form
labels (Tensor): the class labels for each bbox
"""
def __init__(self):
self.sample_options = (
# using entire original input image
None,
# sample a patch s.t. MIN jaccard w/ obj in .1,.3,.4,.7,.9
(0.1, None),
(0.3, None),
(0.7, None),
(0.9, None),
# randomly sample a patch
(None, None),
)
def __call__(self, image, boxes=None, labels=None):
"""
:param image: 3-d array,channel last
:param boxes: 2-d array,(num_gt,(x1,y1,x2,y2)
:param labels: 1-d array(num_gt)
:return:
"""
height, width, _ = image.shape
while True:
# randomly choose a mode
mode = random.choice(self.sample_options)
if mode is None:
return image, boxes, labels
min_iou, max_iou = mode
if min_iou is None:
min_iou = float('-inf')
if max_iou is None:
max_iou = float('inf')
# max trails (50)
for _ in range(50):
current_image = image
w = random.uniform(0.3 * width, width)
h = random.uniform(0.3 * height, height)
# aspect ratio constraint b/t .5 & 2
if h / w < 0.5 or h / w > 2:
continue
left = random.uniform(width - w)
top = random.uniform(height - h)
# convert to integer rect x1,y1,x2,y2
rect = np.array([int(left), int(top), int(left + w), int(top + h)])
# calculate IoU (jaccard overlap) b/t the cropped and gt boxes
# print(boxes, rect)
overlap = jaccard_numpy(boxes, rect)
# is min and max overlap constraint satisfied? if not try again
if overlap.min() < min_iou and max_iou < overlap.max():
continue
# cut the crop from the image
current_image = current_image[rect[1]:rect[3], rect[0]:rect[2], :]
# keep overlap with gt box IF center in sampled patch
centers = (boxes[:, :2] + boxes[:, 2:]) / 2.0
# mask in all gt boxes that above and to the left of centers
m1 = (rect[0] < centers[:, 0]) * (rect[1] < centers[:, 1])
# mask in all gt boxes that under and to the right of centers
m2 = (rect[2] > centers[:, 0]) * (rect[3] > centers[:, 1])
# mask in that both m1 and m2 are true
mask = m1 * m2
# have any valid boxes? try again if not
if not mask.any():
continue
# take only matching gt boxes
current_boxes = boxes[mask, :].copy()
# take only matching gt labels
current_labels = labels[mask]
# should we use the box left and top corner or the crop's
current_boxes[:, :2] = np.maximum(current_boxes[:, :2], rect[:2])
# adjust to crop (by substracting crop's left,top)
current_boxes[:, :2] -= rect[:2]
current_boxes[:, 2:] = np.minimum(current_boxes[:, 2:], rect[2:])
current_boxes[:, 2:] -= rect[:2]
return current_image, current_boxes, current_labels
class RandomRotation(object):
"""
图片旋转增强,实现任意角度旋转,旋转后的图片会自动拓展,图片会变大,
会留有黑角,相应的box的大小和位置都会自动变化
"""
def __init__(self, angle=None, center=None, scale=1.0):
if angle is None:
self.angle = np.random.randint(-3, 3)
else:
self.angle = angle
self.center = center
self.scale = scale
| random_line_split |
||
views.py | config["APP_KEY"]
return render_template('main/index.html', callback=callback, app_key=app_key)
class Update(MethodView):
decorators = [login_required]
def get(self, page):
per_page = 10
unpushed_entry = WaitingQueue.query.order_by(WaitingQueue.cutting_weight.desc()).all()
pagination = Pagination(unpushed_entry, per_page)
current_page = pagination.page(page)
foot_bar = PaginationBar(css_framework='bootstrap3', link_size='sm',
show_single_page=True, page=page,
per_page=per_page, total=len(unpushed_entry),
format_total=True, format_number=True)
result = {
"titles": current_page,
"current_time": datetime.utcnow(),
"pushtime": 10,
"deltime": 999,
"page": page,
"per_page": per_page,
"pagination": foot_bar
}
return render_template('main/update.html', **result)
def post(self, page):
data = request.get_json()
if data['action'] == 'post':
| ':
title = data['title']
UserOperation(user_id=current_user.id, operation=Operation.DELETE, title=title).save()
query = WaitingQueue.query.filter_by(title=data['title']).first()
if query:
query.delete()
response = jsonify({'result': True})
return response
class ManualUpdate(MethodView):
decorators = [login_required]
def __init__(self):
self.form = PushForm
def get(self):
return render_template('main/mupdate.html', form=self.form(), pushtime=10)
def post(self):
if not current_user.can(Permission.MANUAL_PUSH):
flash(u"你没有权限")
form = self.form(request.form)
if not form.validate():
flash(u"条目格式有问题,请检查并重新填写")
title = form.pushtitle.data
result = self.check_push_validate(title.encode("utf-8"))
if not result:
flash(u"推送条目被ban,或者已经在24小时之内推送过,或者已经进入待推送列表")
try:
image = MoegirlImage(title)
except HTTPError as e:
flash(u"请求萌百错误,错误码如下{},请联系管理员".format(e))
return redirect(url_for('main.mupdate'))
if not image.path:
flash(u"无法取得图片,请重试")
entry = WaitingQueue(title=title, image=image.path)
env = Env()
current_weight = env.get("CUTTING_WEIGHT_INIT")
entry.cutting_weight = current_weight + 1
entry.save()
env.set("CUTTING_WEIGHT_INIT", entry.cutting_weight)
UserOperation(user_id=current_user.id, title=title, operation=Operation.PUSH).save()
if form.industry.data:
try:
from koushihime.crontab import push
push()
except Exception as e:
flash(u"推送失败: {}".format(str(e)))
flash(u"操作成功,词条将立即推送")
return redirect(url_for('main.mupdate'))
@staticmethod
def check_push_validate(title):
moegirl_entry = MoegirlQuery(title)
namespace = moegirl_entry.get_namespace()
if namespace is 0:
baned_from_moegirl = moegirl_entry.banned_moegirl_category()
baned_from_regex = moegirl_entry.ban_from_regex()
has_pushed = recent_have_pushed(title.decode("utf-8")) # TODO: 改成自动冒泡
has_catched = have_auto_catched(title.decode("utf-8"))
result = baned_from_moegirl is False \
and has_pushed is False \
and has_catched is False \
and baned_from_regex is False
return result
else:
return False
class UserInfo(MethodView):
decorators = [login_required]
def get(self, username):
is_admin = current_user.can(Permission.ADMINISTER)
if current_user.username == username or is_admin is True:
user_info = User.query.filter_by(username=username, deleted=False).first()
if not user_info:
abort(404)
return render_template('main/user.html', u=user_info, username=user_info.username)
else:
abort(403)
class UserList(MethodView):
decorators = [login_required, admin_required]
def __init__(self):
self.form = AddUserForm
def get(self):
userlist = User.query.filter_by(deleted=False).all()
return render_template('main/userlist.html', userlist=userlist, form=self.form())
def post(self):
data = request.get_json()
if data:
if data['action'] == 'edit':
username = data['username']
else:
username = data['username']
try:
User.query.filter_by(username=username, deleted=False).first().delete()
except:
flash(u'用户不存在')
return jsonify({"status": 302, "location": url_for('main.editprofile', username=username)})
elif request.form:
self.add_user()
return redirect('userlist')
def add_user(self):
form = self.form(request.form)
if form.validate():
role = Role.query.filter_by(name=form.role.data).first()
if role:
if not User.query.filter_by(email=form.email.data).first():
user = User(email=form.email.data, username=form.username.data,
role=role, password=form.password.data)
user.save()
else:
flash(u'已经存在该用户')
else:
flash(u'不存在该用户组')
return redirect(url_for('main.userlist'))
class EditProfile(MethodView):
decorators = [login_required]
def __init__(self):
self.form = EditProfileForm
self.admin_form = AdminEditProfileForm
def get(self, username):
if not username: # 用户访问自己的个人信息编辑页
form = self.form()
form.email.data = current_user.email
form.about_me.data = current_user.aboutme
else:
if current_user.can(Permission.ADMINISTER):
user_info = User.query.filter_by(username=username, deleted=False).first()
if user_info:
form = self.admin_form()
form.email.data = user_info.email
form.about_me.data = user_info.aboutme
form.role.data = user_info.role.name
else:
flash(u'用户不存在')
return redirect(url_for('main.index'))
else:
abort(403)
return render_template('main/edit_profile.html', form=form, u=current_user)
def post(self, username):
if not username:
form = self.form(request.form)
user = current_user
else:
if current_user.can(Permission.ADMINISTER):
form = self.form(request.form)
user = User.query.filter_by(username=username, deleted=False).first()
if user:
if not current_user.verify_password(form.oripassword.data):
flash(u'管理员密码输入错误')
return redirect(url_for('main.editprofile', username=username))
else:
flash(u'用户不存在')
return redirect(url_for('main.index'))
else:
abort(403)
self.change_profile(user, form, True if username else False)
return redirect(url_for('main.user', username=username))
@staticmethod
def change_profile(user, form, admin=False):
user.password = form.password.data
user.email = form.email.data
user.aboutme = form.about_me.data
if admin:
new_role = Role.query.filter_by(name=form.role.data)
if new_role:
user.role = new_role
user.save()
class OperationLog(MethodView):
decorators = [login_required, admin_required]
def get(self, page):
per_page = 10
count = UserOperation.query.count()
query = UserOperation.query.order_by(UserOperation.id.desc())\
.paginate(page=page, per_page=per_page, error_out=False)
foot_bar = PaginationBar(css_framework='bootstrap3', link_size='sm',
show_single_page=False, page=page, per_page=per_page,
total=count, format_total=True, format_number=True)
return render_template('main/log.html', records=query.items,
page=page, per_page=per_page, pagination=foot_bar, Operation=Operation)
class KeywordBan(MethodView):
decorators = [login_required, admin_required]
def __init__(self):
self.form = BanKeywordForm
def get(self, page):
per_page = 10
count = BanList.query.filter_by(deleted=False).count()
# TODO: 把关键词读入配置减少查询次数
pagination = BanList.query.filter_by(deleted=False)\
.paginate(page=page, per_page=per_page, error_out=False)
foot_bar = PaginationBar(css_framework='bootstrap3', link_size='sm',
show_single_page=False, page=page, | title = data["title"]
env = Env()
current_weight = env.get("CUTTING_WEIGHT_INIT")
entry = WaitingQueue.query.filter_by(title=title).first()
if entry:
entry.cutting_weight = current_weight + 1 # FIXME: 即使条目处于权重最高状态亦可增加权限
entry.save()
env.set("CUTTING_WEIGHT_INIT", entry.cutting_weight)
elif data['action'] == 'del | conditional_block |
views.py | ["APP_KEY"]
return render_template('main/index.html', callback=callback, app_key=app_key)
class Update(MethodView):
decorators = [login_required]
def get(self, page):
per_page = 10
unpushed_entry = WaitingQueue.query.order_by(WaitingQueue.cutting_weight.desc()).all()
pagination = Pagination(unpushed_entry, per_page)
current_page = pagination.page(page)
foot_bar = PaginationBar(css_framework='bootstrap3', link_size='sm',
show_single_page=True, page=page,
per_page=per_page, total=len(unpushed_entry),
format_total=True, format_number=True)
result = {
"titles": current_page,
"current_time": datetime.utcnow(),
"pushtime": 10,
"deltime": 999,
"page": page,
"per_page": per_page,
"pagination": foot_bar
}
return render_template('main/update.html', **result)
def post(self, page):
data = request.get_json()
if data['action'] == 'post':
title = data["title"]
env = Env()
current_weight = env.get("CUTTING_WEIGHT_INIT")
entry = WaitingQueue.query.filter_by(title=title).first()
if entry:
entry.cutting_weight = current_weight + 1 # FIXME: 即使条目处于权重最高状态亦可增加权限
entry.save()
env.set("CUTTING_WEIGHT_INIT", entry.cutting_weight)
elif data['action'] == 'del':
title = data['title']
UserOperation(user_id=current_user.id, operation=Operation.DELETE, title=title).save()
query = WaitingQueue.query.filter_by(title=data['title']).first()
if query:
query.delete()
response = jsonify({'result': True})
return response
class ManualUpdate(MethodView):
decorators = [login_required]
def __init__(self):
self.form = PushForm
def get(self):
return render_template('main/mupdate.html', form=self.form(), pushtime=10)
def post(self):
if not current_user.can(Permission.MANUAL_PUSH):
flash(u"你没有权限")
form = self.form(request.form)
if not form.validate():
flash(u"条目格式有问题,请检查并重新填写")
title = form.pushtitle.data
result = self.check_push_validate(title.encode("utf-8"))
if not result:
flash(u"推送条目被ban,或者已经在24小时之内推送过,或者已经进入待推送列表")
try:
image = MoegirlImage(title)
except HTTPError as e:
flash(u"请求萌百错误,错误码如下{},请联系管理员".format(e))
return redirect(url_for('main.mupdate'))
if not image.path:
flash(u"无法取得图片,请重试")
entry = WaitingQueue(title=title, image=image.path)
env = Env()
current_weight = env.get("CUTTING_WEIGHT_INIT")
entry.cutting_weight = current_weight + 1
entry.save()
env.set("CUTTING_WEIGHT_INIT", entry.cutting_weight)
UserOperation(user_id=current_user.id, title=title, operation=Operation.PUSH).save()
if form.industry.data:
try:
from koushihime.crontab import push
push()
except Exception as e:
flash(u"推送失败: {}".format(str(e)))
flash(u"操作成功,词条将立即推送")
return redirect(url_for('main.mupdate'))
@staticmethod
def check_push_validate(title):
moegirl_entry = MoegirlQuery(title)
namespace = moegirl_entry.get_namespace()
if namespace is 0:
baned_from_moegirl = moegirl_entry.banned_moegirl_category()
baned_from_regex = moegirl_entry.ban_from_regex()
has_pushed = recent_have_pushed(title.decode("utf-8")) # TODO: 改成自动冒泡
has_catched = have_auto_catched(title.decode("utf-8"))
result = baned_from_moegirl is False \
and has_pushed is False \
and has_catched is False \
and baned_from_regex is False
return result
else:
return False
class UserInfo(MethodView):
decorators = [login_required]
def get(self, username):
is_admin = current_user.can(Permission.ADMINISTER)
if current_user.username == username or is_admin is True:
user_info = User.query.filter_by(username=username, deleted=False).first()
if not user_info:
abort(404)
return render_template('main/user.html', u=user_info, username=user_info.username)
else:
abort(403)
class UserList(MethodView):
decorators = [login_required, admin_required]
def __init__(self):
self.form = AddUserForm
def get(self):
userlist = User.query.filter_by(deleted=False).all()
return render_template('main/userlist.html', userlist=userlist, form=self.form())
def post(self):
data = request.get_json()
if data:
if data['action'] == 'edit':
username = data['username']
else:
username = data['username']
try:
User.query.filter_by(username=username, deleted=False).first().delete()
except:
flash(u'用户不存在')
return jsonify({"status": 302, "location": url_for('main.editprofile', username=username)})
elif request.form:
self.add_user()
return redirect('userlist')
def add_user(self):
form = self.form(request.form)
if form.validate():
role = Role.query.filter_by(name=form.role.data).first()
if role:
if not User.query.filter_by(email=form.email.data).first():
user = User(email=form.email.data, username=form.username.data,
role=role, password=form.password.data)
user.save()
else:
flash(u'已经存在该用户')
else:
flash(u'不存在该用户组')
return redirect(url_for('main.userlist'))
class EditProfile(MethodView):
decorators = [login_required]
def __init__(self):
self.form = EditProfileForm
self.admin_form = AdminEditProfileForm
def get(self, username):
if not username: # 用户访问自己的个人信息编辑页
form = self.form()
form.email.data = current_user.email
form.about_me.data = current_user.aboutme
else:
if current_user.can(Permission.ADMINISTER):
| user_info = User.query.filter_by(username=username, deleted=False).first()
if user_info:
form = self.admin_form()
form.email.data = user_info.email
form.about_me.data = user_info.aboutme
form.role.data = user_info.role.name
else:
flash(u'用户不存在')
return redirect(url_for('main.index'))
else:
abort(403)
return render_template('main/edit_profile.html', form=form, u=current_user)
def post(self, username):
if not username:
form = self.form(request.form)
user = current_user
else:
if current_user.can(Permission.ADMINISTER):
form = self.form(request.form)
user = User.query.filter_by(username=username, deleted=False).first()
if user:
if not current_user.verify_password(form.oripassword.data):
flash(u'管理员密码输入错误')
return redirect(url_for('main.editprofile', username=username))
else:
flash(u'用户不存在')
return redirect(url_for('main.index'))
else:
abort(403)
self.change_profile(user, form, True if username else False)
return redirect(url_for('main.user', username=username))
@staticmethod
def change_profile(user, form, admin=False):
user.password = form.password.data
user.email = form.email.data
user.aboutme = form.about_me.data
if admin:
new_role = Role.query.filter_by(name=form.role.data)
if new_role:
user.role = new_role
user.save()
class OperationLog(MethodView):
decorators = [login_required, admin_required]
def get(self, page):
per_page = 10
count = UserOperation.query.count()
query = UserOperation.query.order_by(UserOperation.id.desc())\
.paginate(page=page, per_page=per_page, error_out=False)
foot_bar = PaginationBar(css_framework='bootstrap3', link_size='sm',
show_single_page=False, page=page, per_page=per_page,
total=count, format_total=True, format_number=True)
return render_template('main/log.html', records=query.items,
page=page, per_page=per_page, pagination=foot_bar, Operation=Operation)
class KeywordBan(MethodView):
decorators = [login_required, admin_required]
def __init__(self):
self.form = BanKeywordForm
def get(self, page):
per_page = 10
count = BanList.query.filter_by(deleted=False).count()
# TODO: 把关键词读入配置减少查询次数
pagination = BanList.query.filter_by(deleted=False)\
.paginate(page=page, per_page=per_page, error_out=False)
foot_bar = PaginationBar(css_framework='bootstrap3', link_size='sm',
show_single_page=False, page=page | identifier_name |
|
views.py | try:
from koushihime.crontab import push
push()
except Exception as e:
flash(u"推送失败: {}".format(str(e)))
flash(u"操作成功,词条将立即推送")
return redirect(url_for('main.mupdate'))
@staticmethod
def check_push_validate(title):
moegirl_entry = MoegirlQuery(title)
namespace = moegirl_entry.get_namespace()
if namespace is 0:
baned_from_moegirl = moegirl_entry.banned_moegirl_category()
baned_from_regex = moegirl_entry.ban_from_regex()
has_pushed = recent_have_pushed(title.decode("utf-8")) # TODO: 改成自动冒泡
has_catched = have_auto_catched(title.decode("utf-8"))
result = baned_from_moegirl is False \
and has_pushed is False \
and has_catched is False \
and baned_from_regex is False
return result
else:
return False
class UserInfo(MethodView):
decorators = [login_required]
def get(self, username):
is_admin = current_user.can(Permission.ADMINISTER)
if current_user.username == username or is_admin is True:
user_info = User.query.filter_by(username=username, deleted=False).first()
if not user_info:
abort(404)
return render_template('main/user.html', u=user_info, username=user_info.username)
else:
abort(403)
class UserList(MethodView):
decorators = [login_required, admin_required]
def __init__(self):
self.form = AddUserForm
def get(self):
userlist = User.query.filter_by(deleted=False).all()
return render_template('main/userlist.html', userlist=userlist, form=self.form())
def post(self):
data = request.get_json()
if data:
if data['action'] == 'edit':
username = data['username']
else:
username = data['username']
try:
User.query.filter_by(username=username, deleted=False).first().delete()
except:
flash(u'用户不存在')
return jsonify({"status": 302, "location": url_for('main.editprofile', username=username)})
elif request.form:
self.add_user()
return redirect('userlist')
def add_user(self):
form = self.form(request.form)
if form.validate():
role = Role.query.filter_by(name=form.role.data).first()
if role:
if not User.query.filter_by(email=form.email.data).first():
user = User(email=form.email.data, username=form.username.data,
role=role, password=form.password.data)
user.save()
else:
flash(u'已经存在该用户')
else:
flash(u'不存在该用户组')
return redirect(url_for('main.userlist'))
class EditProfile(MethodView):
decorators = [login_required]
def __init__(self):
self.form = EditProfileForm
self.admin_form = AdminEditProfileForm
def get(self, username):
if not username: # 用户访问自己的个人信息编辑页
form = self.form()
form.email.data = current_user.email
form.about_me.data = current_user.aboutme
else:
if current_user.can(Permission.ADMINISTER):
user_info = User.query.filter_by(username=username, deleted=False).first()
if user_info:
form = self.admin_form()
form.email.data = user_info.email
form.about_me.data = user_info.aboutme
form.role.data = user_info.role.name
else:
flash(u'用户不存在')
return redirect(url_for('main.index'))
else:
abort(403)
return render_template('main/edit_profile.html', form=form, u=current_user)
def post(self, username):
if not username:
form = self.form(request.form)
user = current_user
else:
if current_user.can(Permission.ADMINISTER):
form = self.form(request.form)
user = User.query.filter_by(username=username, deleted=False).first()
if user:
if not current_user.verify_password(form.oripassword.data):
flash(u'管理员密码输入错误')
return redirect(url_for('main.editprofile', username=username))
else:
flash(u'用户不存在')
return redirect(url_for('main.index'))
else:
abort(403)
self.change_profile(user, form, True if username else False)
return redirect(url_for('main.user', username=username))
@staticmethod
def change_profile(user, form, admin=False):
user.password = form.password.data
user.email = form.email.data
user.aboutme = form.about_me.data
if admin:
new_role = Role.query.filter_by(name=form.role.data)
if new_role:
user.role = new_role
user.save()
class OperationLog(MethodView):
decorators = [login_required, admin_required]
def get(self, page):
per_page = 10
count = UserOperation.query.count()
query = UserOperation.query.order_by(UserOperation.id.desc())\
.paginate(page=page, per_page=per_page, error_out=False)
foot_bar = PaginationBar(css_framework='bootstrap3', link_size='sm',
show_single_page=False, page=page, per_page=per_page,
total=count, format_total=True, format_number=True)
return render_template('main/log.html', records=query.items,
page=page, per_page=per_page, pagination=foot_bar, Operation=Operation)
class KeywordBan(MethodView):
decorators = [login_required, admin_required]
def __init__(self):
self.form = BanKeywordForm
def get(self, page):
per_page = 10
count = BanList.query.filter_by(deleted=False).count()
# TODO: 把关键词读入配置减少查询次数
pagination = BanList.query.filter_by(deleted=False)\
.paginate(page=page, per_page=per_page, error_out=False)
foot_bar = PaginationBar(css_framework='bootstrap3', link_size='sm',
show_single_page=False, page=page, per_page=per_page,
total=count, format_total=True, format_number=True)
template_param = {
'keywords': pagination.items,
'page': page,
'per_page': per_page,
'pagination': foot_bar,
'form': self.form()
}
return render_template('main/ban.html', **template_param)
def post(self, page):
data = request.get_json()
if data:
keyword = data['keyword']
result = BanList.query.filter_by(rule=keyword).first()
if result:
if result.status:
result.status.delete()
result.delete()
flash(u'成功删除关键词')
else:
flash(u'该关键词不存在')
return jsonify({"status": 302, "location": url_for('main.ban')})
elif request.form:
form = self.form(request.form)
if form.validate():
exist = BanList.query.filter_by(rule=form.keyword.data).first()
if not exist:
ban = BanList(rule=form.keyword.data, time_limit=form.time_limit.data)
ban.save()
status = RulePushCount(rule_id=ban.id, count=ban.time_limit)
status.save()
flash(u'添加关键词成功')
else:
if exist.deleted is True:
exist.deleted = False
exist.time_limit = form.time_limit.data
exist.save()
status = RulePushCount(rule_id=exist.id, count=exist.time_limit)
status.save()
else:
flash(u'重复添加关键词')
return redirect(url_for('main.ban'))
# TODO: deprecated
class WeiboAuthCallback(MethodView):
decorators = [login_required, admin_required]
def get(self):
self.auth_code = request.args.get("code")
result = self.fresh_access()
if result is True:
return render_template('main/success.html')
else:
return render_template('main/failed.html', e=result)
def fresh_access(self):
# config = current_app.config["WEIBO_AUTH_CONFIG"]
# callback = config["CALLBACK"]
# app_key = config["APP_KEY"]
# app_secret_key = config["APP_SECRET"]
try:
pass
# client = APIClient(app_key=app_key, app_secret=app_secret_key, redirect_uri=callback)
# token_data = client.request_access_token(self.auth_code)
# access_token, expires_in = token_data.access_token, token_data.expires_in
except BaseException as e:
return e
# config["ACCESS_TOKEN"] = access_token
# config["EXPIRE_TIME"] = expires_in
# env = Env()
# env.set("ACCESS_TOKEN", access_token)
# env = Env()
# env.set("EXPIRE_TIME", expires_in)
return True
class Cookie(MethodView):
decorators = [login_required, admin_required]
def __init__(self):
self.form = CookieForm
def get(self):
return render_template('main/cookie.html', form=self.form(), pushtime=10)
def post(self):
form = self.form(request.form)
if not form.validate():
flash(u"表单不合法")
cookie = form.cookie.data
env = Env()
env.set("COOKIE", cookie)
flash(u"设置 Cookie 成功")
return redirect(url_for('main.cookie'))
| identifier_body |
||
views.py | return response
class ManualUpdate(MethodView):
decorators = [login_required]
def __init__(self):
self.form = PushForm
def get(self):
return render_template('main/mupdate.html', form=self.form(), pushtime=10)
def post(self):
if not current_user.can(Permission.MANUAL_PUSH):
flash(u"你没有权限")
form = self.form(request.form)
if not form.validate():
flash(u"条目格式有问题,请检查并重新填写")
title = form.pushtitle.data
result = self.check_push_validate(title.encode("utf-8"))
if not result:
flash(u"推送条目被ban,或者已经在24小时之内推送过,或者已经进入待推送列表")
try:
image = MoegirlImage(title)
except HTTPError as e:
flash(u"请求萌百错误,错误码如下{},请联系管理员".format(e))
return redirect(url_for('main.mupdate'))
if not image.path:
flash(u"无法取得图片,请重试")
entry = WaitingQueue(title=title, image=image.path)
env = Env()
current_weight = env.get("CUTTING_WEIGHT_INIT")
entry.cutting_weight = current_weight + 1
entry.save()
env.set("CUTTING_WEIGHT_INIT", entry.cutting_weight)
UserOperation(user_id=current_user.id, title=title, operation=Operation.PUSH).save()
if form.industry.data:
try:
from koushihime.crontab import push
push()
except Exception as e:
flash(u"推送失败: {}".format(str(e)))
flash(u"操作成功,词条将立即推送")
return redirect(url_for('main.mupdate'))
@staticmethod
def check_push_validate(title):
moegirl_entry = MoegirlQuery(title)
namespace = moegirl_entry.get_namespace()
if namespace is 0:
baned_from_moegirl = moegirl_entry.banned_moegirl_category()
baned_from_regex = moegirl_entry.ban_from_regex()
has_pushed = recent_have_pushed(title.decode("utf-8")) # TODO: 改成自动冒泡
has_catched = have_auto_catched(title.decode("utf-8"))
result = baned_from_moegirl is False \
and has_pushed is False \
and has_catched is False \
and baned_from_regex is False
return result
else:
return False
class UserInfo(MethodView):
decorators = [login_required]
def get(self, username):
is_admin = current_user.can(Permission.ADMINISTER)
if current_user.username == username or is_admin is True:
user_info = User.query.filter_by(username=username, deleted=False).first()
if not user_info:
abort(404)
return render_template('main/user.html', u=user_info, username=user_info.username)
else:
abort(403)
class UserList(MethodView):
decorators = [login_required, admin_required]
def __init__(self):
self.form = AddUserForm
def get(self):
userlist = User.query.filter_by(deleted=False).all()
return render_template('main/userlist.html', userlist=userlist, form=self.form())
def post(self):
data = request.get_json()
if data:
if data['action'] == 'edit':
username = data['username']
else:
username = data['username']
try:
User.query.filter_by(username=username, deleted=False).first().delete()
except:
flash(u'用户不存在')
return jsonify({"status": 302, "location": url_for('main.editprofile', username=username)})
elif request.form:
self.add_user()
return redirect('userlist')
def add_user(self):
form = self.form(request.form)
if form.validate():
role = Role.query.filter_by(name=form.role.data).first()
if role:
if not User.query.filter_by(email=form.email.data).first():
user = User(email=form.email.data, username=form.username.data,
role=role, password=form.password.data)
user.save()
else:
flash(u'已经存在该用户')
else:
flash(u'不存在该用户组')
return redirect(url_for('main.userlist'))
class EditProfile(MethodView):
decorators = [login_required]
def __init__(self):
self.form = EditProfileForm
self.admin_form = AdminEditProfileForm
def get(self, username):
if not username: # 用户访问自己的个人信息编辑页
form = self.form()
form.email.data = current_user.email
form.about_me.data = current_user.aboutme
else:
if current_user.can(Permission.ADMINISTER):
user_info = User.query.filter_by(username=username, deleted=False).first()
if user_info:
form = self.admin_form()
form.email.data = user_info.email
form.about_me.data = user_info.aboutme
form.role.data = user_info.role.name
else:
flash(u'用户不存在')
return redirect(url_for('main.index'))
else:
abort(403)
return render_template('main/edit_profile.html', form=form, u=current_user)
def post(self, username):
if not username:
form = self.form(request.form)
user = current_user
else:
if current_user.can(Permission.ADMINISTER):
form = self.form(request.form)
user = User.query.filter_by(username=username, deleted=False).first()
if user:
if not current_user.verify_password(form.oripassword.data):
flash(u'管理员密码输入错误')
return redirect(url_for('main.editprofile', username=username))
else:
flash(u'用户不存在')
return redirect(url_for('main.index'))
else:
abort(403)
self.change_profile(user, form, True if username else False)
return redirect(url_for('main.user', username=username))
@staticmethod
def change_profile(user, form, admin=False):
user.password = form.password.data
user.email = form.email.data
user.aboutme = form.about_me.data
if admin:
new_role = Role.query.filter_by(name=form.role.data)
if new_role:
user.role = new_role
user.save()
class OperationLog(MethodView):
decorators = [login_required, admin_required]
def get(self, page):
per_page = 10
count = UserOperation.query.count()
query = UserOperation.query.order_by(UserOperation.id.desc())\
.paginate(page=page, per_page=per_page, error_out=False)
foot_bar = PaginationBar(css_framework='bootstrap3', link_size='sm',
show_single_page=False, page=page, per_page=per_page,
total=count, format_total=True, format_number=True)
return render_template('main/log.html', records=query.items,
page=page, per_page=per_page, pagination=foot_bar, Operation=Operation)
class KeywordBan(MethodView):
decorators = [login_required, admin_required]
def __init__(self):
self.form = BanKeywordForm
def get(self, page):
per_page = 10
count = BanList.query.filter_by(deleted=False).count()
# TODO: 把关键词读入配置减少查询次数
pagination = BanList.query.filter_by(deleted=False)\
.paginate(page=page, per_page=per_page, error_out=False)
foot_bar = PaginationBar(css_framework='bootstrap3', link_size='sm',
show_single_page=False, page=page, per_page=per_page,
total=count, format_total=True, format_number=True)
template_param = {
'keywords': pagination.items,
'page': page,
'per_page': per_page,
'pagination': foot_bar,
'form': self.form()
}
return render_template('main/ban.html', **template_param)
def post(self, page):
data = request.get_json()
if data:
keyword = data['keyword']
result = BanList.query.filter_by(rule=keyword).first()
if result:
if result.status:
result.status.delete()
result.delete()
flash(u'成功删除关键词')
else:
flash(u'该关键词不存在')
return jsonify({"status": 302, "location": url_for('main.ban')})
elif request.form:
form = self.form(request.form)
if form.validate():
exist = BanList.query.filter_by(rule=form.keyword.data).first()
if not exist:
ban = BanList(rule=form.keyword.data, time_limit=form.time_limit.data)
ban.save()
status = RulePushCount(rule_id=ban.id, count=ban.time_limit)
status.save()
flash(u'添加关键词成功')
else:
if exist.deleted is True:
exist.deleted = False
exist.time_limit = form.time_limit.data
exist.save()
status = RulePushCount(rule_id=exist.id, count=exist.time_limit)
status.save()
else:
flash(u'重复添加关键词')
return redirect(url_for('main.ban'))
# TODO: deprecated
class WeiboAuthCallback(MethodView):
decorators = [login_required, admin_required]
def get(self):
self.auth_code = request.args.get("code")
result = self.fresh_access()
if result is True: | return render_template('main/success.html')
else: | random_line_split |
|
bfr.py | ])
centroid = []
for _ in range(dim):
centroid.append(0)
for point in points:
for i in range(dim):
centroid[i] += point[1][i]
tmp = []
for x in centroid:
tmp.append(x/len(points))
centroid = tmp
return centroid
def comp(new_centroid, l_ctd):
new_n = l_ctd - len(new_centroid)
if new_n != 0:
new_centroid.extend(centroid[:new_n])
#centroid = deepcopy(sorted(new_centroid))
new_centroid = sorted(new_centroid)
return deepcopy(new_centroid)
def cal_conv(c1, c2):
dis = 0
for p1, p2 in zip(c1, c2):
for i in range(len(p1)):
dis += math.pow((p1[i] - p2[i]),2)
return math.sqrt(dis)
def cal_nc(clusters):
pass
def kmeans(points, k, conv=1e-5):
print('kmena: ', len(points))
#--init centroid---
ran_id = random.randint(0, len(points))
start_p = points[ran_id]
centroid = [start_p[1]]
#centroid.append(start_p[1])
print("start centroid",centroid) | for _ in range(k-1):
dis = []
for point in points:
#calculate
tmp_ds = []
for i, c in enumerate(centroid):
if point != c:
# print("point",point)
# print("c",c)
tmp_ds.append((euclidean(point, (0,c)), i))
min_ds = min(tmp_ds)
dis.append((min_ds[0], point))
max_c = max(dis, key = lambda x:x[0])
centroid.append(max_c[1][1])
#--
time_ = 0
point_sp = sc.parallelize(points).cache()
cur_conv = 1
# print('--------', centroid)
while time_ < 50 and cur_conv > conv :
time_ += 1
print('kmeans,find clusters')
# tmp_clusters = point_sp.map(lambda x: (find_cluster(x, centroid)))
# clu_list = tmp_clusters.collect()
clu_list = map(lambda x: (find_cluster(x, centroid)), points)
clusters = defaultdict(list)
for k, v in clu_list:
clusters[k].append(v)
nc = {}
for k, v in clusters.items():
res = find_centroid(v)
nc[k] = res
new_centroid = list(nc.values())
# print('++++++++++++++++++++=', new_centroid)
# clusters = sc.parallelize(clusters)\
# .groupByKey()
# clusters = sp_points.map(lambda x: (find_near_c(x, centroids)))
# new_cents = clusters.groupByKey().mapValues(cal_centroid).map(lambda x: x[1])
# nc = sc.parallelize(clusters.items()).mapValues(find_centroid).map(lambda x: x[1])
# print(nc.count())
# new_centroid = nc.collect()
# nc.unpersist()
# cur_conv = cal_conv(centroid, new_centroid)
centroid = comp(new_centroid, len(centroid))
# res = clusters.mapValues(list).collect()
# res_clusters = dict(res)
res_clusters = clusters
return res_clusters
def point_addition(a, b, pow_):
res = []
for i in range(len(a)):
tmp = a[i] + math.pow(b[i],pow_)
res.append(tmp)
return res
def cal_cluster_squ(cluster,DIM):
_sum = []
_sumq = []
for _ in range(DIM):
_sum.append(0)
_sumq.append(0)
for point in cluster:
_sum = point_addition(_sum, point[1],1)
_sumq = point_addition(_sumq, point[1],2)
lv = len(cluster)
res = [lv, _sum, _sumq]
return res
def merge_list(a,b,DIM):
n1, su1, suq1 = a
n2, su2, suq2 = b
n = n1 + n2
su = point_addition(su1, su2,1)
suq = point_addition(suq1, suq2,2)
flags = []
for i in range(DIM):
tmp = suq[i]/n - math.pow(su[i]/n,2)
flags.append(math.sqrt(tmp) < 3*math.sqrt(DIM))
l = [n,su,suq]
return (all(flags), l)
#def out_2(out_path, data):
#print("writing into out2......")
#with open(out_path, 'w') as f:
#f.write('round_id,nof_cluster_discard,nof_point_discard,nof_cluster_compression,nof_point_compression,nof_point_retained\n')
#3for item in data:
# item = [str(x) for x in item]
# f.write(','.join(item) + '\n')
if __name__ == '__main__':
input_path = sys.argv[1]
n_clusters = int(sys.argv[2])
output_file1 = sys.argv[3]
output_file2 = sys.argv[4]
#with open(output_file2, 'w') as f:
#f.write('round_id,nof_cluster_discard,nof_point_discard,nof_cluster_compression,nof_point_compression,nof_point_retained\n')
OUTPUT1 = []
OUTPUT2 = []
#global POINTS
#global DIM
#global SAMPLE_NUM
SAMPLE_NUM = 40
#global DS
DS = []
#global DS_CLUSTER
DS_CLUSTER = []
#global RS
RS = []
RS = set()
#global RS_DICT
RS_DICT = {}
#global CS
CS = []
#global CS_CLUSTER
CS_CLUSTER = []
FILES = []
for file in Path(input_path).rglob('*.txt'):
if file.is_file():
FILES.append(file)
FILES = sorted(FILES)
#FILES = sorted(os.listdir(input_path))
print(FILES)
for i, filename in enumerate(FILES, start=1):
# filename = str(filename)
if i == 1:
#-- init points--
print(filename)
points = read_file(filename)
POINTS = points
le = len(points[0][1])
DIM = le - 1
#--- init bfr ---
if len(POINTS)//5 > SAMPLE_NUM:
n_sample = SAMPLE_NUM
else: n_sample = len(POINTS)//5
sample_points = random.sample(POINTS, n_sample)
clusters = kmeans(sample_points, n_clusters)
for cluster in clusters.values():
DS.append(cal_cluster_squ(cluster,DIM))
idx = []
for point in cluster:
idx.append(point[0])
DS_CLUSTER.append(idx)
#cal nonsample
Sample_set = {point[0] for point in sample_points}
non_sample = []
for point in POINTS:
if point[0] not in Sample_set:
non_sample.append(point)
new_clusters = kmeans(non_sample, 3*n_clusters)
for cluster in new_clusters.values():
if len(cluster) == 1:
RS.add(cluster[0][0])
RS_DICT[cluster[0][0]] = cluster[0]
else:
CS.append(cal_cluster_squ(cluster, DIM))
idx = []
for point in cluster:
idx.append(point[0])
CS_CLUSTER.append(idx)
else:
print(filename)
# add point
points = read_file(filename)
for point in points:
sign_ds = False
sign_cs = False
for j in range(len(DS)):
#cal
tmp_n, tmp_su, tmp_suq = DS[j]
tmp_md, tmp_sd = 0,0
for k in range(DIM):
nmt = point[1][k] - tmp_su[k]/tmp_n
dnmt = math.sqrt(abs(tmp_suq[k] / tmp_n - (tmp_su[k] / tmp_n) ** 2))
if dnmt != 0:
tmp_md += (nmt / dnmt) ** 2
else: tmp_md += nmt **2
tmp_sd += dnmt
md = math.sqrt(tmp_md)
n, su, suq = DS[j]
if md < math.sqrt(DIM)*3:
n+=1
su = point_addition(su, point[1],1)
suq = point_addition(suq, point[1],2)
tmpr = [n,su,suq]
DS[j] = tmpr
DS_CLUSTER[j].append(point[0])
sign_ds = True
break
if not sign_ds:
for j in range(len(CS)):
#cal
tmp_n, tmp_su, tmp_suq = CS[j]
tmp_md, tmp_sd = 0,0
for k in range(DIM):
n | random_line_split |
|
bfr.py | centroid = []
for _ in range(dim):
centroid.append(0)
for point in points:
for i in range(dim):
centroid[i] += point[1][i]
tmp = []
for x in centroid:
tmp.append(x/len(points))
centroid = tmp
return centroid
def comp(new_centroid, l_ctd):
new_n = l_ctd - len(new_centroid)
if new_n != 0:
new_centroid.extend(centroid[:new_n])
#centroid = deepcopy(sorted(new_centroid))
new_centroid = sorted(new_centroid)
return deepcopy(new_centroid)
def cal_conv(c1, c2):
dis = 0
for p1, p2 in zip(c1, c2):
for i in range(len(p1)):
dis += math.pow((p1[i] - p2[i]),2)
return math.sqrt(dis)
def cal_nc(clusters):
pass
def kmeans(points, k, conv=1e-5):
print('kmena: ', len(points))
#--init centroid---
ran_id = random.randint(0, len(points))
start_p = points[ran_id]
centroid = [start_p[1]]
#centroid.append(start_p[1])
print("start centroid",centroid)
for _ in range(k-1):
dis = []
for point in points:
#calculate
tmp_ds = []
for i, c in enumerate(centroid):
if point != c:
# print("point",point)
# print("c",c)
tmp_ds.append((euclidean(point, (0,c)), i))
min_ds = min(tmp_ds)
dis.append((min_ds[0], point))
max_c = max(dis, key = lambda x:x[0])
centroid.append(max_c[1][1])
#--
time_ = 0
point_sp = sc.parallelize(points).cache()
cur_conv = 1
# print('--------', centroid)
while time_ < 50 and cur_conv > conv :
time_ += 1
print('kmeans,find clusters')
# tmp_clusters = point_sp.map(lambda x: (find_cluster(x, centroid)))
# clu_list = tmp_clusters.collect()
clu_list = map(lambda x: (find_cluster(x, centroid)), points)
clusters = defaultdict(list)
for k, v in clu_list:
clusters[k].append(v)
nc = {}
for k, v in clusters.items():
res = find_centroid(v)
nc[k] = res
new_centroid = list(nc.values())
# print('++++++++++++++++++++=', new_centroid)
# clusters = sc.parallelize(clusters)\
# .groupByKey()
# clusters = sp_points.map(lambda x: (find_near_c(x, centroids)))
# new_cents = clusters.groupByKey().mapValues(cal_centroid).map(lambda x: x[1])
# nc = sc.parallelize(clusters.items()).mapValues(find_centroid).map(lambda x: x[1])
# print(nc.count())
# new_centroid = nc.collect()
# nc.unpersist()
# cur_conv = cal_conv(centroid, new_centroid)
centroid = comp(new_centroid, len(centroid))
# res = clusters.mapValues(list).collect()
# res_clusters = dict(res)
res_clusters = clusters
return res_clusters
def point_addition(a, b, pow_):
res = []
for i in range(len(a)):
tmp = a[i] + math.pow(b[i],pow_)
res.append(tmp)
return res
def | (cluster,DIM):
_sum = []
_sumq = []
for _ in range(DIM):
_sum.append(0)
_sumq.append(0)
for point in cluster:
_sum = point_addition(_sum, point[1],1)
_sumq = point_addition(_sumq, point[1],2)
lv = len(cluster)
res = [lv, _sum, _sumq]
return res
def merge_list(a,b,DIM):
n1, su1, suq1 = a
n2, su2, suq2 = b
n = n1 + n2
su = point_addition(su1, su2,1)
suq = point_addition(suq1, suq2,2)
flags = []
for i in range(DIM):
tmp = suq[i]/n - math.pow(su[i]/n,2)
flags.append(math.sqrt(tmp) < 3*math.sqrt(DIM))
l = [n,su,suq]
return (all(flags), l)
#def out_2(out_path, data):
#print("writing into out2......")
#with open(out_path, 'w') as f:
#f.write('round_id,nof_cluster_discard,nof_point_discard,nof_cluster_compression,nof_point_compression,nof_point_retained\n')
#3for item in data:
# item = [str(x) for x in item]
# f.write(','.join(item) + '\n')
if __name__ == '__main__':
input_path = sys.argv[1]
n_clusters = int(sys.argv[2])
output_file1 = sys.argv[3]
output_file2 = sys.argv[4]
#with open(output_file2, 'w') as f:
#f.write('round_id,nof_cluster_discard,nof_point_discard,nof_cluster_compression,nof_point_compression,nof_point_retained\n')
OUTPUT1 = []
OUTPUT2 = []
#global POINTS
#global DIM
#global SAMPLE_NUM
SAMPLE_NUM = 40
#global DS
DS = []
#global DS_CLUSTER
DS_CLUSTER = []
#global RS
RS = []
RS = set()
#global RS_DICT
RS_DICT = {}
#global CS
CS = []
#global CS_CLUSTER
CS_CLUSTER = []
FILES = []
for file in Path(input_path).rglob('*.txt'):
if file.is_file():
FILES.append(file)
FILES = sorted(FILES)
#FILES = sorted(os.listdir(input_path))
print(FILES)
for i, filename in enumerate(FILES, start=1):
# filename = str(filename)
if i == 1:
#-- init points--
print(filename)
points = read_file(filename)
POINTS = points
le = len(points[0][1])
DIM = le - 1
#--- init bfr ---
if len(POINTS)//5 > SAMPLE_NUM:
n_sample = SAMPLE_NUM
else: n_sample = len(POINTS)//5
sample_points = random.sample(POINTS, n_sample)
clusters = kmeans(sample_points, n_clusters)
for cluster in clusters.values():
DS.append(cal_cluster_squ(cluster,DIM))
idx = []
for point in cluster:
idx.append(point[0])
DS_CLUSTER.append(idx)
#cal nonsample
Sample_set = {point[0] for point in sample_points}
non_sample = []
for point in POINTS:
if point[0] not in Sample_set:
non_sample.append(point)
new_clusters = kmeans(non_sample, 3*n_clusters)
for cluster in new_clusters.values():
if len(cluster) == 1:
RS.add(cluster[0][0])
RS_DICT[cluster[0][0]] = cluster[0]
else:
CS.append(cal_cluster_squ(cluster, DIM))
idx = []
for point in cluster:
idx.append(point[0])
CS_CLUSTER.append(idx)
else:
print(filename)
# add point
points = read_file(filename)
for point in points:
sign_ds = False
sign_cs = False
for j in range(len(DS)):
#cal
tmp_n, tmp_su, tmp_suq = DS[j]
tmp_md, tmp_sd = 0,0
for k in range(DIM):
nmt = point[1][k] - tmp_su[k]/tmp_n
dnmt = math.sqrt(abs(tmp_suq[k] / tmp_n - (tmp_su[k] / tmp_n) ** 2))
if dnmt != 0:
tmp_md += (nmt / dnmt) ** 2
else: tmp_md += nmt **2
tmp_sd += dnmt
md = math.sqrt(tmp_md)
n, su, suq = DS[j]
if md < math.sqrt(DIM)*3:
n+=1
su = point_addition(su, point[1],1)
suq = point_addition(suq, point[1],2)
tmpr = [n,su,suq]
DS[j] = tmpr
DS_CLUSTER[j].append(point[0])
sign_ds = True
break
if not sign_ds:
for j in range(len(CS)):
#cal
tmp_n, tmp_su, tmp_suq = CS[j]
tmp_md, tmp_sd = 0,0
for k in range(DIM):
n | cal_cluster_squ | identifier_name |
bfr.py | centroid = []
for _ in range(dim):
centroid.append(0)
for point in points:
for i in range(dim):
centroid[i] += point[1][i]
tmp = []
for x in centroid:
tmp.append(x/len(points))
centroid = tmp
return centroid
def comp(new_centroid, l_ctd):
new_n = l_ctd - len(new_centroid)
if new_n != 0:
new_centroid.extend(centroid[:new_n])
#centroid = deepcopy(sorted(new_centroid))
new_centroid = sorted(new_centroid)
return deepcopy(new_centroid)
def cal_conv(c1, c2):
dis = 0
for p1, p2 in zip(c1, c2):
for i in range(len(p1)):
dis += math.pow((p1[i] - p2[i]),2)
return math.sqrt(dis)
def cal_nc(clusters):
pass
def kmeans(points, k, conv=1e-5):
print('kmena: ', len(points))
#--init centroid---
ran_id = random.randint(0, len(points))
start_p = points[ran_id]
centroid = [start_p[1]]
#centroid.append(start_p[1])
print("start centroid",centroid)
for _ in range(k-1):
dis = []
for point in points:
#calculate
tmp_ds = []
for i, c in enumerate(centroid):
if point != c:
# print("point",point)
# print("c",c)
tmp_ds.append((euclidean(point, (0,c)), i))
min_ds = min(tmp_ds)
dis.append((min_ds[0], point))
max_c = max(dis, key = lambda x:x[0])
centroid.append(max_c[1][1])
#--
time_ = 0
point_sp = sc.parallelize(points).cache()
cur_conv = 1
# print('--------', centroid)
while time_ < 50 and cur_conv > conv :
time_ += 1
print('kmeans,find clusters')
# tmp_clusters = point_sp.map(lambda x: (find_cluster(x, centroid)))
# clu_list = tmp_clusters.collect()
clu_list = map(lambda x: (find_cluster(x, centroid)), points)
clusters = defaultdict(list)
for k, v in clu_list:
clusters[k].append(v)
nc = {}
for k, v in clusters.items():
res = find_centroid(v)
nc[k] = res
new_centroid = list(nc.values())
# print('++++++++++++++++++++=', new_centroid)
# clusters = sc.parallelize(clusters)\
# .groupByKey()
# clusters = sp_points.map(lambda x: (find_near_c(x, centroids)))
# new_cents = clusters.groupByKey().mapValues(cal_centroid).map(lambda x: x[1])
# nc = sc.parallelize(clusters.items()).mapValues(find_centroid).map(lambda x: x[1])
# print(nc.count())
# new_centroid = nc.collect()
# nc.unpersist()
# cur_conv = cal_conv(centroid, new_centroid)
centroid = comp(new_centroid, len(centroid))
# res = clusters.mapValues(list).collect()
# res_clusters = dict(res)
res_clusters = clusters
return res_clusters
def point_addition(a, b, pow_):
res = []
for i in range(len(a)):
tmp = a[i] + math.pow(b[i],pow_)
res.append(tmp)
return res
def cal_cluster_squ(cluster,DIM):
_sum = []
_sumq = []
for _ in range(DIM):
_sum.append(0)
_sumq.append(0)
for point in cluster:
_sum = point_addition(_sum, point[1],1)
_sumq = point_addition(_sumq, point[1],2)
lv = len(cluster)
res = [lv, _sum, _sumq]
return res
def merge_list(a,b,DIM):
n1, su1, suq1 = a
n2, su2, suq2 = b
n = n1 + n2
su = point_addition(su1, su2,1)
suq = point_addition(suq1, suq2,2)
flags = []
for i in range(DIM):
tmp = suq[i]/n - math.pow(su[i]/n,2)
flags.append(math.sqrt(tmp) < 3*math.sqrt(DIM))
l = [n,su,suq]
return (all(flags), l)
#def out_2(out_path, data):
#print("writing into out2......")
#with open(out_path, 'w') as f:
#f.write('round_id,nof_cluster_discard,nof_point_discard,nof_cluster_compression,nof_point_compression,nof_point_retained\n')
#3for item in data:
# item = [str(x) for x in item]
# f.write(','.join(item) + '\n')
if __name__ == '__main__':
input_path = sys.argv[1]
n_clusters = int(sys.argv[2])
output_file1 = sys.argv[3]
output_file2 = sys.argv[4]
#with open(output_file2, 'w') as f:
#f.write('round_id,nof_cluster_discard,nof_point_discard,nof_cluster_compression,nof_point_compression,nof_point_retained\n')
OUTPUT1 = []
OUTPUT2 = []
#global POINTS
#global DIM
#global SAMPLE_NUM
SAMPLE_NUM = 40
#global DS
DS = []
#global DS_CLUSTER
DS_CLUSTER = []
#global RS
RS = []
RS = set()
#global RS_DICT
RS_DICT = {}
#global CS
CS = []
#global CS_CLUSTER
CS_CLUSTER = []
FILES = []
for file in Path(input_path).rglob('*.txt'):
if file.is_file():
FILES.append(file)
FILES = sorted(FILES)
#FILES = sorted(os.listdir(input_path))
print(FILES)
for i, filename in enumerate(FILES, start=1):
# filename = str(filename)
if i == 1:
#-- init points--
print(filename)
points = read_file(filename)
POINTS = points
le = len(points[0][1])
DIM = le - 1
#--- init bfr ---
if len(POINTS)//5 > SAMPLE_NUM:
n_sample = SAMPLE_NUM
else: n_sample = len(POINTS)//5
sample_points = random.sample(POINTS, n_sample)
clusters = kmeans(sample_points, n_clusters)
for cluster in clusters.values():
|
#cal nonsample
Sample_set = {point[0] for point in sample_points}
non_sample = []
for point in POINTS:
if point[0] not in Sample_set:
non_sample.append(point)
new_clusters = kmeans(non_sample, 3*n_clusters)
for cluster in new_clusters.values():
if len(cluster) == 1:
RS.add(cluster[0][0])
RS_DICT[cluster[0][0]] = cluster[0]
else:
CS.append(cal_cluster_squ(cluster, DIM))
idx = []
for point in cluster:
idx.append(point[0])
CS_CLUSTER.append(idx)
else:
print(filename)
# add point
points = read_file(filename)
for point in points:
sign_ds = False
sign_cs = False
for j in range(len(DS)):
#cal
tmp_n, tmp_su, tmp_suq = DS[j]
tmp_md, tmp_sd = 0,0
for k in range(DIM):
nmt = point[1][k] - tmp_su[k]/tmp_n
dnmt = math.sqrt(abs(tmp_suq[k] / tmp_n - (tmp_su[k] / tmp_n) ** 2))
if dnmt != 0:
tmp_md += (nmt / dnmt) ** 2
else: tmp_md += nmt **2
tmp_sd += dnmt
md = math.sqrt(tmp_md)
n, su, suq = DS[j]
if md < math.sqrt(DIM)*3:
n+=1
su = point_addition(su, point[1],1)
suq = point_addition(suq, point[1],2)
tmpr = [n,su,suq]
DS[j] = tmpr
DS_CLUSTER[j].append(point[0])
sign_ds = True
break
if not sign_ds:
for j in range(len(CS)):
#cal
tmp_n, tmp_su, tmp_suq = CS[j]
tmp_md, tmp_sd = 0,0
for k in range(DIM):
nmt | DS.append(cal_cluster_squ(cluster,DIM))
idx = []
for point in cluster:
idx.append(point[0])
DS_CLUSTER.append(idx) | conditional_block |
bfr.py |
def find_centroid(points):
points = list(points)
dim = len(points[0][1])
centroid = []
for _ in range(dim):
centroid.append(0)
for point in points:
for i in range(dim):
centroid[i] += point[1][i]
tmp = []
for x in centroid:
tmp.append(x/len(points))
centroid = tmp
return centroid
def comp(new_centroid, l_ctd):
new_n = l_ctd - len(new_centroid)
if new_n != 0:
new_centroid.extend(centroid[:new_n])
#centroid = deepcopy(sorted(new_centroid))
new_centroid = sorted(new_centroid)
return deepcopy(new_centroid)
def cal_conv(c1, c2):
dis = 0
for p1, p2 in zip(c1, c2):
for i in range(len(p1)):
dis += math.pow((p1[i] - p2[i]),2)
return math.sqrt(dis)
def cal_nc(clusters):
pass
def kmeans(points, k, conv=1e-5):
print('kmena: ', len(points))
#--init centroid---
ran_id = random.randint(0, len(points))
start_p = points[ran_id]
centroid = [start_p[1]]
#centroid.append(start_p[1])
print("start centroid",centroid)
for _ in range(k-1):
dis = []
for point in points:
#calculate
tmp_ds = []
for i, c in enumerate(centroid):
if point != c:
# print("point",point)
# print("c",c)
tmp_ds.append((euclidean(point, (0,c)), i))
min_ds = min(tmp_ds)
dis.append((min_ds[0], point))
max_c = max(dis, key = lambda x:x[0])
centroid.append(max_c[1][1])
#--
time_ = 0
point_sp = sc.parallelize(points).cache()
cur_conv = 1
# print('--------', centroid)
while time_ < 50 and cur_conv > conv :
time_ += 1
print('kmeans,find clusters')
# tmp_clusters = point_sp.map(lambda x: (find_cluster(x, centroid)))
# clu_list = tmp_clusters.collect()
clu_list = map(lambda x: (find_cluster(x, centroid)), points)
clusters = defaultdict(list)
for k, v in clu_list:
clusters[k].append(v)
nc = {}
for k, v in clusters.items():
res = find_centroid(v)
nc[k] = res
new_centroid = list(nc.values())
# print('++++++++++++++++++++=', new_centroid)
# clusters = sc.parallelize(clusters)\
# .groupByKey()
# clusters = sp_points.map(lambda x: (find_near_c(x, centroids)))
# new_cents = clusters.groupByKey().mapValues(cal_centroid).map(lambda x: x[1])
# nc = sc.parallelize(clusters.items()).mapValues(find_centroid).map(lambda x: x[1])
# print(nc.count())
# new_centroid = nc.collect()
# nc.unpersist()
# cur_conv = cal_conv(centroid, new_centroid)
centroid = comp(new_centroid, len(centroid))
# res = clusters.mapValues(list).collect()
# res_clusters = dict(res)
res_clusters = clusters
return res_clusters
def point_addition(a, b, pow_):
res = []
for i in range(len(a)):
tmp = a[i] + math.pow(b[i],pow_)
res.append(tmp)
return res
def cal_cluster_squ(cluster,DIM):
_sum = []
_sumq = []
for _ in range(DIM):
_sum.append(0)
_sumq.append(0)
for point in cluster:
_sum = point_addition(_sum, point[1],1)
_sumq = point_addition(_sumq, point[1],2)
lv = len(cluster)
res = [lv, _sum, _sumq]
return res
def merge_list(a,b,DIM):
n1, su1, suq1 = a
n2, su2, suq2 = b
n = n1 + n2
su = point_addition(su1, su2,1)
suq = point_addition(suq1, suq2,2)
flags = []
for i in range(DIM):
tmp = suq[i]/n - math.pow(su[i]/n,2)
flags.append(math.sqrt(tmp) < 3*math.sqrt(DIM))
l = [n,su,suq]
return (all(flags), l)
#def out_2(out_path, data):
#print("writing into out2......")
#with open(out_path, 'w') as f:
#f.write('round_id,nof_cluster_discard,nof_point_discard,nof_cluster_compression,nof_point_compression,nof_point_retained\n')
#3for item in data:
# item = [str(x) for x in item]
# f.write(','.join(item) + '\n')
if __name__ == '__main__':
input_path = sys.argv[1]
n_clusters = int(sys.argv[2])
output_file1 = sys.argv[3]
output_file2 = sys.argv[4]
#with open(output_file2, 'w') as f:
#f.write('round_id,nof_cluster_discard,nof_point_discard,nof_cluster_compression,nof_point_compression,nof_point_retained\n')
OUTPUT1 = []
OUTPUT2 = []
#global POINTS
#global DIM
#global SAMPLE_NUM
SAMPLE_NUM = 40
#global DS
DS = []
#global DS_CLUSTER
DS_CLUSTER = []
#global RS
RS = []
RS = set()
#global RS_DICT
RS_DICT = {}
#global CS
CS = []
#global CS_CLUSTER
CS_CLUSTER = []
FILES = []
for file in Path(input_path).rglob('*.txt'):
if file.is_file():
FILES.append(file)
FILES = sorted(FILES)
#FILES = sorted(os.listdir(input_path))
print(FILES)
for i, filename in enumerate(FILES, start=1):
# filename = str(filename)
if i == 1:
#-- init points--
print(filename)
points = read_file(filename)
POINTS = points
le = len(points[0][1])
DIM = le - 1
#--- init bfr ---
if len(POINTS)//5 > SAMPLE_NUM:
n_sample = SAMPLE_NUM
else: n_sample = len(POINTS)//5
sample_points = random.sample(POINTS, n_sample)
clusters = kmeans(sample_points, n_clusters)
for cluster in clusters.values():
DS.append(cal_cluster_squ(cluster,DIM))
idx = []
for point in cluster:
idx.append(point[0])
DS_CLUSTER.append(idx)
#cal nonsample
Sample_set = {point[0] for point in sample_points}
non_sample = []
for point in POINTS:
if point[0] not in Sample_set:
non_sample.append(point)
new_clusters = kmeans(non_sample, 3*n_clusters)
for cluster in new_clusters.values():
if len(cluster) == 1:
RS.add(cluster[0][0])
RS_DICT[cluster[0][0]] = cluster[0]
else:
CS.append(cal_cluster_squ(cluster, DIM))
idx = []
for point in cluster:
idx.append(point[0])
CS_CLUSTER.append(idx)
else:
print(filename)
# add point
points = read_file(filename)
for point in points:
sign_ds = False
sign_cs = False
for j in range(len(DS)):
#cal
tmp_n, tmp_su, tmp_suq = DS[j]
tmp_md, tmp_sd = 0,0
for k in range(DIM):
nmt = point[1][k] - tmp_su[k]/tmp_n
dnmt = math.sqrt(abs(tmp_suq[k] / tmp_n - (tmp_su[k] / tmp_n) ** 2))
if dnmt != 0:
tmp_md += (nmt / dnmt) ** 2
else: tmp_md += nmt **2
tmp_sd += dnmt
md = math.sqrt(tmp_md)
n, su, suq = DS[j]
if md < math.sqrt(DIM)*3:
n+=1
su = point_addition(su, point[1],1)
suq = point_addition(suq | ds = []
# print(point)
for i, c in enumerate(centroid):
if point != c:
# print(c)
if c == 0:
print(point, centroid)
eud = euclidean(point, (0,c))
ds.append((eud,i))
min_ds = min(ds)
return (min_ds[1], point) | identifier_body |
|
DoGrouping.py | other labels
d1 = labelObj.dCollocates
d1.update(labelObj.dCollocatesOther)
lsPossible = []
for key2, labelObj2 in spreadObj2.dLabels.items():
# already paired this label...
#if same label just set cosine similarity to 1
c_sim = 0
if labelObj.strTextAfterChanges == labelObj2.strTextAfterChanges:
c_sim = 1
else:
d2 = labelObj2.dCollocates
d2.update(labelObj2.dCollocatesOther)
c_sim = utils.cosine_sim(d1, d2)
lsPossible.append([labelObj2, c_sim])
lsObjAll = self.findGrouped(labelObj, lsPossible)
if len(lsObjAll) == 0:
continue
dAll[labelObj] = lsObjAll
if not spreadObj in dAllCombos.keys():
dAllCombos[spreadObj] = {}
dAllCombos[spreadObj][spreadObj2] = dAll
i += 1
return dAllCombos
def findMaxGroup(self, lsMatrix, dMapRev):
# print lsMatrix
# print dMapRev
global MIN_COSINE
lsGrouping = []
lsGroupingIndex = []
lsGroupingScore = []
for i in range(0, len(lsMatrix)):
# get max num in each column
lsCol = []
for j in range(i, len(lsMatrix)):
currElem = lsMatrix[j][i]
lsCol.append(currElem)
maxNum = max(lsCol)
# figure out grouping
if maxNum >= MIN_COSINE:
# col (have to add i since its a staggered matrix)
maxIndex = i + lsCol.index(maxNum)
# look if already stored as grouping, if so overwrite
found = False
for lIndex, lsTemp in enumerate(lsGroupingIndex):
if maxIndex in lsTemp or i in lsTemp: # if in there
# if new pairing is greater then update
if lsGroupingScore[lIndex] < maxNum:
lsGrouping[lIndex] = [dMapRev[i], dMapRev[maxIndex]]
lsGroupingIndex[lIndex] = [i, maxIndex]
lsGroupingScore[lIndex] = maxNum
found = True
break
found = True
if not found:
lsGrouping.append([dMapRev[i], dMapRev[maxIndex]])
lsGroupingIndex.append([i, maxIndex])
lsGroupingScore.append(maxNum)
# delete groupings
for lsTemp in lsGroupingIndex:
if lsTemp[0] in dMapRev:
del dMapRev[lsTemp[0]]
if lsTemp[1] in dMapRev:
del dMapRev[lsTemp[1]]
# now add singles
for key, val in dMapRev.items():
lsGrouping.append([val])
return lsGrouping
def okayGroup(self, ls1, ls2):
# find if the two lists share an element
lsNew = copy.copy(ls1)
lsNew.extend(ls2)
# if they share an element good!
if len(list(set(lsNew))) < len(lsNew):
# check to see if the remaining elements are in the the
# same spreadsheet if so not good
lsSet1 = copy.copy(set(ls1))
lsSet2 = copy.copy(set(ls2))
lsNew2 = list(lsSet1.symmetric_difference(lsSet2))
# print lsNew2
lsSpread = []
for obj in lsNew2:
if obj.strSpreadsheetName in lsSpread:
return False
lsSpread.append(obj.strSpreadsheetName)
# okay to merge
return True
# nothing in common, don't merge
return False
def findName(self,ls1):
my_list = [len(labelC.strOrigText) for labelC in ls1]
val, indexMin = min((val, idx) for (idx, val) in enumerate(my_list))
newName = ls1[indexMin].strOrigText
for labelC in ls1:
labelC.mergedText = newName
return ls1
def doGrouping(self, dCombos):
# turn into a matrix
# assign each label a number
# add this for easy lookup
# dGrouping={}
print 'merging spreadsheets'
lsGroupingAll = []
for spread1, dTemp2 in dCombos.items():
for spread2, dTemp in dTemp2.items():
# iterate through all labels for the two spreadsheets
# set up dNumsRev and dNums
dNumsRev = {}
dNums = {}
i = 0
for key, val in dTemp.items():
if len(val) == 0:
continue
if not key in dNums:
dNums[key] = i
dNumsRev[i] = key
i += 1
for label in val:
if not label[0] in dNums:
dNums[label[0]] = i
dNumsRev[i] = label[0]
i += 1
# create matrix, fill in 0s for this spreadsheet combo
lsMatrix = []
for j in range(0, i):
lsInner = []
for k in range (0, j + 1):
lsInner.append(0)
lsMatrix.append(lsInner)
# fill in matrix with cos
for key, val in dTemp.items():
index1 = dNums[key]
for label in val:
index2 = dNums[label[0]]
cos = label[1]
if index1 > index2:
lsMatrix[index1][index2] = cos
else:
lsMatrix[index2][index1] = cos
lsGrouping = self.findMaxGroup(lsMatrix, dNumsRev)
lsGroupingAll.append(lsGrouping)
# look through all combos of labels in different spreadsheets
lsMerged = []
lsAlone = []
for lsGrouping in lsGroupingAll:
lsGroupingS1 = [lsObj for lsObj in lsGrouping if len(lsObj) > 1]
lsAlone.extend([lsObj[0] for lsObj in lsGrouping if len(lsObj) == 1])
if len(lsMerged) == 0:
lsMerged = lsGroupingS1
continue
while len(lsGroupingS1) > 0:
append = False
lsPair = lsGroupingS1.pop(0)
# look through every merged pair
for i, lsObj in enumerate(lsMerged):
if self.okayGroup(lsObj, lsPair) == True:
lsMerged[i].extend(lsPair)
lsMerged[i] = list(set(lsMerged[i]))
append = True
if not append:
lsMerged.append(lsPair)
lsMerged = [list(set(lsObj)) for lsObj in lsMerged]
#create new name
lsMerged2=[]
for ls1 in lsMerged:
ls2 = self.findName(ls1)
lsMerged2.extend(ls2)
return lsMerged2, list(set(lsAlone))
def averagePosition(self, lsMerged):
count = 0
total = 0
for lsObj in lsMerged:
total += lsObj.iPlace
count += 1
return total / float(count)
def pickName(self,lsObj):
maxObj = lsObj[0]
for obj in lsObj[1:]:
if len(obj.strTextAfterChanges) < len(maxObj.strTextAfterChanges) and len(obj.strTextAfterChanges) > 0:
maxObj = obj
return maxObj.strTextAfterChanges
def getNewNames(self,lsLabels):
lsNewNames=[]
for lsObj in lsLabels:
if len(lsObj) == 1:
lsNewNames.append(lsObj[0].strTextAfterChanges)
else:
lsNewNames.append(self.pickName(lsObj))
return lsNewNames
def makeSpreadsheet(self, lsMerged, lsAlone):
# get the average position for each group and alone order fields
lsNew = copy.copy(lsMerged)
lsNew.extend([[obj] for obj in lsAlone])
#lsPlace = [self.averagePosition(lsObj) for lsObj in lsNew]
#lsSort = sorted(zip(lsPlace, lsNew))
#lsPlace,lsNew = zip(*lsSort)
#get new names for labels (better or merged)
#lsNames = self.getNewNames(lsNew)
for obj in lsNew:
for obj2 in obj:
print obj2
#print name
print "^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^"
print ""
def | writeSpreadsheet | identifier_name |
|
DoGrouping.py |
for key2, labelObj2 in spreadObj2.dLabels.items():
# already paired this label...
#if same label just set cosine similarity to 1
c_sim = 0
if labelObj.strTextAfterChanges == labelObj2.strTextAfterChanges:
c_sim = 1
else:
d2 = labelObj2.dCollocates
d2.update(labelObj2.dCollocatesOther)
c_sim = utils.cosine_sim(d1, d2)
lsPossible.append([labelObj2, c_sim])
lsObjAll = self.findGrouped(labelObj, lsPossible)
if len(lsObjAll) == 0:
continue
dAll[labelObj] = lsObjAll
if not spreadObj in dAllCombos.keys():
dAllCombos[spreadObj] = {}
dAllCombos[spreadObj][spreadObj2] = dAll
i += 1
return dAllCombos
def findMaxGroup(self, lsMatrix, dMapRev):
# print lsMatrix
# print dMapRev
global MIN_COSINE
lsGrouping = []
lsGroupingIndex = []
lsGroupingScore = []
for i in range(0, len(lsMatrix)):
# get max num in each column
lsCol = []
for j in range(i, len(lsMatrix)):
currElem = lsMatrix[j][i]
lsCol.append(currElem)
maxNum = max(lsCol)
# figure out grouping
if maxNum >= MIN_COSINE:
# col (have to add i since its a staggered matrix)
maxIndex = i + lsCol.index(maxNum)
# look if already stored as grouping, if so overwrite
found = False
for lIndex, lsTemp in enumerate(lsGroupingIndex):
if maxIndex in lsTemp or i in lsTemp: # if in there
# if new pairing is greater then update
if lsGroupingScore[lIndex] < maxNum:
lsGrouping[lIndex] = [dMapRev[i], dMapRev[maxIndex]]
lsGroupingIndex[lIndex] = [i, maxIndex]
lsGroupingScore[lIndex] = maxNum
found = True
break
found = True
if not found:
lsGrouping.append([dMapRev[i], dMapRev[maxIndex]])
lsGroupingIndex.append([i, maxIndex])
lsGroupingScore.append(maxNum)
# delete groupings
for lsTemp in lsGroupingIndex:
if lsTemp[0] in dMapRev:
del dMapRev[lsTemp[0]]
if lsTemp[1] in dMapRev:
del dMapRev[lsTemp[1]]
# now add singles
for key, val in dMapRev.items():
lsGrouping.append([val])
return lsGrouping
def okayGroup(self, ls1, ls2):
# find if the two lists share an element
lsNew = copy.copy(ls1)
lsNew.extend(ls2)
# if they share an element good!
if len(list(set(lsNew))) < len(lsNew):
# check to see if the remaining elements are in the the
# same spreadsheet if so not good
lsSet1 = copy.copy(set(ls1))
lsSet2 = copy.copy(set(ls2))
lsNew2 = list(lsSet1.symmetric_difference(lsSet2))
# print lsNew2
lsSpread = []
for obj in lsNew2:
if obj.strSpreadsheetName in lsSpread:
return False
lsSpread.append(obj.strSpreadsheetName)
# okay to merge
return True
# nothing in common, don't merge
return False
def findName(self,ls1):
my_list = [len(labelC.strOrigText) for labelC in ls1]
val, indexMin = min((val, idx) for (idx, val) in enumerate(my_list))
newName = ls1[indexMin].strOrigText
for labelC in ls1:
labelC.mergedText = newName
return ls1
def doGrouping(self, dCombos):
# turn into a matrix
# assign each label a number
# add this for easy lookup
# dGrouping={}
print 'merging spreadsheets'
lsGroupingAll = []
for spread1, dTemp2 in dCombos.items():
for spread2, dTemp in dTemp2.items():
# iterate through all labels for the two spreadsheets
# set up dNumsRev and dNums
dNumsRev = {}
dNums = {}
i = 0
for key, val in dTemp.items():
if len(val) == 0:
continue
if not key in dNums:
dNums[key] = i
dNumsRev[i] = key
i += 1
for label in val:
if not label[0] in dNums:
dNums[label[0]] = i
dNumsRev[i] = label[0]
i += 1
# create matrix, fill in 0s for this spreadsheet combo
lsMatrix = []
for j in range(0, i):
lsInner = []
for k in range (0, j + 1):
lsInner.append(0)
lsMatrix.append(lsInner)
# fill in matrix with cos
for key, val in dTemp.items():
index1 = dNums[key]
for label in val:
index2 = dNums[label[0]]
cos = label[1]
if index1 > index2:
lsMatrix[index1][index2] = cos
else:
lsMatrix[index2][index1] = cos
lsGrouping = self.findMaxGroup(lsMatrix, dNumsRev)
lsGroupingAll.append(lsGrouping)
# look through all combos of labels in different spreadsheets
lsMerged = []
lsAlone = []
for lsGrouping in lsGroupingAll:
lsGroupingS1 = [lsObj for lsObj in lsGrouping if len(lsObj) > 1]
lsAlone.extend([lsObj[0] for lsObj in lsGrouping if len(lsObj) == 1])
if len(lsMerged) == 0:
lsMerged = lsGroupingS1
continue
while len(lsGroupingS1) > 0:
append = False
lsPair = lsGroupingS1.pop(0)
# look through every merged pair
for i, lsObj in enumerate(lsMerged):
if self.okayGroup(lsObj, lsPair) == True:
lsMerged[i].extend(lsPair)
lsMerged[i] = list(set(lsMerged[i]))
append = True
if not append:
lsMerged.append(lsPair)
lsMerged = [list(set(lsObj)) for lsObj in lsMerged]
#create new name
lsMerged2=[]
for ls1 in lsMerged:
ls2 = self.findName(ls1)
lsMerged2.extend(ls2)
return lsMerged2, list(set(lsAlone))
def averagePosition(self, lsMerged):
count = 0
total = 0
for lsObj in lsMerged:
total += lsObj.iPlace
count += 1
return total / float(count)
def pickName(self,lsObj):
maxObj = lsObj[0]
for obj in lsObj[1:]:
if len(obj.strTextAfterChanges) < len(maxObj.strTextAfterChanges) and len(obj.strTextAfterChanges) > 0:
maxObj = obj
return maxObj.strTextAfterChanges
def getNewNames(self,lsLabels):
lsNewNames=[]
for lsObj in lsLabels:
if len(lsObj) == 1:
lsNewNames.append(lsObj[0].strTextAfterChanges)
else:
lsNewNames.append(self.pickName(lsObj))
return lsNewNames
def makeSpreadsheet(self, lsMerged, lsAlone):
# get the average position for each group and alone order fields
lsNew = copy.copy(lsMerged)
lsNew.extend([[obj | rs = ReadSpreadsheets()
print 'reading spreadsheets'
rs.readSpreadsheets(lsSpreadsheets)
dAllCombos = {}
# dAll2 = {}
print 'comparing spreadsheets'
i = 0
for spreadObj in rs.lsSpreadsheetObjs[0:-1]:
# print spreadObj
for spreadObj2 in rs.lsSpreadsheetObjs[i + 1:]:
dAll = {}
for key, labelObj in spreadObj.dLabels.items():
#print labelObj
# find all possible groupings between one label and all other labels
d1 = labelObj.dCollocates
d1.update(labelObj.dCollocatesOther)
lsPossible = []
| identifier_body |
|
DoGrouping.py | dMapRev
global MIN_COSINE
lsGrouping = []
lsGroupingIndex = []
lsGroupingScore = []
for i in range(0, len(lsMatrix)):
# get max num in each column
lsCol = []
for j in range(i, len(lsMatrix)):
currElem = lsMatrix[j][i]
lsCol.append(currElem)
maxNum = max(lsCol)
# figure out grouping
if maxNum >= MIN_COSINE:
# col (have to add i since its a staggered matrix)
maxIndex = i + lsCol.index(maxNum)
# look if already stored as grouping, if so overwrite
found = False
for lIndex, lsTemp in enumerate(lsGroupingIndex):
if maxIndex in lsTemp or i in lsTemp: # if in there
# if new pairing is greater then update
if lsGroupingScore[lIndex] < maxNum:
lsGrouping[lIndex] = [dMapRev[i], dMapRev[maxIndex]]
lsGroupingIndex[lIndex] = [i, maxIndex]
lsGroupingScore[lIndex] = maxNum
found = True
break
found = True
if not found:
lsGrouping.append([dMapRev[i], dMapRev[maxIndex]])
lsGroupingIndex.append([i, maxIndex])
lsGroupingScore.append(maxNum)
# delete groupings
for lsTemp in lsGroupingIndex:
if lsTemp[0] in dMapRev:
del dMapRev[lsTemp[0]]
if lsTemp[1] in dMapRev:
del dMapRev[lsTemp[1]]
# now add singles
for key, val in dMapRev.items():
lsGrouping.append([val])
return lsGrouping
def okayGroup(self, ls1, ls2):
# find if the two lists share an element
lsNew = copy.copy(ls1)
lsNew.extend(ls2)
# if they share an element good!
if len(list(set(lsNew))) < len(lsNew):
# check to see if the remaining elements are in the the
# same spreadsheet if so not good
lsSet1 = copy.copy(set(ls1))
lsSet2 = copy.copy(set(ls2))
lsNew2 = list(lsSet1.symmetric_difference(lsSet2))
# print lsNew2
lsSpread = []
for obj in lsNew2:
if obj.strSpreadsheetName in lsSpread:
return False
lsSpread.append(obj.strSpreadsheetName)
# okay to merge
return True
# nothing in common, don't merge
return False
def findName(self,ls1):
my_list = [len(labelC.strOrigText) for labelC in ls1]
val, indexMin = min((val, idx) for (idx, val) in enumerate(my_list))
newName = ls1[indexMin].strOrigText
for labelC in ls1:
labelC.mergedText = newName
return ls1
def doGrouping(self, dCombos):
# turn into a matrix
# assign each label a number
# add this for easy lookup
# dGrouping={}
print 'merging spreadsheets'
lsGroupingAll = []
for spread1, dTemp2 in dCombos.items():
for spread2, dTemp in dTemp2.items():
# iterate through all labels for the two spreadsheets
# set up dNumsRev and dNums
dNumsRev = {}
dNums = {}
i = 0
for key, val in dTemp.items():
if len(val) == 0:
continue
if not key in dNums:
dNums[key] = i
dNumsRev[i] = key
i += 1
for label in val:
if not label[0] in dNums:
dNums[label[0]] = i
dNumsRev[i] = label[0]
i += 1
# create matrix, fill in 0s for this spreadsheet combo
lsMatrix = []
for j in range(0, i):
lsInner = []
for k in range (0, j + 1):
lsInner.append(0)
lsMatrix.append(lsInner)
# fill in matrix with cos
for key, val in dTemp.items():
index1 = dNums[key]
for label in val:
index2 = dNums[label[0]]
cos = label[1]
if index1 > index2:
lsMatrix[index1][index2] = cos
else:
lsMatrix[index2][index1] = cos
lsGrouping = self.findMaxGroup(lsMatrix, dNumsRev)
lsGroupingAll.append(lsGrouping)
# look through all combos of labels in different spreadsheets
lsMerged = []
lsAlone = []
for lsGrouping in lsGroupingAll:
lsGroupingS1 = [lsObj for lsObj in lsGrouping if len(lsObj) > 1]
lsAlone.extend([lsObj[0] for lsObj in lsGrouping if len(lsObj) == 1])
if len(lsMerged) == 0:
lsMerged = lsGroupingS1
continue
while len(lsGroupingS1) > 0:
append = False
lsPair = lsGroupingS1.pop(0)
# look through every merged pair
for i, lsObj in enumerate(lsMerged):
if self.okayGroup(lsObj, lsPair) == True:
lsMerged[i].extend(lsPair)
lsMerged[i] = list(set(lsMerged[i]))
append = True
if not append:
lsMerged.append(lsPair)
lsMerged = [list(set(lsObj)) for lsObj in lsMerged]
#create new name
lsMerged2=[]
for ls1 in lsMerged:
ls2 = self.findName(ls1)
lsMerged2.extend(ls2)
return lsMerged2, list(set(lsAlone))
def averagePosition(self, lsMerged):
count = 0
total = 0
for lsObj in lsMerged:
total += lsObj.iPlace
count += 1
return total / float(count)
def pickName(self,lsObj):
maxObj = lsObj[0]
for obj in lsObj[1:]:
if len(obj.strTextAfterChanges) < len(maxObj.strTextAfterChanges) and len(obj.strTextAfterChanges) > 0:
maxObj = obj
return maxObj.strTextAfterChanges
def getNewNames(self,lsLabels):
lsNewNames=[]
for lsObj in lsLabels:
if len(lsObj) == 1:
lsNewNames.append(lsObj[0].strTextAfterChanges)
else:
lsNewNames.append(self.pickName(lsObj))
return lsNewNames
def makeSpreadsheet(self, lsMerged, lsAlone):
# get the average position for each group and alone order fields
lsNew = copy.copy(lsMerged)
lsNew.extend([[obj] for obj in lsAlone])
#lsPlace = [self.averagePosition(lsObj) for lsObj in lsNew]
#lsSort = sorted(zip(lsPlace, lsNew))
#lsPlace,lsNew = zip(*lsSort)
#get new names for labels (better or merged)
#lsNames = self.getNewNames(lsNew)
for obj in lsNew:
for obj2 in obj:
print obj2
#print name
print "^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^"
print ""
def writeSpreadsheet(self,lsMerged,lsAlone,output_name):
print 'writing master spreadsheet'
export_file = open(pySet.OUTPUT_PATH + '{}-values.csv'.format(output_name), 'w+')
max_num = max([len(x.lsOrigColumnValues) for x in lsMerged] + [len(x.lsOrigColumnValues) for x in lsAlone])
for i in xrange(max_num+2):
for label in lsMerged:
if i==0:
export_file.write('{},{},'.format(label.strOrigText,label.mergedText))
elif i==1:
export_file.write('{},,'.format(label.strSpreadsheetName.split("/")[-1]))
else:
try:
export_file.write('{},,'.format(label.lsOrigColumnValues[i-2]))
except:
export_file.write(',,')
for label in lsAlone:
if i==0:
export_file.write('{},{},'.format(label.strOrigText,label.strOrigText))
elif i==1:
export_file.write('{},,'.format(label.strSpreadsheetName.split("/")[-1]))
else:
| try:
export_file.write('{},,'.format(label.lsOrigColumnValues[i-2]))
except:
export_file.write(',,') | conditional_block |
|
DoGrouping.py | Combos = {}
# dAll2 = {}
print 'comparing spreadsheets'
i = 0
for spreadObj in rs.lsSpreadsheetObjs[0:-1]:
# print spreadObj
for spreadObj2 in rs.lsSpreadsheetObjs[i + 1:]:
dAll = {}
for key, labelObj in spreadObj.dLabels.items():
#print labelObj
# find all possible groupings between one label and all other labels
d1 = labelObj.dCollocates
d1.update(labelObj.dCollocatesOther)
lsPossible = []
for key2, labelObj2 in spreadObj2.dLabels.items():
# already paired this label...
#if same label just set cosine similarity to 1
c_sim = 0
if labelObj.strTextAfterChanges == labelObj2.strTextAfterChanges:
c_sim = 1
else:
d2 = labelObj2.dCollocates
d2.update(labelObj2.dCollocatesOther)
c_sim = utils.cosine_sim(d1, d2)
lsPossible.append([labelObj2, c_sim])
lsObjAll = self.findGrouped(labelObj, lsPossible)
if len(lsObjAll) == 0:
continue
dAll[labelObj] = lsObjAll
if not spreadObj in dAllCombos.keys():
dAllCombos[spreadObj] = {}
dAllCombos[spreadObj][spreadObj2] = dAll
i += 1
return dAllCombos
def findMaxGroup(self, lsMatrix, dMapRev):
# print lsMatrix
# print dMapRev
global MIN_COSINE
lsGrouping = []
lsGroupingIndex = []
lsGroupingScore = []
for i in range(0, len(lsMatrix)):
# get max num in each column
lsCol = []
for j in range(i, len(lsMatrix)):
currElem = lsMatrix[j][i]
lsCol.append(currElem)
maxNum = max(lsCol)
# figure out grouping
if maxNum >= MIN_COSINE:
# col (have to add i since its a staggered matrix)
maxIndex = i + lsCol.index(maxNum)
# look if already stored as grouping, if so overwrite
found = False
for lIndex, lsTemp in enumerate(lsGroupingIndex):
if maxIndex in lsTemp or i in lsTemp: # if in there
# if new pairing is greater then update
if lsGroupingScore[lIndex] < maxNum:
lsGrouping[lIndex] = [dMapRev[i], dMapRev[maxIndex]]
lsGroupingIndex[lIndex] = [i, maxIndex]
lsGroupingScore[lIndex] = maxNum
found = True
break
found = True
if not found:
lsGrouping.append([dMapRev[i], dMapRev[maxIndex]])
lsGroupingIndex.append([i, maxIndex])
lsGroupingScore.append(maxNum)
# delete groupings
for lsTemp in lsGroupingIndex:
if lsTemp[0] in dMapRev:
del dMapRev[lsTemp[0]]
if lsTemp[1] in dMapRev:
del dMapRev[lsTemp[1]]
# now add singles
for key, val in dMapRev.items():
lsGrouping.append([val])
return lsGrouping
def okayGroup(self, ls1, ls2):
# find if the two lists share an element
lsNew = copy.copy(ls1)
lsNew.extend(ls2)
# if they share an element good!
if len(list(set(lsNew))) < len(lsNew):
# check to see if the remaining elements are in the the
# same spreadsheet if so not good
lsSet1 = copy.copy(set(ls1))
lsSet2 = copy.copy(set(ls2))
lsNew2 = list(lsSet1.symmetric_difference(lsSet2))
# print lsNew2
lsSpread = []
for obj in lsNew2:
if obj.strSpreadsheetName in lsSpread:
return False
lsSpread.append(obj.strSpreadsheetName)
# okay to merge
return True
# nothing in common, don't merge
return False
def findName(self,ls1):
my_list = [len(labelC.strOrigText) for labelC in ls1]
val, indexMin = min((val, idx) for (idx, val) in enumerate(my_list))
newName = ls1[indexMin].strOrigText
for labelC in ls1:
labelC.mergedText = newName
return ls1
def doGrouping(self, dCombos):
# turn into a matrix
# assign each label a number
# add this for easy lookup
# dGrouping={}
print 'merging spreadsheets'
lsGroupingAll = []
for spread1, dTemp2 in dCombos.items():
for spread2, dTemp in dTemp2.items():
# iterate through all labels for the two spreadsheets
# set up dNumsRev and dNums
dNumsRev = {}
dNums = {}
i = 0
for key, val in dTemp.items():
if len(val) == 0:
continue
if not key in dNums:
dNums[key] = i
dNumsRev[i] = key
i += 1
for label in val:
if not label[0] in dNums:
dNums[label[0]] = i
dNumsRev[i] = label[0]
i += 1
# create matrix, fill in 0s for this spreadsheet combo
lsMatrix = []
for j in range(0, i):
lsInner = []
for k in range (0, j + 1):
lsInner.append(0)
lsMatrix.append(lsInner)
# fill in matrix with cos
for key, val in dTemp.items():
index1 = dNums[key]
for label in val:
index2 = dNums[label[0]]
cos = label[1]
if index1 > index2:
lsMatrix[index1][index2] = cos
else:
lsMatrix[index2][index1] = cos
lsGrouping = self.findMaxGroup(lsMatrix, dNumsRev)
lsGroupingAll.append(lsGrouping)
# look through all combos of labels in different spreadsheets
lsMerged = []
lsAlone = []
for lsGrouping in lsGroupingAll:
lsGroupingS1 = [lsObj for lsObj in lsGrouping if len(lsObj) > 1]
lsAlone.extend([lsObj[0] for lsObj in lsGrouping if len(lsObj) == 1])
if len(lsMerged) == 0:
lsMerged = lsGroupingS1
continue
while len(lsGroupingS1) > 0:
append = False
lsPair = lsGroupingS1.pop(0)
# look through every merged pair
for i, lsObj in enumerate(lsMerged):
if self.okayGroup(lsObj, lsPair) == True:
lsMerged[i].extend(lsPair)
lsMerged[i] = list(set(lsMerged[i]))
append = True
if not append:
lsMerged.append(lsPair)
|
lsMerged = [list(set(lsObj)) for lsObj in lsMerged]
#create new name
lsMerged2=[]
for ls1 in lsMerged:
ls2 = self.findName(ls1)
lsMerged2.extend(ls2)
return lsMerged2, list(set(lsAlone))
def averagePosition(self, lsMerged):
count = 0
total = 0
for lsObj in lsMerged:
total += lsObj.iPlace
count += 1
return total / float(count)
def pickName(self,lsObj):
maxObj = lsObj[0]
for obj in lsObj[1:]:
if len(obj.strTextAfterChanges) < len(maxObj.strTextAfterChanges) and len(obj.strTextAfterChanges) > 0:
maxObj = obj
return maxObj.strTextAfterChanges
def getNewNames(self,lsLabels):
lsNewNames=[]
for lsObj in lsLabels:
if len(lsObj) == 1:
lsNewNames.append(lsObj[0].strTextAfterChanges)
else:
lsNewNames.append(self.pickName(lsObj))
return lsNewNames
def makeSpreadsheet(self, lsMerged, lsAlone):
# get the average position for each group and alone order fields
lsNew = copy.copy(lsMerged)
lsNew.extend([[obj] for obj in lsAlone])
#lsPlace = [self.averagePosition(lsObj) for lsObj in lsNew]
| random_line_split |
|
parsers.py | LER, FIT_DATA_UNCLASSIFIED
from app.utils.filesystem import get_relative_path
from app.utils.mongodb import mongodb
logger = logging.getLogger(__name__)
class Parser(metaclass=abc.ABCMeta):
class FileParsingError(PyrError):
pass
_DEFAULT_SAMPLING_FREQUENCY = 60
_COORDINATE_SYSTEM = COORDINATE_SYSTEM_WGS84
_result = None
_sampling_frequency: int
_file_path: str
def __init__(self, file):
self._get_file_path(file)
def _get_file_path(self, obj):
if obj is None:
raise PyrTypeError('file path should not be None')
elif isinstance(obj, str):
file_path = obj
else:
raise PyrTypeError('file path should be a string or File/Media object, but got %r' % obj)
self._file_path = file_path
@abc.abstractmethod
def _parse(self):
pass
def _file_clean(self, mute=True):
if not os.access(self._file_path, os.F_OK):
if mute:
return False
else:
raise PyrTypeError('file %s not found!' % self._file_path)
if not os.access(self._file_path, os.R_OK):
if mute:
return False
else:
raise PyrTypeError('%s is unreadable' % self._file_path)
return True
@staticmethod
def _get_data_from_model(record: dict, model_cls, skip_unorganized=DATA_MODEL['SAVE_UNCLASSIFIED']):
if not hasattr(model_cls, '__new__'):
logger.debug('Model class must be a class, bug got %s' % type(model_cls))
return None
obj = model_cls.__new__(model_cls)
if not isinstance(obj, DataModel):
logger.debug('Model class must be a sub-class of DataModel, bug got %s' % type(obj))
return None
properties = dir(model_cls)
keys = record.keys()
if skip_unorganized:
valid_keys = list(set(keys).intersection(set(properties)))
else:
valid_keys = keys
data = {}
unorganized = {}
for k in valid_keys:
if skip_unorganized:
data.update({k, record[k]})
else:
if k.startswith('unknown'): # k.startswith('unknown') is for handle unknown fields in FIT file
continue
if k in properties:
data.update({k, record[k]})
else:
unorganized.update({k, record[k]})
if len(unorganized) != 0:
data.update({'_unorganized': unorganized})
obj.set_data(data)
return obj
def parse(self):
# check file exit and can be read
self._file_clean()
return self._parse()
def save(self, extra_data: Dict = None, collection=mongodb.Collections.MEDIA_PARSED_DATA):
if not isinstance(extra_data, Dict):
raise PyrTypeError('Extra data should be a Dict object, bug got %s' % type(extra_data))
data_for_store = {'data': self._result, 'path': get_relative_path(self._file_path)}
if extra_data is not None:
data_for_store.update(extra_data)
return mongodb.insert(collection, data_for_store)
class FitParser(Parser):
_activity_record: [] = []
_gears: [] = []
_activity: [] = []
_traveller: [] = []
_unclassified: [] = []
def _parse(self):
try:
fit_file = fitparse.FitFile(self.file_path)
data = fit_file.get_messages()
except fitparse.FitParseError as err:
raise self.FileParsingError(err)
for item in data:
value = item.get_values()
if item.name.lower() in FIT_DATA_ACTIVITY_RECORD[1]:
self._parse_activity_record(value)
elif item.name.lower() in FIT_DATA_GEAR[1]:
self._parse_gear(value)
elif item.name.lower() in FIT_DATA_ACTIVITY[1]:
self._parse_activity(value)
elif item.name.lower() in FIT_DATA_TRAVELLER[1]:
self._parse_activity(value)
else:
self._parse_misc(value)
self._result = {FIT_DATA_ACTIVITY_RECORD(0): self._activity_record,
FIT_DATA_GEAR(0): self._gears,
FIT_DATA_ACTIVITY(0): self._activity,
FIT_DATA_TRAVELLER(0): self._traveller,
FIT_DATA_UNCLASSIFIED(0): self._unclassified,
}
def _parse_activity_record(self, record):
"""
parse fit activity record to geographic and misc data
:param record: {
"timestamp": "2019-09-27 00:32:11",
"positionLat": 358587055,
"positionLong": 1156290179,
"distance": 2.09,
"enhancedAltitude": 3284.0,
"altitude": 18920,
"enhancedSpeed": 2.641,
"speed": 2641,
"unknown61": 18920,
"unknown66": 2236,
"temperature": 5
},
:return:
"""
timestamp = record.pop('timestamp', None)
coordinate = Coordinate({'latitude': record.pop('positionLong', None),
'longitude': record.pop('positionLat', None),
'altitude': record.pop('enhancedAltitude', None),
'datum': self._COORDINATE_SYSTEM})
physiologic = Physiologic({'speed': mps_to_kph(record.pop('enhancedSpeed', None))})
environment = Environment({'temperature': record.pop('temperature', None)})
coordinate.set_time(timestamp, 'UTC')
physiologic.set_time(timestamp, 'UTC')
environment.set_time(timestamp, 'UTC')
activity_record = {}
if coordinate.is_valid():
activity_record.update({'coordinate': coordinate.__dict__})
if physiologic.is_valid():
activity_record.update({'physiologic': physiologic.__dict__})
if environment.is_valid():
activity_record.update({'environment': environment.__dict__})
self._activity_record.append(activity_record)
def _parse_gear(self, record):
timestamp = record.pop('timestamp', None)
gear = self._get_data_from_model(record, Gear)
gear.set_time(timestamp, 'UTC')
if gear.is_valid():
self._activity.append(gear.__dict__)
def _parse_activity(self, record):
timestamp = record.pop('timestamp', None)
start_position = Coordinate({'latitude': record.pop('start_position_lat', None),
'longitude': record.pop('start_position_long', None),
'datum': self._COORDINATE_SYSTEM,
'timestamp': timestamp})
nec_position = Coordinate({'latitude': record.pop('nec_lat', None),
'longitude': record.pop('nec_long', None),
'datum': self._COORDINATE_SYSTEM,
'timestamp': timestamp})
swc_position = Coordinate({'latitude': record.pop('swc_lat', None),
'longitude': record.pop('swc_long', None),
'datum': self._COORDINATE_SYSTEM,
'timestamp': timestamp})
activity_data = {'start_position': start_position.position,
'nec_position': nec_position.position,
'swc_position': swc_position.position,
'avg_speed': mps_to_kph(record.pop('enhanced_avg_speed', None)),
'max_speed': mps_to_kph(record.pop('enhanced_max_speed', None)),
}
activity = self._get_data_from_model(record, Activity)
activity.set_data(activity_data)
activity.set_time(timestamp, 'UTC')
if activity.is_valid():
self._activity.append(activity.__dict__)
def _parse_traveller(self, record):
traveller = self._get_data_from_model(record, TravellerProfile)
traveller.set_time(None, None, skip=True)
if traveller.is_valid():
self._traveller.append(traveller.__dict__)
def _parse_misc(self, record):
unclassified = self._get_data_from_model(record, Unclassified, skip_unorganized=False)
unclassified.set_time(None, None, True)
if unclassified.is_valid():
self._unclassified.append(unclassified.__dict__)
class PhotoParser(Parser):
def _parse(self):
return self.parse_exif(self._file_path)
@staticmethod
def parse_exif(file_path):
| f = open(file_path, 'rb')
tags = exifread.process_file(f)
image_info = {}
gps_info = {}
thumbnail_info = {}
maker_info = {}
exit_info = {}
other_info = {}
for tag in tags:
key_array = tag.split(' ')
category = key_array[0].lower()
key = category if len(key_array) == 1 else '_'.join(key_array[1:])
value = tags[tag].values if isinstance(tags[tag], exifread.IfdTag) else str(tags[tag])
# store base64 encoded thumbnail
if tag.lower() == 'jpegthumbnail':
base64_thumbnail = base64.b64encode(tags[tag])
thumbnail_info.update({'base64': str(base64_thumbnail, 'utf-8')})
| identifier_body |
|
parsers.py | ographic.models import Coordinate
from app.parse.constants import FIT_DATA_ACTIVITY_RECORD, FIT_DATA_GEAR, FIT_DATA_ACTIVITY, PHOTO_DATA_OTHER, \
PHOTO_DATA_MAKER, \
PHOTO_DATA_THUMBNAIL, PHOTO_DATA_EXIF, PHOTO_DATA_GPS, PHOTO_DATA_IMAGE, FIT_DATA_TRAVELLER, FIT_DATA_UNCLASSIFIED
from app.utils.filesystem import get_relative_path
from app.utils.mongodb import mongodb
logger = logging.getLogger(__name__)
class Parser(metaclass=abc.ABCMeta):
class FileParsingError(PyrError):
pass
_DEFAULT_SAMPLING_FREQUENCY = 60
_COORDINATE_SYSTEM = COORDINATE_SYSTEM_WGS84
_result = None
_sampling_frequency: int
_file_path: str
def __init__(self, file):
self._get_file_path(file)
def _get_file_path(self, obj):
if obj is None:
raise PyrTypeError('file path should not be None')
elif isinstance(obj, str):
file_path = obj
else:
raise PyrTypeError('file path should be a string or File/Media object, but got %r' % obj)
self._file_path = file_path
@abc.abstractmethod
def _parse(self):
pass
def _file_clean(self, mute=True):
if not os.access(self._file_path, os.F_OK):
if mute:
return False
else:
raise PyrTypeError('file %s not found!' % self._file_path)
if not os.access(self._file_path, os.R_OK):
if mute:
return False
else:
raise PyrTypeError('%s is unreadable' % self._file_path)
return True
@staticmethod
def _get_data_from_model(record: dict, model_cls, skip_unorganized=DATA_MODEL['SAVE_UNCLASSIFIED']):
if not hasattr(model_cls, '__new__'):
logger.debug('Model class must be a class, bug got %s' % type(model_cls))
return None
obj = model_cls.__new__(model_cls)
if not isinstance(obj, DataModel):
logger.debug('Model class must be a sub-class of DataModel, bug got %s' % type(obj))
return None
properties = dir(model_cls)
keys = record.keys()
if skip_unorganized:
valid_keys = list(set(keys).intersection(set(properties)))
else:
valid_keys = keys
data = {}
unorganized = {}
for k in valid_keys:
if skip_unorganized:
data.update({k, record[k]})
else:
if k.startswith('unknown'): # k.startswith('unknown') is for handle unknown fields in FIT file
continue
if k in properties:
data.update({k, record[k]})
else:
unorganized.update({k, record[k]})
if len(unorganized) != 0:
data.update({'_unorganized': unorganized})
obj.set_data(data)
return obj
def parse(self):
# check file exit and can be read
self._file_clean()
return self._parse()
def save(self, extra_data: Dict = None, collection=mongodb.Collections.MEDIA_PARSED_DATA):
if not isinstance(extra_data, Dict):
raise PyrTypeError('Extra data should be a Dict object, bug got %s' % type(extra_data))
data_for_store = {'data': self._result, 'path': get_relative_path(self._file_path)}
if extra_data is not None:
data_for_store.update(extra_data)
return mongodb.insert(collection, data_for_store)
class FitParser(Parser):
_activity_record: [] = []
_gears: [] = []
_activity: [] = []
_traveller: [] = []
_unclassified: [] = []
def _parse(self):
try:
fit_file = fitparse.FitFile(self.file_path)
data = fit_file.get_messages()
except fitparse.FitParseError as err:
raise self.FileParsingError(err)
for item in data:
value = item.get_values()
if item.name.lower() in FIT_DATA_ACTIVITY_RECORD[1]:
self._parse_activity_record(value)
elif item.name.lower() in FIT_DATA_GEAR[1]:
self._parse_gear(value)
elif item.name.lower() in FIT_DATA_ACTIVITY[1]:
self._parse_activity(value)
elif item.name.lower() in FIT_DATA_TRAVELLER[1]:
self._parse_activity(value)
else:
self._parse_misc(value)
self._result = {FIT_DATA_ACTIVITY_RECORD(0): self._activity_record,
FIT_DATA_GEAR(0): self._gears,
FIT_DATA_ACTIVITY(0): self._activity,
FIT_DATA_TRAVELLER(0): self._traveller,
FIT_DATA_UNCLASSIFIED(0): self._unclassified,
}
def _parse_activity_record(self, record):
"""
parse fit activity record to geographic and misc data
:param record: {
"timestamp": "2019-09-27 00:32:11",
"positionLat": 358587055,
"positionLong": 1156290179,
"distance": 2.09,
"enhancedAltitude": 3284.0,
"altitude": 18920,
"enhancedSpeed": 2.641,
"speed": 2641,
"unknown61": 18920,
"unknown66": 2236,
"temperature": 5
},
:return:
"""
timestamp = record.pop('timestamp', None)
coordinate = Coordinate({'latitude': record.pop('positionLong', None),
'longitude': record.pop('positionLat', None),
'altitude': record.pop('enhancedAltitude', None),
'datum': self._COORDINATE_SYSTEM})
physiologic = Physiologic({'speed': mps_to_kph(record.pop('enhancedSpeed', None))})
environment = Environment({'temperature': record.pop('temperature', None)})
coordinate.set_time(timestamp, 'UTC')
physiologic.set_time(timestamp, 'UTC')
environment.set_time(timestamp, 'UTC')
activity_record = {}
if coordinate.is_valid():
activity_record.update({'coordinate': coordinate.__dict__})
if physiologic.is_valid():
activity_record.update({'physiologic': physiologic.__dict__})
if environment.is_valid():
activity_record.update({'environment': environment.__dict__})
self._activity_record.append(activity_record)
def _parse_gear(self, record):
timestamp = record.pop('timestamp', None)
gear = self._get_data_from_model(record, Gear)
gear.set_time(timestamp, 'UTC')
if gear.is_valid():
self._activity.append(gear.__dict__)
def _parse_activity(self, record):
timestamp = record.pop('timestamp', None)
start_position = Coordinate({'latitude': record.pop('start_position_lat', None),
'longitude': record.pop('start_position_long', None),
'datum': self._COORDINATE_SYSTEM,
'timestamp': timestamp})
nec_position = Coordinate({'latitude': record.pop('nec_lat', None),
'longitude': record.pop('nec_long', None),
'datum': self._COORDINATE_SYSTEM,
'timestamp': timestamp})
swc_position = Coordinate({'latitude': record.pop('swc_lat', None),
'longitude': record.pop('swc_long', None),
'datum': self._COORDINATE_SYSTEM,
'timestamp': timestamp})
activity_data = {'start_position': start_position.position,
'nec_position': nec_position.position,
'swc_position': swc_position.position,
'avg_speed': mps_to_kph(record.pop('enhanced_avg_speed', None)),
'max_speed': mps_to_kph(record.pop('enhanced_max_speed', None)),
}
activity = self._get_data_from_model(record, Activity)
activity.set_data(activity_data)
activity.set_time(timestamp, 'UTC')
if activity.is_valid():
self._activity.append(activity.__dict__)
def _parse_traveller(self, record):
traveller = self._get_data_from_model(record, TravellerProfile)
traveller.set_time(None, None, skip=True)
if traveller.is_valid():
self._traveller.append(traveller.__dict__)
def _parse_misc(self, record):
unclassified = self._get_data_from_model(record, Unclassified, skip_unorganized=False)
unclassified.set_time(None, None, True)
if unclassified.is_valid():
self._unclassified.append(unclassified.__dict__)
class | (Parser):
def _parse(self):
return self.parse_exif(self._file_path)
@staticmethod
def parse_exif(file_path):
f = open(file_path, 'rb')
tags = exifread.process_file(f)
image_info = {}
gps_info = {}
thumbnail_info = {}
maker_info = {}
exit_info = {}
other_info = {}
for tag in tags:
key_array = tag.split(' ')
category = key_array[0].lower()
key = category if len(key_array) == 1 else '_'.join(key_array[1:])
value = tags[tag].values if isinstance(tags[tag], exifread.IfdTag | PhotoParser | identifier_name |
parsers.py | Meta):
class FileParsingError(PyrError):
pass
_DEFAULT_SAMPLING_FREQUENCY = 60
_COORDINATE_SYSTEM = COORDINATE_SYSTEM_WGS84
_result = None
_sampling_frequency: int
_file_path: str
def __init__(self, file):
self._get_file_path(file)
def _get_file_path(self, obj):
if obj is None:
raise PyrTypeError('file path should not be None')
elif isinstance(obj, str):
file_path = obj
else:
raise PyrTypeError('file path should be a string or File/Media object, but got %r' % obj)
self._file_path = file_path
@abc.abstractmethod
def _parse(self):
pass
def _file_clean(self, mute=True):
if not os.access(self._file_path, os.F_OK):
if mute:
return False
else:
raise PyrTypeError('file %s not found!' % self._file_path)
if not os.access(self._file_path, os.R_OK):
if mute:
return False
else:
raise PyrTypeError('%s is unreadable' % self._file_path)
return True
@staticmethod
def _get_data_from_model(record: dict, model_cls, skip_unorganized=DATA_MODEL['SAVE_UNCLASSIFIED']):
if not hasattr(model_cls, '__new__'):
logger.debug('Model class must be a class, bug got %s' % type(model_cls))
return None
obj = model_cls.__new__(model_cls)
if not isinstance(obj, DataModel):
logger.debug('Model class must be a sub-class of DataModel, bug got %s' % type(obj))
return None
properties = dir(model_cls)
keys = record.keys()
if skip_unorganized:
valid_keys = list(set(keys).intersection(set(properties)))
else:
valid_keys = keys
data = {}
unorganized = {}
for k in valid_keys:
if skip_unorganized:
data.update({k, record[k]})
else:
if k.startswith('unknown'): # k.startswith('unknown') is for handle unknown fields in FIT file
continue
if k in properties:
data.update({k, record[k]})
else:
unorganized.update({k, record[k]})
if len(unorganized) != 0:
data.update({'_unorganized': unorganized})
obj.set_data(data)
return obj
def parse(self):
# check file exit and can be read
self._file_clean()
return self._parse()
def save(self, extra_data: Dict = None, collection=mongodb.Collections.MEDIA_PARSED_DATA):
if not isinstance(extra_data, Dict):
raise PyrTypeError('Extra data should be a Dict object, bug got %s' % type(extra_data))
data_for_store = {'data': self._result, 'path': get_relative_path(self._file_path)}
if extra_data is not None:
data_for_store.update(extra_data)
return mongodb.insert(collection, data_for_store)
class FitParser(Parser):
_activity_record: [] = []
_gears: [] = []
_activity: [] = []
_traveller: [] = []
_unclassified: [] = []
def _parse(self):
try:
fit_file = fitparse.FitFile(self.file_path)
data = fit_file.get_messages()
except fitparse.FitParseError as err:
raise self.FileParsingError(err)
for item in data:
value = item.get_values()
if item.name.lower() in FIT_DATA_ACTIVITY_RECORD[1]:
self._parse_activity_record(value)
elif item.name.lower() in FIT_DATA_GEAR[1]:
self._parse_gear(value)
elif item.name.lower() in FIT_DATA_ACTIVITY[1]:
self._parse_activity(value)
elif item.name.lower() in FIT_DATA_TRAVELLER[1]:
self._parse_activity(value)
else:
self._parse_misc(value)
self._result = {FIT_DATA_ACTIVITY_RECORD(0): self._activity_record,
FIT_DATA_GEAR(0): self._gears,
FIT_DATA_ACTIVITY(0): self._activity,
FIT_DATA_TRAVELLER(0): self._traveller,
FIT_DATA_UNCLASSIFIED(0): self._unclassified,
}
def _parse_activity_record(self, record):
"""
parse fit activity record to geographic and misc data
:param record: {
"timestamp": "2019-09-27 00:32:11",
"positionLat": 358587055,
"positionLong": 1156290179,
"distance": 2.09,
"enhancedAltitude": 3284.0,
"altitude": 18920,
"enhancedSpeed": 2.641,
"speed": 2641,
"unknown61": 18920,
"unknown66": 2236,
"temperature": 5
},
:return:
"""
timestamp = record.pop('timestamp', None)
coordinate = Coordinate({'latitude': record.pop('positionLong', None),
'longitude': record.pop('positionLat', None),
'altitude': record.pop('enhancedAltitude', None),
'datum': self._COORDINATE_SYSTEM})
physiologic = Physiologic({'speed': mps_to_kph(record.pop('enhancedSpeed', None))})
environment = Environment({'temperature': record.pop('temperature', None)})
coordinate.set_time(timestamp, 'UTC')
physiologic.set_time(timestamp, 'UTC')
environment.set_time(timestamp, 'UTC')
activity_record = {}
if coordinate.is_valid():
activity_record.update({'coordinate': coordinate.__dict__})
if physiologic.is_valid():
activity_record.update({'physiologic': physiologic.__dict__})
if environment.is_valid():
activity_record.update({'environment': environment.__dict__})
self._activity_record.append(activity_record)
def _parse_gear(self, record):
timestamp = record.pop('timestamp', None)
gear = self._get_data_from_model(record, Gear)
gear.set_time(timestamp, 'UTC')
if gear.is_valid():
self._activity.append(gear.__dict__)
def _parse_activity(self, record):
timestamp = record.pop('timestamp', None)
start_position = Coordinate({'latitude': record.pop('start_position_lat', None),
'longitude': record.pop('start_position_long', None),
'datum': self._COORDINATE_SYSTEM,
'timestamp': timestamp})
nec_position = Coordinate({'latitude': record.pop('nec_lat', None),
'longitude': record.pop('nec_long', None),
'datum': self._COORDINATE_SYSTEM,
'timestamp': timestamp})
swc_position = Coordinate({'latitude': record.pop('swc_lat', None),
'longitude': record.pop('swc_long', None),
'datum': self._COORDINATE_SYSTEM,
'timestamp': timestamp})
activity_data = {'start_position': start_position.position,
'nec_position': nec_position.position,
'swc_position': swc_position.position,
'avg_speed': mps_to_kph(record.pop('enhanced_avg_speed', None)),
'max_speed': mps_to_kph(record.pop('enhanced_max_speed', None)),
}
activity = self._get_data_from_model(record, Activity)
activity.set_data(activity_data)
activity.set_time(timestamp, 'UTC')
if activity.is_valid():
self._activity.append(activity.__dict__)
def _parse_traveller(self, record):
traveller = self._get_data_from_model(record, TravellerProfile)
traveller.set_time(None, None, skip=True)
if traveller.is_valid():
self._traveller.append(traveller.__dict__)
def _parse_misc(self, record):
unclassified = self._get_data_from_model(record, Unclassified, skip_unorganized=False)
unclassified.set_time(None, None, True)
if unclassified.is_valid():
self._unclassified.append(unclassified.__dict__)
class PhotoParser(Parser):
def _parse(self):
return self.parse_exif(self._file_path)
@staticmethod
def parse_exif(file_path):
f = open(file_path, 'rb')
tags = exifread.process_file(f)
image_info = {}
gps_info = {}
thumbnail_info = {}
maker_info = {}
exit_info = {}
other_info = {}
for tag in tags:
key_array = tag.split(' ')
category = key_array[0].lower()
key = category if len(key_array) == 1 else '_'.join(key_array[1:])
value = tags[tag].values if isinstance(tags[tag], exifread.IfdTag) else str(tags[tag])
# store base64 encoded thumbnail
if tag.lower() == 'jpegthumbnail':
base64_thumbnail = base64.b64encode(tags[tag])
thumbnail_info.update({'base64': str(base64_thumbnail, 'utf-8')})
item = {key: value}
if category in PHOTO_DATA_EXIF[1]:
exit_info.update(item)
elif category in PHOTO_DATA_GPS[1]:
| gps_info.update(item) | conditional_block |
|
parsers.py | ographic.models import Coordinate
from app.parse.constants import FIT_DATA_ACTIVITY_RECORD, FIT_DATA_GEAR, FIT_DATA_ACTIVITY, PHOTO_DATA_OTHER, \
PHOTO_DATA_MAKER, \
PHOTO_DATA_THUMBNAIL, PHOTO_DATA_EXIF, PHOTO_DATA_GPS, PHOTO_DATA_IMAGE, FIT_DATA_TRAVELLER, FIT_DATA_UNCLASSIFIED
from app.utils.filesystem import get_relative_path
from app.utils.mongodb import mongodb
logger = logging.getLogger(__name__)
class Parser(metaclass=abc.ABCMeta):
class FileParsingError(PyrError):
pass
_DEFAULT_SAMPLING_FREQUENCY = 60
_COORDINATE_SYSTEM = COORDINATE_SYSTEM_WGS84
_result = None
_sampling_frequency: int
_file_path: str
def __init__(self, file):
self._get_file_path(file)
def _get_file_path(self, obj):
if obj is None:
raise PyrTypeError('file path should not be None')
elif isinstance(obj, str):
file_path = obj
else:
raise PyrTypeError('file path should be a string or File/Media object, but got %r' % obj)
self._file_path = file_path
@abc.abstractmethod
def _parse(self):
pass
def _file_clean(self, mute=True):
if not os.access(self._file_path, os.F_OK):
if mute:
return False
else:
raise PyrTypeError('file %s not found!' % self._file_path)
if not os.access(self._file_path, os.R_OK):
if mute:
return False
else:
raise PyrTypeError('%s is unreadable' % self._file_path)
return True
@staticmethod
def _get_data_from_model(record: dict, model_cls, skip_unorganized=DATA_MODEL['SAVE_UNCLASSIFIED']):
if not hasattr(model_cls, '__new__'):
logger.debug('Model class must be a class, bug got %s' % type(model_cls))
return None
obj = model_cls.__new__(model_cls)
if not isinstance(obj, DataModel):
logger.debug('Model class must be a sub-class of DataModel, bug got %s' % type(obj))
return None
properties = dir(model_cls)
keys = record.keys()
if skip_unorganized:
valid_keys = list(set(keys).intersection(set(properties)))
else:
valid_keys = keys
data = {}
unorganized = {}
for k in valid_keys:
if skip_unorganized:
data.update({k, record[k]})
else:
if k.startswith('unknown'): # k.startswith('unknown') is for handle unknown fields in FIT file
continue
if k in properties:
data.update({k, record[k]})
else:
unorganized.update({k, record[k]})
if len(unorganized) != 0:
data.update({'_unorganized': unorganized})
obj.set_data(data)
return obj
def parse(self):
# check file exit and can be read
self._file_clean()
return self._parse()
def save(self, extra_data: Dict = None, collection=mongodb.Collections.MEDIA_PARSED_DATA):
if not isinstance(extra_data, Dict):
raise PyrTypeError('Extra data should be a Dict object, bug got %s' % type(extra_data)) | data_for_store.update(extra_data)
return mongodb.insert(collection, data_for_store)
class FitParser(Parser):
_activity_record: [] = []
_gears: [] = []
_activity: [] = []
_traveller: [] = []
_unclassified: [] = []
def _parse(self):
try:
fit_file = fitparse.FitFile(self.file_path)
data = fit_file.get_messages()
except fitparse.FitParseError as err:
raise self.FileParsingError(err)
for item in data:
value = item.get_values()
if item.name.lower() in FIT_DATA_ACTIVITY_RECORD[1]:
self._parse_activity_record(value)
elif item.name.lower() in FIT_DATA_GEAR[1]:
self._parse_gear(value)
elif item.name.lower() in FIT_DATA_ACTIVITY[1]:
self._parse_activity(value)
elif item.name.lower() in FIT_DATA_TRAVELLER[1]:
self._parse_activity(value)
else:
self._parse_misc(value)
self._result = {FIT_DATA_ACTIVITY_RECORD(0): self._activity_record,
FIT_DATA_GEAR(0): self._gears,
FIT_DATA_ACTIVITY(0): self._activity,
FIT_DATA_TRAVELLER(0): self._traveller,
FIT_DATA_UNCLASSIFIED(0): self._unclassified,
}
def _parse_activity_record(self, record):
"""
parse fit activity record to geographic and misc data
:param record: {
"timestamp": "2019-09-27 00:32:11",
"positionLat": 358587055,
"positionLong": 1156290179,
"distance": 2.09,
"enhancedAltitude": 3284.0,
"altitude": 18920,
"enhancedSpeed": 2.641,
"speed": 2641,
"unknown61": 18920,
"unknown66": 2236,
"temperature": 5
},
:return:
"""
timestamp = record.pop('timestamp', None)
coordinate = Coordinate({'latitude': record.pop('positionLong', None),
'longitude': record.pop('positionLat', None),
'altitude': record.pop('enhancedAltitude', None),
'datum': self._COORDINATE_SYSTEM})
physiologic = Physiologic({'speed': mps_to_kph(record.pop('enhancedSpeed', None))})
environment = Environment({'temperature': record.pop('temperature', None)})
coordinate.set_time(timestamp, 'UTC')
physiologic.set_time(timestamp, 'UTC')
environment.set_time(timestamp, 'UTC')
activity_record = {}
if coordinate.is_valid():
activity_record.update({'coordinate': coordinate.__dict__})
if physiologic.is_valid():
activity_record.update({'physiologic': physiologic.__dict__})
if environment.is_valid():
activity_record.update({'environment': environment.__dict__})
self._activity_record.append(activity_record)
def _parse_gear(self, record):
timestamp = record.pop('timestamp', None)
gear = self._get_data_from_model(record, Gear)
gear.set_time(timestamp, 'UTC')
if gear.is_valid():
self._activity.append(gear.__dict__)
def _parse_activity(self, record):
timestamp = record.pop('timestamp', None)
start_position = Coordinate({'latitude': record.pop('start_position_lat', None),
'longitude': record.pop('start_position_long', None),
'datum': self._COORDINATE_SYSTEM,
'timestamp': timestamp})
nec_position = Coordinate({'latitude': record.pop('nec_lat', None),
'longitude': record.pop('nec_long', None),
'datum': self._COORDINATE_SYSTEM,
'timestamp': timestamp})
swc_position = Coordinate({'latitude': record.pop('swc_lat', None),
'longitude': record.pop('swc_long', None),
'datum': self._COORDINATE_SYSTEM,
'timestamp': timestamp})
activity_data = {'start_position': start_position.position,
'nec_position': nec_position.position,
'swc_position': swc_position.position,
'avg_speed': mps_to_kph(record.pop('enhanced_avg_speed', None)),
'max_speed': mps_to_kph(record.pop('enhanced_max_speed', None)),
}
activity = self._get_data_from_model(record, Activity)
activity.set_data(activity_data)
activity.set_time(timestamp, 'UTC')
if activity.is_valid():
self._activity.append(activity.__dict__)
def _parse_traveller(self, record):
traveller = self._get_data_from_model(record, TravellerProfile)
traveller.set_time(None, None, skip=True)
if traveller.is_valid():
self._traveller.append(traveller.__dict__)
def _parse_misc(self, record):
unclassified = self._get_data_from_model(record, Unclassified, skip_unorganized=False)
unclassified.set_time(None, None, True)
if unclassified.is_valid():
self._unclassified.append(unclassified.__dict__)
class PhotoParser(Parser):
def _parse(self):
return self.parse_exif(self._file_path)
@staticmethod
def parse_exif(file_path):
f = open(file_path, 'rb')
tags = exifread.process_file(f)
image_info = {}
gps_info = {}
thumbnail_info = {}
maker_info = {}
exit_info = {}
other_info = {}
for tag in tags:
key_array = tag.split(' ')
category = key_array[0].lower()
key = category if len(key_array) == 1 else '_'.join(key_array[1:])
value = tags[tag].values if isinstance(tags[tag], exifread.IfdTag |
data_for_store = {'data': self._result, 'path': get_relative_path(self._file_path)}
if extra_data is not None: | random_line_split |
compile.rs | Document;
use typst::eval::{eco_format, Tracer};
use typst::geom::Color;
use typst::syntax::{FileId, Source};
use typst::World;
use crate::args::{CompileCommand, DiagnosticFormat, OutputFormat};
use crate::watch::Status;
use crate::world::SystemWorld;
use crate::{color_stream, set_failed};
type CodespanResult<T> = Result<T, CodespanError>;
type CodespanError = codespan_reporting::files::Error;
impl CompileCommand {
/// The output path.
pub fn output(&self) -> PathBuf {
self.output
.clone()
.unwrap_or_else(|| self.common.input.with_extension("pdf"))
}
/// The format to use for generated output, either specified by the user or inferred from the extension.
///
/// Will return `Err` if the format was not specified and could not be inferred.
pub fn output_format(&self) -> StrResult<OutputFormat> {
Ok(if let Some(specified) = self.format {
specified
} else if let Some(output) = &self.output {
match output.extension() {
Some(ext) if ext.eq_ignore_ascii_case("pdf") => OutputFormat::Pdf,
Some(ext) if ext.eq_ignore_ascii_case("png") => OutputFormat::Png,
Some(ext) if ext.eq_ignore_ascii_case("svg") => OutputFormat::Svg,
_ => bail!("could not infer output format for path {}.\nconsider providing the format manually with `--format/-f`", output.display()),
}
} else {
OutputFormat::Pdf
})
}
}
/// Execute a compilation command.
pub fn compile(mut command: CompileCommand) -> StrResult<()> {
let mut world = SystemWorld::new(&command.common)?;
compile_once(&mut world, &mut command, false)?;
Ok(())
}
/// Compile a single time.
///
/// Returns whether it compiled without errors.
#[tracing::instrument(skip_all)]
pub fn compile_once(
world: &mut SystemWorld,
command: &mut CompileCommand,
watching: bool,
) -> StrResult<()> {
tracing::info!("Starting compilation");
let start = std::time::Instant::now();
if watching {
Status::Compiling.print(command).unwrap();
}
// Reset everything and ensure that the main file is present.
world.reset();
world.source(world.main()).map_err(|err| err.to_string())?;
let mut tracer = Tracer::default();
let result = typst::compile(world, &mut tracer);
let warnings = tracer.warnings();
match result {
// Export the PDF / PNG.
Ok(document) => {
export(&document, command)?;
let duration = start.elapsed();
tracing::info!("Compilation succeeded in {duration:?}");
if watching {
if warnings.is_empty() {
Status::Success(duration).print(command).unwrap();
} else {
Status::PartialSuccess(duration).print(command).unwrap();
}
}
print_diagnostics(world, &[], &warnings, command.common.diagnostic_format)
.map_err(|_| "failed to print diagnostics")?;
if let Some(open) = command.open.take() {
open_file(open.as_deref(), &command.output())?;
}
}
// Print diagnostics.
Err(errors) => {
set_failed();
tracing::info!("Compilation failed");
if watching {
Status::Error.print(command).unwrap();
}
print_diagnostics(
world,
&errors,
&warnings,
command.common.diagnostic_format,
)
.map_err(|_| "failed to print diagnostics")?;
}
}
Ok(())
}
/// Export into the target format.
fn export(document: &Document, command: &CompileCommand) -> StrResult<()> {
match command.output_format()? {
OutputFormat::Png => export_image(document, command, ImageExportFormat::Png),
OutputFormat::Svg => export_image(document, command, ImageExportFormat::Svg),
OutputFormat::Pdf => export_pdf(document, command),
}
}
/// Export to a PDF.
fn export_pdf(document: &Document, command: &CompileCommand) -> StrResult<()> {
let output = command.output();
let buffer = typst::export::pdf(document);
fs::write(output, buffer).map_err(|_| "failed to write PDF file")?;
Ok(())
}
/// An image format to export in.
enum ImageExportFormat {
Png,
Svg,
}
/// Export to one or multiple PNGs.
fn export_image(
document: &Document,
command: &CompileCommand,
fmt: ImageExportFormat,
) -> StrResult<()> {
// Determine whether we have a `{n}` numbering.
let output = command.output();
let string = output.to_str().unwrap_or_default();
let numbered = string.contains("{n}");
if !numbered && document.pages.len() > 1 {
bail!("cannot export multiple images without `{{n}}` in output path");
}
// Find a number width that accommodates all pages. For instance, the
// first page should be numbered "001" if there are between 100 and
// 999 pages.
let width = 1 + document.pages.len().checked_ilog10().unwrap_or(0) as usize;
let mut storage;
for (i, frame) in document.pages.iter().enumerate() {
let path = if numbered {
storage = string.replace("{n}", &format!("{:0width$}", i + 1));
Path::new(&storage)
} else {
output.as_path()
};
match fmt {
ImageExportFormat::Png => {
let pixmap =
typst::export::render(frame, command.ppi / 72.0, Color::WHITE);
pixmap.save_png(path).map_err(|_| "failed to write PNG file")?;
}
ImageExportFormat::Svg => {
let svg = typst::export::svg(frame);
fs::write(path, svg).map_err(|_| "failed to write SVG file")?;
}
}
}
Ok(())
}
/// Opens the given file using:
/// - The default file viewer if `open` is `None`.
/// - The given viewer provided by `open` if it is `Some`.
fn open_file(open: Option<&str>, path: &Path) -> StrResult<()> {
if let Some(app) = open {
open::with_in_background(path, app);
} else {
open::that_in_background(path);
}
Ok(())
}
/// Print diagnostic messages to the terminal.
pub fn print_diagnostics(
world: &SystemWorld,
errors: &[SourceDiagnostic],
warnings: &[SourceDiagnostic],
diagnostic_format: DiagnosticFormat,
) -> Result<(), codespan_reporting::files::Error> {
let mut w = match diagnostic_format {
DiagnosticFormat::Human => color_stream(),
DiagnosticFormat::Short => StandardStream::stderr(ColorChoice::Never),
};
let mut config = term::Config { tab_width: 2, ..Default::default() };
if diagnostic_format == DiagnosticFormat::Short {
config.display_style = term::DisplayStyle::Short;
} | Severity::Warning => Diagnostic::warning(),
}
.with_message(diagnostic.message.clone())
.with_notes(
diagnostic
.hints
.iter()
.map(|e| (eco_format!("hint: {e}")).into())
.collect(),
)
.with_labels(vec![Label::primary(
diagnostic.span.id(),
world.range(diagnostic.span),
)]);
term::emit(&mut w, &config, world, &diag)?;
// Stacktrace-like helper diagnostics.
for point in &diagnostic.trace {
let message = point.v.to_string();
let help = Diagnostic::help().with_message(message).with_labels(vec![
Label::primary(point.span.id(), world.range(point.span)),
]);
term::emit(&mut w, &config, world, &help)?;
}
}
Ok(())
}
impl<'a> codespan_reporting::files::Files<'a> for SystemWorld {
type FileId = FileId;
type Name = String;
type Source = Source;
fn name(&'a self, id: FileId) -> CodespanResult<Self::Name> {
let vpath = id.vpath();
Ok(if let Some(package) = id.package() {
format!("{package}{}", vpath.as_rooted_path().display())
} else {
// Try to express the path relative to the working directory.
vpath
.resolve(self.root())
.and_then(|abs| pathdiff::diff_paths(&abs, self.workdir()))
.as_deref()
.unwrap_or_else(|| vpath.as_rootless_path())
.to_string_lossy()
.into()
})
}
fn source(&'a self, id: FileId) -> CodespanResult<Self::Source> {
Ok(self.lookup(id))
}
fn line_index(&'a self, id: FileId, given: usize) -> CodespanResult<usize> {
let source = self.lookup(id);
source
.byte_to_line(given)
.ok_or_else(|| CodespanError::IndexTooLarge {
given,
max: source.len_bytes(),
})
}
fn line_range(
&'a self,
id: File |
for diagnostic in warnings.iter().chain(errors.iter()) {
let diag = match diagnostic.severity {
Severity::Error => Diagnostic::error(), | random_line_split |
compile.rs | Document;
use typst::eval::{eco_format, Tracer};
use typst::geom::Color;
use typst::syntax::{FileId, Source};
use typst::World;
use crate::args::{CompileCommand, DiagnosticFormat, OutputFormat};
use crate::watch::Status;
use crate::world::SystemWorld;
use crate::{color_stream, set_failed};
type CodespanResult<T> = Result<T, CodespanError>;
type CodespanError = codespan_reporting::files::Error;
impl CompileCommand {
/// The output path.
pub fn output(&self) -> PathBuf {
self.output
.clone()
.unwrap_or_else(|| self.common.input.with_extension("pdf"))
}
/// The format to use for generated output, either specified by the user or inferred from the extension.
///
/// Will return `Err` if the format was not specified and could not be inferred.
pub fn output_format(&self) -> StrResult<OutputFormat> {
Ok(if let Some(specified) = self.format {
specified
} else if let Some(output) = &self.output {
match output.extension() {
Some(ext) if ext.eq_ignore_ascii_case("pdf") => OutputFormat::Pdf,
Some(ext) if ext.eq_ignore_ascii_case("png") => OutputFormat::Png,
Some(ext) if ext.eq_ignore_ascii_case("svg") => OutputFormat::Svg,
_ => bail!("could not infer output format for path {}.\nconsider providing the format manually with `--format/-f`", output.display()),
}
} else {
OutputFormat::Pdf
})
}
}
/// Execute a compilation command.
pub fn compile(mut command: CompileCommand) -> StrResult<()> {
let mut world = SystemWorld::new(&command.common)?;
compile_once(&mut world, &mut command, false)?;
Ok(())
}
/// Compile a single time.
///
/// Returns whether it compiled without errors.
#[tracing::instrument(skip_all)]
pub fn compile_once(
world: &mut SystemWorld,
command: &mut CompileCommand,
watching: bool,
) -> StrResult<()> {
tracing::info!("Starting compilation");
let start = std::time::Instant::now();
if watching {
Status::Compiling.print(command).unwrap();
}
// Reset everything and ensure that the main file is present.
world.reset();
world.source(world.main()).map_err(|err| err.to_string())?;
let mut tracer = Tracer::default();
let result = typst::compile(world, &mut tracer);
let warnings = tracer.warnings();
match result {
// Export the PDF / PNG.
Ok(document) => {
export(&document, command)?;
let duration = start.elapsed();
tracing::info!("Compilation succeeded in {duration:?}");
if watching {
if warnings.is_empty() {
Status::Success(duration).print(command).unwrap();
} else {
Status::PartialSuccess(duration).print(command).unwrap();
}
}
print_diagnostics(world, &[], &warnings, command.common.diagnostic_format)
.map_err(|_| "failed to print diagnostics")?;
if let Some(open) = command.open.take() {
open_file(open.as_deref(), &command.output())?;
}
}
// Print diagnostics.
Err(errors) => {
set_failed();
tracing::info!("Compilation failed");
if watching {
Status::Error.print(command).unwrap();
}
print_diagnostics(
world,
&errors,
&warnings,
command.common.diagnostic_format,
)
.map_err(|_| "failed to print diagnostics")?;
}
}
Ok(())
}
/// Export into the target format.
fn export(document: &Document, command: &CompileCommand) -> StrResult<()> {
match command.output_format()? {
OutputFormat::Png => export_image(document, command, ImageExportFormat::Png),
OutputFormat::Svg => export_image(document, command, ImageExportFormat::Svg),
OutputFormat::Pdf => export_pdf(document, command),
}
}
/// Export to a PDF.
fn export_pdf(document: &Document, command: &CompileCommand) -> StrResult<()> {
let output = command.output();
let buffer = typst::export::pdf(document);
fs::write(output, buffer).map_err(|_| "failed to write PDF file")?;
Ok(())
}
/// An image format to export in.
enum ImageExportFormat {
Png,
Svg,
}
/// Export to one or multiple PNGs.
fn export_image(
document: &Document,
command: &CompileCommand,
fmt: ImageExportFormat,
) -> StrResult<()> {
// Determine whether we have a `{n}` numbering.
let output = command.output();
let string = output.to_str().unwrap_or_default();
let numbered = string.contains("{n}");
if !numbered && document.pages.len() > 1 {
bail!("cannot export multiple images without `{{n}}` in output path");
}
// Find a number width that accommodates all pages. For instance, the
// first page should be numbered "001" if there are between 100 and
// 999 pages.
let width = 1 + document.pages.len().checked_ilog10().unwrap_or(0) as usize;
let mut storage;
for (i, frame) in document.pages.iter().enumerate() {
let path = if numbered {
storage = string.replace("{n}", &format!("{:0width$}", i + 1));
Path::new(&storage)
} else {
output.as_path()
};
match fmt {
ImageExportFormat::Png => {
let pixmap =
typst::export::render(frame, command.ppi / 72.0, Color::WHITE);
pixmap.save_png(path).map_err(|_| "failed to write PNG file")?;
}
ImageExportFormat::Svg => {
let svg = typst::export::svg(frame);
fs::write(path, svg).map_err(|_| "failed to write SVG file")?;
}
}
}
Ok(())
}
/// Opens the given file using:
/// - The default file viewer if `open` is `None`.
/// - The given viewer provided by `open` if it is `Some`.
fn open_file(open: Option<&str>, path: &Path) -> StrResult<()> {
if let Some(app) = open {
open::with_in_background(path, app);
} else {
open::that_in_background(path);
}
Ok(())
}
/// Print diagnostic messages to the terminal.
pub fn print_diagnostics(
world: &SystemWorld,
errors: &[SourceDiagnostic],
warnings: &[SourceDiagnostic],
diagnostic_format: DiagnosticFormat,
) -> Result<(), codespan_reporting::files::Error> | .iter()
.map(|e| (eco_format!("hint: {e}")).into())
.collect(),
)
.with_labels(vec![Label::primary(
diagnostic.span.id(),
world.range(diagnostic.span),
)]);
term::emit(&mut w, &config, world, &diag)?;
// Stacktrace-like helper diagnostics.
for point in &diagnostic.trace {
let message = point.v.to_string();
let help = Diagnostic::help().with_message(message).with_labels(vec![
Label::primary(point.span.id(), world.range(point.span)),
]);
term::emit(&mut w, &config, world, &help)?;
}
}
Ok(())
}
impl<'a> codespan_reporting::files::Files<'a> for SystemWorld {
type FileId = FileId;
type Name = String;
type Source = Source;
fn name(&'a self, id: FileId) -> CodespanResult<Self::Name> {
let vpath = id.vpath();
Ok(if let Some(package) = id.package() {
format!("{package}{}", vpath.as_rooted_path().display())
} else {
// Try to express the path relative to the working directory.
vpath
.resolve(self.root())
.and_then(|abs| pathdiff::diff_paths(&abs, self.workdir()))
.as_deref()
.unwrap_or_else(|| vpath.as_rootless_path())
.to_string_lossy()
.into()
})
}
fn source(&'a self, id: FileId) -> CodespanResult<Self::Source> {
Ok(self.lookup(id))
}
fn line_index(&'a self, id: FileId, given: usize) -> CodespanResult<usize> {
let source = self.lookup(id);
source
.byte_to_line(given)
.ok_or_else(|| CodespanError::IndexTooLarge {
given,
max: source.len_bytes(),
})
}
fn line_range(
&'a self,
id: FileId | {
let mut w = match diagnostic_format {
DiagnosticFormat::Human => color_stream(),
DiagnosticFormat::Short => StandardStream::stderr(ColorChoice::Never),
};
let mut config = term::Config { tab_width: 2, ..Default::default() };
if diagnostic_format == DiagnosticFormat::Short {
config.display_style = term::DisplayStyle::Short;
}
for diagnostic in warnings.iter().chain(errors.iter()) {
let diag = match diagnostic.severity {
Severity::Error => Diagnostic::error(),
Severity::Warning => Diagnostic::warning(),
}
.with_message(diagnostic.message.clone())
.with_notes(
diagnostic
.hints | identifier_body |
compile.rs | Document;
use typst::eval::{eco_format, Tracer};
use typst::geom::Color;
use typst::syntax::{FileId, Source};
use typst::World;
use crate::args::{CompileCommand, DiagnosticFormat, OutputFormat};
use crate::watch::Status;
use crate::world::SystemWorld;
use crate::{color_stream, set_failed};
type CodespanResult<T> = Result<T, CodespanError>;
type CodespanError = codespan_reporting::files::Error;
impl CompileCommand {
/// The output path.
pub fn output(&self) -> PathBuf {
self.output
.clone()
.unwrap_or_else(|| self.common.input.with_extension("pdf"))
}
/// The format to use for generated output, either specified by the user or inferred from the extension.
///
/// Will return `Err` if the format was not specified and could not be inferred.
pub fn output_format(&self) -> StrResult<OutputFormat> {
Ok(if let Some(specified) = self.format {
specified
} else if let Some(output) = &self.output {
match output.extension() {
Some(ext) if ext.eq_ignore_ascii_case("pdf") => OutputFormat::Pdf,
Some(ext) if ext.eq_ignore_ascii_case("png") => OutputFormat::Png,
Some(ext) if ext.eq_ignore_ascii_case("svg") => OutputFormat::Svg,
_ => bail!("could not infer output format for path {}.\nconsider providing the format manually with `--format/-f`", output.display()),
}
} else {
OutputFormat::Pdf
})
}
}
/// Execute a compilation command.
pub fn compile(mut command: CompileCommand) -> StrResult<()> {
let mut world = SystemWorld::new(&command.common)?;
compile_once(&mut world, &mut command, false)?;
Ok(())
}
/// Compile a single time.
///
/// Returns whether it compiled without errors.
#[tracing::instrument(skip_all)]
pub fn compile_once(
world: &mut SystemWorld,
command: &mut CompileCommand,
watching: bool,
) -> StrResult<()> {
tracing::info!("Starting compilation");
let start = std::time::Instant::now();
if watching {
Status::Compiling.print(command).unwrap();
}
// Reset everything and ensure that the main file is present.
world.reset();
world.source(world.main()).map_err(|err| err.to_string())?;
let mut tracer = Tracer::default();
let result = typst::compile(world, &mut tracer);
let warnings = tracer.warnings();
match result {
// Export the PDF / PNG.
Ok(document) => {
export(&document, command)?;
let duration = start.elapsed();
tracing::info!("Compilation succeeded in {duration:?}");
if watching {
if warnings.is_empty() {
Status::Success(duration).print(command).unwrap();
} else {
Status::PartialSuccess(duration).print(command).unwrap();
}
}
print_diagnostics(world, &[], &warnings, command.common.diagnostic_format)
.map_err(|_| "failed to print diagnostics")?;
if let Some(open) = command.open.take() {
open_file(open.as_deref(), &command.output())?;
}
}
// Print diagnostics.
Err(errors) => {
set_failed();
tracing::info!("Compilation failed");
if watching {
Status::Error.print(command).unwrap();
}
print_diagnostics(
world,
&errors,
&warnings,
command.common.diagnostic_format,
)
.map_err(|_| "failed to print diagnostics")?;
}
}
Ok(())
}
/// Export into the target format.
fn export(document: &Document, command: &CompileCommand) -> StrResult<()> {
match command.output_format()? {
OutputFormat::Png => export_image(document, command, ImageExportFormat::Png),
OutputFormat::Svg => export_image(document, command, ImageExportFormat::Svg),
OutputFormat::Pdf => export_pdf(document, command),
}
}
/// Export to a PDF.
fn export_pdf(document: &Document, command: &CompileCommand) -> StrResult<()> {
let output = command.output();
let buffer = typst::export::pdf(document);
fs::write(output, buffer).map_err(|_| "failed to write PDF file")?;
Ok(())
}
/// An image format to export in.
enum ImageExportFormat {
Png,
Svg,
}
/// Export to one or multiple PNGs.
fn export_image(
document: &Document,
command: &CompileCommand,
fmt: ImageExportFormat,
) -> StrResult<()> {
// Determine whether we have a `{n}` numbering.
let output = command.output();
let string = output.to_str().unwrap_or_default();
let numbered = string.contains("{n}");
if !numbered && document.pages.len() > 1 {
bail!("cannot export multiple images without `{{n}}` in output path");
}
// Find a number width that accommodates all pages. For instance, the
// first page should be numbered "001" if there are between 100 and
// 999 pages.
let width = 1 + document.pages.len().checked_ilog10().unwrap_or(0) as usize;
let mut storage;
for (i, frame) in document.pages.iter().enumerate() {
let path = if numbered {
storage = string.replace("{n}", &format!("{:0width$}", i + 1));
Path::new(&storage)
} else {
output.as_path()
};
match fmt {
ImageExportFormat::Png => {
let pixmap =
typst::export::render(frame, command.ppi / 72.0, Color::WHITE);
pixmap.save_png(path).map_err(|_| "failed to write PNG file")?;
}
ImageExportFormat::Svg => {
let svg = typst::export::svg(frame);
fs::write(path, svg).map_err(|_| "failed to write SVG file")?;
}
}
}
Ok(())
}
/// Opens the given file using:
/// - The default file viewer if `open` is `None`.
/// - The given viewer provided by `open` if it is `Some`.
fn open_file(open: Option<&str>, path: &Path) -> StrResult<()> {
if let Some(app) = open {
open::with_in_background(path, app);
} else {
open::that_in_background(path);
}
Ok(())
}
/// Print diagnostic messages to the terminal.
pub fn print_diagnostics(
world: &SystemWorld,
errors: &[SourceDiagnostic],
warnings: &[SourceDiagnostic],
diagnostic_format: DiagnosticFormat,
) -> Result<(), codespan_reporting::files::Error> {
let mut w = match diagnostic_format {
DiagnosticFormat::Human => color_stream(),
DiagnosticFormat::Short => StandardStream::stderr(ColorChoice::Never),
};
let mut config = term::Config { tab_width: 2, ..Default::default() };
if diagnostic_format == DiagnosticFormat::Short {
config.display_style = term::DisplayStyle::Short;
}
for diagnostic in warnings.iter().chain(errors.iter()) {
let diag = match diagnostic.severity {
Severity::Error => Diagnostic::error(),
Severity::Warning => Diagnostic::warning(),
}
.with_message(diagnostic.message.clone())
.with_notes(
diagnostic
.hints
.iter()
.map(|e| (eco_format!("hint: {e}")).into())
.collect(),
)
.with_labels(vec![Label::primary(
diagnostic.span.id(),
world.range(diagnostic.span),
)]);
term::emit(&mut w, &config, world, &diag)?;
// Stacktrace-like helper diagnostics.
for point in &diagnostic.trace {
let message = point.v.to_string();
let help = Diagnostic::help().with_message(message).with_labels(vec![
Label::primary(point.span.id(), world.range(point.span)),
]);
term::emit(&mut w, &config, world, &help)?;
}
}
Ok(())
}
impl<'a> codespan_reporting::files::Files<'a> for SystemWorld {
type FileId = FileId;
type Name = String;
type Source = Source;
fn | (&'a self, id: FileId) -> CodespanResult<Self::Name> {
let vpath = id.vpath();
Ok(if let Some(package) = id.package() {
format!("{package}{}", vpath.as_rooted_path().display())
} else {
// Try to express the path relative to the working directory.
vpath
.resolve(self.root())
.and_then(|abs| pathdiff::diff_paths(&abs, self.workdir()))
.as_deref()
.unwrap_or_else(|| vpath.as_rootless_path())
.to_string_lossy()
.into()
})
}
fn source(&'a self, id: FileId) -> CodespanResult<Self::Source> {
Ok(self.lookup(id))
}
fn line_index(&'a self, id: FileId, given: usize) -> CodespanResult<usize> {
let source = self.lookup(id);
source
.byte_to_line(given)
.ok_or_else(|| CodespanError::IndexTooLarge {
given,
max: source.len_bytes(),
})
}
fn line_range(
&'a self,
id: File | name | identifier_name |
main.rs | _margin_top(4.0);
//bottom_container.set_margin_bottom(4.0);
bottom_container.set_margin_left(0.0);
bottom_container.set_margin_right(0.0);
bottom_container.set_height(Length::Fixed(32.0));
bottom_container.set_width(Length::Stretch { min: 0.0, max: f32::INFINITY });
let moon_img = Rc::new(Picture::from_encoded_bytes(include_bytes!("../resource/moon.png")));
let light_img = Rc::new(Picture::from_encoded_bytes(include_bytes!("../resource/light.png")));
let theme_button = Rc::new(Button::new());
theme_button.set_margin_top(5.0);
theme_button.set_margin_left(28.0);
theme_button.set_margin_right(4.0);
theme_button.set_height(Length::Fixed(24.0));
theme_button.set_width(Length::Fixed(24.0));
theme_button.set_horizontal_align(Alignment::Center);
theme_button.set_icon(Some(moon_img.clone()));
let question =
Rc::new(Picture::from_encoded_bytes(include_bytes!("../resource/question_button.png")));
let question_light = Rc::new(Picture::from_encoded_bytes(include_bytes!(
"../resource/question_button_light.png"
)));
let question_noti =
Rc::new(Picture::from_encoded_bytes(include_bytes!("../resource/question-noti.png")));
let question_light_noti =
Rc::new(Picture::from_encoded_bytes(include_bytes!("../resource/question-light-noti.png")));
let help_button = Rc::new(Button::new());
help_button.set_margin_top(5.0);
help_button.set_margin_left(4.0);
help_button.set_margin_right(28.0);
help_button.set_height(Length::Fixed(24.0));
help_button.set_width(Length::Fixed(24.0));
help_button.set_horizontal_align(Alignment::Center);
help_button.set_icon(Some(question.clone()));
let slider = Rc::new(Slider::new());
slider.set_margin_top(5.0);
slider.set_margin_left(4.0);
slider.set_margin_right(4.0);
slider.set_height(Length::Fixed(24.0));
slider.set_width(Length::Stretch { min: 0.0, max: 600.0 });
slider.set_horizontal_align(Alignment::Center);
slider.set_steps(6, 1);
let picture_widget = Rc::new(PictureWidget::new(
&window.display_mut(),
&window,
slider.clone(),
bottom_container.clone(),
config.clone(),
));
picture_widget.set_height(Length::Stretch { min: 0.0, max: f32::INFINITY });
picture_widget.set_width(Length::Stretch { min: 0.0, max: f32::INFINITY });
if let Some(file_path) = std::env::args().nth(1) {
picture_widget.jump_to_path(file_path);
}
bottom_container.add_child(theme_button.clone());
bottom_container.add_child(slider.clone());
bottom_container.add_child(help_button.clone());
picture_area_container.add_child(picture_widget.clone());
picture_area_container.add_child(help_screen.clone());
picture_area_container.add_child(update_notification.clone());
vertical_container.add_child(picture_area_container);
vertical_container.add_child(bottom_container.clone());
let update_available = Arc::new(AtomicBool::new(false));
let update_check_done = Arc::new(AtomicBool::new(false));
let light_theme = Rc::new(Cell::new(!cache.lock().unwrap().window.dark));
let theme_button_clone = theme_button.clone();
let help_button_clone = help_button.clone();
let update_label_clone = update_label;
let picture_widget_clone = picture_widget.clone();
let bottom_container_clone = bottom_container;
let update_notification_clone = update_notification.clone();
let slider_clone = slider.clone();
let window_clone = window.clone();
let light_theme_clone = light_theme.clone();
let update_available_clone = update_available.clone();
let set_theme = Rc::new(move || {
if light_theme_clone.get() {
picture_widget_clone.set_bright_shade(0.96);
bottom_container_clone.set_bg_color([1.0, 1.0, 1.0, 1.0]);
slider_clone.set_shadow_color([0.0, 0.0, 0.0]);
window_clone.set_bg_color([0.85, 0.85, 0.85, 1.0]);
theme_button_clone.set_icon(Some(moon_img.clone()));
update_notification_clone.set_bg_color([0.06, 0.06, 0.06, 1.0]);
update_label_clone.set_icon(Some(update_label_image_light.clone()));
if update_available_clone.load(Ordering::SeqCst) {
help_button_clone.set_icon(Some(question_noti.clone()));
} else {
help_button_clone.set_icon(Some(question.clone()));
}
} else {
picture_widget_clone.set_bright_shade(0.11);
bottom_container_clone.set_bg_color([0.08, 0.08, 0.08, 1.0]);
slider_clone.set_shadow_color([0.0, 0.0, 0.0]);
window_clone.set_bg_color([0.03, 0.03, 0.03, 1.0]);
theme_button_clone.set_icon(Some(light_img.clone()));
update_notification_clone.set_bg_color([0.85, 0.85, 0.85, 1.0]);
update_label_clone.set_icon(Some(update_label_image.clone()));
if update_available_clone.load(Ordering::SeqCst) {
help_button_clone.set_icon(Some(question_light_noti.clone()));
} else {
help_button_clone.set_icon(Some(question_light.clone()));
}
}
});
set_theme();
{
let cache = cache.clone();
let set_theme = set_theme.clone();
theme_button.set_on_click(move || {
light_theme.set(!light_theme.get());
cache.lock().unwrap().window.dark = !light_theme.get();
set_theme();
});
}
let slider_clone2 = slider.clone();
let image_widget_clone = picture_widget;
slider.set_on_value_change(move || {
image_widget_clone.jump_to_index(slider_clone2.value());
});
let help_visible = Cell::new(first_launch);
help_screen.set_visible(help_visible.get());
let update_available_clone = update_available.clone();
let help_screen_clone = help_screen.clone();
let update_notification_clone = update_notification.clone();
update_notification
.set_visible(help_visible.get() && update_available_clone.load(Ordering::SeqCst));
help_button.set_on_click(move || {
help_visible.set(!help_visible.get());
help_screen_clone.set_visible(help_visible.get());
update_notification_clone
.set_visible(help_visible.get() && update_available_clone.load(Ordering::SeqCst));
});
window.set_root(vertical_container);
let check_updates_enabled = match &config.borrow().updates {
Some(u) if !u.check_updates => false,
_ => true,
};
let update_checker_join_handle = {
let updates = &mut cache.lock().unwrap().updates;
let cache = cache.clone();
let update_available = update_available.clone();
let update_check_done = update_check_done.clone();
if check_updates_enabled && updates.update_check_needed() {
// kick off a thread that will check for an update in the background
Some(std::thread::spawn(move || {
let has_update = check_for_updates();
update_available.store(has_update, Ordering::SeqCst);
update_check_done.store(true, Ordering::SeqCst);
if !has_update {
cache.lock().unwrap().updates.set_update_check_time();
}
}))
} else {
None
}
};
let mut nothing_to_do = false;
application.add_global_event_handler(move |_| {
if nothing_to_do {
return NextUpdate::Latest;
}
if update_check_done.load(Ordering::SeqCst) {
nothing_to_do = true;
set_theme();
if help_screen.visible() && update_available.load(Ordering::SeqCst) {
update_notification.set_visible(true);
}
}
NextUpdate::WaitUntil(Instant::now() + Duration::from_secs(1))
});
application.set_at_exit(Some(move || {
cache.lock().unwrap().save(cache_path).unwrap();
if let Some(h) = update_checker_join_handle {
h.join().unwrap();
}
}));
application.start_event_loop();
}
// ========================================================
#[derive(Deserialize)]
struct ReleaseInfoJson {
tag_name: String,
}
fn get_config_and_cache_paths() -> (PathBuf, PathBuf) {
let config_folder;
let cache_folder;
if let Some(ref project_dirs) = *PROJECT_DIRS {
config_folder = project_dirs.config_dir().to_owned();
cache_folder = project_dirs.cache_dir().to_owned();
} else {
let exe_path = std::env::current_exe().unwrap();
let exe_folder = exe_path.parent().unwrap();
config_folder = exe_folder.to_owned();
cache_folder = exe_folder.to_owned();
}
if !config_folder.exists() {
std::fs::create_dir_all(&config_folder).unwrap();
}
if !cache_folder.exists() {
std::fs::create_dir_all(&cache_folder).unwrap();
}
(config_folder.join("cfg.toml"), cache_folder.join("cache.toml"))
} | random_line_split |
||
main.rs | .0);
bottom_container.set_margin_right(0.0);
bottom_container.set_height(Length::Fixed(32.0));
bottom_container.set_width(Length::Stretch { min: 0.0, max: f32::INFINITY });
let moon_img = Rc::new(Picture::from_encoded_bytes(include_bytes!("../resource/moon.png")));
let light_img = Rc::new(Picture::from_encoded_bytes(include_bytes!("../resource/light.png")));
let theme_button = Rc::new(Button::new());
theme_button.set_margin_top(5.0);
theme_button.set_margin_left(28.0);
theme_button.set_margin_right(4.0);
theme_button.set_height(Length::Fixed(24.0));
theme_button.set_width(Length::Fixed(24.0));
theme_button.set_horizontal_align(Alignment::Center);
theme_button.set_icon(Some(moon_img.clone()));
let question =
Rc::new(Picture::from_encoded_bytes(include_bytes!("../resource/question_button.png")));
let question_light = Rc::new(Picture::from_encoded_bytes(include_bytes!(
"../resource/question_button_light.png"
)));
let question_noti =
Rc::new(Picture::from_encoded_bytes(include_bytes!("../resource/question-noti.png")));
let question_light_noti =
Rc::new(Picture::from_encoded_bytes(include_bytes!("../resource/question-light-noti.png")));
let help_button = Rc::new(Button::new());
help_button.set_margin_top(5.0);
help_button.set_margin_left(4.0);
help_button.set_margin_right(28.0);
help_button.set_height(Length::Fixed(24.0));
help_button.set_width(Length::Fixed(24.0));
help_button.set_horizontal_align(Alignment::Center);
help_button.set_icon(Some(question.clone()));
let slider = Rc::new(Slider::new());
slider.set_margin_top(5.0);
slider.set_margin_left(4.0);
slider.set_margin_right(4.0);
slider.set_height(Length::Fixed(24.0));
slider.set_width(Length::Stretch { min: 0.0, max: 600.0 });
slider.set_horizontal_align(Alignment::Center);
slider.set_steps(6, 1);
let picture_widget = Rc::new(PictureWidget::new(
&window.display_mut(),
&window,
slider.clone(),
bottom_container.clone(),
config.clone(),
));
picture_widget.set_height(Length::Stretch { min: 0.0, max: f32::INFINITY });
picture_widget.set_width(Length::Stretch { min: 0.0, max: f32::INFINITY });
if let Some(file_path) = std::env::args().nth(1) {
picture_widget.jump_to_path(file_path);
}
bottom_container.add_child(theme_button.clone());
bottom_container.add_child(slider.clone());
bottom_container.add_child(help_button.clone());
picture_area_container.add_child(picture_widget.clone());
picture_area_container.add_child(help_screen.clone());
picture_area_container.add_child(update_notification.clone());
vertical_container.add_child(picture_area_container);
vertical_container.add_child(bottom_container.clone());
let update_available = Arc::new(AtomicBool::new(false));
let update_check_done = Arc::new(AtomicBool::new(false));
let light_theme = Rc::new(Cell::new(!cache.lock().unwrap().window.dark));
let theme_button_clone = theme_button.clone();
let help_button_clone = help_button.clone();
let update_label_clone = update_label;
let picture_widget_clone = picture_widget.clone();
let bottom_container_clone = bottom_container;
let update_notification_clone = update_notification.clone();
let slider_clone = slider.clone();
let window_clone = window.clone();
let light_theme_clone = light_theme.clone();
let update_available_clone = update_available.clone();
let set_theme = Rc::new(move || {
if light_theme_clone.get() {
picture_widget_clone.set_bright_shade(0.96);
bottom_container_clone.set_bg_color([1.0, 1.0, 1.0, 1.0]);
slider_clone.set_shadow_color([0.0, 0.0, 0.0]);
window_clone.set_bg_color([0.85, 0.85, 0.85, 1.0]);
theme_button_clone.set_icon(Some(moon_img.clone()));
update_notification_clone.set_bg_color([0.06, 0.06, 0.06, 1.0]);
update_label_clone.set_icon(Some(update_label_image_light.clone()));
if update_available_clone.load(Ordering::SeqCst) {
help_button_clone.set_icon(Some(question_noti.clone()));
} else {
help_button_clone.set_icon(Some(question.clone()));
}
} else {
picture_widget_clone.set_bright_shade(0.11);
bottom_container_clone.set_bg_color([0.08, 0.08, 0.08, 1.0]);
slider_clone.set_shadow_color([0.0, 0.0, 0.0]);
window_clone.set_bg_color([0.03, 0.03, 0.03, 1.0]);
theme_button_clone.set_icon(Some(light_img.clone()));
update_notification_clone.set_bg_color([0.85, 0.85, 0.85, 1.0]);
update_label_clone.set_icon(Some(update_label_image.clone()));
if update_available_clone.load(Ordering::SeqCst) {
help_button_clone.set_icon(Some(question_light_noti.clone()));
} else {
help_button_clone.set_icon(Some(question_light.clone()));
}
}
});
set_theme();
{
let cache = cache.clone();
let set_theme = set_theme.clone();
theme_button.set_on_click(move || {
light_theme.set(!light_theme.get());
cache.lock().unwrap().window.dark = !light_theme.get();
set_theme();
});
}
let slider_clone2 = slider.clone();
let image_widget_clone = picture_widget;
slider.set_on_value_change(move || {
image_widget_clone.jump_to_index(slider_clone2.value());
});
let help_visible = Cell::new(first_launch);
help_screen.set_visible(help_visible.get());
let update_available_clone = update_available.clone();
let help_screen_clone = help_screen.clone();
let update_notification_clone = update_notification.clone();
update_notification
.set_visible(help_visible.get() && update_available_clone.load(Ordering::SeqCst));
help_button.set_on_click(move || {
help_visible.set(!help_visible.get());
help_screen_clone.set_visible(help_visible.get());
update_notification_clone
.set_visible(help_visible.get() && update_available_clone.load(Ordering::SeqCst));
});
window.set_root(vertical_container);
let check_updates_enabled = match &config.borrow().updates {
Some(u) if !u.check_updates => false,
_ => true,
};
let update_checker_join_handle = {
let updates = &mut cache.lock().unwrap().updates;
let cache = cache.clone();
let update_available = update_available.clone();
let update_check_done = update_check_done.clone();
if check_updates_enabled && updates.update_check_needed() {
// kick off a thread that will check for an update in the background
Some(std::thread::spawn(move || {
let has_update = check_for_updates();
update_available.store(has_update, Ordering::SeqCst);
update_check_done.store(true, Ordering::SeqCst);
if !has_update {
cache.lock().unwrap().updates.set_update_check_time();
}
}))
} else {
None
}
};
let mut nothing_to_do = false;
application.add_global_event_handler(move |_| {
if nothing_to_do {
return NextUpdate::Latest;
}
if update_check_done.load(Ordering::SeqCst) {
nothing_to_do = true;
set_theme();
if help_screen.visible() && update_available.load(Ordering::SeqCst) {
update_notification.set_visible(true);
}
}
NextUpdate::WaitUntil(Instant::now() + Duration::from_secs(1))
});
application.set_at_exit(Some(move || {
cache.lock().unwrap().save(cache_path).unwrap();
if let Some(h) = update_checker_join_handle {
h.join().unwrap();
}
}));
application.start_event_loop();
}
// ========================================================
#[derive(Deserialize)]
struct ReleaseInfoJson {
tag_name: String,
}
fn get_config_and_cache_paths() -> (PathBuf, PathBuf) {
let config_folder;
let cache_folder;
if let Some(ref project_dirs) = *PROJECT_DIRS {
config_folder = project_dirs.config_dir().to_owned();
cache_folder = project_dirs.cache_dir().to_owned();
} else {
let exe_path = std::env::current_exe().unwrap();
let exe_folder = exe_path.parent().unwrap();
config_folder = exe_folder.to_owned();
cache_folder = exe_folder.to_owned();
}
if !config_folder.exists() {
std::fs::create_dir_all(&config_folder).unwrap();
}
if !cache_folder.exists() {
std::fs::create_dir_all(&cache_folder).unwrap();
}
(config_folder.join("cfg.toml"), cache_folder.join("cache.toml"))
}
#[cfg(not(feature = "networking"))]
/// Always returns false without the `networking` feature.
fn | check_for_updates | identifier_name |
|
main.rs | resource/usage.png"));
let help_screen = Rc::new(HelpScreen::new(usage_img));
let bottom_container = Rc::new(HorizontalLayoutContainer::new());
//bottom_container.set_margin_top(4.0);
//bottom_container.set_margin_bottom(4.0);
bottom_container.set_margin_left(0.0);
bottom_container.set_margin_right(0.0);
bottom_container.set_height(Length::Fixed(32.0));
bottom_container.set_width(Length::Stretch { min: 0.0, max: f32::INFINITY });
let moon_img = Rc::new(Picture::from_encoded_bytes(include_bytes!("../resource/moon.png")));
let light_img = Rc::new(Picture::from_encoded_bytes(include_bytes!("../resource/light.png")));
let theme_button = Rc::new(Button::new());
theme_button.set_margin_top(5.0);
theme_button.set_margin_left(28.0);
theme_button.set_margin_right(4.0);
theme_button.set_height(Length::Fixed(24.0));
theme_button.set_width(Length::Fixed(24.0));
theme_button.set_horizontal_align(Alignment::Center);
theme_button.set_icon(Some(moon_img.clone()));
let question =
Rc::new(Picture::from_encoded_bytes(include_bytes!("../resource/question_button.png")));
let question_light = Rc::new(Picture::from_encoded_bytes(include_bytes!(
"../resource/question_button_light.png"
)));
let question_noti =
Rc::new(Picture::from_encoded_bytes(include_bytes!("../resource/question-noti.png")));
let question_light_noti =
Rc::new(Picture::from_encoded_bytes(include_bytes!("../resource/question-light-noti.png")));
let help_button = Rc::new(Button::new());
help_button.set_margin_top(5.0);
help_button.set_margin_left(4.0);
help_button.set_margin_right(28.0);
help_button.set_height(Length::Fixed(24.0));
help_button.set_width(Length::Fixed(24.0));
help_button.set_horizontal_align(Alignment::Center);
help_button.set_icon(Some(question.clone()));
let slider = Rc::new(Slider::new());
slider.set_margin_top(5.0);
slider.set_margin_left(4.0);
slider.set_margin_right(4.0);
slider.set_height(Length::Fixed(24.0));
slider.set_width(Length::Stretch { min: 0.0, max: 600.0 });
slider.set_horizontal_align(Alignment::Center);
slider.set_steps(6, 1);
let picture_widget = Rc::new(PictureWidget::new(
&window.display_mut(),
&window,
slider.clone(),
bottom_container.clone(),
config.clone(),
));
picture_widget.set_height(Length::Stretch { min: 0.0, max: f32::INFINITY });
picture_widget.set_width(Length::Stretch { min: 0.0, max: f32::INFINITY });
if let Some(file_path) = std::env::args().nth(1) {
picture_widget.jump_to_path(file_path);
}
bottom_container.add_child(theme_button.clone());
bottom_container.add_child(slider.clone());
bottom_container.add_child(help_button.clone());
picture_area_container.add_child(picture_widget.clone());
picture_area_container.add_child(help_screen.clone());
picture_area_container.add_child(update_notification.clone());
vertical_container.add_child(picture_area_container);
vertical_container.add_child(bottom_container.clone());
let update_available = Arc::new(AtomicBool::new(false));
let update_check_done = Arc::new(AtomicBool::new(false));
let light_theme = Rc::new(Cell::new(!cache.lock().unwrap().window.dark));
let theme_button_clone = theme_button.clone();
let help_button_clone = help_button.clone();
let update_label_clone = update_label;
let picture_widget_clone = picture_widget.clone();
let bottom_container_clone = bottom_container;
let update_notification_clone = update_notification.clone();
let slider_clone = slider.clone();
let window_clone = window.clone();
let light_theme_clone = light_theme.clone();
let update_available_clone = update_available.clone();
let set_theme = Rc::new(move || {
if light_theme_clone.get() {
picture_widget_clone.set_bright_shade(0.96);
bottom_container_clone.set_bg_color([1.0, 1.0, 1.0, 1.0]);
slider_clone.set_shadow_color([0.0, 0.0, 0.0]);
window_clone.set_bg_color([0.85, 0.85, 0.85, 1.0]);
theme_button_clone.set_icon(Some(moon_img.clone()));
update_notification_clone.set_bg_color([0.06, 0.06, 0.06, 1.0]);
update_label_clone.set_icon(Some(update_label_image_light.clone()));
if update_available_clone.load(Ordering::SeqCst) {
help_button_clone.set_icon(Some(question_noti.clone()));
} else {
help_button_clone.set_icon(Some(question.clone()));
}
} else {
picture_widget_clone.set_bright_shade(0.11);
bottom_container_clone.set_bg_color([0.08, 0.08, 0.08, 1.0]);
slider_clone.set_shadow_color([0.0, 0.0, 0.0]);
window_clone.set_bg_color([0.03, 0.03, 0.03, 1.0]);
theme_button_clone.set_icon(Some(light_img.clone()));
update_notification_clone.set_bg_color([0.85, 0.85, 0.85, 1.0]);
update_label_clone.set_icon(Some(update_label_image.clone()));
if update_available_clone.load(Ordering::SeqCst) {
help_button_clone.set_icon(Some(question_light_noti.clone()));
} else {
help_button_clone.set_icon(Some(question_light.clone()));
}
}
});
set_theme();
{
let cache = cache.clone();
let set_theme = set_theme.clone();
theme_button.set_on_click(move || {
light_theme.set(!light_theme.get());
cache.lock().unwrap().window.dark = !light_theme.get();
set_theme();
});
}
let slider_clone2 = slider.clone();
let image_widget_clone = picture_widget;
slider.set_on_value_change(move || {
image_widget_clone.jump_to_index(slider_clone2.value());
});
let help_visible = Cell::new(first_launch);
help_screen.set_visible(help_visible.get());
let update_available_clone = update_available.clone();
let help_screen_clone = help_screen.clone();
let update_notification_clone = update_notification.clone();
update_notification
.set_visible(help_visible.get() && update_available_clone.load(Ordering::SeqCst));
help_button.set_on_click(move || {
help_visible.set(!help_visible.get());
help_screen_clone.set_visible(help_visible.get());
update_notification_clone
.set_visible(help_visible.get() && update_available_clone.load(Ordering::SeqCst));
});
window.set_root(vertical_container);
let check_updates_enabled = match &config.borrow().updates {
Some(u) if !u.check_updates => false,
_ => true,
};
let update_checker_join_handle = {
let updates = &mut cache.lock().unwrap().updates;
let cache = cache.clone();
let update_available = update_available.clone();
let update_check_done = update_check_done.clone();
if check_updates_enabled && updates.update_check_needed() {
// kick off a thread that will check for an update in the background
Some(std::thread::spawn(move || {
let has_update = check_for_updates();
update_available.store(has_update, Ordering::SeqCst);
update_check_done.store(true, Ordering::SeqCst);
if !has_update {
cache.lock().unwrap().updates.set_update_check_time();
}
}))
} else {
None
}
};
let mut nothing_to_do = false;
application.add_global_event_handler(move |_| {
if nothing_to_do {
return NextUpdate::Latest;
}
if update_check_done.load(Ordering::SeqCst) {
nothing_to_do = true;
set_theme();
if help_screen.visible() && update_available.load(Ordering::SeqCst) {
update_notification.set_visible(true);
}
}
NextUpdate::WaitUntil(Instant::now() + Duration::from_secs(1))
});
application.set_at_exit(Some(move || {
cache.lock().unwrap().save(cache_path).unwrap();
if let Some(h) = update_checker_join_handle {
h.join().unwrap();
}
}));
application.start_event_loop();
}
// ========================================================
#[derive(Deserialize)]
struct ReleaseInfoJson {
tag_name: String,
}
fn get_config_and_cache_paths() -> (PathBuf, PathBuf) {
let config_folder;
let cache_folder;
if let Some(ref project_dirs) = *PROJECT_DIRS {
config_folder = project_dirs.config_dir().to_owned();
cache_folder = project_dirs.cache_dir().to_owned();
} else {
let exe_path = std::env::current_exe().unwrap();
let exe_folder = exe_path.parent().unwrap();
config_folder = exe_folder.to_owned();
cache_folder = exe_folder.to_owned();
}
if !config_folder.exists() | {
std::fs::create_dir_all(&config_folder).unwrap();
} | conditional_block |
|
mod.rs | ::Guild(channel)) => channel,
Some(_other_channel) => {
println!(
"Warning: guild message was supposedly sent in a non-guild channel. Denying invocation"
);
return false;
}
None => return false,
};
// If member not in cache (probably because presences intent is not enabled), retrieve via HTTP
let member = match guild.members.get(&ctx.author().id) {
Some(x) => x.clone(),
None => match ctx
.discord()
.http
.get_member(guild_id.0, ctx.author().id.0)
.await
{
Ok(member) => member,
Err(_) => return false,
},
};
match guild.user_permissions_in(channel, &member) {
Ok(perms) => perms.contains(required_permissions),
Err(_) => false,
}
}
async fn check_required_permissions_and_owners_only<U, E>(
ctx: crate::Context<'_, U, E>,
required_permissions: serenity::Permissions,
owners_only: bool,
) -> bool {
if owners_only && !ctx.framework().options().owners.contains(&ctx.author().id) {
return false;
}
if !check_permissions(ctx, required_permissions).await {
return false;
}
true
}
/// The main framework struct which stores all data and handles message and interaction dispatch.
pub struct Framework<U, E> {
user_data: once_cell::sync::OnceCell<U>,
bot_id: serenity::UserId,
// TODO: wrap in RwLock to allow changing framework options while running? Could also replace
// the edit tracking cache interior mutability
options: FrameworkOptions<U, E>,
application_id: serenity::ApplicationId,
// Will be initialized to Some on construction, and then taken out on startup
client: std::sync::Mutex<Option<serenity::Client>>,
// Initialized to Some during construction; so shouldn't be None at any observable point
shard_manager: std::sync::Mutex<Option<std::sync::Arc<tokio::sync::Mutex<ShardManager>>>>,
// Filled with Some on construction. Taken out and executed on first Ready gateway event
user_data_setup: std::sync::Mutex<
Option<
Box<
dyn Send
+ Sync
+ for<'a> FnOnce(
&'a serenity::Context,
&'a serenity::Ready,
&'a Self,
) -> BoxFuture<'a, Result<U, E>>,
>,
>,
>,
}
impl<U, E> Framework<U, E> {
/// Create a framework builder to configure, create and run a framework.
///
/// For more information, see [`FrameworkBuilder`]
pub fn build() -> FrameworkBuilder<U, E> {
FrameworkBuilder::default()
}
/// Setup a new [`Framework`]. For more ergonomic setup, please see [`FrameworkBuilder`]
///
/// This function is async and returns Result because it already initializes the Discord client.
///
/// The user data callback is invoked as soon as the bot is logged in. That way, bot data like
/// user ID or connected guilds can be made available to the user data setup function. The user
/// data setup is not allowed to return Result because there would be no reasonable
/// course of action on error.
pub async fn new<F>(
application_id: serenity::ApplicationId,
client_builder: serenity::ClientBuilder<'_>,
user_data_setup: F,
options: FrameworkOptions<U, E>,
) -> Result<std::sync::Arc<Self>, serenity::Error>
where
F: Send
+ Sync
+ 'static
+ for<'a> FnOnce(
&'a serenity::Context,
&'a serenity::Ready,
&'a Self,
) -> BoxFuture<'a, Result<U, E>>,
U: Send + Sync + 'static,
E: Send + 'static,
{
let self_1 = std::sync::Arc::new(Self {
user_data: once_cell::sync::OnceCell::new(),
user_data_setup: std::sync::Mutex::new(Some(Box::new(user_data_setup))),
bot_id: serenity::parse_token(client_builder.get_token().trim_start_matches("Bot "))
.expect("Invalid bot token")
.bot_user_id,
// To break up the circular dependency (framework setup -> client setup -> event handler
// -> framework), we initialize this with None and then immediately fill in once the
// client is created
client: std::sync::Mutex::new(None),
options,
application_id,
shard_manager: std::sync::Mutex::new(None),
});
let self_2 = self_1.clone();
let event_handler = EventWrapper(move |ctx, event| {
let self_2 = std::sync::Arc::clone(&self_2);
Box::pin(async move {
self_2.event(ctx, event).await;
}) as _
});
let client: Client = client_builder
.application_id(application_id.0)
.event_handler(event_handler)
.await?;
*self_1.shard_manager.lock().unwrap() = Some(client.shard_manager.clone());
*self_1.client.lock().unwrap() = Some(client);
Ok(self_1)
}
/// Start the framework.
///
/// Takes a `serenity::ClientBuilder`, in which you need to supply the bot token, as well as
/// any gateway intents.
pub async fn start(self: std::sync::Arc<Self>) -> Result<(), serenity::Error>
where
U: Send + Sync + 'static,
E: Send + 'static,
{
let mut client = self
.client
.lock()
.unwrap()
.take()
.expect("Prepared client is missing");
let edit_track_cache_purge_task = tokio::spawn(async move {
loop {
if let Some(edit_tracker) = &self.options.prefix_options.edit_tracker {
edit_tracker.write().unwrap().purge();
}
// not sure if the purging interval should be configurable
tokio::time::sleep(std::time::Duration::from_secs(60)).await;
}
});
// This will run for as long as the bot is active
client.start().await?;
edit_track_cache_purge_task.abort();
Ok(())
}
/// Return the stored framework options, including commands.
pub fn options(&self) -> &FrameworkOptions<U, E> {
&self.options
}
/// Returns the application ID given to the framework on its creation.
pub fn application_id(&self) -> serenity::ApplicationId {
self.application_id
}
/// Returns the serenity's client shard manager.
pub fn shard_manager(&self) -> std::sync::Arc<tokio::sync::Mutex<ShardManager>> {
self.shard_manager
.lock()
.unwrap()
.clone()
.expect("fatal: shard manager not stored in framework initialization")
}
async fn get_user_data(&self) -> &U {
// We shouldn't get a Message event before a Ready event. But if we do, wait until
// the Ready event does come and the resulting data has arrived.
loop {
match self.user_data.get() {
Some(x) => break x,
None => tokio::time::sleep(std::time::Duration::from_millis(100)).await,
}
}
}
async fn event(&self, ctx: serenity::Context, event: Event<'_>)
where
U: Send + Sync,
{
match &event {
Event::Ready { data_about_bot } => {
let user_data_setup = Option::take(&mut *self.user_data_setup.lock().unwrap());
if let Some(user_data_setup) = user_data_setup {
match user_data_setup(&ctx, data_about_bot, self).await {
Ok(user_data) => {
let _: Result<_, _> = self.user_data.set(user_data);
}
Err(e) => (self.options.on_error)(e, ErrorContext::Setup).await,
}
} else {
// discarding duplicate Discord bot ready event
// (happens regularly when bot is online for long period of time)
}
}
Event::Message { new_message } => {
if let Err(Some((err, ctx))) =
prefix::dispatch_message(self, &ctx, new_message, false).await
{
if let Some(on_error) = ctx.command.options.on_error | else {
(self.options.on_error)(
err,
crate::ErrorContext::Command(crate::CommandErrorContext::Prefix(ctx)),
)
.await;
}
}
}
Event::MessageUpdate { event, .. } => {
if let Some(edit_tracker) = &self.options.prefix_options.edit_tracker {
let msg = edit_tracker.write().unwrap().process_message_update(
event,
self.options().prefix_options.ignore_edit_tracker_cache,
);
if let Some(msg) = msg {
if let Err(Some((err, ctx))) =
prefix::dispatch_message(self, &ctx, &msg, true).await
{
(self.options.on_error)(
err,
crate::ErrorContext::Command(crate::CommandErrorContext::Prefix(
ctx,
| {
(on_error)(err, ctx).await;
} | conditional_block |
mod.rs | ::Guild(channel)) => channel,
Some(_other_channel) => {
println!(
"Warning: guild message was supposedly sent in a non-guild channel. Denying invocation"
);
return false;
}
None => return false,
};
// If member not in cache (probably because presences intent is not enabled), retrieve via HTTP
let member = match guild.members.get(&ctx.author().id) {
Some(x) => x.clone(),
None => match ctx
.discord()
.http
.get_member(guild_id.0, ctx.author().id.0)
.await
{
Ok(member) => member,
Err(_) => return false,
},
};
match guild.user_permissions_in(channel, &member) {
Ok(perms) => perms.contains(required_permissions),
Err(_) => false,
}
}
async fn check_required_permissions_and_owners_only<U, E>(
ctx: crate::Context<'_, U, E>,
required_permissions: serenity::Permissions,
owners_only: bool,
) -> bool {
if owners_only && !ctx.framework().options().owners.contains(&ctx.author().id) {
return false;
}
if !check_permissions(ctx, required_permissions).await {
return false;
}
true
}
/// The main framework struct which stores all data and handles message and interaction dispatch.
pub struct Framework<U, E> {
user_data: once_cell::sync::OnceCell<U>,
bot_id: serenity::UserId,
// TODO: wrap in RwLock to allow changing framework options while running? Could also replace
// the edit tracking cache interior mutability
options: FrameworkOptions<U, E>,
application_id: serenity::ApplicationId,
// Will be initialized to Some on construction, and then taken out on startup
client: std::sync::Mutex<Option<serenity::Client>>,
// Initialized to Some during construction; so shouldn't be None at any observable point
shard_manager: std::sync::Mutex<Option<std::sync::Arc<tokio::sync::Mutex<ShardManager>>>>,
// Filled with Some on construction. Taken out and executed on first Ready gateway event
user_data_setup: std::sync::Mutex<
Option<
Box<
dyn Send
+ Sync
+ for<'a> FnOnce(
&'a serenity::Context,
&'a serenity::Ready,
&'a Self,
) -> BoxFuture<'a, Result<U, E>>,
>,
>,
>,
}
impl<U, E> Framework<U, E> {
/// Create a framework builder to configure, create and run a framework.
///
/// For more information, see [`FrameworkBuilder`]
pub fn build() -> FrameworkBuilder<U, E> {
FrameworkBuilder::default()
}
/// Setup a new [`Framework`]. For more ergonomic setup, please see [`FrameworkBuilder`]
///
/// This function is async and returns Result because it already initializes the Discord client.
///
/// The user data callback is invoked as soon as the bot is logged in. That way, bot data like
/// user ID or connected guilds can be made available to the user data setup function. The user
/// data setup is not allowed to return Result because there would be no reasonable
/// course of action on error.
pub async fn new<F>(
application_id: serenity::ApplicationId,
client_builder: serenity::ClientBuilder<'_>,
user_data_setup: F,
options: FrameworkOptions<U, E>,
) -> Result<std::sync::Arc<Self>, serenity::Error>
where
F: Send
+ Sync
+ 'static
+ for<'a> FnOnce(
&'a serenity::Context,
&'a serenity::Ready,
&'a Self,
) -> BoxFuture<'a, Result<U, E>>,
U: Send + Sync + 'static,
E: Send + 'static,
{
let self_1 = std::sync::Arc::new(Self {
user_data: once_cell::sync::OnceCell::new(),
user_data_setup: std::sync::Mutex::new(Some(Box::new(user_data_setup))),
bot_id: serenity::parse_token(client_builder.get_token().trim_start_matches("Bot "))
.expect("Invalid bot token")
.bot_user_id,
// To break up the circular dependency (framework setup -> client setup -> event handler
// -> framework), we initialize this with None and then immediately fill in once the
// client is created
client: std::sync::Mutex::new(None),
options,
application_id,
shard_manager: std::sync::Mutex::new(None),
});
let self_2 = self_1.clone();
let event_handler = EventWrapper(move |ctx, event| {
let self_2 = std::sync::Arc::clone(&self_2);
Box::pin(async move {
self_2.event(ctx, event).await;
}) as _
});
let client: Client = client_builder
.application_id(application_id.0)
.event_handler(event_handler)
.await?;
*self_1.shard_manager.lock().unwrap() = Some(client.shard_manager.clone());
*self_1.client.lock().unwrap() = Some(client);
Ok(self_1)
}
/// Start the framework.
///
/// Takes a `serenity::ClientBuilder`, in which you need to supply the bot token, as well as
/// any gateway intents.
pub async fn start(self: std::sync::Arc<Self>) -> Result<(), serenity::Error>
where
U: Send + Sync + 'static,
E: Send + 'static,
{
let mut client = self
.client
.lock()
.unwrap()
.take()
.expect("Prepared client is missing");
let edit_track_cache_purge_task = tokio::spawn(async move {
loop {
if let Some(edit_tracker) = &self.options.prefix_options.edit_tracker {
edit_tracker.write().unwrap().purge();
}
// not sure if the purging interval should be configurable
tokio::time::sleep(std::time::Duration::from_secs(60)).await;
}
});
// This will run for as long as the bot is active
client.start().await?;
edit_track_cache_purge_task.abort();
Ok(())
}
/// Return the stored framework options, including commands.
pub fn options(&self) -> &FrameworkOptions<U, E> {
&self.options
}
/// Returns the application ID given to the framework on its creation.
pub fn application_id(&self) -> serenity::ApplicationId {
self.application_id
}
/// Returns the serenity's client shard manager.
pub fn shard_manager(&self) -> std::sync::Arc<tokio::sync::Mutex<ShardManager>> {
self.shard_manager
.lock()
.unwrap()
.clone()
.expect("fatal: shard manager not stored in framework initialization")
}
async fn | (&self) -> &U {
// We shouldn't get a Message event before a Ready event. But if we do, wait until
// the Ready event does come and the resulting data has arrived.
loop {
match self.user_data.get() {
Some(x) => break x,
None => tokio::time::sleep(std::time::Duration::from_millis(100)).await,
}
}
}
async fn event(&self, ctx: serenity::Context, event: Event<'_>)
where
U: Send + Sync,
{
match &event {
Event::Ready { data_about_bot } => {
let user_data_setup = Option::take(&mut *self.user_data_setup.lock().unwrap());
if let Some(user_data_setup) = user_data_setup {
match user_data_setup(&ctx, data_about_bot, self).await {
Ok(user_data) => {
let _: Result<_, _> = self.user_data.set(user_data);
}
Err(e) => (self.options.on_error)(e, ErrorContext::Setup).await,
}
} else {
// discarding duplicate Discord bot ready event
// (happens regularly when bot is online for long period of time)
}
}
Event::Message { new_message } => {
if let Err(Some((err, ctx))) =
prefix::dispatch_message(self, &ctx, new_message, false).await
{
if let Some(on_error) = ctx.command.options.on_error {
(on_error)(err, ctx).await;
} else {
(self.options.on_error)(
err,
crate::ErrorContext::Command(crate::CommandErrorContext::Prefix(ctx)),
)
.await;
}
}
}
Event::MessageUpdate { event, .. } => {
if let Some(edit_tracker) = &self.options.prefix_options.edit_tracker {
let msg = edit_tracker.write().unwrap().process_message_update(
event,
self.options().prefix_options.ignore_edit_tracker_cache,
);
if let Some(msg) = msg {
if let Err(Some((err, ctx))) =
prefix::dispatch_message(self, &ctx, &msg, true).await
{
(self.options.on_error)(
err,
crate::ErrorContext::Command(crate::CommandErrorContext::Prefix(
ctx,
| get_user_data | identifier_name |
mod.rs | );
return false;
}
None => return false,
};
// If member not in cache (probably because presences intent is not enabled), retrieve via HTTP
let member = match guild.members.get(&ctx.author().id) {
Some(x) => x.clone(),
None => match ctx
.discord()
.http
.get_member(guild_id.0, ctx.author().id.0)
.await
{
Ok(member) => member,
Err(_) => return false,
},
};
match guild.user_permissions_in(channel, &member) {
Ok(perms) => perms.contains(required_permissions),
Err(_) => false,
}
}
async fn check_required_permissions_and_owners_only<U, E>(
ctx: crate::Context<'_, U, E>,
required_permissions: serenity::Permissions,
owners_only: bool,
) -> bool {
if owners_only && !ctx.framework().options().owners.contains(&ctx.author().id) {
return false;
}
if !check_permissions(ctx, required_permissions).await {
return false;
}
true
}
/// The main framework struct which stores all data and handles message and interaction dispatch.
pub struct Framework<U, E> {
user_data: once_cell::sync::OnceCell<U>,
bot_id: serenity::UserId,
// TODO: wrap in RwLock to allow changing framework options while running? Could also replace
// the edit tracking cache interior mutability
options: FrameworkOptions<U, E>,
application_id: serenity::ApplicationId,
// Will be initialized to Some on construction, and then taken out on startup
client: std::sync::Mutex<Option<serenity::Client>>,
// Initialized to Some during construction; so shouldn't be None at any observable point
shard_manager: std::sync::Mutex<Option<std::sync::Arc<tokio::sync::Mutex<ShardManager>>>>,
// Filled with Some on construction. Taken out and executed on first Ready gateway event
user_data_setup: std::sync::Mutex<
Option<
Box<
dyn Send
+ Sync
+ for<'a> FnOnce(
&'a serenity::Context,
&'a serenity::Ready,
&'a Self,
) -> BoxFuture<'a, Result<U, E>>,
>,
>,
>,
}
impl<U, E> Framework<U, E> {
/// Create a framework builder to configure, create and run a framework.
///
/// For more information, see [`FrameworkBuilder`]
pub fn build() -> FrameworkBuilder<U, E> {
FrameworkBuilder::default()
}
/// Setup a new [`Framework`]. For more ergonomic setup, please see [`FrameworkBuilder`]
///
/// This function is async and returns Result because it already initializes the Discord client.
///
/// The user data callback is invoked as soon as the bot is logged in. That way, bot data like
/// user ID or connected guilds can be made available to the user data setup function. The user
/// data setup is not allowed to return Result because there would be no reasonable
/// course of action on error.
pub async fn new<F>(
application_id: serenity::ApplicationId,
client_builder: serenity::ClientBuilder<'_>,
user_data_setup: F,
options: FrameworkOptions<U, E>,
) -> Result<std::sync::Arc<Self>, serenity::Error>
where
F: Send
+ Sync
+ 'static
+ for<'a> FnOnce(
&'a serenity::Context,
&'a serenity::Ready,
&'a Self,
) -> BoxFuture<'a, Result<U, E>>,
U: Send + Sync + 'static,
E: Send + 'static,
{
let self_1 = std::sync::Arc::new(Self {
user_data: once_cell::sync::OnceCell::new(),
user_data_setup: std::sync::Mutex::new(Some(Box::new(user_data_setup))),
bot_id: serenity::parse_token(client_builder.get_token().trim_start_matches("Bot "))
.expect("Invalid bot token")
.bot_user_id,
// To break up the circular dependency (framework setup -> client setup -> event handler
// -> framework), we initialize this with None and then immediately fill in once the
// client is created
client: std::sync::Mutex::new(None),
options,
application_id,
shard_manager: std::sync::Mutex::new(None),
});
let self_2 = self_1.clone();
let event_handler = EventWrapper(move |ctx, event| {
let self_2 = std::sync::Arc::clone(&self_2);
Box::pin(async move {
self_2.event(ctx, event).await;
}) as _
});
let client: Client = client_builder
.application_id(application_id.0)
.event_handler(event_handler)
.await?;
*self_1.shard_manager.lock().unwrap() = Some(client.shard_manager.clone());
*self_1.client.lock().unwrap() = Some(client);
Ok(self_1)
}
/// Start the framework.
///
/// Takes a `serenity::ClientBuilder`, in which you need to supply the bot token, as well as
/// any gateway intents.
pub async fn start(self: std::sync::Arc<Self>) -> Result<(), serenity::Error>
where
U: Send + Sync + 'static,
E: Send + 'static,
{
let mut client = self
.client
.lock()
.unwrap()
.take()
.expect("Prepared client is missing");
let edit_track_cache_purge_task = tokio::spawn(async move {
loop {
if let Some(edit_tracker) = &self.options.prefix_options.edit_tracker {
edit_tracker.write().unwrap().purge();
}
// not sure if the purging interval should be configurable
tokio::time::sleep(std::time::Duration::from_secs(60)).await;
}
});
// This will run for as long as the bot is active
client.start().await?;
edit_track_cache_purge_task.abort();
Ok(())
}
/// Return the stored framework options, including commands.
pub fn options(&self) -> &FrameworkOptions<U, E> {
&self.options
}
/// Returns the application ID given to the framework on its creation.
pub fn application_id(&self) -> serenity::ApplicationId {
self.application_id
}
/// Returns the serenity's client shard manager.
pub fn shard_manager(&self) -> std::sync::Arc<tokio::sync::Mutex<ShardManager>> {
self.shard_manager
.lock()
.unwrap()
.clone()
.expect("fatal: shard manager not stored in framework initialization")
}
async fn get_user_data(&self) -> &U {
// We shouldn't get a Message event before a Ready event. But if we do, wait until
// the Ready event does come and the resulting data has arrived.
loop {
match self.user_data.get() {
Some(x) => break x,
None => tokio::time::sleep(std::time::Duration::from_millis(100)).await,
}
}
}
async fn event(&self, ctx: serenity::Context, event: Event<'_>)
where
U: Send + Sync,
{
match &event {
Event::Ready { data_about_bot } => {
let user_data_setup = Option::take(&mut *self.user_data_setup.lock().unwrap());
if let Some(user_data_setup) = user_data_setup {
match user_data_setup(&ctx, data_about_bot, self).await {
Ok(user_data) => {
let _: Result<_, _> = self.user_data.set(user_data);
}
Err(e) => (self.options.on_error)(e, ErrorContext::Setup).await,
}
} else {
// discarding duplicate Discord bot ready event
// (happens regularly when bot is online for long period of time)
}
}
Event::Message { new_message } => {
if let Err(Some((err, ctx))) =
prefix::dispatch_message(self, &ctx, new_message, false).await
{
if let Some(on_error) = ctx.command.options.on_error {
(on_error)(err, ctx).await;
} else {
(self.options.on_error)(
err,
crate::ErrorContext::Command(crate::CommandErrorContext::Prefix(ctx)),
)
.await;
}
}
}
Event::MessageUpdate { event, .. } => {
if let Some(edit_tracker) = & | {
if required_permissions.is_empty() {
return true;
}
let guild_id = match ctx.guild_id() {
Some(x) => x,
None => return true, // no permission checks in DMs
};
let guild = match ctx.discord().cache.guild(guild_id) {
Some(x) => x,
None => return false, // Guild not in cache
};
let channel = match guild.channels.get(&ctx.channel_id()) {
Some(serenity::Channel::Guild(channel)) => channel,
Some(_other_channel) => {
println!(
"Warning: guild message was supposedly sent in a non-guild channel. Denying invocation" | identifier_body |
|
mod.rs | ::Guild(channel)) => channel,
Some(_other_channel) => {
println!(
"Warning: guild message was supposedly sent in a non-guild channel. Denying invocation"
);
return false;
}
None => return false,
};
// If member not in cache (probably because presences intent is not enabled), retrieve via HTTP
let member = match guild.members.get(&ctx.author().id) {
Some(x) => x.clone(),
None => match ctx
.discord()
.http
.get_member(guild_id.0, ctx.author().id.0)
.await
{
Ok(member) => member,
Err(_) => return false,
},
};
match guild.user_permissions_in(channel, &member) {
Ok(perms) => perms.contains(required_permissions),
Err(_) => false,
}
}
async fn check_required_permissions_and_owners_only<U, E>(
ctx: crate::Context<'_, U, E>,
required_permissions: serenity::Permissions,
owners_only: bool,
) -> bool {
if owners_only && !ctx.framework().options().owners.contains(&ctx.author().id) {
return false;
}
if !check_permissions(ctx, required_permissions).await {
return false;
}
true
}
/// The main framework struct which stores all data and handles message and interaction dispatch.
pub struct Framework<U, E> {
user_data: once_cell::sync::OnceCell<U>,
bot_id: serenity::UserId,
// TODO: wrap in RwLock to allow changing framework options while running? Could also replace
// the edit tracking cache interior mutability
options: FrameworkOptions<U, E>,
application_id: serenity::ApplicationId,
// Will be initialized to Some on construction, and then taken out on startup
client: std::sync::Mutex<Option<serenity::Client>>,
// Initialized to Some during construction; so shouldn't be None at any observable point
shard_manager: std::sync::Mutex<Option<std::sync::Arc<tokio::sync::Mutex<ShardManager>>>>,
// Filled with Some on construction. Taken out and executed on first Ready gateway event
user_data_setup: std::sync::Mutex<
Option<
Box<
dyn Send
+ Sync
+ for<'a> FnOnce(
&'a serenity::Context,
&'a serenity::Ready,
&'a Self,
) -> BoxFuture<'a, Result<U, E>>,
>,
>,
>,
}
impl<U, E> Framework<U, E> {
/// Create a framework builder to configure, create and run a framework.
///
/// For more information, see [`FrameworkBuilder`]
pub fn build() -> FrameworkBuilder<U, E> {
FrameworkBuilder::default()
}
/// Setup a new [`Framework`]. For more ergonomic setup, please see [`FrameworkBuilder`]
///
/// This function is async and returns Result because it already initializes the Discord client.
///
/// The user data callback is invoked as soon as the bot is logged in. That way, bot data like
/// user ID or connected guilds can be made available to the user data setup function. The user
/// data setup is not allowed to return Result because there would be no reasonable
/// course of action on error.
pub async fn new<F>(
application_id: serenity::ApplicationId,
client_builder: serenity::ClientBuilder<'_>,
user_data_setup: F,
options: FrameworkOptions<U, E>,
) -> Result<std::sync::Arc<Self>, serenity::Error>
where
F: Send
+ Sync
+ 'static
+ for<'a> FnOnce(
&'a serenity::Context,
&'a serenity::Ready,
&'a Self,
) -> BoxFuture<'a, Result<U, E>>,
U: Send + Sync + 'static,
E: Send + 'static,
{
let self_1 = std::sync::Arc::new(Self {
user_data: once_cell::sync::OnceCell::new(),
user_data_setup: std::sync::Mutex::new(Some(Box::new(user_data_setup))),
bot_id: serenity::parse_token(client_builder.get_token().trim_start_matches("Bot "))
.expect("Invalid bot token")
.bot_user_id,
// To break up the circular dependency (framework setup -> client setup -> event handler
// -> framework), we initialize this with None and then immediately fill in once the
// client is created
client: std::sync::Mutex::new(None),
options,
application_id,
shard_manager: std::sync::Mutex::new(None),
});
let self_2 = self_1.clone();
let event_handler = EventWrapper(move |ctx, event| {
let self_2 = std::sync::Arc::clone(&self_2);
Box::pin(async move {
self_2.event(ctx, event).await;
}) as _
});
let client: Client = client_builder
.application_id(application_id.0)
.event_handler(event_handler)
.await?;
*self_1.shard_manager.lock().unwrap() = Some(client.shard_manager.clone());
*self_1.client.lock().unwrap() = Some(client);
Ok(self_1)
}
/// Start the framework.
///
/// Takes a `serenity::ClientBuilder`, in which you need to supply the bot token, as well as
/// any gateway intents.
pub async fn start(self: std::sync::Arc<Self>) -> Result<(), serenity::Error>
where
U: Send + Sync + 'static,
E: Send + 'static,
{
let mut client = self
.client
.lock()
.unwrap()
.take()
.expect("Prepared client is missing");
let edit_track_cache_purge_task = tokio::spawn(async move {
loop {
if let Some(edit_tracker) = &self.options.prefix_options.edit_tracker {
edit_tracker.write().unwrap().purge();
}
// not sure if the purging interval should be configurable
tokio::time::sleep(std::time::Duration::from_secs(60)).await;
}
});
// This will run for as long as the bot is active
client.start().await?;
edit_track_cache_purge_task.abort();
Ok(())
}
/// Return the stored framework options, including commands.
pub fn options(&self) -> &FrameworkOptions<U, E> { | pub fn application_id(&self) -> serenity::ApplicationId {
self.application_id
}
/// Returns the serenity's client shard manager.
pub fn shard_manager(&self) -> std::sync::Arc<tokio::sync::Mutex<ShardManager>> {
self.shard_manager
.lock()
.unwrap()
.clone()
.expect("fatal: shard manager not stored in framework initialization")
}
async fn get_user_data(&self) -> &U {
// We shouldn't get a Message event before a Ready event. But if we do, wait until
// the Ready event does come and the resulting data has arrived.
loop {
match self.user_data.get() {
Some(x) => break x,
None => tokio::time::sleep(std::time::Duration::from_millis(100)).await,
}
}
}
async fn event(&self, ctx: serenity::Context, event: Event<'_>)
where
U: Send + Sync,
{
match &event {
Event::Ready { data_about_bot } => {
let user_data_setup = Option::take(&mut *self.user_data_setup.lock().unwrap());
if let Some(user_data_setup) = user_data_setup {
match user_data_setup(&ctx, data_about_bot, self).await {
Ok(user_data) => {
let _: Result<_, _> = self.user_data.set(user_data);
}
Err(e) => (self.options.on_error)(e, ErrorContext::Setup).await,
}
} else {
// discarding duplicate Discord bot ready event
// (happens regularly when bot is online for long period of time)
}
}
Event::Message { new_message } => {
if let Err(Some((err, ctx))) =
prefix::dispatch_message(self, &ctx, new_message, false).await
{
if let Some(on_error) = ctx.command.options.on_error {
(on_error)(err, ctx).await;
} else {
(self.options.on_error)(
err,
crate::ErrorContext::Command(crate::CommandErrorContext::Prefix(ctx)),
)
.await;
}
}
}
Event::MessageUpdate { event, .. } => {
if let Some(edit_tracker) = &self.options.prefix_options.edit_tracker {
let msg = edit_tracker.write().unwrap().process_message_update(
event,
self.options().prefix_options.ignore_edit_tracker_cache,
);
if let Some(msg) = msg {
if let Err(Some((err, ctx))) =
prefix::dispatch_message(self, &ctx, &msg, true).await
{
(self.options.on_error)(
err,
crate::ErrorContext::Command(crate::CommandErrorContext::Prefix(
ctx,
)),
| &self.options
}
/// Returns the application ID given to the framework on its creation. | random_line_split |
plugin.go | []string
MatchEmail bool
Port int
Tunnel bool
Debug bool
Domain string
AutoTLS bool
Host []string
}
// Plugin values.
Plugin struct {
Repo Repo
Build Build
Config Config
}
// Audio format
Audio struct {
URL string
Duration int
}
// Location format
Location struct {
Title string
Address string
Latitude float64
Longitude float64
}
)
var (
// ReceiveCount is receive notification count
ReceiveCount int64
// SendCount is send notification count
SendCount int64
)
func init() {
// Support metrics
m := NewMetrics()
prometheus.MustRegister(m)
}
func trimElement(keys []string) []string {
var newKeys []string
for _, value := range keys {
value = strings.Trim(value, " ")
if len(value) == 0 {
continue
}
newKeys = append(newKeys, value)
}
return newKeys
}
func convertImage(value, delimiter string) []string {
values := trimElement(strings.Split(value, delimiter))
if len(values) < 2 {
values = append(values, values[0])
}
return values
}
func convertVideo(value, delimiter string) []string {
values := trimElement(strings.Split(value, delimiter))
if len(values) < 2 {
values = append(values, defaultPreviewImageURL)
}
return values
}
func convertAudio(value, delimiter string) (Audio, bool) {
values := trimElement(strings.Split(value, delimiter))
if len(values) < 2 {
return Audio{}, true
}
duration, err := strconv.Atoi(values[1])
if err != nil {
log.Println(err.Error())
return Audio{}, true
}
return Audio{
URL: values[0],
Duration: duration,
}, false
}
func convertSticker(value, delimiter string) ([]string, bool) {
values := trimElement(strings.Split(value, delimiter))
if len(values) < 2 {
return []string{}, true
}
return values, false
}
func convertLocation(value, delimiter string) (Location, bool) {
var latitude, longitude float64
var err error
values := trimElement(strings.Split(value, delimiter))
if len(values) < 4 {
return Location{}, true
}
latitude, err = strconv.ParseFloat(values[2], 64)
if err != nil {
log.Println(err.Error())
return Location{}, true
}
longitude, err = strconv.ParseFloat(values[3], 64)
if err != nil {
log.Println(err.Error())
return Location{}, true
}
return Location{
Title: values[0],
Address: values[1],
Latitude: latitude,
Longitude: longitude,
}, false
}
func parseTo(to []string, authorEmail string, matchEmail bool, delimiter string) []string {
var emails []string
var ids []string
attachEmail := true
for _, value := range trimElement(to) {
idArray := trimElement(strings.Split(value, delimiter))
// check match author email
if len(idArray) > 1 {
if email := idArray[1]; email != authorEmail {
continue
}
emails = append(emails, idArray[0])
attachEmail = false
continue
}
ids = append(ids, idArray[0])
}
if matchEmail == true && attachEmail == false {
return emails
}
for _, value := range emails {
ids = append(ids, value)
}
return ids
}
// Bot is new Line Bot clien.
func (p Plugin) Bot() (*linebot.Client, error) {
if len(p.Config.ChannelToken) == 0 || len(p.Config.ChannelSecret) == 0 {
log.Println("missing line bot config")
return nil, errors.New("missing line bot config")
}
return linebot.New(p.Config.ChannelSecret, p.Config.ChannelToken)
}
func (p Plugin) getTunnelDomain() (string, error) {
var domain string
if p.Config.Domain != "" {
if len(p.Config.Domain) < 4 || len(p.Config.Domain) > 63 {
return "", errors.New("tunnel host name must be lowercase and between 4 and 63 alphanumeric characters")
}
domain = p.Config.Domain
} else {
domain = strings.ToLower(random.String(10))
}
return domain, nil
}
// Handler is http handler.
func (p Plugin) Handler(bot *linebot.Client) *http.ServeMux {
mux := http.NewServeMux()
// Setup HTTP Server for receiving requests from LINE platform
mux.HandleFunc("/callback", func(w http.ResponseWriter, req *http.Request) {
events, err := bot.ParseRequest(req)
if err != nil {
if err == linebot.ErrInvalidSignature {
w.WriteHeader(400)
} else {
w.WriteHeader(500)
}
return
}
for _, event := range events {
if event.Type == linebot.EventTypeMessage {
switch message := event.Message.(type) {
case *linebot.TextMessage:
log.Printf("User ID is %v\n", event.Source.UserID)
log.Printf("Room ID is %v\n", event.Source.RoomID)
log.Printf("Group ID is %v\n", event.Source.GroupID)
ReceiveCount++
if message.Text == "test" {
SendCount++
if _, err = bot.ReplyMessage(event.ReplyToken, linebot.NewTextMessage("count + 1")).Do(); err != nil {
log.Print(err)
}
}
if _, err = bot.ReplyMessage(event.ReplyToken, linebot.NewTextMessage(message.Text)).Do(); err != nil |
}
}
}
})
mux.HandleFunc("/metrics", func(w http.ResponseWriter, req *http.Request) {
promhttp.Handler().ServeHTTP(w, req)
})
// Setup HTTP Server for receiving requests from LINE platform
mux.HandleFunc("/", func(w http.ResponseWriter, req *http.Request) {
fmt.Fprintln(w, "Welcome to Line webhook page.")
})
return mux
}
// Webhook support line callback service.
func (p Plugin) Webhook() error {
readyToListen := false
bot, err := p.Bot()
if err != nil {
return err
}
mux := p.Handler(bot)
if p.Config.Tunnel {
if p.Config.Debug {
gotunnelme.Debug = true
}
domain, err := p.getTunnelDomain()
if err != nil {
panic(err)
}
tunnel := gotunnelme.NewTunnel()
url, err := tunnel.GetUrl(domain)
if err != nil {
panic("Could not get localtunnel.me URL. " + err.Error())
}
go func() {
for !readyToListen {
time.Sleep(1 * time.Second)
}
c := color.New(color.FgYellow)
c.Println("Tunnel URL:", url)
err := tunnel.CreateTunnel(p.Config.Port)
if err != nil {
panic("Could not create tunnel. " + err.Error())
}
}()
}
readyToListen = true
if p.Config.Port != 443 && !p.Config.AutoTLS {
log.Println("Line Webhook Server Listin on " + strconv.Itoa(p.Config.Port) + " port")
if err := http.ListenAndServe(":"+strconv.Itoa(p.Config.Port), mux); err != nil {
log.Fatal(err)
}
}
if p.Config.AutoTLS && len(p.Config.Host) != 0 {
log.Println("Line Webhook Server Listin on 443 port, hostname: " + strings.Join(p.Config.Host, ", "))
return http.Serve(autocert.NewListener(p.Config.Host...), mux)
}
return nil
}
// Notify for Line notify service
// https://notify-bot.line.me
func (p Plugin) Notify() error {
if p.Config.ChannelToken == "" || len(p.Config.Message) == 0 {
return errors.New("missing token or message")
}
for _, m := range p.Config.Message {
if err := p.notify(m, p.Config.ChannelToken); err != nil {
return err
}
}
return nil
}
func (p Plugin) notify(message, token string) error {
data := url.Values{}
data.Add("message", message)
u, _ := url.ParseRequestURI("https://notify-api.line.me/api/notify")
urlStr := u.String()
req, err := http.NewRequest(
"POST",
urlStr,
strings.NewReader(data.Encode()),
)
if err != nil {
return errors.New("failed to create request:" + err.Error())
}
req.Header.Add("Authorization", "Bearer "+token)
req.Header.Add("Content-Type", "application/x-www-form-urlencoded")
req.Header.Add("Content-Length", strconv.Itoa(len(data.Encode())))
client := &http.Client{}
res, err := client.Do(req)
if err != nil {
return errors.New("failed to process request:" + err.Error())
}
defer res.Body.Close()
if p.Config.Debug {
log.Println("=================================")
| {
log.Print(err)
} | conditional_block |
plugin.go | []string
MatchEmail bool
Port int
Tunnel bool
Debug bool
Domain string
AutoTLS bool
Host []string
}
// Plugin values.
Plugin struct {
Repo Repo
Build Build
Config Config
}
// Audio format
Audio struct {
URL string
Duration int
}
// Location format
Location struct {
Title string
Address string
Latitude float64
Longitude float64
}
)
var (
// ReceiveCount is receive notification count
ReceiveCount int64
// SendCount is send notification count
SendCount int64
)
func init() |
func trimElement(keys []string) []string {
var newKeys []string
for _, value := range keys {
value = strings.Trim(value, " ")
if len(value) == 0 {
continue
}
newKeys = append(newKeys, value)
}
return newKeys
}
func convertImage(value, delimiter string) []string {
values := trimElement(strings.Split(value, delimiter))
if len(values) < 2 {
values = append(values, values[0])
}
return values
}
func convertVideo(value, delimiter string) []string {
values := trimElement(strings.Split(value, delimiter))
if len(values) < 2 {
values = append(values, defaultPreviewImageURL)
}
return values
}
func convertAudio(value, delimiter string) (Audio, bool) {
values := trimElement(strings.Split(value, delimiter))
if len(values) < 2 {
return Audio{}, true
}
duration, err := strconv.Atoi(values[1])
if err != nil {
log.Println(err.Error())
return Audio{}, true
}
return Audio{
URL: values[0],
Duration: duration,
}, false
}
func convertSticker(value, delimiter string) ([]string, bool) {
values := trimElement(strings.Split(value, delimiter))
if len(values) < 2 {
return []string{}, true
}
return values, false
}
func convertLocation(value, delimiter string) (Location, bool) {
var latitude, longitude float64
var err error
values := trimElement(strings.Split(value, delimiter))
if len(values) < 4 {
return Location{}, true
}
latitude, err = strconv.ParseFloat(values[2], 64)
if err != nil {
log.Println(err.Error())
return Location{}, true
}
longitude, err = strconv.ParseFloat(values[3], 64)
if err != nil {
log.Println(err.Error())
return Location{}, true
}
return Location{
Title: values[0],
Address: values[1],
Latitude: latitude,
Longitude: longitude,
}, false
}
func parseTo(to []string, authorEmail string, matchEmail bool, delimiter string) []string {
var emails []string
var ids []string
attachEmail := true
for _, value := range trimElement(to) {
idArray := trimElement(strings.Split(value, delimiter))
// check match author email
if len(idArray) > 1 {
if email := idArray[1]; email != authorEmail {
continue
}
emails = append(emails, idArray[0])
attachEmail = false
continue
}
ids = append(ids, idArray[0])
}
if matchEmail == true && attachEmail == false {
return emails
}
for _, value := range emails {
ids = append(ids, value)
}
return ids
}
// Bot is new Line Bot clien.
func (p Plugin) Bot() (*linebot.Client, error) {
if len(p.Config.ChannelToken) == 0 || len(p.Config.ChannelSecret) == 0 {
log.Println("missing line bot config")
return nil, errors.New("missing line bot config")
}
return linebot.New(p.Config.ChannelSecret, p.Config.ChannelToken)
}
func (p Plugin) getTunnelDomain() (string, error) {
var domain string
if p.Config.Domain != "" {
if len(p.Config.Domain) < 4 || len(p.Config.Domain) > 63 {
return "", errors.New("tunnel host name must be lowercase and between 4 and 63 alphanumeric characters")
}
domain = p.Config.Domain
} else {
domain = strings.ToLower(random.String(10))
}
return domain, nil
}
// Handler is http handler.
func (p Plugin) Handler(bot *linebot.Client) *http.ServeMux {
mux := http.NewServeMux()
// Setup HTTP Server for receiving requests from LINE platform
mux.HandleFunc("/callback", func(w http.ResponseWriter, req *http.Request) {
events, err := bot.ParseRequest(req)
if err != nil {
if err == linebot.ErrInvalidSignature {
w.WriteHeader(400)
} else {
w.WriteHeader(500)
}
return
}
for _, event := range events {
if event.Type == linebot.EventTypeMessage {
switch message := event.Message.(type) {
case *linebot.TextMessage:
log.Printf("User ID is %v\n", event.Source.UserID)
log.Printf("Room ID is %v\n", event.Source.RoomID)
log.Printf("Group ID is %v\n", event.Source.GroupID)
ReceiveCount++
if message.Text == "test" {
SendCount++
if _, err = bot.ReplyMessage(event.ReplyToken, linebot.NewTextMessage("count + 1")).Do(); err != nil {
log.Print(err)
}
}
if _, err = bot.ReplyMessage(event.ReplyToken, linebot.NewTextMessage(message.Text)).Do(); err != nil {
log.Print(err)
}
}
}
}
})
mux.HandleFunc("/metrics", func(w http.ResponseWriter, req *http.Request) {
promhttp.Handler().ServeHTTP(w, req)
})
// Setup HTTP Server for receiving requests from LINE platform
mux.HandleFunc("/", func(w http.ResponseWriter, req *http.Request) {
fmt.Fprintln(w, "Welcome to Line webhook page.")
})
return mux
}
// Webhook support line callback service.
func (p Plugin) Webhook() error {
readyToListen := false
bot, err := p.Bot()
if err != nil {
return err
}
mux := p.Handler(bot)
if p.Config.Tunnel {
if p.Config.Debug {
gotunnelme.Debug = true
}
domain, err := p.getTunnelDomain()
if err != nil {
panic(err)
}
tunnel := gotunnelme.NewTunnel()
url, err := tunnel.GetUrl(domain)
if err != nil {
panic("Could not get localtunnel.me URL. " + err.Error())
}
go func() {
for !readyToListen {
time.Sleep(1 * time.Second)
}
c := color.New(color.FgYellow)
c.Println("Tunnel URL:", url)
err := tunnel.CreateTunnel(p.Config.Port)
if err != nil {
panic("Could not create tunnel. " + err.Error())
}
}()
}
readyToListen = true
if p.Config.Port != 443 && !p.Config.AutoTLS {
log.Println("Line Webhook Server Listin on " + strconv.Itoa(p.Config.Port) + " port")
if err := http.ListenAndServe(":"+strconv.Itoa(p.Config.Port), mux); err != nil {
log.Fatal(err)
}
}
if p.Config.AutoTLS && len(p.Config.Host) != 0 {
log.Println("Line Webhook Server Listin on 443 port, hostname: " + strings.Join(p.Config.Host, ", "))
return http.Serve(autocert.NewListener(p.Config.Host...), mux)
}
return nil
}
// Notify for Line notify service
// https://notify-bot.line.me
func (p Plugin) Notify() error {
if p.Config.ChannelToken == "" || len(p.Config.Message) == 0 {
return errors.New("missing token or message")
}
for _, m := range p.Config.Message {
if err := p.notify(m, p.Config.ChannelToken); err != nil {
return err
}
}
return nil
}
func (p Plugin) notify(message, token string) error {
data := url.Values{}
data.Add("message", message)
u, _ := url.ParseRequestURI("https://notify-api.line.me/api/notify")
urlStr := u.String()
req, err := http.NewRequest(
"POST",
urlStr,
strings.NewReader(data.Encode()),
)
if err != nil {
return errors.New("failed to create request:" + err.Error())
}
req.Header.Add("Authorization", "Bearer "+token)
req.Header.Add("Content-Type", "application/x-www-form-urlencoded")
req.Header.Add("Content-Length", strconv.Itoa(len(data.Encode())))
client := &http.Client{}
res, err := client.Do(req)
if err != nil {
return errors.New("failed to process request:" + err.Error())
}
defer res.Body.Close()
if p.Config.Debug {
log.Println("=================================")
| {
// Support metrics
m := NewMetrics()
prometheus.MustRegister(m)
} | identifier_body |
plugin.go | Location []string
MatchEmail bool
Port int
Tunnel bool
Debug bool
Domain string
AutoTLS bool
Host []string
}
// Plugin values.
Plugin struct {
Repo Repo
Build Build
Config Config
}
// Audio format
Audio struct {
URL string
Duration int
}
// Location format
Location struct {
Title string
Address string
Latitude float64
Longitude float64
}
)
var (
// ReceiveCount is receive notification count
ReceiveCount int64
// SendCount is send notification count
SendCount int64
)
func init() {
// Support metrics
m := NewMetrics()
prometheus.MustRegister(m)
}
func trimElement(keys []string) []string {
var newKeys []string
for _, value := range keys {
value = strings.Trim(value, " ")
if len(value) == 0 {
continue
}
newKeys = append(newKeys, value)
}
return newKeys
}
func convertImage(value, delimiter string) []string {
values := trimElement(strings.Split(value, delimiter))
if len(values) < 2 {
values = append(values, values[0])
}
return values
}
func convertVideo(value, delimiter string) []string {
values := trimElement(strings.Split(value, delimiter))
if len(values) < 2 {
values = append(values, defaultPreviewImageURL)
}
return values
}
func convertAudio(value, delimiter string) (Audio, bool) {
values := trimElement(strings.Split(value, delimiter))
if len(values) < 2 {
return Audio{}, true
}
duration, err := strconv.Atoi(values[1])
if err != nil {
log.Println(err.Error())
return Audio{}, true
}
return Audio{
URL: values[0],
Duration: duration,
}, false
}
func convertSticker(value, delimiter string) ([]string, bool) {
values := trimElement(strings.Split(value, delimiter))
if len(values) < 2 {
return []string{}, true
}
return values, false
}
func convertLocation(value, delimiter string) (Location, bool) {
var latitude, longitude float64
var err error
values := trimElement(strings.Split(value, delimiter))
if len(values) < 4 {
return Location{}, true
}
latitude, err = strconv.ParseFloat(values[2], 64)
if err != nil {
log.Println(err.Error())
return Location{}, true
}
longitude, err = strconv.ParseFloat(values[3], 64)
if err != nil {
log.Println(err.Error())
return Location{}, true
}
return Location{
Title: values[0],
Address: values[1],
Latitude: latitude,
Longitude: longitude,
}, false
}
func parseTo(to []string, authorEmail string, matchEmail bool, delimiter string) []string {
var emails []string
var ids []string
attachEmail := true
for _, value := range trimElement(to) {
idArray := trimElement(strings.Split(value, delimiter))
// check match author email
if len(idArray) > 1 {
if email := idArray[1]; email != authorEmail {
continue
}
emails = append(emails, idArray[0])
attachEmail = false
continue
}
ids = append(ids, idArray[0])
}
if matchEmail == true && attachEmail == false {
return emails
}
for _, value := range emails {
ids = append(ids, value)
}
return ids | // Bot is new Line Bot clien.
func (p Plugin) Bot() (*linebot.Client, error) {
if len(p.Config.ChannelToken) == 0 || len(p.Config.ChannelSecret) == 0 {
log.Println("missing line bot config")
return nil, errors.New("missing line bot config")
}
return linebot.New(p.Config.ChannelSecret, p.Config.ChannelToken)
}
func (p Plugin) getTunnelDomain() (string, error) {
var domain string
if p.Config.Domain != "" {
if len(p.Config.Domain) < 4 || len(p.Config.Domain) > 63 {
return "", errors.New("tunnel host name must be lowercase and between 4 and 63 alphanumeric characters")
}
domain = p.Config.Domain
} else {
domain = strings.ToLower(random.String(10))
}
return domain, nil
}
// Handler is http handler.
func (p Plugin) Handler(bot *linebot.Client) *http.ServeMux {
mux := http.NewServeMux()
// Setup HTTP Server for receiving requests from LINE platform
mux.HandleFunc("/callback", func(w http.ResponseWriter, req *http.Request) {
events, err := bot.ParseRequest(req)
if err != nil {
if err == linebot.ErrInvalidSignature {
w.WriteHeader(400)
} else {
w.WriteHeader(500)
}
return
}
for _, event := range events {
if event.Type == linebot.EventTypeMessage {
switch message := event.Message.(type) {
case *linebot.TextMessage:
log.Printf("User ID is %v\n", event.Source.UserID)
log.Printf("Room ID is %v\n", event.Source.RoomID)
log.Printf("Group ID is %v\n", event.Source.GroupID)
ReceiveCount++
if message.Text == "test" {
SendCount++
if _, err = bot.ReplyMessage(event.ReplyToken, linebot.NewTextMessage("count + 1")).Do(); err != nil {
log.Print(err)
}
}
if _, err = bot.ReplyMessage(event.ReplyToken, linebot.NewTextMessage(message.Text)).Do(); err != nil {
log.Print(err)
}
}
}
}
})
mux.HandleFunc("/metrics", func(w http.ResponseWriter, req *http.Request) {
promhttp.Handler().ServeHTTP(w, req)
})
// Setup HTTP Server for receiving requests from LINE platform
mux.HandleFunc("/", func(w http.ResponseWriter, req *http.Request) {
fmt.Fprintln(w, "Welcome to Line webhook page.")
})
return mux
}
// Webhook support line callback service.
func (p Plugin) Webhook() error {
readyToListen := false
bot, err := p.Bot()
if err != nil {
return err
}
mux := p.Handler(bot)
if p.Config.Tunnel {
if p.Config.Debug {
gotunnelme.Debug = true
}
domain, err := p.getTunnelDomain()
if err != nil {
panic(err)
}
tunnel := gotunnelme.NewTunnel()
url, err := tunnel.GetUrl(domain)
if err != nil {
panic("Could not get localtunnel.me URL. " + err.Error())
}
go func() {
for !readyToListen {
time.Sleep(1 * time.Second)
}
c := color.New(color.FgYellow)
c.Println("Tunnel URL:", url)
err := tunnel.CreateTunnel(p.Config.Port)
if err != nil {
panic("Could not create tunnel. " + err.Error())
}
}()
}
readyToListen = true
if p.Config.Port != 443 && !p.Config.AutoTLS {
log.Println("Line Webhook Server Listin on " + strconv.Itoa(p.Config.Port) + " port")
if err := http.ListenAndServe(":"+strconv.Itoa(p.Config.Port), mux); err != nil {
log.Fatal(err)
}
}
if p.Config.AutoTLS && len(p.Config.Host) != 0 {
log.Println("Line Webhook Server Listin on 443 port, hostname: " + strings.Join(p.Config.Host, ", "))
return http.Serve(autocert.NewListener(p.Config.Host...), mux)
}
return nil
}
// Notify for Line notify service
// https://notify-bot.line.me
func (p Plugin) Notify() error {
if p.Config.ChannelToken == "" || len(p.Config.Message) == 0 {
return errors.New("missing token or message")
}
for _, m := range p.Config.Message {
if err := p.notify(m, p.Config.ChannelToken); err != nil {
return err
}
}
return nil
}
func (p Plugin) notify(message, token string) error {
data := url.Values{}
data.Add("message", message)
u, _ := url.ParseRequestURI("https://notify-api.line.me/api/notify")
urlStr := u.String()
req, err := http.NewRequest(
"POST",
urlStr,
strings.NewReader(data.Encode()),
)
if err != nil {
return errors.New("failed to create request:" + err.Error())
}
req.Header.Add("Authorization", "Bearer "+token)
req.Header.Add("Content-Type", "application/x-www-form-urlencoded")
req.Header.Add("Content-Length", strconv.Itoa(len(data.Encode())))
client := &http.Client{}
res, err := client.Do(req)
if err != nil {
return errors.New("failed to process request:" + err.Error())
}
defer res.Body.Close()
if p.Config.Debug {
log.Println("=================================")
log | }
| random_line_split |
plugin.go | Audio format
Audio struct {
URL string
Duration int
}
// Location format
Location struct {
Title string
Address string
Latitude float64
Longitude float64
}
)
var (
// ReceiveCount is receive notification count
ReceiveCount int64
// SendCount is send notification count
SendCount int64
)
func init() {
// Support metrics
m := NewMetrics()
prometheus.MustRegister(m)
}
func trimElement(keys []string) []string {
var newKeys []string
for _, value := range keys {
value = strings.Trim(value, " ")
if len(value) == 0 {
continue
}
newKeys = append(newKeys, value)
}
return newKeys
}
func convertImage(value, delimiter string) []string {
values := trimElement(strings.Split(value, delimiter))
if len(values) < 2 {
values = append(values, values[0])
}
return values
}
func convertVideo(value, delimiter string) []string {
values := trimElement(strings.Split(value, delimiter))
if len(values) < 2 {
values = append(values, defaultPreviewImageURL)
}
return values
}
func convertAudio(value, delimiter string) (Audio, bool) {
values := trimElement(strings.Split(value, delimiter))
if len(values) < 2 {
return Audio{}, true
}
duration, err := strconv.Atoi(values[1])
if err != nil {
log.Println(err.Error())
return Audio{}, true
}
return Audio{
URL: values[0],
Duration: duration,
}, false
}
func convertSticker(value, delimiter string) ([]string, bool) {
values := trimElement(strings.Split(value, delimiter))
if len(values) < 2 {
return []string{}, true
}
return values, false
}
func convertLocation(value, delimiter string) (Location, bool) {
var latitude, longitude float64
var err error
values := trimElement(strings.Split(value, delimiter))
if len(values) < 4 {
return Location{}, true
}
latitude, err = strconv.ParseFloat(values[2], 64)
if err != nil {
log.Println(err.Error())
return Location{}, true
}
longitude, err = strconv.ParseFloat(values[3], 64)
if err != nil {
log.Println(err.Error())
return Location{}, true
}
return Location{
Title: values[0],
Address: values[1],
Latitude: latitude,
Longitude: longitude,
}, false
}
func parseTo(to []string, authorEmail string, matchEmail bool, delimiter string) []string {
var emails []string
var ids []string
attachEmail := true
for _, value := range trimElement(to) {
idArray := trimElement(strings.Split(value, delimiter))
// check match author email
if len(idArray) > 1 {
if email := idArray[1]; email != authorEmail {
continue
}
emails = append(emails, idArray[0])
attachEmail = false
continue
}
ids = append(ids, idArray[0])
}
if matchEmail == true && attachEmail == false {
return emails
}
for _, value := range emails {
ids = append(ids, value)
}
return ids
}
// Bot is new Line Bot clien.
func (p Plugin) Bot() (*linebot.Client, error) {
if len(p.Config.ChannelToken) == 0 || len(p.Config.ChannelSecret) == 0 {
log.Println("missing line bot config")
return nil, errors.New("missing line bot config")
}
return linebot.New(p.Config.ChannelSecret, p.Config.ChannelToken)
}
func (p Plugin) getTunnelDomain() (string, error) {
var domain string
if p.Config.Domain != "" {
if len(p.Config.Domain) < 4 || len(p.Config.Domain) > 63 {
return "", errors.New("tunnel host name must be lowercase and between 4 and 63 alphanumeric characters")
}
domain = p.Config.Domain
} else {
domain = strings.ToLower(random.String(10))
}
return domain, nil
}
// Handler is http handler.
func (p Plugin) Handler(bot *linebot.Client) *http.ServeMux {
mux := http.NewServeMux()
// Setup HTTP Server for receiving requests from LINE platform
mux.HandleFunc("/callback", func(w http.ResponseWriter, req *http.Request) {
events, err := bot.ParseRequest(req)
if err != nil {
if err == linebot.ErrInvalidSignature {
w.WriteHeader(400)
} else {
w.WriteHeader(500)
}
return
}
for _, event := range events {
if event.Type == linebot.EventTypeMessage {
switch message := event.Message.(type) {
case *linebot.TextMessage:
log.Printf("User ID is %v\n", event.Source.UserID)
log.Printf("Room ID is %v\n", event.Source.RoomID)
log.Printf("Group ID is %v\n", event.Source.GroupID)
ReceiveCount++
if message.Text == "test" {
SendCount++
if _, err = bot.ReplyMessage(event.ReplyToken, linebot.NewTextMessage("count + 1")).Do(); err != nil {
log.Print(err)
}
}
if _, err = bot.ReplyMessage(event.ReplyToken, linebot.NewTextMessage(message.Text)).Do(); err != nil {
log.Print(err)
}
}
}
}
})
mux.HandleFunc("/metrics", func(w http.ResponseWriter, req *http.Request) {
promhttp.Handler().ServeHTTP(w, req)
})
// Setup HTTP Server for receiving requests from LINE platform
mux.HandleFunc("/", func(w http.ResponseWriter, req *http.Request) {
fmt.Fprintln(w, "Welcome to Line webhook page.")
})
return mux
}
// Webhook support line callback service.
func (p Plugin) Webhook() error {
readyToListen := false
bot, err := p.Bot()
if err != nil {
return err
}
mux := p.Handler(bot)
if p.Config.Tunnel {
if p.Config.Debug {
gotunnelme.Debug = true
}
domain, err := p.getTunnelDomain()
if err != nil {
panic(err)
}
tunnel := gotunnelme.NewTunnel()
url, err := tunnel.GetUrl(domain)
if err != nil {
panic("Could not get localtunnel.me URL. " + err.Error())
}
go func() {
for !readyToListen {
time.Sleep(1 * time.Second)
}
c := color.New(color.FgYellow)
c.Println("Tunnel URL:", url)
err := tunnel.CreateTunnel(p.Config.Port)
if err != nil {
panic("Could not create tunnel. " + err.Error())
}
}()
}
readyToListen = true
if p.Config.Port != 443 && !p.Config.AutoTLS {
log.Println("Line Webhook Server Listin on " + strconv.Itoa(p.Config.Port) + " port")
if err := http.ListenAndServe(":"+strconv.Itoa(p.Config.Port), mux); err != nil {
log.Fatal(err)
}
}
if p.Config.AutoTLS && len(p.Config.Host) != 0 {
log.Println("Line Webhook Server Listin on 443 port, hostname: " + strings.Join(p.Config.Host, ", "))
return http.Serve(autocert.NewListener(p.Config.Host...), mux)
}
return nil
}
// Notify for Line notify service
// https://notify-bot.line.me
func (p Plugin) Notify() error {
if p.Config.ChannelToken == "" || len(p.Config.Message) == 0 {
return errors.New("missing token or message")
}
for _, m := range p.Config.Message {
if err := p.notify(m, p.Config.ChannelToken); err != nil {
return err
}
}
return nil
}
func (p Plugin) notify(message, token string) error {
data := url.Values{}
data.Add("message", message)
u, _ := url.ParseRequestURI("https://notify-api.line.me/api/notify")
urlStr := u.String()
req, err := http.NewRequest(
"POST",
urlStr,
strings.NewReader(data.Encode()),
)
if err != nil {
return errors.New("failed to create request:" + err.Error())
}
req.Header.Add("Authorization", "Bearer "+token)
req.Header.Add("Content-Type", "application/x-www-form-urlencoded")
req.Header.Add("Content-Length", strconv.Itoa(len(data.Encode())))
client := &http.Client{}
res, err := client.Do(req)
if err != nil {
return errors.New("failed to process request:" + err.Error())
}
defer res.Body.Close()
if p.Config.Debug {
log.Println("=================================")
log.Printf("%#v\n", res)
log.Println("=================================")
}
if res.Status == "200 OK" {
log.Println("successfully send notfiy")
}
if err != nil {
return errors.New("failed to create request:" + err.Error())
}
return nil
}
// Exec executes the plugin.
func (p Plugin) | Exec | identifier_name |
|
render.rs | 0.0f, 1.0f
);
int index = flipped != 0 ? flipped_vertex_id() : gl_VertexID;
if (frame == -1)
texcoord = TEXCOORD_FROM_ID[index];
else
texcoord = frames[frame * 4 + index];
texcoord.y = 1 - texcoord.y;
}
";
pub static STANDARD_FRAGMENT: &'static str = "
#version 330 core
in vec2 texcoord;
out vec4 color;
uniform sampler2D tex;
void main()
{
color = texture(tex, texcoord);
}
";
macro_rules! check_log(
($typ:expr, $get_iv:ident | $get_log:ident $val:ident $status:ident $on_error:ident) => (
unsafe {
let mut status = 0;
gl::$get_iv($val, gl::$status, &mut status);
if status == 0 {
let mut len = 0;
gl::$get_iv($val, gl::INFO_LOG_LENGTH, &mut len);
let mut buf = Vec::with_capacity(len as usize - 1);
for _ in (0..len-1) { buf.push(0); }
gl::$get_log($val, len, ptr::null_mut(), buf.as_mut_ptr() as *mut GLchar);
match String::from_utf8(buf) {
Ok(error_message) => $on_error!("{}", error_message),
Err(e) => $on_error!("Error parsing OpenGL error message: {}", e)
}
false
} else {
println!("I THINK THE {} COMPILED", $typ);
true
}
}
)
);
macro_rules! make_shader(
(($name:expr): $shader_type:ident) => (
unsafe {
let sh = gl::CreateShader(gl::$shader_type);
let shader_src_str = CString::new($name).unwrap();
gl::ShaderSource(sh, 1, &shader_src_str.as_ptr(), ptr::null());
gl::CompileShader(sh);
sh
}
)
);
pub struct Texcoords {
pub top_right: Vec2<GLfloat>,
pub bottom_right: Vec2<GLfloat>,
pub bottom_left: Vec2<GLfloat>,
pub top_left: Vec2<GLfloat>
}
impl Texcoords {
pub unsafe fn copy_to(&self, dest: *mut Texcoords) {
ptr::copy(self, dest, 1);
}
}
// Represents an animation frame; a square section of a Texture.
pub struct Frame {
pub position: Vec2<f32>,
pub size: Vec2<f32>,
// Texcoords are generated via #generate_texcoords.
pub texcoords: Texcoords
}
impl Frame {
pub fn generate_texcoords(&mut self, tex_width: f32, tex_height: f32) {
let ref position = self.position;
let ref size = self.size;
// TODO SIMD this son of a bitch
self.texcoords = Texcoords {
top_right: Vec2::new(
(position.x + size.x) / tex_width,
(position.y + size.y) / tex_height
),
bottom_right: Vec2::new(
(position.x + size.x) / tex_width,
(position.y) / tex_height
),
bottom_left: Vec2::new(
(position.x) / tex_width,
(position.y) / tex_height
),
top_left: Vec2::new(
(position.x) / tex_width,
(position.y + size.y) / tex_height
)
};
}
}
// Represents an actual texture that is currently on the GPU.
#[allow(missing_copy_implementations)]
pub struct Texture {
pub id: GLuint,
pub width: i32,
pub height: i32,
pub filename: &'static str,
pub frame_texcoords_size: i64,
pub texcoords_space: *mut [Texcoords]
}
impl Texture {
pub fn set_full(&self, sampler_uniform: GLint, sprite_size_uniform: GLint) {
unsafe {
gl::ActiveTexture(gl::TEXTURE0);
gl::BindTexture(gl::TEXTURE_2D, self.id);
gl::Uniform1i(sampler_uniform, 0);
gl::Uniform2f(sprite_size_uniform, self.width as f32, self.height as f32);
}
}
#[inline]
pub fn texcoords(&self) -> &[Texcoords] {
unsafe { transmute(self.texcoords_space) }
}
#[inline]
pub fn texcoords_mut(&mut self) -> &mut [Texcoords] {
unsafe { transmute(self.texcoords_space) }
}
// NOTE this expects #generate_texcoords_buffer to have been called
// if there are frames.
pub fn set(&self, sampler_uniform: GLint,
sprite_size_uniform: GLint,
frames_uniform: GLint,
width: f32, height: f32) {
unsafe {
assert!(self.frame_texcoords_size / 8 < FRAME_UNIFORM_MAX);
gl::ActiveTexture(gl::TEXTURE0);
gl::BindTexture(gl::TEXTURE_2D, self.id);
gl::Uniform1i(sampler_uniform, 0);
gl::Uniform2f(sprite_size_uniform, width as f32, height as f32);
let frames_len = self.texcoords().len();
if frames_len > 0 {
gl::Uniform2fv(
frames_uniform,
frames_len as GLint * 4,
transmute(&(&*self.texcoords_space)[0])
);
}
}
}
/*
fn put_texcoord(&mut self, index: usize, texcoord: Texcoords) {
self.texcoords_mut()[index] = texcoord;
}
*/
// NOTE this should be properly merged with add_frames.
pub fn generate_texcoords_buffer(
&mut self, frame_width: usize, frame_height: usize, space: *mut [Texcoords]
) {
unsafe {
let frames_len = (*space).len();
let mut frames = Vec::<Frame>::with_capacity(frames_len);
self.add_frames(&mut frames, frame_width, frame_height);
assert_eq!(frames.len(), frames_len); // PLZ
self.texcoords_space = space;
for i in (0..frames_len) {
frames[i].texcoords.copy_to(&mut self.texcoords_mut()[i]);
}
}
}
// Fill the given slice with frames of the given width and height. "
// So this is now called only by #generate_texcoords_buffer
pub fn add_frames(&mut self, space: &mut Vec<Frame>, uwidth: usize, uheight: usize) {
let count = space.capacity();
let tex_width = self.width as f32;
let tex_height = self.height as f32;
let width = uwidth as f32;
let height = uheight as f32;
{
let mut current_pos = Vec2::<f32>::new(0.0, tex_height - height);
for _ in (0..count) {
if current_pos.x + width > tex_width {
current_pos.x = 0.0;
current_pos.y -= height;
}
if current_pos.y < 0.0 {
panic!(
"Too many frames! Asked for {} {}x{} frames on a {}x{} texture.",
count, width, height, tex_width, tex_height
);
}
let mut frame = Frame {
position: current_pos,
size: Vec2::new(width, height),
texcoords: unsafe { uninitialized() }
};
frame.generate_texcoords(tex_width, tex_height);
space.push(frame);
current_pos.x += width;
}
}
self.frame_texcoords_size = size_of::<Texcoords>() as i64 * count as i64;
}
// TODO man, should this be a destructor?
// A: NO
pub fn unload(&mut self) {
unsafe {
gl::DeleteTextures(1, &self.id);
}
}
}
// NOTE don't instantiate these willy nilly!
pub struct ImageAsset {
// I don't like wasting space with the pointer here, but
// it's hard to pass gl_data to a method called on this
// because of the borrow checker...
pub gl_data: *const GLData,
pub filename: &'static str,
pub vbo: GLuint,
pub set_attributes: extern "Rust" fn(GLuint),
pub shader: extern "Rust" fn(&GLData) -> &assets::Shader,
pub attributes_size: usize,
pub texture: Texture,
pub frame_width: usize,
pub frame_height: usize,
pub texcoord_count: usize,
// The next texcoord_count * size_of::<Texcoords>() bytes
// should be free for this struct to use.
}
impl ImageAsset {
pub unsafe fn texcoords(&mut self) -> &mut [Texcoords] | {
let count_ptr: *mut usize = &mut self.texcoord_count;
slice::from_raw_parts_mut::<Texcoords>(
transmute(count_ptr.offset(1)),
self.texcoord_count
)
} | identifier_body |
|
render.rs | cam_pos; // in pixels
uniform vec2 sprite_size; // in pixels
uniform float scale;
out vec2 texcoord;
const vec2 TEXCOORD_FROM_ID[4] = vec2[4](
vec2(1.0, 1.0), vec2(1.0, 0.0),
vec2(0.0, 0.0), vec2(0.0, 1.0)
);
vec2 from_pixel(vec2 pos)
{
return pos / screen_size;
}
int flipped_vertex_id()
{
return 3 - gl_VertexID;
}
void main()
{
vec2 pixel_screen_pos = (position - cam_pos) * 2;
gl_Position = vec4(
(vertex_pos * from_pixel(sprite_size) + from_pixel(pixel_screen_pos)) * scale,
0.0f, 1.0f
);
int index = flipped != 0 ? flipped_vertex_id() : gl_VertexID;
if (frame == -1)
texcoord = TEXCOORD_FROM_ID[index];
else
texcoord = frames[frame * 4 + index];
texcoord.y = 1 - texcoord.y;
}
";
pub static STANDARD_FRAGMENT: &'static str = "
#version 330 core
in vec2 texcoord;
out vec4 color;
uniform sampler2D tex;
void main()
{
color = texture(tex, texcoord);
}
";
macro_rules! check_log(
($typ:expr, $get_iv:ident | $get_log:ident $val:ident $status:ident $on_error:ident) => (
unsafe {
let mut status = 0;
gl::$get_iv($val, gl::$status, &mut status);
if status == 0 {
let mut len = 0;
gl::$get_iv($val, gl::INFO_LOG_LENGTH, &mut len);
let mut buf = Vec::with_capacity(len as usize - 1);
for _ in (0..len-1) { buf.push(0); }
gl::$get_log($val, len, ptr::null_mut(), buf.as_mut_ptr() as *mut GLchar);
match String::from_utf8(buf) {
Ok(error_message) => $on_error!("{}", error_message),
Err(e) => $on_error!("Error parsing OpenGL error message: {}", e)
}
false
} else {
println!("I THINK THE {} COMPILED", $typ);
true
}
}
)
);
macro_rules! make_shader(
(($name:expr): $shader_type:ident) => (
unsafe {
let sh = gl::CreateShader(gl::$shader_type);
let shader_src_str = CString::new($name).unwrap();
gl::ShaderSource(sh, 1, &shader_src_str.as_ptr(), ptr::null());
gl::CompileShader(sh);
sh
}
)
);
pub struct Texcoords {
pub top_right: Vec2<GLfloat>,
pub bottom_right: Vec2<GLfloat>,
pub bottom_left: Vec2<GLfloat>,
pub top_left: Vec2<GLfloat>
}
impl Texcoords {
pub unsafe fn copy_to(&self, dest: *mut Texcoords) {
ptr::copy(self, dest, 1);
}
}
// Represents an animation frame; a square section of a Texture.
pub struct Frame {
pub position: Vec2<f32>,
pub size: Vec2<f32>,
// Texcoords are generated via #generate_texcoords.
pub texcoords: Texcoords
}
impl Frame {
pub fn generate_texcoords(&mut self, tex_width: f32, tex_height: f32) {
let ref position = self.position;
let ref size = self.size;
// TODO SIMD this son of a bitch
self.texcoords = Texcoords {
top_right: Vec2::new(
(position.x + size.x) / tex_width,
(position.y + size.y) / tex_height
),
bottom_right: Vec2::new(
(position.x + size.x) / tex_width,
(position.y) / tex_height
),
bottom_left: Vec2::new(
(position.x) / tex_width,
(position.y) / tex_height
),
top_left: Vec2::new(
(position.x) / tex_width,
(position.y + size.y) / tex_height
)
};
}
}
// Represents an actual texture that is currently on the GPU.
#[allow(missing_copy_implementations)]
pub struct Texture {
pub id: GLuint,
pub width: i32,
pub height: i32,
pub filename: &'static str,
pub frame_texcoords_size: i64,
pub texcoords_space: *mut [Texcoords]
}
impl Texture {
pub fn set_full(&self, sampler_uniform: GLint, sprite_size_uniform: GLint) {
unsafe {
gl::ActiveTexture(gl::TEXTURE0);
gl::BindTexture(gl::TEXTURE_2D, self.id);
gl::Uniform1i(sampler_uniform, 0);
gl::Uniform2f(sprite_size_uniform, self.width as f32, self.height as f32);
}
}
#[inline]
pub fn texcoords(&self) -> &[Texcoords] {
unsafe { transmute(self.texcoords_space) }
}
#[inline]
pub fn texcoords_mut(&mut self) -> &mut [Texcoords] {
unsafe { transmute(self.texcoords_space) }
}
// NOTE this expects #generate_texcoords_buffer to have been called
// if there are frames.
pub fn set(&self, sampler_uniform: GLint,
sprite_size_uniform: GLint,
frames_uniform: GLint,
width: f32, height: f32) {
unsafe {
assert!(self.frame_texcoords_size / 8 < FRAME_UNIFORM_MAX);
gl::ActiveTexture(gl::TEXTURE0);
gl::BindTexture(gl::TEXTURE_2D, self.id);
gl::Uniform1i(sampler_uniform, 0);
gl::Uniform2f(sprite_size_uniform, width as f32, height as f32);
let frames_len = self.texcoords().len();
if frames_len > 0 |
}
}
/*
fn put_texcoord(&mut self, index: usize, texcoord: Texcoords) {
self.texcoords_mut()[index] = texcoord;
}
*/
// NOTE this should be properly merged with add_frames.
pub fn generate_texcoords_buffer(
&mut self, frame_width: usize, frame_height: usize, space: *mut [Texcoords]
) {
unsafe {
let frames_len = (*space).len();
let mut frames = Vec::<Frame>::with_capacity(frames_len);
self.add_frames(&mut frames, frame_width, frame_height);
assert_eq!(frames.len(), frames_len); // PLZ
self.texcoords_space = space;
for i in (0..frames_len) {
frames[i].texcoords.copy_to(&mut self.texcoords_mut()[i]);
}
}
}
// Fill the given slice with frames of the given width and height. "
// So this is now called only by #generate_texcoords_buffer
pub fn add_frames(&mut self, space: &mut Vec<Frame>, uwidth: usize, uheight: usize) {
let count = space.capacity();
let tex_width = self.width as f32;
let tex_height = self.height as f32;
let width = uwidth as f32;
let height = uheight as f32;
{
let mut current_pos = Vec2::<f32>::new(0.0, tex_height - height);
for _ in (0..count) {
if current_pos.x + width > tex_width {
current_pos.x = 0.0;
current_pos.y -= height;
}
if current_pos.y < 0.0 {
panic!(
"Too many frames! Asked for {} {}x{} frames on a {}x{} texture.",
count, width, height, tex_width, tex_height
);
}
let mut frame = Frame {
position: current_pos,
size: Vec2::new(width, height),
texcoords: unsafe { uninitialized() }
};
frame.generate_texcoords(tex_width, tex_height);
space.push(frame);
current_pos.x += width;
}
}
self.frame_texcoords_size = size_of::<Texcoords>() as i64 * count as i64;
}
// TODO man, should this be a destructor?
// A: NO
pub fn unload(&mut self) {
unsafe {
gl::DeleteTextures(1, &self.id);
}
}
}
// NOTE don't instantiate these willy nilly!
pub struct ImageAsset {
// I don't like wasting space with the pointer here, but
// it's hard to pass gl_data to a method called on this
// because of the borrow checker...
pub gl_data: *const | {
gl::Uniform2fv(
frames_uniform,
frames_len as GLint * 4,
transmute(&(&*self.texcoords_space)[0])
);
} | conditional_block |
render.rs | cam_pos; // in pixels
uniform vec2 sprite_size; // in pixels
uniform float scale;
out vec2 texcoord;
const vec2 TEXCOORD_FROM_ID[4] = vec2[4](
vec2(1.0, 1.0), vec2(1.0, 0.0),
vec2(0.0, 0.0), vec2(0.0, 1.0)
);
vec2 from_pixel(vec2 pos)
{
return pos / screen_size;
}
int flipped_vertex_id()
{
return 3 - gl_VertexID;
}
void main()
{
vec2 pixel_screen_pos = (position - cam_pos) * 2;
gl_Position = vec4(
(vertex_pos * from_pixel(sprite_size) + from_pixel(pixel_screen_pos)) * scale,
0.0f, 1.0f
);
int index = flipped != 0 ? flipped_vertex_id() : gl_VertexID;
if (frame == -1)
texcoord = TEXCOORD_FROM_ID[index];
else
texcoord = frames[frame * 4 + index];
texcoord.y = 1 - texcoord.y;
}
";
pub static STANDARD_FRAGMENT: &'static str = "
#version 330 core
in vec2 texcoord;
out vec4 color;
uniform sampler2D tex;
void main()
{ |
macro_rules! check_log(
($typ:expr, $get_iv:ident | $get_log:ident $val:ident $status:ident $on_error:ident) => (
unsafe {
let mut status = 0;
gl::$get_iv($val, gl::$status, &mut status);
if status == 0 {
let mut len = 0;
gl::$get_iv($val, gl::INFO_LOG_LENGTH, &mut len);
let mut buf = Vec::with_capacity(len as usize - 1);
for _ in (0..len-1) { buf.push(0); }
gl::$get_log($val, len, ptr::null_mut(), buf.as_mut_ptr() as *mut GLchar);
match String::from_utf8(buf) {
Ok(error_message) => $on_error!("{}", error_message),
Err(e) => $on_error!("Error parsing OpenGL error message: {}", e)
}
false
} else {
println!("I THINK THE {} COMPILED", $typ);
true
}
}
)
);
macro_rules! make_shader(
(($name:expr): $shader_type:ident) => (
unsafe {
let sh = gl::CreateShader(gl::$shader_type);
let shader_src_str = CString::new($name).unwrap();
gl::ShaderSource(sh, 1, &shader_src_str.as_ptr(), ptr::null());
gl::CompileShader(sh);
sh
}
)
);
pub struct Texcoords {
pub top_right: Vec2<GLfloat>,
pub bottom_right: Vec2<GLfloat>,
pub bottom_left: Vec2<GLfloat>,
pub top_left: Vec2<GLfloat>
}
impl Texcoords {
pub unsafe fn copy_to(&self, dest: *mut Texcoords) {
ptr::copy(self, dest, 1);
}
}
// Represents an animation frame; a square section of a Texture.
pub struct Frame {
pub position: Vec2<f32>,
pub size: Vec2<f32>,
// Texcoords are generated via #generate_texcoords.
pub texcoords: Texcoords
}
impl Frame {
pub fn generate_texcoords(&mut self, tex_width: f32, tex_height: f32) {
let ref position = self.position;
let ref size = self.size;
// TODO SIMD this son of a bitch
self.texcoords = Texcoords {
top_right: Vec2::new(
(position.x + size.x) / tex_width,
(position.y + size.y) / tex_height
),
bottom_right: Vec2::new(
(position.x + size.x) / tex_width,
(position.y) / tex_height
),
bottom_left: Vec2::new(
(position.x) / tex_width,
(position.y) / tex_height
),
top_left: Vec2::new(
(position.x) / tex_width,
(position.y + size.y) / tex_height
)
};
}
}
// Represents an actual texture that is currently on the GPU.
#[allow(missing_copy_implementations)]
pub struct Texture {
pub id: GLuint,
pub width: i32,
pub height: i32,
pub filename: &'static str,
pub frame_texcoords_size: i64,
pub texcoords_space: *mut [Texcoords]
}
impl Texture {
pub fn set_full(&self, sampler_uniform: GLint, sprite_size_uniform: GLint) {
unsafe {
gl::ActiveTexture(gl::TEXTURE0);
gl::BindTexture(gl::TEXTURE_2D, self.id);
gl::Uniform1i(sampler_uniform, 0);
gl::Uniform2f(sprite_size_uniform, self.width as f32, self.height as f32);
}
}
#[inline]
pub fn texcoords(&self) -> &[Texcoords] {
unsafe { transmute(self.texcoords_space) }
}
#[inline]
pub fn texcoords_mut(&mut self) -> &mut [Texcoords] {
unsafe { transmute(self.texcoords_space) }
}
// NOTE this expects #generate_texcoords_buffer to have been called
// if there are frames.
pub fn set(&self, sampler_uniform: GLint,
sprite_size_uniform: GLint,
frames_uniform: GLint,
width: f32, height: f32) {
unsafe {
assert!(self.frame_texcoords_size / 8 < FRAME_UNIFORM_MAX);
gl::ActiveTexture(gl::TEXTURE0);
gl::BindTexture(gl::TEXTURE_2D, self.id);
gl::Uniform1i(sampler_uniform, 0);
gl::Uniform2f(sprite_size_uniform, width as f32, height as f32);
let frames_len = self.texcoords().len();
if frames_len > 0 {
gl::Uniform2fv(
frames_uniform,
frames_len as GLint * 4,
transmute(&(&*self.texcoords_space)[0])
);
}
}
}
/*
fn put_texcoord(&mut self, index: usize, texcoord: Texcoords) {
self.texcoords_mut()[index] = texcoord;
}
*/
// NOTE this should be properly merged with add_frames.
pub fn generate_texcoords_buffer(
&mut self, frame_width: usize, frame_height: usize, space: *mut [Texcoords]
) {
unsafe {
let frames_len = (*space).len();
let mut frames = Vec::<Frame>::with_capacity(frames_len);
self.add_frames(&mut frames, frame_width, frame_height);
assert_eq!(frames.len(), frames_len); // PLZ
self.texcoords_space = space;
for i in (0..frames_len) {
frames[i].texcoords.copy_to(&mut self.texcoords_mut()[i]);
}
}
}
// Fill the given slice with frames of the given width and height. "
// So this is now called only by #generate_texcoords_buffer
pub fn add_frames(&mut self, space: &mut Vec<Frame>, uwidth: usize, uheight: usize) {
let count = space.capacity();
let tex_width = self.width as f32;
let tex_height = self.height as f32;
let width = uwidth as f32;
let height = uheight as f32;
{
let mut current_pos = Vec2::<f32>::new(0.0, tex_height - height);
for _ in (0..count) {
if current_pos.x + width > tex_width {
current_pos.x = 0.0;
current_pos.y -= height;
}
if current_pos.y < 0.0 {
panic!(
"Too many frames! Asked for {} {}x{} frames on a {}x{} texture.",
count, width, height, tex_width, tex_height
);
}
let mut frame = Frame {
position: current_pos,
size: Vec2::new(width, height),
texcoords: unsafe { uninitialized() }
};
frame.generate_texcoords(tex_width, tex_height);
space.push(frame);
current_pos.x += width;
}
}
self.frame_texcoords_size = size_of::<Texcoords>() as i64 * count as i64;
}
// TODO man, should this be a destructor?
// A: NO
pub fn unload(&mut self) {
unsafe {
gl::DeleteTextures(1, &self.id);
}
}
}
// NOTE don't instantiate these willy nilly!
pub struct ImageAsset {
// I don't like wasting space with the pointer here, but
// it's hard to pass gl_data to a method called on this
// because of the borrow checker...
pub gl_data: *const GL | color = texture(tex, texcoord);
}
"; | random_line_split |
render.rs | - gl_VertexID;
}
void main()
{
vec2 pixel_screen_pos = (position - cam_pos) * 2;
gl_Position = vec4(
(vertex_pos * from_pixel(sprite_size) + from_pixel(pixel_screen_pos)) * scale,
0.0f, 1.0f
);
int index = flipped != 0 ? flipped_vertex_id() : gl_VertexID;
if (frame == -1)
texcoord = TEXCOORD_FROM_ID[index];
else
texcoord = frames[frame * 4 + index];
texcoord.y = 1 - texcoord.y;
}
";
pub static STANDARD_FRAGMENT: &'static str = "
#version 330 core
in vec2 texcoord;
out vec4 color;
uniform sampler2D tex;
void main()
{
color = texture(tex, texcoord);
}
";
macro_rules! check_log(
($typ:expr, $get_iv:ident | $get_log:ident $val:ident $status:ident $on_error:ident) => (
unsafe {
let mut status = 0;
gl::$get_iv($val, gl::$status, &mut status);
if status == 0 {
let mut len = 0;
gl::$get_iv($val, gl::INFO_LOG_LENGTH, &mut len);
let mut buf = Vec::with_capacity(len as usize - 1);
for _ in (0..len-1) { buf.push(0); }
gl::$get_log($val, len, ptr::null_mut(), buf.as_mut_ptr() as *mut GLchar);
match String::from_utf8(buf) {
Ok(error_message) => $on_error!("{}", error_message),
Err(e) => $on_error!("Error parsing OpenGL error message: {}", e)
}
false
} else {
println!("I THINK THE {} COMPILED", $typ);
true
}
}
)
);
macro_rules! make_shader(
(($name:expr): $shader_type:ident) => (
unsafe {
let sh = gl::CreateShader(gl::$shader_type);
let shader_src_str = CString::new($name).unwrap();
gl::ShaderSource(sh, 1, &shader_src_str.as_ptr(), ptr::null());
gl::CompileShader(sh);
sh
}
)
);
pub struct Texcoords {
pub top_right: Vec2<GLfloat>,
pub bottom_right: Vec2<GLfloat>,
pub bottom_left: Vec2<GLfloat>,
pub top_left: Vec2<GLfloat>
}
impl Texcoords {
pub unsafe fn copy_to(&self, dest: *mut Texcoords) {
ptr::copy(self, dest, 1);
}
}
// Represents an animation frame; a square section of a Texture.
pub struct Frame {
pub position: Vec2<f32>,
pub size: Vec2<f32>,
// Texcoords are generated via #generate_texcoords.
pub texcoords: Texcoords
}
impl Frame {
pub fn generate_texcoords(&mut self, tex_width: f32, tex_height: f32) {
let ref position = self.position;
let ref size = self.size;
// TODO SIMD this son of a bitch
self.texcoords = Texcoords {
top_right: Vec2::new(
(position.x + size.x) / tex_width,
(position.y + size.y) / tex_height
),
bottom_right: Vec2::new(
(position.x + size.x) / tex_width,
(position.y) / tex_height
),
bottom_left: Vec2::new(
(position.x) / tex_width,
(position.y) / tex_height
),
top_left: Vec2::new(
(position.x) / tex_width,
(position.y + size.y) / tex_height
)
};
}
}
// Represents an actual texture that is currently on the GPU.
#[allow(missing_copy_implementations)]
pub struct Texture {
pub id: GLuint,
pub width: i32,
pub height: i32,
pub filename: &'static str,
pub frame_texcoords_size: i64,
pub texcoords_space: *mut [Texcoords]
}
impl Texture {
pub fn set_full(&self, sampler_uniform: GLint, sprite_size_uniform: GLint) {
unsafe {
gl::ActiveTexture(gl::TEXTURE0);
gl::BindTexture(gl::TEXTURE_2D, self.id);
gl::Uniform1i(sampler_uniform, 0);
gl::Uniform2f(sprite_size_uniform, self.width as f32, self.height as f32);
}
}
#[inline]
pub fn texcoords(&self) -> &[Texcoords] {
unsafe { transmute(self.texcoords_space) }
}
#[inline]
pub fn texcoords_mut(&mut self) -> &mut [Texcoords] {
unsafe { transmute(self.texcoords_space) }
}
// NOTE this expects #generate_texcoords_buffer to have been called
// if there are frames.
pub fn set(&self, sampler_uniform: GLint,
sprite_size_uniform: GLint,
frames_uniform: GLint,
width: f32, height: f32) {
unsafe {
assert!(self.frame_texcoords_size / 8 < FRAME_UNIFORM_MAX);
gl::ActiveTexture(gl::TEXTURE0);
gl::BindTexture(gl::TEXTURE_2D, self.id);
gl::Uniform1i(sampler_uniform, 0);
gl::Uniform2f(sprite_size_uniform, width as f32, height as f32);
let frames_len = self.texcoords().len();
if frames_len > 0 {
gl::Uniform2fv(
frames_uniform,
frames_len as GLint * 4,
transmute(&(&*self.texcoords_space)[0])
);
}
}
}
/*
fn put_texcoord(&mut self, index: usize, texcoord: Texcoords) {
self.texcoords_mut()[index] = texcoord;
}
*/
// NOTE this should be properly merged with add_frames.
pub fn generate_texcoords_buffer(
&mut self, frame_width: usize, frame_height: usize, space: *mut [Texcoords]
) {
unsafe {
let frames_len = (*space).len();
let mut frames = Vec::<Frame>::with_capacity(frames_len);
self.add_frames(&mut frames, frame_width, frame_height);
assert_eq!(frames.len(), frames_len); // PLZ
self.texcoords_space = space;
for i in (0..frames_len) {
frames[i].texcoords.copy_to(&mut self.texcoords_mut()[i]);
}
}
}
// Fill the given slice with frames of the given width and height. "
// So this is now called only by #generate_texcoords_buffer
pub fn add_frames(&mut self, space: &mut Vec<Frame>, uwidth: usize, uheight: usize) {
let count = space.capacity();
let tex_width = self.width as f32;
let tex_height = self.height as f32;
let width = uwidth as f32;
let height = uheight as f32;
{
let mut current_pos = Vec2::<f32>::new(0.0, tex_height - height);
for _ in (0..count) {
if current_pos.x + width > tex_width {
current_pos.x = 0.0;
current_pos.y -= height;
}
if current_pos.y < 0.0 {
panic!(
"Too many frames! Asked for {} {}x{} frames on a {}x{} texture.",
count, width, height, tex_width, tex_height
);
}
let mut frame = Frame {
position: current_pos,
size: Vec2::new(width, height),
texcoords: unsafe { uninitialized() }
};
frame.generate_texcoords(tex_width, tex_height);
space.push(frame);
current_pos.x += width;
}
}
self.frame_texcoords_size = size_of::<Texcoords>() as i64 * count as i64;
}
// TODO man, should this be a destructor?
// A: NO
pub fn unload(&mut self) {
unsafe {
gl::DeleteTextures(1, &self.id);
}
}
}
// NOTE don't instantiate these willy nilly!
pub struct ImageAsset {
// I don't like wasting space with the pointer here, but
// it's hard to pass gl_data to a method called on this
// because of the borrow checker...
pub gl_data: *const GLData,
pub filename: &'static str,
pub vbo: GLuint,
pub set_attributes: extern "Rust" fn(GLuint),
pub shader: extern "Rust" fn(&GLData) -> &assets::Shader,
pub attributes_size: usize,
pub texture: Texture,
pub frame_width: usize,
pub frame_height: usize,
pub texcoord_count: usize,
// The next texcoord_count * size_of::<Texcoords>() bytes
// should be free for this struct to use.
}
impl ImageAsset {
pub unsafe fn | texcoords | identifier_name |
|
Assignment+2.py | ]:
import nltk
import pandas as pd
import numpy as np
# If you would like to work with the raw text you can use 'moby_raw'
with open('moby.txt', 'r') as f:
moby_raw = f.read()
# If you would like to work with the novel in nltk.Text format you can use 'text1'
moby_tokens = nltk.word_tokenize(moby_raw)
text1 = nltk.Text(moby_tokens)
# ### Example 1
#
# How many tokens (words and punctuation symbols) are in text1?
#
# *This function should return an integer.*
# In[83]:
def example_one():
return len(nltk.word_tokenize(moby_raw)) # or alternatively len(text1)
example_one()
# ### Example 2
#
# How many unique tokens (unique words and punctuation) does text1 have?
#
# *This function should return an integer.*
# In[84]:
def example_two():
return len(set(nltk.word_tokenize(moby_raw))) # or alternatively len(set(text1))
example_two()
# ### Example 3
#
# After lemmatizing the verbs, how many unique tokens does text1 have?
#
# *This function should return an integer.*
# In[85]:
from nltk.stem import WordNetLemmatizer
def example_three():
lemmatizer = WordNetLemmatizer()
lemmatized = [lemmatizer.lemmatize(w,'v') for w in text1]
return len(set(lemmatized))
example_three()
# ### Question 1
#
# What is the lexical diversity of the given text input? (i.e. ratio of unique tokens to the total number of tokens)
#
# *This function should return a float.*
# In[86]:
tokens = nltk.word_tokenize(moby_raw)
unique_tokens = set(tokens)
lemmatizer = WordNetLemmatizer()
stems = [lemmatizer.lemmatize(w,'v') for w in text1]
unique_stems = set(stems)
tags = nltk.pos_tag(tokens)
moby_sents = nltk.sent_tokenize(moby_raw)
def answer_one():
return len(unique_tokens) / len(tokens)
answer_one()
# ### Question 2
#
# What percentage of tokens is 'whale'or 'Whale'?
#
# *This function should return a float.*
# In[100]:
def answer_two():
return len([tok for tok in tokens if tok == 'whale' or tok == 'Whale']) / len(tokens) * 100
answer_two()
# ### Question 3
#
# What are the 20 most frequently occurring (unique) tokens in the text? What is their frequency?
#
# *This function should return a list of 20 tuples where each tuple is of the form `(token, frequency)`. The list should be sorted in descending order of frequency.*
# In[88]:
token_freq = nltk.FreqDist(tokens)
token_freq_sorted = sorted(token_freq.items(), key=lambda x: x[1], reverse=True)
def answer_three():
return token_freq_sorted[:20]
answer_three()
# ### Question 4
#
# What tokens have a length of greater than 5 and frequency of more than 150?
#
# *This function should return an alphabetically sorted list of the tokens that match the above constraints. To sort your list, use `sorted()`*
# In[89]:
def answer_four():
return sorted([tok for tok in unique_tokens if len(tok) > 5 and token_freq[tok] > 150])
answer_four()
# ### Question 5
#
# Find the longest word in text1 and that word's length.
#
# *This function should return a tuple `(longest_word, length)`.*
# In[90]:
def answer_five():
return max([(tok, len(tok)) for tok in unique_tokens], key=lambda k: k[1])
answer_five()
# ### Question 6
#
# What unique words have a frequency of more than 2000? What is their frequency?
#
# "Hint: you may want to use `isalpha()` to check if the token is a word and not punctuation."
#
# *This function should return a list of tuples of the form `(frequency, word)` sorted in descending order of frequency.*
# In[91]:
def answer_six():
return [(freq, tok) for tok, freq in token_freq_sorted if freq > 2000 and tok.isalpha()]
answer_six()
# ### Question 7
#
# What is the average number of tokens per sentence?
#
# *This function should return a float.*
# In[92]:
def answer_seven():
tokens = 0
sents = 0
for sent in moby_sents:
tokens += len(nltk.word_tokenize(sent))
sents += 1
return tokens / sents
answer_seven()
# ### Question 8
#
# What are the 5 most frequent parts of speech in this text? What is their frequency?
#
# *This function should return a list of tuples of the form `(part_of_speech, frequency)` sorted in descending order of frequency.*
# In[93]:
def answer_eight():
import collections
d = collections.defaultdict(int)
for tok, pos in tags:
d[pos] += 1
return sorted(d.items(), key=lambda x: x[1], reverse=True)[:5]
answer_eight()
# ## Part 2 - Spelling Recommender
#
# For this part of the assignment you will create three different spelling recommenders, that each take a list of misspelled words and recommends a correctly spelled word for every word in the list.
#
# For every misspelled word, the recommender should find find the word in `correct_spellings` that has the shortest distance*, and starts with the same letter as the misspelled word, and return that word as a recommendation. | #
# Each of the recommenders should provide recommendations for the three default words provided: `['cormulent', 'incendenece', 'validrate']`.
# In[94]:
from nltk.corpus import words
correct_spellings = words.words()
# ### Question 9
#
# For this recommender, your function should provide recommendations for the three default words provided above using the following distance metric:
#
# **[Jaccard distance](https://en.wikipedia.org/wiki/Jaccard_index) on the trigrams of the two words.**
#
# *This function should return a list of length three:
# `['cormulent_reccomendation', 'incendenece_reccomendation', 'validrate_reccomendation']`.*
# In[101]:
def create_trigram(w):
trigrams = []
for i in range(0, len(w)-2):
trigrams.append(w[i:i+3])
return set(trigrams)
def answer_nine(entries=['cormulent', 'incendenece', 'validrate']):
max_dists, max_words, trigrams = {}, {}, {}
for entry in entries:
max_dists[entry] = 1.0
max_words[entry] = None
trigrams[entry] = create_trigram(entry)
def try_correct(correct_spelling, incorrect_spelling):
c_trigram = create_trigram(correct_spelling)
distance = nltk.jaccard_distance(trigrams[incorrect_spelling], c_trigram)
if distance < max_dists[incorrect_spelling]:
max_dists[incorrect_spelling] = distance
max_words[incorrect_spelling] = correct_spelling
for correct_spelling in correct_spellings:
for entry in entries:
if correct_spelling[0] == entry[0]:
try_correct(correct_spelling, entry)
return [max_words[entry] for entry in entries]
answer_nine()
# ### Question 10
#
# For this recommender, your function should provide recommendations for the three default words provided above using the following distance metric:
#
# **[Jaccard distance](https://en.wikipedia.org/wiki/Jaccard_index) on the 4-grams of the two words.**
#
# *This function should return a list of length three:
# `['cormulent_reccomendation', 'incendenece_reccomendation', 'validrate_reccomendation']`.*
# In[103]:
def create_quadgram(w):
quadgrams = []
for i in range(0, len(w)-3):
quadgrams.append(w[i:i+4])
return set(quadgrams)
def answer_ten(entries=['cormulent', 'incendenece', 'validrate']):
max_dists, max_words, quadgrams = {}, {}, {}
for entry in entries:
max_dists[entry] = 1.0
max_words[entry] = None
quadgrams[entry] = create_quadgram(entry)
def try_correct(correct_spelling, incorrect_spelling):
c_quadgram = create_quadgram(correct_spelling)
distance = nltk.jaccard_distance(quadgrams[incorrect_spelling], c_quadgram)
if distance < max_dists[incorrect_spelling]:
max | #
# *Each of the three different recommenders will use a different distance measure (outlined below). | random_line_split |
Assignment+2.py | ]:
import nltk
import pandas as pd
import numpy as np
# If you would like to work with the raw text you can use 'moby_raw'
with open('moby.txt', 'r') as f:
moby_raw = f.read()
# If you would like to work with the novel in nltk.Text format you can use 'text1'
moby_tokens = nltk.word_tokenize(moby_raw)
text1 = nltk.Text(moby_tokens)
# ### Example 1
#
# How many tokens (words and punctuation symbols) are in text1?
#
# *This function should return an integer.*
# In[83]:
def example_one():
return len(nltk.word_tokenize(moby_raw)) # or alternatively len(text1)
example_one()
# ### Example 2
#
# How many unique tokens (unique words and punctuation) does text1 have?
#
# *This function should return an integer.*
# In[84]:
def | ():
return len(set(nltk.word_tokenize(moby_raw))) # or alternatively len(set(text1))
example_two()
# ### Example 3
#
# After lemmatizing the verbs, how many unique tokens does text1 have?
#
# *This function should return an integer.*
# In[85]:
from nltk.stem import WordNetLemmatizer
def example_three():
lemmatizer = WordNetLemmatizer()
lemmatized = [lemmatizer.lemmatize(w,'v') for w in text1]
return len(set(lemmatized))
example_three()
# ### Question 1
#
# What is the lexical diversity of the given text input? (i.e. ratio of unique tokens to the total number of tokens)
#
# *This function should return a float.*
# In[86]:
tokens = nltk.word_tokenize(moby_raw)
unique_tokens = set(tokens)
lemmatizer = WordNetLemmatizer()
stems = [lemmatizer.lemmatize(w,'v') for w in text1]
unique_stems = set(stems)
tags = nltk.pos_tag(tokens)
moby_sents = nltk.sent_tokenize(moby_raw)
def answer_one():
return len(unique_tokens) / len(tokens)
answer_one()
# ### Question 2
#
# What percentage of tokens is 'whale'or 'Whale'?
#
# *This function should return a float.*
# In[100]:
def answer_two():
return len([tok for tok in tokens if tok == 'whale' or tok == 'Whale']) / len(tokens) * 100
answer_two()
# ### Question 3
#
# What are the 20 most frequently occurring (unique) tokens in the text? What is their frequency?
#
# *This function should return a list of 20 tuples where each tuple is of the form `(token, frequency)`. The list should be sorted in descending order of frequency.*
# In[88]:
token_freq = nltk.FreqDist(tokens)
token_freq_sorted = sorted(token_freq.items(), key=lambda x: x[1], reverse=True)
def answer_three():
return token_freq_sorted[:20]
answer_three()
# ### Question 4
#
# What tokens have a length of greater than 5 and frequency of more than 150?
#
# *This function should return an alphabetically sorted list of the tokens that match the above constraints. To sort your list, use `sorted()`*
# In[89]:
def answer_four():
return sorted([tok for tok in unique_tokens if len(tok) > 5 and token_freq[tok] > 150])
answer_four()
# ### Question 5
#
# Find the longest word in text1 and that word's length.
#
# *This function should return a tuple `(longest_word, length)`.*
# In[90]:
def answer_five():
return max([(tok, len(tok)) for tok in unique_tokens], key=lambda k: k[1])
answer_five()
# ### Question 6
#
# What unique words have a frequency of more than 2000? What is their frequency?
#
# "Hint: you may want to use `isalpha()` to check if the token is a word and not punctuation."
#
# *This function should return a list of tuples of the form `(frequency, word)` sorted in descending order of frequency.*
# In[91]:
def answer_six():
return [(freq, tok) for tok, freq in token_freq_sorted if freq > 2000 and tok.isalpha()]
answer_six()
# ### Question 7
#
# What is the average number of tokens per sentence?
#
# *This function should return a float.*
# In[92]:
def answer_seven():
tokens = 0
sents = 0
for sent in moby_sents:
tokens += len(nltk.word_tokenize(sent))
sents += 1
return tokens / sents
answer_seven()
# ### Question 8
#
# What are the 5 most frequent parts of speech in this text? What is their frequency?
#
# *This function should return a list of tuples of the form `(part_of_speech, frequency)` sorted in descending order of frequency.*
# In[93]:
def answer_eight():
import collections
d = collections.defaultdict(int)
for tok, pos in tags:
d[pos] += 1
return sorted(d.items(), key=lambda x: x[1], reverse=True)[:5]
answer_eight()
# ## Part 2 - Spelling Recommender
#
# For this part of the assignment you will create three different spelling recommenders, that each take a list of misspelled words and recommends a correctly spelled word for every word in the list.
#
# For every misspelled word, the recommender should find find the word in `correct_spellings` that has the shortest distance*, and starts with the same letter as the misspelled word, and return that word as a recommendation.
#
# *Each of the three different recommenders will use a different distance measure (outlined below).
#
# Each of the recommenders should provide recommendations for the three default words provided: `['cormulent', 'incendenece', 'validrate']`.
# In[94]:
from nltk.corpus import words
correct_spellings = words.words()
# ### Question 9
#
# For this recommender, your function should provide recommendations for the three default words provided above using the following distance metric:
#
# **[Jaccard distance](https://en.wikipedia.org/wiki/Jaccard_index) on the trigrams of the two words.**
#
# *This function should return a list of length three:
# `['cormulent_reccomendation', 'incendenece_reccomendation', 'validrate_reccomendation']`.*
# In[101]:
def create_trigram(w):
trigrams = []
for i in range(0, len(w)-2):
trigrams.append(w[i:i+3])
return set(trigrams)
def answer_nine(entries=['cormulent', 'incendenece', 'validrate']):
max_dists, max_words, trigrams = {}, {}, {}
for entry in entries:
max_dists[entry] = 1.0
max_words[entry] = None
trigrams[entry] = create_trigram(entry)
def try_correct(correct_spelling, incorrect_spelling):
c_trigram = create_trigram(correct_spelling)
distance = nltk.jaccard_distance(trigrams[incorrect_spelling], c_trigram)
if distance < max_dists[incorrect_spelling]:
max_dists[incorrect_spelling] = distance
max_words[incorrect_spelling] = correct_spelling
for correct_spelling in correct_spellings:
for entry in entries:
if correct_spelling[0] == entry[0]:
try_correct(correct_spelling, entry)
return [max_words[entry] for entry in entries]
answer_nine()
# ### Question 10
#
# For this recommender, your function should provide recommendations for the three default words provided above using the following distance metric:
#
# **[Jaccard distance](https://en.wikipedia.org/wiki/Jaccard_index) on the 4-grams of the two words.**
#
# *This function should return a list of length three:
# `['cormulent_reccomendation', 'incendenece_reccomendation', 'validrate_reccomendation']`.*
# In[103]:
def create_quadgram(w):
quadgrams = []
for i in range(0, len(w)-3):
quadgrams.append(w[i:i+4])
return set(quadgrams)
def answer_ten(entries=['cormulent', 'incendenece', 'validrate']):
max_dists, max_words, quadgrams = {}, {}, {}
for entry in entries:
max_dists[entry] = 1.0
max_words[entry] = None
quadgrams[entry] = create_quadgram(entry)
def try_correct(correct_spelling, incorrect_spelling):
c_quadgram = create_quadgram(correct_spelling)
distance = nltk.jaccard_distance(quadgrams[incorrect_spelling], c_quadgram)
if distance < max_dists[incorrect_spelling]:
| example_two | identifier_name |
Assignment+2.py | ]:
import nltk
import pandas as pd
import numpy as np
# If you would like to work with the raw text you can use 'moby_raw'
with open('moby.txt', 'r') as f:
moby_raw = f.read()
# If you would like to work with the novel in nltk.Text format you can use 'text1'
moby_tokens = nltk.word_tokenize(moby_raw)
text1 = nltk.Text(moby_tokens)
# ### Example 1
#
# How many tokens (words and punctuation symbols) are in text1?
#
# *This function should return an integer.*
# In[83]:
def example_one():
return len(nltk.word_tokenize(moby_raw)) # or alternatively len(text1)
example_one()
# ### Example 2
#
# How many unique tokens (unique words and punctuation) does text1 have?
#
# *This function should return an integer.*
# In[84]:
def example_two():
return len(set(nltk.word_tokenize(moby_raw))) # or alternatively len(set(text1))
example_two()
# ### Example 3
#
# After lemmatizing the verbs, how many unique tokens does text1 have?
#
# *This function should return an integer.*
# In[85]:
from nltk.stem import WordNetLemmatizer
def example_three():
lemmatizer = WordNetLemmatizer()
lemmatized = [lemmatizer.lemmatize(w,'v') for w in text1]
return len(set(lemmatized))
example_three()
# ### Question 1
#
# What is the lexical diversity of the given text input? (i.e. ratio of unique tokens to the total number of tokens)
#
# *This function should return a float.*
# In[86]:
tokens = nltk.word_tokenize(moby_raw)
unique_tokens = set(tokens)
lemmatizer = WordNetLemmatizer()
stems = [lemmatizer.lemmatize(w,'v') for w in text1]
unique_stems = set(stems)
tags = nltk.pos_tag(tokens)
moby_sents = nltk.sent_tokenize(moby_raw)
def answer_one():
return len(unique_tokens) / len(tokens)
answer_one()
# ### Question 2
#
# What percentage of tokens is 'whale'or 'Whale'?
#
# *This function should return a float.*
# In[100]:
def answer_two():
return len([tok for tok in tokens if tok == 'whale' or tok == 'Whale']) / len(tokens) * 100
answer_two()
# ### Question 3
#
# What are the 20 most frequently occurring (unique) tokens in the text? What is their frequency?
#
# *This function should return a list of 20 tuples where each tuple is of the form `(token, frequency)`. The list should be sorted in descending order of frequency.*
# In[88]:
token_freq = nltk.FreqDist(tokens)
token_freq_sorted = sorted(token_freq.items(), key=lambda x: x[1], reverse=True)
def answer_three():
return token_freq_sorted[:20]
answer_three()
# ### Question 4
#
# What tokens have a length of greater than 5 and frequency of more than 150?
#
# *This function should return an alphabetically sorted list of the tokens that match the above constraints. To sort your list, use `sorted()`*
# In[89]:
def answer_four():
return sorted([tok for tok in unique_tokens if len(tok) > 5 and token_freq[tok] > 150])
answer_four()
# ### Question 5
#
# Find the longest word in text1 and that word's length.
#
# *This function should return a tuple `(longest_word, length)`.*
# In[90]:
def answer_five():
return max([(tok, len(tok)) for tok in unique_tokens], key=lambda k: k[1])
answer_five()
# ### Question 6
#
# What unique words have a frequency of more than 2000? What is their frequency?
#
# "Hint: you may want to use `isalpha()` to check if the token is a word and not punctuation."
#
# *This function should return a list of tuples of the form `(frequency, word)` sorted in descending order of frequency.*
# In[91]:
def answer_six():
return [(freq, tok) for tok, freq in token_freq_sorted if freq > 2000 and tok.isalpha()]
answer_six()
# ### Question 7
#
# What is the average number of tokens per sentence?
#
# *This function should return a float.*
# In[92]:
def answer_seven():
tokens = 0
sents = 0
for sent in moby_sents:
tokens += len(nltk.word_tokenize(sent))
sents += 1
return tokens / sents
answer_seven()
# ### Question 8
#
# What are the 5 most frequent parts of speech in this text? What is their frequency?
#
# *This function should return a list of tuples of the form `(part_of_speech, frequency)` sorted in descending order of frequency.*
# In[93]:
def answer_eight():
import collections
d = collections.defaultdict(int)
for tok, pos in tags:
d[pos] += 1
return sorted(d.items(), key=lambda x: x[1], reverse=True)[:5]
answer_eight()
# ## Part 2 - Spelling Recommender
#
# For this part of the assignment you will create three different spelling recommenders, that each take a list of misspelled words and recommends a correctly spelled word for every word in the list.
#
# For every misspelled word, the recommender should find find the word in `correct_spellings` that has the shortest distance*, and starts with the same letter as the misspelled word, and return that word as a recommendation.
#
# *Each of the three different recommenders will use a different distance measure (outlined below).
#
# Each of the recommenders should provide recommendations for the three default words provided: `['cormulent', 'incendenece', 'validrate']`.
# In[94]:
from nltk.corpus import words
correct_spellings = words.words()
# ### Question 9
#
# For this recommender, your function should provide recommendations for the three default words provided above using the following distance metric:
#
# **[Jaccard distance](https://en.wikipedia.org/wiki/Jaccard_index) on the trigrams of the two words.**
#
# *This function should return a list of length three:
# `['cormulent_reccomendation', 'incendenece_reccomendation', 'validrate_reccomendation']`.*
# In[101]:
def create_trigram(w):
trigrams = []
for i in range(0, len(w)-2):
trigrams.append(w[i:i+3])
return set(trigrams)
def answer_nine(entries=['cormulent', 'incendenece', 'validrate']):
max_dists, max_words, trigrams = {}, {}, {}
for entry in entries:
max_dists[entry] = 1.0
max_words[entry] = None
trigrams[entry] = create_trigram(entry)
def try_correct(correct_spelling, incorrect_spelling):
c_trigram = create_trigram(correct_spelling)
distance = nltk.jaccard_distance(trigrams[incorrect_spelling], c_trigram)
if distance < max_dists[incorrect_spelling]:
max_dists[incorrect_spelling] = distance
max_words[incorrect_spelling] = correct_spelling
for correct_spelling in correct_spellings:
for entry in entries:
if correct_spelling[0] == entry[0]:
try_correct(correct_spelling, entry)
return [max_words[entry] for entry in entries]
answer_nine()
# ### Question 10
#
# For this recommender, your function should provide recommendations for the three default words provided above using the following distance metric:
#
# **[Jaccard distance](https://en.wikipedia.org/wiki/Jaccard_index) on the 4-grams of the two words.**
#
# *This function should return a list of length three:
# `['cormulent_reccomendation', 'incendenece_reccomendation', 'validrate_reccomendation']`.*
# In[103]:
def create_quadgram(w):
|
def answer_ten(entries=['cormulent', 'incendenece', 'validrate']):
max_dists, max_words, quadgrams = {}, {}, {}
for entry in entries:
max_dists[entry] = 1.0
max_words[entry] = None
quadgrams[entry] = create_quadgram(entry)
def try_correct(correct_spelling, incorrect_spelling):
c_quadgram = create_quadgram(correct_spelling)
distance = nltk.jaccard_distance(quadgrams[incorrect_spelling], c_quadgram)
if distance < max_dists[incorrect_spelling]:
| quadgrams = []
for i in range(0, len(w)-3):
quadgrams.append(w[i:i+4])
return set(quadgrams) | identifier_body |
Assignment+2.py | work with the novel in nltk.Text format you can use 'text1'
moby_tokens = nltk.word_tokenize(moby_raw)
text1 = nltk.Text(moby_tokens)
# ### Example 1
#
# How many tokens (words and punctuation symbols) are in text1?
#
# *This function should return an integer.*
# In[83]:
def example_one():
return len(nltk.word_tokenize(moby_raw)) # or alternatively len(text1)
example_one()
# ### Example 2
#
# How many unique tokens (unique words and punctuation) does text1 have?
#
# *This function should return an integer.*
# In[84]:
def example_two():
return len(set(nltk.word_tokenize(moby_raw))) # or alternatively len(set(text1))
example_two()
# ### Example 3
#
# After lemmatizing the verbs, how many unique tokens does text1 have?
#
# *This function should return an integer.*
# In[85]:
from nltk.stem import WordNetLemmatizer
def example_three():
lemmatizer = WordNetLemmatizer()
lemmatized = [lemmatizer.lemmatize(w,'v') for w in text1]
return len(set(lemmatized))
example_three()
# ### Question 1
#
# What is the lexical diversity of the given text input? (i.e. ratio of unique tokens to the total number of tokens)
#
# *This function should return a float.*
# In[86]:
tokens = nltk.word_tokenize(moby_raw)
unique_tokens = set(tokens)
lemmatizer = WordNetLemmatizer()
stems = [lemmatizer.lemmatize(w,'v') for w in text1]
unique_stems = set(stems)
tags = nltk.pos_tag(tokens)
moby_sents = nltk.sent_tokenize(moby_raw)
def answer_one():
return len(unique_tokens) / len(tokens)
answer_one()
# ### Question 2
#
# What percentage of tokens is 'whale'or 'Whale'?
#
# *This function should return a float.*
# In[100]:
def answer_two():
return len([tok for tok in tokens if tok == 'whale' or tok == 'Whale']) / len(tokens) * 100
answer_two()
# ### Question 3
#
# What are the 20 most frequently occurring (unique) tokens in the text? What is their frequency?
#
# *This function should return a list of 20 tuples where each tuple is of the form `(token, frequency)`. The list should be sorted in descending order of frequency.*
# In[88]:
token_freq = nltk.FreqDist(tokens)
token_freq_sorted = sorted(token_freq.items(), key=lambda x: x[1], reverse=True)
def answer_three():
return token_freq_sorted[:20]
answer_three()
# ### Question 4
#
# What tokens have a length of greater than 5 and frequency of more than 150?
#
# *This function should return an alphabetically sorted list of the tokens that match the above constraints. To sort your list, use `sorted()`*
# In[89]:
def answer_four():
return sorted([tok for tok in unique_tokens if len(tok) > 5 and token_freq[tok] > 150])
answer_four()
# ### Question 5
#
# Find the longest word in text1 and that word's length.
#
# *This function should return a tuple `(longest_word, length)`.*
# In[90]:
def answer_five():
return max([(tok, len(tok)) for tok in unique_tokens], key=lambda k: k[1])
answer_five()
# ### Question 6
#
# What unique words have a frequency of more than 2000? What is their frequency?
#
# "Hint: you may want to use `isalpha()` to check if the token is a word and not punctuation."
#
# *This function should return a list of tuples of the form `(frequency, word)` sorted in descending order of frequency.*
# In[91]:
def answer_six():
return [(freq, tok) for tok, freq in token_freq_sorted if freq > 2000 and tok.isalpha()]
answer_six()
# ### Question 7
#
# What is the average number of tokens per sentence?
#
# *This function should return a float.*
# In[92]:
def answer_seven():
tokens = 0
sents = 0
for sent in moby_sents:
tokens += len(nltk.word_tokenize(sent))
sents += 1
return tokens / sents
answer_seven()
# ### Question 8
#
# What are the 5 most frequent parts of speech in this text? What is their frequency?
#
# *This function should return a list of tuples of the form `(part_of_speech, frequency)` sorted in descending order of frequency.*
# In[93]:
def answer_eight():
import collections
d = collections.defaultdict(int)
for tok, pos in tags:
d[pos] += 1
return sorted(d.items(), key=lambda x: x[1], reverse=True)[:5]
answer_eight()
# ## Part 2 - Spelling Recommender
#
# For this part of the assignment you will create three different spelling recommenders, that each take a list of misspelled words and recommends a correctly spelled word for every word in the list.
#
# For every misspelled word, the recommender should find find the word in `correct_spellings` that has the shortest distance*, and starts with the same letter as the misspelled word, and return that word as a recommendation.
#
# *Each of the three different recommenders will use a different distance measure (outlined below).
#
# Each of the recommenders should provide recommendations for the three default words provided: `['cormulent', 'incendenece', 'validrate']`.
# In[94]:
from nltk.corpus import words
correct_spellings = words.words()
# ### Question 9
#
# For this recommender, your function should provide recommendations for the three default words provided above using the following distance metric:
#
# **[Jaccard distance](https://en.wikipedia.org/wiki/Jaccard_index) on the trigrams of the two words.**
#
# *This function should return a list of length three:
# `['cormulent_reccomendation', 'incendenece_reccomendation', 'validrate_reccomendation']`.*
# In[101]:
def create_trigram(w):
trigrams = []
for i in range(0, len(w)-2):
trigrams.append(w[i:i+3])
return set(trigrams)
def answer_nine(entries=['cormulent', 'incendenece', 'validrate']):
max_dists, max_words, trigrams = {}, {}, {}
for entry in entries:
max_dists[entry] = 1.0
max_words[entry] = None
trigrams[entry] = create_trigram(entry)
def try_correct(correct_spelling, incorrect_spelling):
c_trigram = create_trigram(correct_spelling)
distance = nltk.jaccard_distance(trigrams[incorrect_spelling], c_trigram)
if distance < max_dists[incorrect_spelling]:
max_dists[incorrect_spelling] = distance
max_words[incorrect_spelling] = correct_spelling
for correct_spelling in correct_spellings:
for entry in entries:
if correct_spelling[0] == entry[0]:
try_correct(correct_spelling, entry)
return [max_words[entry] for entry in entries]
answer_nine()
# ### Question 10
#
# For this recommender, your function should provide recommendations for the three default words provided above using the following distance metric:
#
# **[Jaccard distance](https://en.wikipedia.org/wiki/Jaccard_index) on the 4-grams of the two words.**
#
# *This function should return a list of length three:
# `['cormulent_reccomendation', 'incendenece_reccomendation', 'validrate_reccomendation']`.*
# In[103]:
def create_quadgram(w):
quadgrams = []
for i in range(0, len(w)-3):
quadgrams.append(w[i:i+4])
return set(quadgrams)
def answer_ten(entries=['cormulent', 'incendenece', 'validrate']):
max_dists, max_words, quadgrams = {}, {}, {}
for entry in entries:
max_dists[entry] = 1.0
max_words[entry] = None
quadgrams[entry] = create_quadgram(entry)
def try_correct(correct_spelling, incorrect_spelling):
c_quadgram = create_quadgram(correct_spelling)
distance = nltk.jaccard_distance(quadgrams[incorrect_spelling], c_quadgram)
if distance < max_dists[incorrect_spelling]:
max_dists[incorrect_spelling] = distance
max_words[incorrect_spelling] = correct_spelling
for correct_spelling in correct_spellings:
for entry in entries:
if correct_spelling[0] == entry[0]:
| try_correct(correct_spelling, entry) | conditional_block |
|
vault.rs | pub fn token(&self) -> &str {
&self.token
}
/// Returns the Vault address
pub fn address(&self) -> &str {
&self.address
}
/// Returns the HTTP Client
pub fn http_client(&self) -> &HttpClient {
&self.client
}
fn execute_request<T>(client: &HttpClient, request: reqwest::Request) -> Result<T, crate::Error>
where
T: serde::de::DeserializeOwned + Debug,
{
debug!("Executing request: {:#?}", request);
let mut response = client.execute(request)?;
debug!("Response received: {:#?}", response);
let body = response.text()?;
debug!("Response body: {}", body);
let result = serde_json::from_str(&body)?;
debug!("Deserialized body: {:#?}", result);
Ok(result)
}
fn execute_request_no_body(
client: &HttpClient,
request: reqwest::Request,
) -> Result<(), crate::Error> {
debug!("Executing request: {:#?}", request);
let response = client.execute(request)?;
debug!("Response received: {:#?}", response);
Ok(())
}
/// Login with AWS IAM authentication method. Returns a Vault token on success
///
/// - `address`: Address of Vault Server. Include the scheme (e.g. `https`) and the host with an
/// optional port
/// - `path`: Path to the AWS authentication engine. Usually just `aws`.
/// - `role`: Name fo the AWS authentication role
/// - `payload`: Authentication payload from calling `aws::VaultAwsAuthIamPayload::new`
///
/// You can optionally provide a `reqwest::Client` if you have specific needs like custom root
/// CA certificate or require client authentication
pub fn login_aws_iam(
vault_address: &str,
aws_auth_path: &str,
aws_auth_role: &str,
aws_payload: &crate::aws::VaultAwsAuthIamPayload,
client: Option<HttpClient>,
) -> Result<Self, crate::Error> {
info!(
"Logging in to Vault with AWS Credentials at path `{}` and role `{}",
aws_auth_path, aws_auth_role
);
let client = match client {
Some(client) => client,
None => ClientBuilder::new().build()?,
};
let request = Self::build_login_aws_iam_request(
vault_address,
aws_auth_path,
aws_auth_role,
aws_payload,
&client,
)?;
let response: Response = Self::execute_request(&client, request)?;
let token = match response {
Response::Error { errors } => {
Err(crate::Error::InvalidVaultResponse(errors.join("; ")))?
}
Response::Response(ResponseData {
auth: Some(auth), ..
}) => Ok(auth.client_token),
_ => Err(crate::Error::InvalidVaultResponse(
"Missing authentication data".to_string(),
)),
}?;
info!("Vault authentication successful. Received Vault Token");
Ok(Self {
address: vault_address.to_string(),
token,
revoke_self_on_drop: true,
client,
})
}
fn build_login_aws_iam_request(
vault_address: &str,
aws_auth_path: &str,
aws_auth_role: &str,
aws_payload: &crate::aws::VaultAwsAuthIamPayload,
client: &HttpClient,
) -> Result<reqwest::Request, crate::Error> {
let vault_address = url::Url::parse(vault_address)?;
let vault_address = vault_address.join(&format!("/v1/auth/{}/login", aws_auth_path))?;
let payload = AwsIamLoginPayload {
role: aws_auth_role,
aws_payload: Cow::Borrowed(aws_payload),
};
Ok(client.post(vault_address).json(&payload).build()?)
}
/// Get a token from Nomad Secrets Engine
///
/// You can optionally provide a `reqwest::Client` if you have specific needs like custom root
/// CA certificate or require client authentication
pub fn get_nomad_token(
&self,
nomad_path: &str,
nomad_role: &str,
) -> Result<crate::Secret, crate::Error> {
info!(
"Retrieving Nomad Token from Secrets engine mounted at `{}` with role `{}`",
nomad_path, nomad_role
);
let request = self.build_nomad_token_request(nomad_path, nomad_role)?;
let response: Response = Self::execute_request(&self.client, request)?;
Ok(From::from(match response {
Response::Error { errors } => {
Err(crate::Error::InvalidVaultResponse(errors.join("; ")))?
}
Response::Response(ResponseData {
data: Some(mut data),
..
}) => data.remove("secret_id").ok_or_else(|| {
crate::Error::InvalidVaultResponse("Missing Nomad token from response".to_string())
})?,
_ => Err(crate::Error::InvalidVaultResponse(
"Missing secrets data".to_string(),
))?,
}))
}
/// Revoke the Vault token itself
///
/// If successful, the Vault Token can no longer be used
pub fn revoke_self(&self) -> Result<(), crate::Error> {
info!("Revoking self Vault Token");
let request = self.build_revoke_self_request()?;
// HTTP 204 is returned
Self::execute_request_no_body(&self.client, request)?;
Ok(())
}
fn build_revoke_self_request(&self) -> Result<reqwest::Request, crate::Error> {
let vault_address = url::Url::parse(self.address())?;
let vault_address = vault_address.join("/v1/auth/token/revoke-self")?;
Ok(self
.client
.post(vault_address)
.header("X-Vault-Token", self.token.as_str())
.build()?)
}
fn build_nomad_token_request(
&self,
nomad_path: &str,
nomad_role: &str,
) -> Result<reqwest::Request, crate::Error> {
let vault_address = url::Url::parse(self.address())?;
let vault_address =
vault_address.join(&format!("/v1/{}/creds/{}", nomad_path, nomad_role))?;
Ok(self
.client
.get(vault_address)
.header("X-Vault-Token", self.token.as_str())
.build()?)
}
}
impl Drop for Client {
fn drop(&mut self) {
if self.revoke_self_on_drop {
info!("Vault Client is being dropped. Revoking its own Token");
match self.revoke_self() {
Ok(()) => {}
Err(e) => warn!("Error revoking self: {}", e),
}
}
}
}
#[cfg(test)]
pub(crate) mod tests {
use super::*;
use std::env;
pub(crate) fn vault_address() -> String {
env::var("VAULT_ADDR").unwrap_or_else(|_| "http://127.0.0.1:8200".to_string())
}
#[test]
fn login_aws_iam_request_is_built_properly() -> Result<(), crate::Error> {
let address = vault_address();
let aws_payload = crate::aws::tests::vault_aws_iam_payload(None, None)?;
let request = Client::build_login_aws_iam_request(
&address,
"aws",
"default",
&aws_payload,
&ClientBuilder::new().build()?,
)?;
assert_eq!(
format!("{}/v1/auth/aws/login", address),
request.url().to_string()
);
assert_eq!(&reqwest::Method::POST, request.method());
// Can't test payload
Ok(())
}
/// Requires Mock AWS API and Vault server
/// This test does not verify if the signature from rusoto is correct.
#[test]
fn login_aws_with_vault_is_successful() -> Result<(), crate::Error> {
let address = vault_address();
let aws_payload =
crate::aws::tests::vault_aws_iam_payload(Some("vault.example.com"), None)?;
let client = Client::login_aws_iam(&address, "aws", "default", &aws_payload, None)?;
assert!(!client.token().is_empty());
Ok(())
}
#[test]
fn nomad_token_secrets_engine_payload_can_be_deserialized() {
// Example payload from Nomad Secrets Engine
// e.g. `vault read nomad/creds/default`
let json = r#"
{
"request_id": "xxx4",
"lease_id": "nomad/creds/default/xxx",
"lease_duration": 2764800,
"renewable": true,
"data": {
"accessor_id": "accessor",
"secret_id": "secret"
},
"warnings": null
}
"#;
let data = match serde_json::from_str::<Response>(json).unwrap() {
Response::Response(ResponseData { data, .. }) => data,
_ => panic!("Invalid deserialization"),
};
let nomad = data.unwrap();
assert_eq!(nomad["secret_id"], "secret");
}
#[test]
fn | nomad_token_request_is_built_properly | identifier_name |
|
vault.rs | ,
/// Data for secrets requests
#[serde(default)]
data: Option<HashMap<String, String>>,
// Missing and ignored fields:
// - wrap_info
}
/// Authentication data from Vault
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
pub struct Authentication {
/// The actual token
pub client_token: crate::Secret,
/// The accessor for the Token
pub accessor: String,
/// List of policies for token, including from Identity
pub policies: Vec<String>,
/// List of tokens directly assigned to token
pub token_policies: Vec<String>,
/// Arbitrary metadata
pub metadata: HashMap<String, String>,
/// Lease Duration for the token
pub lease_duration: u64,
/// Whether the token is renewable
pub renewable: bool,
/// UUID for the entity
pub entity_id: String,
/// Type of token
pub token_type: TokenType,
}
/// Type of token from Vault
/// See [Vault Documentation](https://www.vaultproject.io/docs/concepts/tokens.html#token-types-in-detail)
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
#[serde(rename_all = "lowercase")]
pub enum TokenType {
/// Long lived service tokens
Service,
/// Short lived batch tokens
Batch,
}
/// Payload to send to Vault for logging in via AWS IAM
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
pub struct AwsIamLoginPayload<'a, 'b> {
pub role: &'a str,
#[serde(borrow, flatten)]
pub aws_payload: Cow<'b, crate::aws::VaultAwsAuthIamPayload>,
}
impl Client {
/// Create a new API client from an existing Token
///
/// You can optionally provide a `reqwest::Client` if you have specific needs like custom root
/// CA certificate or require client authentication
#[allow(clippy::new_ret_no_self)]
pub fn new<S1, S2>(
vault_address: S1,
vault_token: S2,
revoke_self_on_drop: bool,
client: Option<HttpClient>,
) -> Result<Self, crate::Error>
where
S1: AsRef<str>,
S2: AsRef<str>,
{
let client = match client {
Some(client) => client,
None => ClientBuilder::new().build()?,
};
Ok(Self {
address: vault_address.as_ref().to_string(),
token: crate::Secret(vault_token.as_ref().to_string()),
revoke_self_on_drop,
client,
})
}
/// Returns the Vault Token
pub fn token(&self) -> &str {
&self.token
}
/// Returns the Vault address
pub fn address(&self) -> &str {
&self.address
}
/// Returns the HTTP Client
pub fn http_client(&self) -> &HttpClient {
&self.client
}
fn execute_request<T>(client: &HttpClient, request: reqwest::Request) -> Result<T, crate::Error>
where
T: serde::de::DeserializeOwned + Debug,
{
debug!("Executing request: {:#?}", request);
let mut response = client.execute(request)?;
debug!("Response received: {:#?}", response);
let body = response.text()?;
debug!("Response body: {}", body);
let result = serde_json::from_str(&body)?;
debug!("Deserialized body: {:#?}", result);
Ok(result)
}
fn execute_request_no_body(
client: &HttpClient,
request: reqwest::Request,
) -> Result<(), crate::Error> {
debug!("Executing request: {:#?}", request);
let response = client.execute(request)?;
debug!("Response received: {:#?}", response);
Ok(())
}
/// Login with AWS IAM authentication method. Returns a Vault token on success
///
/// - `address`: Address of Vault Server. Include the scheme (e.g. `https`) and the host with an
/// optional port
/// - `path`: Path to the AWS authentication engine. Usually just `aws`.
/// - `role`: Name fo the AWS authentication role
/// - `payload`: Authentication payload from calling `aws::VaultAwsAuthIamPayload::new`
///
/// You can optionally provide a `reqwest::Client` if you have specific needs like custom root
/// CA certificate or require client authentication
pub fn login_aws_iam(
vault_address: &str,
aws_auth_path: &str,
aws_auth_role: &str,
aws_payload: &crate::aws::VaultAwsAuthIamPayload,
client: Option<HttpClient>,
) -> Result<Self, crate::Error> {
info!(
"Logging in to Vault with AWS Credentials at path `{}` and role `{}",
aws_auth_path, aws_auth_role
);
let client = match client {
Some(client) => client,
None => ClientBuilder::new().build()?,
};
let request = Self::build_login_aws_iam_request(
vault_address,
aws_auth_path,
aws_auth_role,
aws_payload,
&client,
)?;
let response: Response = Self::execute_request(&client, request)?;
let token = match response {
Response::Error { errors } => {
Err(crate::Error::InvalidVaultResponse(errors.join("; ")))?
}
Response::Response(ResponseData {
auth: Some(auth), ..
}) => Ok(auth.client_token),
_ => Err(crate::Error::InvalidVaultResponse(
"Missing authentication data".to_string(),
)),
}?;
info!("Vault authentication successful. Received Vault Token");
Ok(Self {
address: vault_address.to_string(),
token,
revoke_self_on_drop: true,
client,
})
}
fn build_login_aws_iam_request(
vault_address: &str,
aws_auth_path: &str,
aws_auth_role: &str,
aws_payload: &crate::aws::VaultAwsAuthIamPayload,
client: &HttpClient,
) -> Result<reqwest::Request, crate::Error> {
let vault_address = url::Url::parse(vault_address)?;
let vault_address = vault_address.join(&format!("/v1/auth/{}/login", aws_auth_path))?;
let payload = AwsIamLoginPayload {
role: aws_auth_role,
aws_payload: Cow::Borrowed(aws_payload),
};
Ok(client.post(vault_address).json(&payload).build()?)
}
/// Get a token from Nomad Secrets Engine
///
/// You can optionally provide a `reqwest::Client` if you have specific needs like custom root
/// CA certificate or require client authentication
pub fn get_nomad_token(
&self,
nomad_path: &str,
nomad_role: &str,
) -> Result<crate::Secret, crate::Error> {
info!(
"Retrieving Nomad Token from Secrets engine mounted at `{}` with role `{}`",
nomad_path, nomad_role
);
let request = self.build_nomad_token_request(nomad_path, nomad_role)?;
let response: Response = Self::execute_request(&self.client, request)?;
Ok(From::from(match response {
Response::Error { errors } => {
Err(crate::Error::InvalidVaultResponse(errors.join("; ")))?
}
Response::Response(ResponseData {
data: Some(mut data),
..
}) => data.remove("secret_id").ok_or_else(|| {
crate::Error::InvalidVaultResponse("Missing Nomad token from response".to_string())
})?,
_ => Err(crate::Error::InvalidVaultResponse(
"Missing secrets data".to_string(),
))?,
}))
}
/// Revoke the Vault token itself
///
/// If successful, the Vault Token can no longer be used
pub fn revoke_self(&self) -> Result<(), crate::Error> {
info!("Revoking self Vault Token");
let request = self.build_revoke_self_request()?;
// HTTP 204 is returned
Self::execute_request_no_body(&self.client, request)?;
Ok(())
}
fn build_revoke_self_request(&self) -> Result<reqwest::Request, crate::Error> |
fn build_nomad_token_request(
&self,
nomad_path: &str,
nomad_role: &str,
) -> Result<reqwest::Request, crate::Error> {
let vault_address = url::Url::parse(self.address())?;
let vault_address =
vault_address.join(&format!("/v1/{}/creds/{}", nomad_path, nomad_role))?;
Ok(self
.client
.get(vault_address)
.header("X-Vault-Token", self.token.as_str())
.build()?)
}
}
impl Drop for Client {
fn drop(&mut self) {
if self.revoke_self_on_drop {
info!("Vault Client is being dropped. Revoking its own Token");
match self.revoke_self() {
Ok(()) => {}
Err(e) => warn!("Error revoking self: | {
let vault_address = url::Url::parse(self.address())?;
let vault_address = vault_address.join("/v1/auth/token/revoke-self")?;
Ok(self
.client
.post(vault_address)
.header("X-Vault-Token", self.token.as_str())
.build()?)
} | identifier_body |
vault.rs | >,
/// Data for secrets requests
#[serde(default)]
data: Option<HashMap<String, String>>,
// Missing and ignored fields:
// - wrap_info
}
/// Authentication data from Vault
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
pub struct Authentication {
/// The actual token | /// List of tokens directly assigned to token
pub token_policies: Vec<String>,
/// Arbitrary metadata
pub metadata: HashMap<String, String>,
/// Lease Duration for the token
pub lease_duration: u64,
/// Whether the token is renewable
pub renewable: bool,
/// UUID for the entity
pub entity_id: String,
/// Type of token
pub token_type: TokenType,
}
/// Type of token from Vault
/// See [Vault Documentation](https://www.vaultproject.io/docs/concepts/tokens.html#token-types-in-detail)
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
#[serde(rename_all = "lowercase")]
pub enum TokenType {
/// Long lived service tokens
Service,
/// Short lived batch tokens
Batch,
}
/// Payload to send to Vault for logging in via AWS IAM
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
pub struct AwsIamLoginPayload<'a, 'b> {
pub role: &'a str,
#[serde(borrow, flatten)]
pub aws_payload: Cow<'b, crate::aws::VaultAwsAuthIamPayload>,
}
impl Client {
/// Create a new API client from an existing Token
///
/// You can optionally provide a `reqwest::Client` if you have specific needs like custom root
/// CA certificate or require client authentication
#[allow(clippy::new_ret_no_self)]
pub fn new<S1, S2>(
vault_address: S1,
vault_token: S2,
revoke_self_on_drop: bool,
client: Option<HttpClient>,
) -> Result<Self, crate::Error>
where
S1: AsRef<str>,
S2: AsRef<str>,
{
let client = match client {
Some(client) => client,
None => ClientBuilder::new().build()?,
};
Ok(Self {
address: vault_address.as_ref().to_string(),
token: crate::Secret(vault_token.as_ref().to_string()),
revoke_self_on_drop,
client,
})
}
/// Returns the Vault Token
pub fn token(&self) -> &str {
&self.token
}
/// Returns the Vault address
pub fn address(&self) -> &str {
&self.address
}
/// Returns the HTTP Client
pub fn http_client(&self) -> &HttpClient {
&self.client
}
fn execute_request<T>(client: &HttpClient, request: reqwest::Request) -> Result<T, crate::Error>
where
T: serde::de::DeserializeOwned + Debug,
{
debug!("Executing request: {:#?}", request);
let mut response = client.execute(request)?;
debug!("Response received: {:#?}", response);
let body = response.text()?;
debug!("Response body: {}", body);
let result = serde_json::from_str(&body)?;
debug!("Deserialized body: {:#?}", result);
Ok(result)
}
fn execute_request_no_body(
client: &HttpClient,
request: reqwest::Request,
) -> Result<(), crate::Error> {
debug!("Executing request: {:#?}", request);
let response = client.execute(request)?;
debug!("Response received: {:#?}", response);
Ok(())
}
/// Login with AWS IAM authentication method. Returns a Vault token on success
///
/// - `address`: Address of Vault Server. Include the scheme (e.g. `https`) and the host with an
/// optional port
/// - `path`: Path to the AWS authentication engine. Usually just `aws`.
/// - `role`: Name fo the AWS authentication role
/// - `payload`: Authentication payload from calling `aws::VaultAwsAuthIamPayload::new`
///
/// You can optionally provide a `reqwest::Client` if you have specific needs like custom root
/// CA certificate or require client authentication
pub fn login_aws_iam(
vault_address: &str,
aws_auth_path: &str,
aws_auth_role: &str,
aws_payload: &crate::aws::VaultAwsAuthIamPayload,
client: Option<HttpClient>,
) -> Result<Self, crate::Error> {
info!(
"Logging in to Vault with AWS Credentials at path `{}` and role `{}",
aws_auth_path, aws_auth_role
);
let client = match client {
Some(client) => client,
None => ClientBuilder::new().build()?,
};
let request = Self::build_login_aws_iam_request(
vault_address,
aws_auth_path,
aws_auth_role,
aws_payload,
&client,
)?;
let response: Response = Self::execute_request(&client, request)?;
let token = match response {
Response::Error { errors } => {
Err(crate::Error::InvalidVaultResponse(errors.join("; ")))?
}
Response::Response(ResponseData {
auth: Some(auth), ..
}) => Ok(auth.client_token),
_ => Err(crate::Error::InvalidVaultResponse(
"Missing authentication data".to_string(),
)),
}?;
info!("Vault authentication successful. Received Vault Token");
Ok(Self {
address: vault_address.to_string(),
token,
revoke_self_on_drop: true,
client,
})
}
fn build_login_aws_iam_request(
vault_address: &str,
aws_auth_path: &str,
aws_auth_role: &str,
aws_payload: &crate::aws::VaultAwsAuthIamPayload,
client: &HttpClient,
) -> Result<reqwest::Request, crate::Error> {
let vault_address = url::Url::parse(vault_address)?;
let vault_address = vault_address.join(&format!("/v1/auth/{}/login", aws_auth_path))?;
let payload = AwsIamLoginPayload {
role: aws_auth_role,
aws_payload: Cow::Borrowed(aws_payload),
};
Ok(client.post(vault_address).json(&payload).build()?)
}
/// Get a token from Nomad Secrets Engine
///
/// You can optionally provide a `reqwest::Client` if you have specific needs like custom root
/// CA certificate or require client authentication
pub fn get_nomad_token(
&self,
nomad_path: &str,
nomad_role: &str,
) -> Result<crate::Secret, crate::Error> {
info!(
"Retrieving Nomad Token from Secrets engine mounted at `{}` with role `{}`",
nomad_path, nomad_role
);
let request = self.build_nomad_token_request(nomad_path, nomad_role)?;
let response: Response = Self::execute_request(&self.client, request)?;
Ok(From::from(match response {
Response::Error { errors } => {
Err(crate::Error::InvalidVaultResponse(errors.join("; ")))?
}
Response::Response(ResponseData {
data: Some(mut data),
..
}) => data.remove("secret_id").ok_or_else(|| {
crate::Error::InvalidVaultResponse("Missing Nomad token from response".to_string())
})?,
_ => Err(crate::Error::InvalidVaultResponse(
"Missing secrets data".to_string(),
))?,
}))
}
/// Revoke the Vault token itself
///
/// If successful, the Vault Token can no longer be used
pub fn revoke_self(&self) -> Result<(), crate::Error> {
info!("Revoking self Vault Token");
let request = self.build_revoke_self_request()?;
// HTTP 204 is returned
Self::execute_request_no_body(&self.client, request)?;
Ok(())
}
fn build_revoke_self_request(&self) -> Result<reqwest::Request, crate::Error> {
let vault_address = url::Url::parse(self.address())?;
let vault_address = vault_address.join("/v1/auth/token/revoke-self")?;
Ok(self
.client
.post(vault_address)
.header("X-Vault-Token", self.token.as_str())
.build()?)
}
fn build_nomad_token_request(
&self,
nomad_path: &str,
nomad_role: &str,
) -> Result<reqwest::Request, crate::Error> {
let vault_address = url::Url::parse(self.address())?;
let vault_address =
vault_address.join(&format!("/v1/{}/creds/{}", nomad_path, nomad_role))?;
Ok(self
.client
.get(vault_address)
.header("X-Vault-Token", self.token.as_str())
.build()?)
}
}
impl Drop for Client {
fn drop(&mut self) {
if self.revoke_self_on_drop {
info!("Vault Client is being dropped. Revoking its own Token");
match self.revoke_self() {
Ok(()) => {}
Err(e) => warn!("Error revoking self: {}", e | pub client_token: crate::Secret,
/// The accessor for the Token
pub accessor: String,
/// List of policies for token, including from Identity
pub policies: Vec<String>, | random_line_split |
controlPanel.go | rapper = w
flow := widget.NewFlow(widget.AxisHorizontal)
flow.Insert(widget.NewSizer(unit.Ems(0.5), unit.Value{}, nil), nil)
flow.Insert(widget.NewLabel(fmt.Sprintf("%-30s", text)), nil)
flow.Insert(widget.NewSizer(unit.Ems(0.5), unit.Value{}, nil), nil)
w.label = widget.NewLabel(fmt.Sprintf("%30s", ""))
flow.Insert(w.label, nil)
uniform := widget.NewUniform(theme.StaticColor(colornames.Aqua), widget.NewPadder(widget.AxisBoth, unit.Ems(0.5), flow))
padding := widget.NewPadder(widget.AxisBoth, unit.Ems(0.5), uniform)
w.Insert(padding, nil)
go func() {
for {
newString := w.tick()
w.label.Text = fmt.Sprintf("%-30s", newString)
w.label.Mark(node.MarkNeedsPaintBase)
p.w.Send(update{})
}
}()
return w
}
type Button struct {
node.ShellEmbed
icon []byte
onClick func()
z iconvg.Rasterizer
uniform *widget.Uniform
label *widget.Label
pressed bool
}
func (p *ControlPanel) NewButton(text string, icon []byte, toggle bool, onClick func() string) *Button {
w := &Button{
icon: icon,
}
fn := func() {
w.pressed = !w.pressed
w.label.Text = fmt.Sprintf("%-30s", onClick())
w.label.Mark(node.MarkNeedsPaintBase)
if w.pressed || !toggle {
w.uniform.ThemeColor = theme.StaticColor(colornames.Lightgreen)
} else {
w.uniform.ThemeColor = theme.StaticColor(colornames.Lightsalmon)
}
w.uniform.Mark(node.MarkNeedsPaintBase)
p.w.Send(panelUpdate{})
}
w.onClick = fn
w.Wrapper = w
flow := widget.NewFlow(widget.AxisHorizontal)
flow.Insert(widget.NewSizer(unit.Ems(0.5), unit.Value{}, nil), nil)
w.label = widget.NewLabel(fmt.Sprintf("%-30s", text))
flow.Insert(w.label, nil)
flow.Insert(widget.NewSizer(unit.Ems(0.5), unit.Value{}, nil), nil)
flow.Insert(NewIcon(icon), nil)
w.uniform = widget.NewUniform(theme.StaticColor(colornames.Lightsalmon), flow)
padding := widget.NewPadder(widget.AxisBoth, unit.Ems(0.5), w.uniform)
w.Insert(padding, nil)
return w
}
func (w *Button) OnInputEvent(e interface{}, origin image.Point) node.EventHandled {
switch e := e.(type) {
case gesture.Event:
if e.Type != gesture.TypeTap {
break
}
if w.onClick != nil {
w.uniform.ThemeColor = theme.StaticColor(colornames.Orange)
w.uniform.Mark(node.MarkNeedsPaintBase)
go w.onClick()
}
return node.Handled
}
return node.NotHandled
}
func (p *ControlPanel) start(r *RenderState) {
p.world = r.world
p.s = r.s
p.r = r
controls := widget.NewFlow(widget.AxisVertical)
tickers := widget.NewFlow(widget.AxisVertical)
p.root = widget.NewSheet(
widget.NewUniform(theme.StaticColor(colornames.White),
widget.NewPadder(widget.AxisBoth, unit.Ems(1), widget.NewFlow(widget.AxisHorizontal, controls, widget.NewSizer(unit.Ems(1), unit.Value{}, nil), tickers))))
controls.Insert(p.NewGenrateFlowFieldsButton(), nil)
controls.Insert(p.NewStartSimulationButton(), nil)
controls.Insert(p.NewHighlightActiveButton(), nil)
controls.Insert(p.NewExitButton(), nil)
controls.Insert(p.NewSaveFlowFieldsButton(), nil)
controls.Insert(p.NewLoadFlowFieldsButton(), nil)
controls.Insert(p.NewCloseAllButton(), nil)
tickers.Insert(p.NewTicker("Total People:", func() string { return fmt.Sprintf("%d", <-p.world.peopleCurrentChan) }), nil)
tickers.Insert(p.NewTicker("Total People Added:", func() string { return fmt.Sprintf("%d", <-p.world.peopleAddedChan) }), nil)
tickers.Insert(p.NewTicker("Simulation Time:", func() string { return (<-p.world.simulationTimeChan).String() }), nil)
tickers.Insert(p.NewTicker("Current Active People:", func() string { return fmt.Sprintf("%d", <-p.world.currentSendersChan) }), nil)
tickers.Insert(p.NewNetworkTickers(), nil)
for i := range p.world.scenario.Destinations {
dest := &p.world.scenario.Destinations[i]
button := p.NewButton(fmt.Sprintf("Close %s", dest.Name), icons.NavigationClose, true, func() string {
if dest.isClosed() {
dest.Open()
return fmt.Sprintf("Close %s", dest.Name)
} else {
dest.Close()
return fmt.Sprintf("Reopen %s", dest.Name)
}
})
controls.Insert(button, nil)
}
newtheme := theme.Theme{}
p.root.Measure(&newtheme, -1, -1)
go func() {
//widget.RunWindow(p.s, p.root, nil)
err := p.RunWindow(&widget.RunWindowOptions{
NewWindowOptions: screen.NewWindowOptions{
Title: "Simulation control",
Width: p.root.MeasuredSize.X,
Height: p.root.MeasuredSize.Y,
},
Theme: newtheme})
if err != nil {
log.Fatalln("error: ", err)
}
}()
}
func (p *ControlPanel) NewGenrateFlowFieldsButton() *Button {
pressed := false
return p.NewButton("Generate Flow Fields", icons.MapsMap, false, func() string {
if pressed {
return "Flow Fields Generated"
}
pressed = true
log.Println("Generate Flow Fields")
InitFlowFields()
for _, dest := range p.world.scenario.Destinations {
log.Println("Flow field for", dest.Name, "starting")
err := p.world.GenerateFlowField(dest.ID)
log.Println("Flow field for", dest.Name, "done")
if err != nil {
log.Fatal("cannot make flow field for", dest)
}
}
log.Println("Flow fields done")
return "Flow Fields Generated"
})
}
func (p *ControlPanel) NewLoadFlowFieldsButton() *Button {
pressed := false
return p.NewButton("Load Flow Fields From File", icons.FileFileDownload, false, func() string {
if pressed {
return "Flow Fields Loaded"
}
pressed = true
log.Println("Load Flow Fields")
for _, dest := range p.world.scenario.Destinations {
log.Println("Flow field for", dest.Name, "saving")
err := p.world.LoadFlowField(dest.ID)
if err != nil {
log.Println("error loading flow field", err)
}
}
log.Println("Loading Flow Fields done")
return "Flow Fields Loaded"
})
}
func (p *ControlPanel) NewSaveFlowFieldsButton() *Button {
return p.NewButton("Save Flow Fields", icons.ContentSave, false, func() string {
log.Println("Save Flow Fields")
for _, dest := range p.world.scenario.Destinations {
log.Println("Flow field for", dest.Name, "saving")
err := p.world.SaveFlowField(dest.ID)
if err != nil {
log.Println("error saving flow field", err)
return "Retry Save Flow Fields"
}
}
log.Println("Saving Flow Fields done")
return "Flow Fields Saved"
})
}
func (p *ControlPanel) NewStartSimulationButton() *Button {
pressed := false
return p.NewButton("Run Simulation", icons.ActionBuild, true, func() string {
if pressed {
pressed = false
log.Println("Pausing Simulation")
p.world.playPauseChan <- true
return "Play Simulation"
}
pressed = true
log.Println("Starting Simulation")
p.world.playPauseChan <- true
return "Pause Simulation"
})
}
func (p *ControlPanel) NewHighlightActiveButton() *Button {
return p.NewButton("Highlight Active AI", icons.ActionFavorite, true, func() string {
if p.world.highlightActive {
p.world.highlightActive = false
} else {
p.world.highlightActive = true
}
p.r.w.Send(UpdateEvent{p.world})
return "Highlight Active AI"
})
}
func (p *ControlPanel) NewExitButton() *Button {
clicks := 3
return p.NewButton(fmt.Sprintf("Exit - click %d time(s)", clicks), icons.ActionExitToApp, false, func() string {
clicks--
if clicks < 1 {
os.Exit(0)
}
return fmt.Sprintf("Exit - click %d time(s)", clicks)
})
}
func (p *ControlPanel) NewCloseAllButton() *Button {
open := true
button := p.NewButton("Close all", icons.NavigationClose, true, func() string {
for i := 0; i < len(p.world.scenario.Destinations); i++ | {
dest := &p.world.scenario.Destinations[i]
if dest.ID == p.world.scenario.Exit.ID {
continue
}
if open {
dest.Close()
} else {
dest.Open()
}
} | conditional_block |
|
controlPanel.go | ()
w.z.SetDstImage(ctx.Dst, w.Rect.Add(origin), draw.Over)
return iconvg.Decode(&w.z, w.icon, nil)
}
type Ticker struct {
node.ShellEmbed
tick func() string
label *widget.Label
}
func (p *ControlPanel) NewTicker(text string, tick func() string) *Ticker {
w := &Ticker{
tick: tick,
}
w.Wrapper = w
flow := widget.NewFlow(widget.AxisHorizontal)
flow.Insert(widget.NewSizer(unit.Ems(0.5), unit.Value{}, nil), nil)
flow.Insert(widget.NewLabel(fmt.Sprintf("%-30s", text)), nil)
flow.Insert(widget.NewSizer(unit.Ems(0.5), unit.Value{}, nil), nil)
w.label = widget.NewLabel(fmt.Sprintf("%30s", ""))
flow.Insert(w.label, nil)
uniform := widget.NewUniform(theme.StaticColor(colornames.Aqua), widget.NewPadder(widget.AxisBoth, unit.Ems(0.5), flow))
padding := widget.NewPadder(widget.AxisBoth, unit.Ems(0.5), uniform)
w.Insert(padding, nil)
go func() {
for {
newString := w.tick()
w.label.Text = fmt.Sprintf("%-30s", newString)
w.label.Mark(node.MarkNeedsPaintBase)
p.w.Send(update{})
}
}()
return w
}
type Button struct {
node.ShellEmbed
icon []byte
onClick func()
z iconvg.Rasterizer
uniform *widget.Uniform
label *widget.Label
pressed bool
}
func (p *ControlPanel) NewButton(text string, icon []byte, toggle bool, onClick func() string) *Button | flow.Insert(widget.NewSizer(unit.Ems(0.5), unit.Value{}, nil), nil)
w.label = widget.NewLabel(fmt.Sprintf("%-30s", text))
flow.Insert(w.label, nil)
flow.Insert(widget.NewSizer(unit.Ems(0.5), unit.Value{}, nil), nil)
flow.Insert(NewIcon(icon), nil)
w.uniform = widget.NewUniform(theme.StaticColor(colornames.Lightsalmon), flow)
padding := widget.NewPadder(widget.AxisBoth, unit.Ems(0.5), w.uniform)
w.Insert(padding, nil)
return w
}
func (w *Button) OnInputEvent(e interface{}, origin image.Point) node.EventHandled {
switch e := e.(type) {
case gesture.Event:
if e.Type != gesture.TypeTap {
break
}
if w.onClick != nil {
w.uniform.ThemeColor = theme.StaticColor(colornames.Orange)
w.uniform.Mark(node.MarkNeedsPaintBase)
go w.onClick()
}
return node.Handled
}
return node.NotHandled
}
func (p *ControlPanel) start(r *RenderState) {
p.world = r.world
p.s = r.s
p.r = r
controls := widget.NewFlow(widget.AxisVertical)
tickers := widget.NewFlow(widget.AxisVertical)
p.root = widget.NewSheet(
widget.NewUniform(theme.StaticColor(colornames.White),
widget.NewPadder(widget.AxisBoth, unit.Ems(1), widget.NewFlow(widget.AxisHorizontal, controls, widget.NewSizer(unit.Ems(1), unit.Value{}, nil), tickers))))
controls.Insert(p.NewGenrateFlowFieldsButton(), nil)
controls.Insert(p.NewStartSimulationButton(), nil)
controls.Insert(p.NewHighlightActiveButton(), nil)
controls.Insert(p.NewExitButton(), nil)
controls.Insert(p.NewSaveFlowFieldsButton(), nil)
controls.Insert(p.NewLoadFlowFieldsButton(), nil)
controls.Insert(p.NewCloseAllButton(), nil)
tickers.Insert(p.NewTicker("Total People:", func() string { return fmt.Sprintf("%d", <-p.world.peopleCurrentChan) }), nil)
tickers.Insert(p.NewTicker("Total People Added:", func() string { return fmt.Sprintf("%d", <-p.world.peopleAddedChan) }), nil)
tickers.Insert(p.NewTicker("Simulation Time:", func() string { return (<-p.world.simulationTimeChan).String() }), nil)
tickers.Insert(p.NewTicker("Current Active People:", func() string { return fmt.Sprintf("%d", <-p.world.currentSendersChan) }), nil)
tickers.Insert(p.NewNetworkTickers(), nil)
for i := range p.world.scenario.Destinations {
dest := &p.world.scenario.Destinations[i]
button := p.NewButton(fmt.Sprintf("Close %s", dest.Name), icons.NavigationClose, true, func() string {
if dest.isClosed() {
dest.Open()
return fmt.Sprintf("Close %s", dest.Name)
} else {
dest.Close()
return fmt.Sprintf("Reopen %s", dest.Name)
}
})
controls.Insert(button, nil)
}
newtheme := theme.Theme{}
p.root.Measure(&newtheme, -1, -1)
go func() {
//widget.RunWindow(p.s, p.root, nil)
err := p.RunWindow(&widget.RunWindowOptions{
NewWindowOptions: screen.NewWindowOptions{
Title: "Simulation control",
Width: p.root.MeasuredSize.X,
Height: p.root.MeasuredSize.Y,
},
Theme: newtheme})
if err != nil {
log.Fatalln("error: ", err)
}
}()
}
func (p *ControlPanel) NewGenrateFlowFieldsButton() *Button {
pressed := false
return p.NewButton("Generate Flow Fields", icons.MapsMap, false, func() string {
if pressed {
return "Flow Fields Generated"
}
pressed = true
log.Println("Generate Flow Fields")
InitFlowFields()
for _, dest := range p.world.scenario.Destinations {
log.Println("Flow field for", dest.Name, "starting")
err := p.world.GenerateFlowField(dest.ID)
log.Println("Flow field for", dest.Name, "done")
if err != nil {
log.Fatal("cannot make flow field for", dest)
}
}
log.Println("Flow fields done")
return "Flow Fields Generated"
})
}
func (p *ControlPanel) NewLoadFlowFieldsButton() *Button {
pressed := false
return p.NewButton("Load Flow Fields From File", icons.FileFileDownload, false, func() string {
if pressed {
return "Flow Fields Loaded"
}
pressed = true
log.Println("Load Flow Fields")
for _, dest := range p.world.scenario.Destinations {
log.Println("Flow field for", dest.Name, "saving")
err := p.world.LoadFlowField(dest.ID)
if err != nil {
log.Println("error loading flow field", err)
}
}
log.Println("Loading Flow Fields done")
return "Flow Fields Loaded"
})
}
func (p *ControlPanel) NewSaveFlowFieldsButton() *Button {
return p.NewButton("Save Flow Fields", icons.ContentSave, false, func() string {
log.Println("Save Flow Fields")
for _, dest := range p.world.scenario.Destinations {
log.Println("Flow field for", dest.Name, "saving")
err := p.world.SaveFlowField(dest.ID)
if err != nil {
log.Println("error saving flow field", err)
return "Retry Save Flow Fields"
}
}
log.Println("Saving Flow Fields done")
return "Flow Fields Saved"
})
}
func (p *ControlPanel) NewStartSimulationButton() *Button {
pressed := false
return p.NewButton("Run Simulation", icons.ActionBuild, true, func() string {
if pressed {
pressed = false
log.Println("Pausing Simulation")
p.world.playPauseChan <- true
return "Play Simulation"
}
pressed = true
log.Println("Starting Simulation")
p.world.playPauseChan <- true
return "Pause Simulation"
})
}
func (p *ControlPanel) NewHighlightActiveButton() *Button {
return p.NewButton("Highlight Active AI", icons.ActionFavorite, true, func() string {
if p.world.highlightActive {
p.world.highlightActive = false
} else {
p.world.highlightActive = true
}
p.r.w.Send(UpdateEvent{p.world})
return "Highlight Active AI"
})
}
func (p *ControlPanel) NewExitButton() *Button {
clicks := 3
return p.NewButton(fmt.Sprintf("Exit - click %d time(s)", clicks), icons.ActionExitToApp, false, func() string {
clicks--
if clicks < 1 {
os.Exit(0)
}
return fmt.Sprintf("Exit - click %d time(s)", clicks)
})
}
func (p *ControlPanel) NewCloseAllButton() *Button {
open := true
button := p | {
w := &Button{
icon: icon,
}
fn := func() {
w.pressed = !w.pressed
w.label.Text = fmt.Sprintf("%-30s", onClick())
w.label.Mark(node.MarkNeedsPaintBase)
if w.pressed || !toggle {
w.uniform.ThemeColor = theme.StaticColor(colornames.Lightgreen)
} else {
w.uniform.ThemeColor = theme.StaticColor(colornames.Lightsalmon)
}
w.uniform.Mark(node.MarkNeedsPaintBase)
p.w.Send(panelUpdate{})
}
w.onClick = fn
w.Wrapper = w
flow := widget.NewFlow(widget.AxisHorizontal) | identifier_body |
controlPanel.go | node.LeafEmbed
icon []byte
z iconvg.Rasterizer
}
func NewIcon(icon []byte) *Icon {
w := &Icon{
icon: icon,
}
w.Wrapper = w
return w
}
func (w *Icon) Measure(t *theme.Theme, widthHint, heightHint int) {
px := t.Pixels(unit.Ems(2)).Ceil()
w.MeasuredSize = image.Point{X: px, Y: px}
}
func (w *Icon) PaintBase(ctx *node.PaintBaseContext, origin image.Point) error {
w.Marks.UnmarkNeedsPaintBase()
w.z.SetDstImage(ctx.Dst, w.Rect.Add(origin), draw.Over)
return iconvg.Decode(&w.z, w.icon, nil)
}
type Ticker struct {
node.ShellEmbed
tick func() string
label *widget.Label
}
func (p *ControlPanel) NewTicker(text string, tick func() string) *Ticker {
w := &Ticker{
tick: tick,
}
w.Wrapper = w
flow := widget.NewFlow(widget.AxisHorizontal)
flow.Insert(widget.NewSizer(unit.Ems(0.5), unit.Value{}, nil), nil)
flow.Insert(widget.NewLabel(fmt.Sprintf("%-30s", text)), nil)
flow.Insert(widget.NewSizer(unit.Ems(0.5), unit.Value{}, nil), nil)
w.label = widget.NewLabel(fmt.Sprintf("%30s", ""))
flow.Insert(w.label, nil)
uniform := widget.NewUniform(theme.StaticColor(colornames.Aqua), widget.NewPadder(widget.AxisBoth, unit.Ems(0.5), flow))
padding := widget.NewPadder(widget.AxisBoth, unit.Ems(0.5), uniform)
w.Insert(padding, nil)
go func() {
for {
newString := w.tick()
w.label.Text = fmt.Sprintf("%-30s", newString)
w.label.Mark(node.MarkNeedsPaintBase)
p.w.Send(update{})
}
}()
return w
}
type Button struct {
node.ShellEmbed
icon []byte
onClick func()
z iconvg.Rasterizer
uniform *widget.Uniform
label *widget.Label
pressed bool
}
func (p *ControlPanel) NewButton(text string, icon []byte, toggle bool, onClick func() string) *Button {
w := &Button{
icon: icon,
}
fn := func() {
w.pressed = !w.pressed
w.label.Text = fmt.Sprintf("%-30s", onClick())
w.label.Mark(node.MarkNeedsPaintBase)
if w.pressed || !toggle {
w.uniform.ThemeColor = theme.StaticColor(colornames.Lightgreen)
} else {
w.uniform.ThemeColor = theme.StaticColor(colornames.Lightsalmon)
}
w.uniform.Mark(node.MarkNeedsPaintBase)
p.w.Send(panelUpdate{})
}
w.onClick = fn
w.Wrapper = w
flow := widget.NewFlow(widget.AxisHorizontal)
flow.Insert(widget.NewSizer(unit.Ems(0.5), unit.Value{}, nil), nil)
w.label = widget.NewLabel(fmt.Sprintf("%-30s", text))
flow.Insert(w.label, nil)
flow.Insert(widget.NewSizer(unit.Ems(0.5), unit.Value{}, nil), nil)
flow.Insert(NewIcon(icon), nil)
w.uniform = widget.NewUniform(theme.StaticColor(colornames.Lightsalmon), flow)
padding := widget.NewPadder(widget.AxisBoth, unit.Ems(0.5), w.uniform)
w.Insert(padding, nil)
return w
}
func (w *Button) OnInputEvent(e interface{}, origin image.Point) node.EventHandled {
switch e := e.(type) {
case gesture.Event:
if e.Type != gesture.TypeTap {
break
}
if w.onClick != nil {
w.uniform.ThemeColor = theme.StaticColor(colornames.Orange)
w.uniform.Mark(node.MarkNeedsPaintBase)
go w.onClick()
}
return node.Handled
}
return node.NotHandled
}
func (p *ControlPanel) start(r *RenderState) {
p.world = r.world
p.s = r.s
p.r = r
controls := widget.NewFlow(widget.AxisVertical)
tickers := widget.NewFlow(widget.AxisVertical)
p.root = widget.NewSheet(
widget.NewUniform(theme.StaticColor(colornames.White),
widget.NewPadder(widget.AxisBoth, unit.Ems(1), widget.NewFlow(widget.AxisHorizontal, controls, widget.NewSizer(unit.Ems(1), unit.Value{}, nil), tickers))))
controls.Insert(p.NewGenrateFlowFieldsButton(), nil)
controls.Insert(p.NewStartSimulationButton(), nil)
controls.Insert(p.NewHighlightActiveButton(), nil)
controls.Insert(p.NewExitButton(), nil)
controls.Insert(p.NewSaveFlowFieldsButton(), nil)
controls.Insert(p.NewLoadFlowFieldsButton(), nil)
controls.Insert(p.NewCloseAllButton(), nil)
tickers.Insert(p.NewTicker("Total People:", func() string { return fmt.Sprintf("%d", <-p.world.peopleCurrentChan) }), nil)
tickers.Insert(p.NewTicker("Total People Added:", func() string { return fmt.Sprintf("%d", <-p.world.peopleAddedChan) }), nil)
tickers.Insert(p.NewTicker("Simulation Time:", func() string { return (<-p.world.simulationTimeChan).String() }), nil)
tickers.Insert(p.NewTicker("Current Active People:", func() string { return fmt.Sprintf("%d", <-p.world.currentSendersChan) }), nil)
tickers.Insert(p.NewNetworkTickers(), nil)
for i := range p.world.scenario.Destinations {
dest := &p.world.scenario.Destinations[i]
button := p.NewButton(fmt.Sprintf("Close %s", dest.Name), icons.NavigationClose, true, func() string {
if dest.isClosed() {
dest.Open()
return fmt.Sprintf("Close %s", dest.Name)
} else {
dest.Close()
return fmt.Sprintf("Reopen %s", dest.Name)
}
})
controls.Insert(button, nil)
}
newtheme := theme.Theme{}
p.root.Measure(&newtheme, -1, -1)
go func() {
//widget.RunWindow(p.s, p.root, nil)
err := p.RunWindow(&widget.RunWindowOptions{
NewWindowOptions: screen.NewWindowOptions{
Title: "Simulation control",
Width: p.root.MeasuredSize.X,
Height: p.root.MeasuredSize.Y,
},
Theme: newtheme})
if err != nil {
log.Fatalln("error: ", err)
}
}()
}
func (p *ControlPanel) NewGenrateFlowFieldsButton() *Button {
pressed := false
return p.NewButton("Generate Flow Fields", icons.MapsMap, false, func() string {
if pressed {
return "Flow Fields Generated"
}
pressed = true
log.Println("Generate Flow Fields")
InitFlowFields()
for _, dest := range p.world.scenario.Destinations {
log.Println("Flow field for", dest.Name, "starting")
err := p.world.GenerateFlowField(dest.ID)
log.Println("Flow field for", dest.Name, "done")
if err != nil {
log.Fatal("cannot make flow field for", dest)
}
}
log.Println("Flow fields done")
return "Flow Fields Generated"
})
}
func (p *ControlPanel) NewLoadFlowFieldsButton() *Button {
pressed := false
return p.NewButton("Load Flow Fields From File", icons.FileFileDownload, false, func() string {
if pressed {
return "Flow Fields Loaded"
}
pressed = true
log.Println("Load Flow Fields")
for _, dest := range p.world.scenario.Destinations {
log.Println("Flow field for", dest.Name, "saving")
err := p.world.LoadFlowField(dest.ID)
if err != nil {
log.Println("error loading flow field", err)
}
}
log.Println("Loading Flow Fields done")
return "Flow Fields Loaded"
})
}
func (p *ControlPanel) NewSaveFlowFieldsButton() *Button {
return p.NewButton("Save Flow Fields", icons.ContentSave, false, func() string {
log.Println("Save Flow Fields")
for _, dest := range p.world.scenario.Destinations {
log.Println("Flow field for", dest.Name, "saving")
err := p.world.SaveFlowField(dest.ID)
if err != nil {
log.Println("error saving flow field", err)
return "Retry Save Flow Fields"
}
}
log.Println("Saving Flow Fields done")
return "Flow Fields Saved"
})
}
func (p *ControlPanel) NewStartSimulationButton() *Button {
pressed := false
return p.NewButton("Run Simulation", icons.ActionBuild, true, func() string {
if pressed {
pressed = false
log.Println("Pausing Simulation")
p.world.playPauseChan <- true
return "Play Simulation"
}
pressed = true
log.Println("Starting Simulation")
p.world.playPauseChan <- true
return "Pause Simulation"
})
}
func (p *ControlPanel) NewHighlightActiveButton() *Button {
return p.NewButton("Highlight Active AI", icons.ActionFavorite, true, func() string {
if p.world.highlightActive {
p.world.highlightActive = false
} else {
p |
type panelUpdate struct {
}
type Icon struct { | random_line_split |
|
controlPanel.go | ()
w.z.SetDstImage(ctx.Dst, w.Rect.Add(origin), draw.Over)
return iconvg.Decode(&w.z, w.icon, nil)
}
type Ticker struct {
node.ShellEmbed
tick func() string
label *widget.Label
}
func (p *ControlPanel) NewTicker(text string, tick func() string) *Ticker {
w := &Ticker{
tick: tick,
}
w.Wrapper = w
flow := widget.NewFlow(widget.AxisHorizontal)
flow.Insert(widget.NewSizer(unit.Ems(0.5), unit.Value{}, nil), nil)
flow.Insert(widget.NewLabel(fmt.Sprintf("%-30s", text)), nil)
flow.Insert(widget.NewSizer(unit.Ems(0.5), unit.Value{}, nil), nil)
w.label = widget.NewLabel(fmt.Sprintf("%30s", ""))
flow.Insert(w.label, nil)
uniform := widget.NewUniform(theme.StaticColor(colornames.Aqua), widget.NewPadder(widget.AxisBoth, unit.Ems(0.5), flow))
padding := widget.NewPadder(widget.AxisBoth, unit.Ems(0.5), uniform)
w.Insert(padding, nil)
go func() {
for {
newString := w.tick()
w.label.Text = fmt.Sprintf("%-30s", newString)
w.label.Mark(node.MarkNeedsPaintBase)
p.w.Send(update{})
}
}()
return w
}
type Button struct {
node.ShellEmbed
icon []byte
onClick func()
z iconvg.Rasterizer
uniform *widget.Uniform
label *widget.Label
pressed bool
}
func (p *ControlPanel) NewButton(text string, icon []byte, toggle bool, onClick func() string) *Button {
w := &Button{
icon: icon,
}
fn := func() {
w.pressed = !w.pressed
w.label.Text = fmt.Sprintf("%-30s", onClick())
w.label.Mark(node.MarkNeedsPaintBase)
if w.pressed || !toggle {
w.uniform.ThemeColor = theme.StaticColor(colornames.Lightgreen)
} else {
w.uniform.ThemeColor = theme.StaticColor(colornames.Lightsalmon)
}
w.uniform.Mark(node.MarkNeedsPaintBase)
p.w.Send(panelUpdate{})
}
w.onClick = fn
w.Wrapper = w
flow := widget.NewFlow(widget.AxisHorizontal)
flow.Insert(widget.NewSizer(unit.Ems(0.5), unit.Value{}, nil), nil)
w.label = widget.NewLabel(fmt.Sprintf("%-30s", text))
flow.Insert(w.label, nil)
flow.Insert(widget.NewSizer(unit.Ems(0.5), unit.Value{}, nil), nil)
flow.Insert(NewIcon(icon), nil)
w.uniform = widget.NewUniform(theme.StaticColor(colornames.Lightsalmon), flow)
padding := widget.NewPadder(widget.AxisBoth, unit.Ems(0.5), w.uniform)
w.Insert(padding, nil)
return w
}
func (w *Button) OnInputEvent(e interface{}, origin image.Point) node.EventHandled {
switch e := e.(type) {
case gesture.Event:
if e.Type != gesture.TypeTap {
break
}
if w.onClick != nil {
w.uniform.ThemeColor = theme.StaticColor(colornames.Orange)
w.uniform.Mark(node.MarkNeedsPaintBase)
go w.onClick()
}
return node.Handled
}
return node.NotHandled
}
func (p *ControlPanel) start(r *RenderState) {
p.world = r.world
p.s = r.s
p.r = r
controls := widget.NewFlow(widget.AxisVertical)
tickers := widget.NewFlow(widget.AxisVertical)
p.root = widget.NewSheet(
widget.NewUniform(theme.StaticColor(colornames.White),
widget.NewPadder(widget.AxisBoth, unit.Ems(1), widget.NewFlow(widget.AxisHorizontal, controls, widget.NewSizer(unit.Ems(1), unit.Value{}, nil), tickers))))
controls.Insert(p.NewGenrateFlowFieldsButton(), nil)
controls.Insert(p.NewStartSimulationButton(), nil)
controls.Insert(p.NewHighlightActiveButton(), nil)
controls.Insert(p.NewExitButton(), nil)
controls.Insert(p.NewSaveFlowFieldsButton(), nil)
controls.Insert(p.NewLoadFlowFieldsButton(), nil)
controls.Insert(p.NewCloseAllButton(), nil)
tickers.Insert(p.NewTicker("Total People:", func() string { return fmt.Sprintf("%d", <-p.world.peopleCurrentChan) }), nil)
tickers.Insert(p.NewTicker("Total People Added:", func() string { return fmt.Sprintf("%d", <-p.world.peopleAddedChan) }), nil)
tickers.Insert(p.NewTicker("Simulation Time:", func() string { return (<-p.world.simulationTimeChan).String() }), nil)
tickers.Insert(p.NewTicker("Current Active People:", func() string { return fmt.Sprintf("%d", <-p.world.currentSendersChan) }), nil)
tickers.Insert(p.NewNetworkTickers(), nil)
for i := range p.world.scenario.Destinations {
dest := &p.world.scenario.Destinations[i]
button := p.NewButton(fmt.Sprintf("Close %s", dest.Name), icons.NavigationClose, true, func() string {
if dest.isClosed() {
dest.Open()
return fmt.Sprintf("Close %s", dest.Name)
} else {
dest.Close()
return fmt.Sprintf("Reopen %s", dest.Name)
}
})
controls.Insert(button, nil)
}
newtheme := theme.Theme{}
p.root.Measure(&newtheme, -1, -1)
go func() {
//widget.RunWindow(p.s, p.root, nil)
err := p.RunWindow(&widget.RunWindowOptions{
NewWindowOptions: screen.NewWindowOptions{
Title: "Simulation control",
Width: p.root.MeasuredSize.X,
Height: p.root.MeasuredSize.Y,
},
Theme: newtheme})
if err != nil {
log.Fatalln("error: ", err)
}
}()
}
func (p *ControlPanel) NewGenrateFlowFieldsButton() *Button {
pressed := false
return p.NewButton("Generate Flow Fields", icons.MapsMap, false, func() string {
if pressed {
return "Flow Fields Generated"
}
pressed = true
log.Println("Generate Flow Fields")
InitFlowFields()
for _, dest := range p.world.scenario.Destinations {
log.Println("Flow field for", dest.Name, "starting")
err := p.world.GenerateFlowField(dest.ID)
log.Println("Flow field for", dest.Name, "done")
if err != nil {
log.Fatal("cannot make flow field for", dest)
}
}
log.Println("Flow fields done")
return "Flow Fields Generated"
})
}
func (p *ControlPanel) NewLoadFlowFieldsButton() *Button {
pressed := false
return p.NewButton("Load Flow Fields From File", icons.FileFileDownload, false, func() string {
if pressed {
return "Flow Fields Loaded"
}
pressed = true
log.Println("Load Flow Fields")
for _, dest := range p.world.scenario.Destinations {
log.Println("Flow field for", dest.Name, "saving")
err := p.world.LoadFlowField(dest.ID)
if err != nil {
log.Println("error loading flow field", err)
}
}
log.Println("Loading Flow Fields done")
return "Flow Fields Loaded"
})
}
func (p *ControlPanel) NewSaveFlowFieldsButton() *Button {
return p.NewButton("Save Flow Fields", icons.ContentSave, false, func() string {
log.Println("Save Flow Fields")
for _, dest := range p.world.scenario.Destinations {
log.Println("Flow field for", dest.Name, "saving")
err := p.world.SaveFlowField(dest.ID)
if err != nil {
log.Println("error saving flow field", err)
return "Retry Save Flow Fields"
}
}
log.Println("Saving Flow Fields done")
return "Flow Fields Saved"
})
}
func (p *ControlPanel) NewStartSimulationButton() *Button {
pressed := false
return p.NewButton("Run Simulation", icons.ActionBuild, true, func() string {
if pressed {
pressed = false
log.Println("Pausing Simulation")
p.world.playPauseChan <- true
return "Play Simulation"
}
pressed = true
log.Println("Starting Simulation")
p.world.playPauseChan <- true
return "Pause Simulation"
})
}
func (p *ControlPanel) | () *Button {
return p.NewButton("Highlight Active AI", icons.ActionFavorite, true, func() string {
if p.world.highlightActive {
p.world.highlightActive = false
} else {
p.world.highlightActive = true
}
p.r.w.Send(UpdateEvent{p.world})
return "Highlight Active AI"
})
}
func (p *ControlPanel) NewExitButton() *Button {
clicks := 3
return p.NewButton(fmt.Sprintf("Exit - click %d time(s)", clicks), icons.ActionExitToApp, false, func() string {
clicks--
if clicks < 1 {
os.Exit(0)
}
return fmt.Sprintf("Exit - click %d time(s)", clicks)
})
}
func (p *ControlPanel) NewCloseAllButton() *Button {
open := true
button := p | NewHighlightActiveButton | identifier_name |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.