file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
39k
| suffix
large_stringlengths 0
36.1k
| middle
large_stringlengths 0
29.4k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
sample_file_parser.py | import json
import logging
from os import listdir, path
from typing import Dict, List
from tqdm import tqdm
TELEM_DIR_PATH = "../envs/monkey_zoo/blackbox/tests/performance/telemetry_sample"
MAX_SAME_TYPE_TELEM_FILES = 10000
LOGGER = logging.getLogger(__name__)
class SampleFileParser:
@staticmethod
def save_teletries_to_files(telems: List[Dict]):
for telem in tqdm(telems, desc="Telemetries saved to files", position=3):
SampleFileParser.save_telemetry_to_file(telem)
@staticmethod
def save_telemetry_to_file(telem: Dict):
|
@staticmethod
def read_telem_files() -> List[str]:
telems = []
try:
file_paths = [
path.join(TELEM_DIR_PATH, f)
for f in listdir(TELEM_DIR_PATH)
if path.isfile(path.join(TELEM_DIR_PATH, f))
]
except FileNotFoundError:
raise FileNotFoundError(
"Telemetries to send not found. "
"Refer to readme to figure out how to generate telemetries and where to put them."
)
for file_path in file_paths:
with open(file_path, "r") as telem_file:
telem_string = "".join(telem_file.readlines()).replace("\n", "")
telems.append(telem_string)
return telems
@staticmethod
def get_all_telemetries() -> List[Dict]:
return [json.loads(t) for t in SampleFileParser.read_telem_files()]
| telem_filename = telem["name"] + telem["method"]
for i in range(MAX_SAME_TYPE_TELEM_FILES):
if not path.exists(path.join(TELEM_DIR_PATH, (str(i) + telem_filename))):
telem_filename = str(i) + telem_filename
break
with open(path.join(TELEM_DIR_PATH, telem_filename), "w") as file:
file.write(json.dumps(telem)) | identifier_body |
sample_file_parser.py | import json
import logging
from os import listdir, path
from typing import Dict, List
from tqdm import tqdm
TELEM_DIR_PATH = "../envs/monkey_zoo/blackbox/tests/performance/telemetry_sample"
MAX_SAME_TYPE_TELEM_FILES = 10000
LOGGER = logging.getLogger(__name__)
class SampleFileParser:
@staticmethod
def save_teletries_to_files(telems: List[Dict]):
for telem in tqdm(telems, desc="Telemetries saved to files", position=3):
SampleFileParser.save_telemetry_to_file(telem)
@staticmethod
def | (telem: Dict):
telem_filename = telem["name"] + telem["method"]
for i in range(MAX_SAME_TYPE_TELEM_FILES):
if not path.exists(path.join(TELEM_DIR_PATH, (str(i) + telem_filename))):
telem_filename = str(i) + telem_filename
break
with open(path.join(TELEM_DIR_PATH, telem_filename), "w") as file:
file.write(json.dumps(telem))
@staticmethod
def read_telem_files() -> List[str]:
telems = []
try:
file_paths = [
path.join(TELEM_DIR_PATH, f)
for f in listdir(TELEM_DIR_PATH)
if path.isfile(path.join(TELEM_DIR_PATH, f))
]
except FileNotFoundError:
raise FileNotFoundError(
"Telemetries to send not found. "
"Refer to readme to figure out how to generate telemetries and where to put them."
)
for file_path in file_paths:
with open(file_path, "r") as telem_file:
telem_string = "".join(telem_file.readlines()).replace("\n", "")
telems.append(telem_string)
return telems
@staticmethod
def get_all_telemetries() -> List[Dict]:
return [json.loads(t) for t in SampleFileParser.read_telem_files()]
| save_telemetry_to_file | identifier_name |
ejemplo-funciona.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
|
appname = "example"
class App(object):
def __init__(self, root=None):
if not root:
root = Tk()
self.root = root
self.initUI()
def initUI(self):
self.root.title(appname)
menubar = Menu(self.root)
self.root.config(menu=menubar)
fileMenu = Menu(menubar, tearoff=0)
menubar.add_command(label="Tomar Foto", command=self.tomarFoto)
# Rafa
for i in range(3):
self.root.columnconfigure(i, weight=1)
for i in range(20):
self.root.rowconfigure(i, weight=1)
self.etiqueta = Label(self.root, text="Hola")
# fin Rafa
self.canvas = Canvas(self.root)
# self.canvas.pack(side=LEFT, fill=BOTH)
self.canvas.pack(side=BOTTOM, fill=X)
self.scrollbar_vert = Scrollbar(self.root)
self.scrollbar_vert.pack(side=RIGHT, fill=Y)
self.scrollbar_hor = Scrollbar(self.root)
self.scrollbar_hor.config(orient=HORIZONTAL)
self.scrollbar_hor.pack(side=BOTTOM, fill=X)
def onExit(self):
self.root.quit()
def tomarFoto(self):
# Bloque : Tomamos la foto desde la web cam y la grabamos en formato PGM
video_capture = cv2.VideoCapture(0)
ret, frame = video_capture.read()
cv2.imshow('Video', frame)
params = list()
params.append(cv2.cv.CV_IMWRITE_PXM_BINARY)
params.append(1)
print "hola"
frame2 = cv2.cvtColor(frame, cv2.cv.CV_BGR2GRAY) # convert to grayscale
cv2.imwrite('cara2.pgm', frame2, params)
cv2.imwrite('cara2.PGM', frame2, params)
video_capture.release()
cv2.destroyAllWindows()
# Fin de Tomamos la foto desde la web cam y la grabamos en formato PGM
filename = 'cara2.pgm'
self.img = Image.open(filename)
self.photo_image = ImageTk.PhotoImage(self.img)
self.canvas.pack_forget()
self.canvas = Canvas(self.root, width=self.img.size[0], height=self.img.size[1])
self.canvas.create_image(10, 10, anchor=NW, image=self.photo_image)
self.canvas.pack(side=LEFT, fill=BOTH)
self.canvas.config(yscrollcommand=self.scrollbar_vert.set)
self.canvas.config(xscrollcommand=self.scrollbar_hor.set)
self.canvas.config(scrollregion=self.canvas.bbox(ALL))
self.scrollbar_vert.config(command=self.canvas.yview)
self.scrollbar_hor.config(command=self.canvas.xview)
def run(self):
self.root.mainloop()
def main():
root = Tk()
root.geometry("250x150+300+300")
app = App(root)
app.run()
if __name__ == '__main__':
main() | import cv2
from Tkinter import *
from PIL import Image, ImageTk
import tkFileDialog | random_line_split |
ejemplo-funciona.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
import cv2
from Tkinter import *
from PIL import Image, ImageTk
import tkFileDialog
appname = "example"
class App(object):
def __init__(self, root=None):
if not root:
root = Tk()
self.root = root
self.initUI()
def initUI(self):
self.root.title(appname)
menubar = Menu(self.root)
self.root.config(menu=menubar)
fileMenu = Menu(menubar, tearoff=0)
menubar.add_command(label="Tomar Foto", command=self.tomarFoto)
# Rafa
for i in range(3):
self.root.columnconfigure(i, weight=1)
for i in range(20):
self.root.rowconfigure(i, weight=1)
self.etiqueta = Label(self.root, text="Hola")
# fin Rafa
self.canvas = Canvas(self.root)
# self.canvas.pack(side=LEFT, fill=BOTH)
self.canvas.pack(side=BOTTOM, fill=X)
self.scrollbar_vert = Scrollbar(self.root)
self.scrollbar_vert.pack(side=RIGHT, fill=Y)
self.scrollbar_hor = Scrollbar(self.root)
self.scrollbar_hor.config(orient=HORIZONTAL)
self.scrollbar_hor.pack(side=BOTTOM, fill=X)
def onExit(self):
self.root.quit()
def tomarFoto(self):
# Bloque : Tomamos la foto desde la web cam y la grabamos en formato PGM
|
def run(self):
self.root.mainloop()
def main():
root = Tk()
root.geometry("250x150+300+300")
app = App(root)
app.run()
if __name__ == '__main__':
main()
| video_capture = cv2.VideoCapture(0)
ret, frame = video_capture.read()
cv2.imshow('Video', frame)
params = list()
params.append(cv2.cv.CV_IMWRITE_PXM_BINARY)
params.append(1)
print "hola"
frame2 = cv2.cvtColor(frame, cv2.cv.CV_BGR2GRAY) # convert to grayscale
cv2.imwrite('cara2.pgm', frame2, params)
cv2.imwrite('cara2.PGM', frame2, params)
video_capture.release()
cv2.destroyAllWindows()
# Fin de Tomamos la foto desde la web cam y la grabamos en formato PGM
filename = 'cara2.pgm'
self.img = Image.open(filename)
self.photo_image = ImageTk.PhotoImage(self.img)
self.canvas.pack_forget()
self.canvas = Canvas(self.root, width=self.img.size[0], height=self.img.size[1])
self.canvas.create_image(10, 10, anchor=NW, image=self.photo_image)
self.canvas.pack(side=LEFT, fill=BOTH)
self.canvas.config(yscrollcommand=self.scrollbar_vert.set)
self.canvas.config(xscrollcommand=self.scrollbar_hor.set)
self.canvas.config(scrollregion=self.canvas.bbox(ALL))
self.scrollbar_vert.config(command=self.canvas.yview)
self.scrollbar_hor.config(command=self.canvas.xview) | identifier_body |
ejemplo-funciona.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
import cv2
from Tkinter import *
from PIL import Image, ImageTk
import tkFileDialog
appname = "example"
class App(object):
def __init__(self, root=None):
if not root:
root = Tk()
self.root = root
self.initUI()
def initUI(self):
self.root.title(appname)
menubar = Menu(self.root)
self.root.config(menu=menubar)
fileMenu = Menu(menubar, tearoff=0)
menubar.add_command(label="Tomar Foto", command=self.tomarFoto)
# Rafa
for i in range(3):
self.root.columnconfigure(i, weight=1)
for i in range(20):
self.root.rowconfigure(i, weight=1)
self.etiqueta = Label(self.root, text="Hola")
# fin Rafa
self.canvas = Canvas(self.root)
# self.canvas.pack(side=LEFT, fill=BOTH)
self.canvas.pack(side=BOTTOM, fill=X)
self.scrollbar_vert = Scrollbar(self.root)
self.scrollbar_vert.pack(side=RIGHT, fill=Y)
self.scrollbar_hor = Scrollbar(self.root)
self.scrollbar_hor.config(orient=HORIZONTAL)
self.scrollbar_hor.pack(side=BOTTOM, fill=X)
def onExit(self):
self.root.quit()
def | (self):
# Bloque : Tomamos la foto desde la web cam y la grabamos en formato PGM
video_capture = cv2.VideoCapture(0)
ret, frame = video_capture.read()
cv2.imshow('Video', frame)
params = list()
params.append(cv2.cv.CV_IMWRITE_PXM_BINARY)
params.append(1)
print "hola"
frame2 = cv2.cvtColor(frame, cv2.cv.CV_BGR2GRAY) # convert to grayscale
cv2.imwrite('cara2.pgm', frame2, params)
cv2.imwrite('cara2.PGM', frame2, params)
video_capture.release()
cv2.destroyAllWindows()
# Fin de Tomamos la foto desde la web cam y la grabamos en formato PGM
filename = 'cara2.pgm'
self.img = Image.open(filename)
self.photo_image = ImageTk.PhotoImage(self.img)
self.canvas.pack_forget()
self.canvas = Canvas(self.root, width=self.img.size[0], height=self.img.size[1])
self.canvas.create_image(10, 10, anchor=NW, image=self.photo_image)
self.canvas.pack(side=LEFT, fill=BOTH)
self.canvas.config(yscrollcommand=self.scrollbar_vert.set)
self.canvas.config(xscrollcommand=self.scrollbar_hor.set)
self.canvas.config(scrollregion=self.canvas.bbox(ALL))
self.scrollbar_vert.config(command=self.canvas.yview)
self.scrollbar_hor.config(command=self.canvas.xview)
def run(self):
self.root.mainloop()
def main():
root = Tk()
root.geometry("250x150+300+300")
app = App(root)
app.run()
if __name__ == '__main__':
main()
| tomarFoto | identifier_name |
ejemplo-funciona.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
import cv2
from Tkinter import *
from PIL import Image, ImageTk
import tkFileDialog
appname = "example"
class App(object):
def __init__(self, root=None):
if not root:
root = Tk()
self.root = root
self.initUI()
def initUI(self):
self.root.title(appname)
menubar = Menu(self.root)
self.root.config(menu=menubar)
fileMenu = Menu(menubar, tearoff=0)
menubar.add_command(label="Tomar Foto", command=self.tomarFoto)
# Rafa
for i in range(3):
self.root.columnconfigure(i, weight=1)
for i in range(20):
self.root.rowconfigure(i, weight=1)
self.etiqueta = Label(self.root, text="Hola")
# fin Rafa
self.canvas = Canvas(self.root)
# self.canvas.pack(side=LEFT, fill=BOTH)
self.canvas.pack(side=BOTTOM, fill=X)
self.scrollbar_vert = Scrollbar(self.root)
self.scrollbar_vert.pack(side=RIGHT, fill=Y)
self.scrollbar_hor = Scrollbar(self.root)
self.scrollbar_hor.config(orient=HORIZONTAL)
self.scrollbar_hor.pack(side=BOTTOM, fill=X)
def onExit(self):
self.root.quit()
def tomarFoto(self):
# Bloque : Tomamos la foto desde la web cam y la grabamos en formato PGM
video_capture = cv2.VideoCapture(0)
ret, frame = video_capture.read()
cv2.imshow('Video', frame)
params = list()
params.append(cv2.cv.CV_IMWRITE_PXM_BINARY)
params.append(1)
print "hola"
frame2 = cv2.cvtColor(frame, cv2.cv.CV_BGR2GRAY) # convert to grayscale
cv2.imwrite('cara2.pgm', frame2, params)
cv2.imwrite('cara2.PGM', frame2, params)
video_capture.release()
cv2.destroyAllWindows()
# Fin de Tomamos la foto desde la web cam y la grabamos en formato PGM
filename = 'cara2.pgm'
self.img = Image.open(filename)
self.photo_image = ImageTk.PhotoImage(self.img)
self.canvas.pack_forget()
self.canvas = Canvas(self.root, width=self.img.size[0], height=self.img.size[1])
self.canvas.create_image(10, 10, anchor=NW, image=self.photo_image)
self.canvas.pack(side=LEFT, fill=BOTH)
self.canvas.config(yscrollcommand=self.scrollbar_vert.set)
self.canvas.config(xscrollcommand=self.scrollbar_hor.set)
self.canvas.config(scrollregion=self.canvas.bbox(ALL))
self.scrollbar_vert.config(command=self.canvas.yview)
self.scrollbar_hor.config(command=self.canvas.xview)
def run(self):
self.root.mainloop()
def main():
root = Tk()
root.geometry("250x150+300+300")
app = App(root)
app.run()
if __name__ == '__main__':
| main() | conditional_block |
|
benefit-owner.js | const UrlPathValidator = require('../../../services/validators/url-path-validator')
const referenceIdHelper = require('../../helpers/reference-id-helper')
const BenefitOwner = require('../../../services/domain/benefit-owner')
const ValidationError = require('../../../services/errors/validation-error')
const insertBenefitOwner = require('../../../services/data/insert-benefit-owner')
const SessionHandler = require('../../../services/validators/session-handler')
module.exports = function (router) {
router.get('/apply/:claimType/new-eligibility/benefit-owner', function (req, res) {
UrlPathValidator(req.params)
const isValidSession = SessionHandler.validateSession(req.session, req.url)
if (!isValidSession) { | dob: req.session.dobEncoded,
relationship: req.session.relationship,
benefit: req.session.benefit,
referenceId: req.session.referenceId
})
})
router.post('/apply/:claimType/new-eligibility/benefit-owner', function (req, res, next) {
UrlPathValidator(req.params)
const isValidSession = SessionHandler.validateSession(req.session, req.url)
if (!isValidSession) {
return res.redirect(SessionHandler.getErrorPath(req.session, req.url))
}
const benefitOwnerBody = req.body
try {
const benefitOwner = new BenefitOwner(
req.body.FirstName,
req.body.LastName,
req.body['dob-day'],
req.body['dob-month'],
req.body['dob-year'],
req.body.NationalInsuranceNumber)
const referenceAndEligibilityId = referenceIdHelper.extractReferenceId(req.session.referenceId)
return insertBenefitOwner(referenceAndEligibilityId.reference, referenceAndEligibilityId.id, benefitOwner)
.then(function () {
return res.redirect(`/apply/${req.params.claimType}/new-eligibility/about-you`)
})
.catch(function (error) {
next(error)
})
} catch (error) {
if (error instanceof ValidationError) {
return renderValidationError(req, res, benefitOwnerBody, error.validationErrors, false)
} else {
throw error
}
}
})
}
function renderValidationError (req, res, benefitOwnerBody, validationErrors, isDuplicateClaim) {
return res.status(400).render('apply/new-eligibility/benefit-owner', {
errors: validationErrors,
isDuplicateClaim: isDuplicateClaim,
claimType: req.session.claimType,
dob: req.session.dobEncoded,
relationship: req.session.relationship,
benefit: req.session.benefit,
referenceId: req.session.referenceId,
benefitOwner: benefitOwnerBody
})
} | return res.redirect(SessionHandler.getErrorPath(req.session, req.url))
}
return res.render('apply/new-eligibility/benefit-owner', {
claimType: req.session.claimType, | random_line_split |
benefit-owner.js | const UrlPathValidator = require('../../../services/validators/url-path-validator')
const referenceIdHelper = require('../../helpers/reference-id-helper')
const BenefitOwner = require('../../../services/domain/benefit-owner')
const ValidationError = require('../../../services/errors/validation-error')
const insertBenefitOwner = require('../../../services/data/insert-benefit-owner')
const SessionHandler = require('../../../services/validators/session-handler')
module.exports = function (router) {
router.get('/apply/:claimType/new-eligibility/benefit-owner', function (req, res) {
UrlPathValidator(req.params)
const isValidSession = SessionHandler.validateSession(req.session, req.url)
if (!isValidSession) {
return res.redirect(SessionHandler.getErrorPath(req.session, req.url))
}
return res.render('apply/new-eligibility/benefit-owner', {
claimType: req.session.claimType,
dob: req.session.dobEncoded,
relationship: req.session.relationship,
benefit: req.session.benefit,
referenceId: req.session.referenceId
})
})
router.post('/apply/:claimType/new-eligibility/benefit-owner', function (req, res, next) {
UrlPathValidator(req.params)
const isValidSession = SessionHandler.validateSession(req.session, req.url)
if (!isValidSession) {
return res.redirect(SessionHandler.getErrorPath(req.session, req.url))
}
const benefitOwnerBody = req.body
try {
const benefitOwner = new BenefitOwner(
req.body.FirstName,
req.body.LastName,
req.body['dob-day'],
req.body['dob-month'],
req.body['dob-year'],
req.body.NationalInsuranceNumber)
const referenceAndEligibilityId = referenceIdHelper.extractReferenceId(req.session.referenceId)
return insertBenefitOwner(referenceAndEligibilityId.reference, referenceAndEligibilityId.id, benefitOwner)
.then(function () {
return res.redirect(`/apply/${req.params.claimType}/new-eligibility/about-you`)
})
.catch(function (error) {
next(error)
})
} catch (error) {
if (error instanceof ValidationError) | else {
throw error
}
}
})
}
function renderValidationError (req, res, benefitOwnerBody, validationErrors, isDuplicateClaim) {
return res.status(400).render('apply/new-eligibility/benefit-owner', {
errors: validationErrors,
isDuplicateClaim: isDuplicateClaim,
claimType: req.session.claimType,
dob: req.session.dobEncoded,
relationship: req.session.relationship,
benefit: req.session.benefit,
referenceId: req.session.referenceId,
benefitOwner: benefitOwnerBody
})
}
| {
return renderValidationError(req, res, benefitOwnerBody, error.validationErrors, false)
} | conditional_block |
benefit-owner.js | const UrlPathValidator = require('../../../services/validators/url-path-validator')
const referenceIdHelper = require('../../helpers/reference-id-helper')
const BenefitOwner = require('../../../services/domain/benefit-owner')
const ValidationError = require('../../../services/errors/validation-error')
const insertBenefitOwner = require('../../../services/data/insert-benefit-owner')
const SessionHandler = require('../../../services/validators/session-handler')
module.exports = function (router) {
router.get('/apply/:claimType/new-eligibility/benefit-owner', function (req, res) {
UrlPathValidator(req.params)
const isValidSession = SessionHandler.validateSession(req.session, req.url)
if (!isValidSession) {
return res.redirect(SessionHandler.getErrorPath(req.session, req.url))
}
return res.render('apply/new-eligibility/benefit-owner', {
claimType: req.session.claimType,
dob: req.session.dobEncoded,
relationship: req.session.relationship,
benefit: req.session.benefit,
referenceId: req.session.referenceId
})
})
router.post('/apply/:claimType/new-eligibility/benefit-owner', function (req, res, next) {
UrlPathValidator(req.params)
const isValidSession = SessionHandler.validateSession(req.session, req.url)
if (!isValidSession) {
return res.redirect(SessionHandler.getErrorPath(req.session, req.url))
}
const benefitOwnerBody = req.body
try {
const benefitOwner = new BenefitOwner(
req.body.FirstName,
req.body.LastName,
req.body['dob-day'],
req.body['dob-month'],
req.body['dob-year'],
req.body.NationalInsuranceNumber)
const referenceAndEligibilityId = referenceIdHelper.extractReferenceId(req.session.referenceId)
return insertBenefitOwner(referenceAndEligibilityId.reference, referenceAndEligibilityId.id, benefitOwner)
.then(function () {
return res.redirect(`/apply/${req.params.claimType}/new-eligibility/about-you`)
})
.catch(function (error) {
next(error)
})
} catch (error) {
if (error instanceof ValidationError) {
return renderValidationError(req, res, benefitOwnerBody, error.validationErrors, false)
} else {
throw error
}
}
})
}
function renderValidationError (req, res, benefitOwnerBody, validationErrors, isDuplicateClaim) | {
return res.status(400).render('apply/new-eligibility/benefit-owner', {
errors: validationErrors,
isDuplicateClaim: isDuplicateClaim,
claimType: req.session.claimType,
dob: req.session.dobEncoded,
relationship: req.session.relationship,
benefit: req.session.benefit,
referenceId: req.session.referenceId,
benefitOwner: benefitOwnerBody
})
} | identifier_body |
|
benefit-owner.js | const UrlPathValidator = require('../../../services/validators/url-path-validator')
const referenceIdHelper = require('../../helpers/reference-id-helper')
const BenefitOwner = require('../../../services/domain/benefit-owner')
const ValidationError = require('../../../services/errors/validation-error')
const insertBenefitOwner = require('../../../services/data/insert-benefit-owner')
const SessionHandler = require('../../../services/validators/session-handler')
module.exports = function (router) {
router.get('/apply/:claimType/new-eligibility/benefit-owner', function (req, res) {
UrlPathValidator(req.params)
const isValidSession = SessionHandler.validateSession(req.session, req.url)
if (!isValidSession) {
return res.redirect(SessionHandler.getErrorPath(req.session, req.url))
}
return res.render('apply/new-eligibility/benefit-owner', {
claimType: req.session.claimType,
dob: req.session.dobEncoded,
relationship: req.session.relationship,
benefit: req.session.benefit,
referenceId: req.session.referenceId
})
})
router.post('/apply/:claimType/new-eligibility/benefit-owner', function (req, res, next) {
UrlPathValidator(req.params)
const isValidSession = SessionHandler.validateSession(req.session, req.url)
if (!isValidSession) {
return res.redirect(SessionHandler.getErrorPath(req.session, req.url))
}
const benefitOwnerBody = req.body
try {
const benefitOwner = new BenefitOwner(
req.body.FirstName,
req.body.LastName,
req.body['dob-day'],
req.body['dob-month'],
req.body['dob-year'],
req.body.NationalInsuranceNumber)
const referenceAndEligibilityId = referenceIdHelper.extractReferenceId(req.session.referenceId)
return insertBenefitOwner(referenceAndEligibilityId.reference, referenceAndEligibilityId.id, benefitOwner)
.then(function () {
return res.redirect(`/apply/${req.params.claimType}/new-eligibility/about-you`)
})
.catch(function (error) {
next(error)
})
} catch (error) {
if (error instanceof ValidationError) {
return renderValidationError(req, res, benefitOwnerBody, error.validationErrors, false)
} else {
throw error
}
}
})
}
function | (req, res, benefitOwnerBody, validationErrors, isDuplicateClaim) {
return res.status(400).render('apply/new-eligibility/benefit-owner', {
errors: validationErrors,
isDuplicateClaim: isDuplicateClaim,
claimType: req.session.claimType,
dob: req.session.dobEncoded,
relationship: req.session.relationship,
benefit: req.session.benefit,
referenceId: req.session.referenceId,
benefitOwner: benefitOwnerBody
})
}
| renderValidationError | identifier_name |
init.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::codegen::RegisterBindings;
use crate::dom::bindings::proxyhandler;
use crate::script_runtime::JSEngineSetup;
use crate::serviceworker_manager::ServiceWorkerManager;
use script_traits::SWManagerSenders;
#[cfg(target_os = "linux")]
#[allow(unsafe_code)]
fn perform_platform_specific_initialization() {
// 4096 is default max on many linux systems
const MAX_FILE_LIMIT: libc::rlim_t = 4096;
| let mut rlim = libc::rlimit {
rlim_cur: 0,
rlim_max: 0,
};
match libc::getrlimit(libc::RLIMIT_NOFILE, &mut rlim) {
0 => {
if rlim.rlim_cur >= MAX_FILE_LIMIT {
// we have more than enough
return;
}
rlim.rlim_cur = match rlim.rlim_max {
libc::RLIM_INFINITY => MAX_FILE_LIMIT,
_ => {
if rlim.rlim_max < MAX_FILE_LIMIT {
rlim.rlim_max
} else {
MAX_FILE_LIMIT
}
},
};
match libc::setrlimit(libc::RLIMIT_NOFILE, &rlim) {
0 => (),
_ => warn!("Failed to set file count limit"),
};
},
_ => warn!("Failed to get file count limit"),
};
}
}
#[cfg(not(target_os = "linux"))]
fn perform_platform_specific_initialization() {}
pub fn init_service_workers(sw_senders: SWManagerSenders) {
// Spawn the service worker manager passing the constellation sender
ServiceWorkerManager::spawn_manager(sw_senders);
}
#[allow(unsafe_code)]
pub fn init() -> JSEngineSetup {
unsafe {
proxyhandler::init();
// Create the global vtables used by the (generated) DOM
// bindings to implement JS proxies.
RegisterBindings::RegisterProxyHandlers();
}
perform_platform_specific_initialization();
JSEngineSetup::new()
} | // Bump up our number of file descriptors to save us from impending doom caused by an onslaught
// of iframes.
unsafe { | random_line_split |
init.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::codegen::RegisterBindings;
use crate::dom::bindings::proxyhandler;
use crate::script_runtime::JSEngineSetup;
use crate::serviceworker_manager::ServiceWorkerManager;
use script_traits::SWManagerSenders;
#[cfg(target_os = "linux")]
#[allow(unsafe_code)]
fn perform_platform_specific_initialization() {
// 4096 is default max on many linux systems
const MAX_FILE_LIMIT: libc::rlim_t = 4096;
// Bump up our number of file descriptors to save us from impending doom caused by an onslaught
// of iframes.
unsafe {
let mut rlim = libc::rlimit {
rlim_cur: 0,
rlim_max: 0,
};
match libc::getrlimit(libc::RLIMIT_NOFILE, &mut rlim) {
0 => {
if rlim.rlim_cur >= MAX_FILE_LIMIT {
// we have more than enough
return;
}
rlim.rlim_cur = match rlim.rlim_max {
libc::RLIM_INFINITY => MAX_FILE_LIMIT,
_ => {
if rlim.rlim_max < MAX_FILE_LIMIT {
rlim.rlim_max
} else {
MAX_FILE_LIMIT
}
},
};
match libc::setrlimit(libc::RLIMIT_NOFILE, &rlim) {
0 => (),
_ => warn!("Failed to set file count limit"),
};
},
_ => warn!("Failed to get file count limit"),
};
}
}
#[cfg(not(target_os = "linux"))]
fn perform_platform_specific_initialization() |
pub fn init_service_workers(sw_senders: SWManagerSenders) {
// Spawn the service worker manager passing the constellation sender
ServiceWorkerManager::spawn_manager(sw_senders);
}
#[allow(unsafe_code)]
pub fn init() -> JSEngineSetup {
unsafe {
proxyhandler::init();
// Create the global vtables used by the (generated) DOM
// bindings to implement JS proxies.
RegisterBindings::RegisterProxyHandlers();
}
perform_platform_specific_initialization();
JSEngineSetup::new()
}
| {} | identifier_body |
init.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::codegen::RegisterBindings;
use crate::dom::bindings::proxyhandler;
use crate::script_runtime::JSEngineSetup;
use crate::serviceworker_manager::ServiceWorkerManager;
use script_traits::SWManagerSenders;
#[cfg(target_os = "linux")]
#[allow(unsafe_code)]
fn | () {
// 4096 is default max on many linux systems
const MAX_FILE_LIMIT: libc::rlim_t = 4096;
// Bump up our number of file descriptors to save us from impending doom caused by an onslaught
// of iframes.
unsafe {
let mut rlim = libc::rlimit {
rlim_cur: 0,
rlim_max: 0,
};
match libc::getrlimit(libc::RLIMIT_NOFILE, &mut rlim) {
0 => {
if rlim.rlim_cur >= MAX_FILE_LIMIT {
// we have more than enough
return;
}
rlim.rlim_cur = match rlim.rlim_max {
libc::RLIM_INFINITY => MAX_FILE_LIMIT,
_ => {
if rlim.rlim_max < MAX_FILE_LIMIT {
rlim.rlim_max
} else {
MAX_FILE_LIMIT
}
},
};
match libc::setrlimit(libc::RLIMIT_NOFILE, &rlim) {
0 => (),
_ => warn!("Failed to set file count limit"),
};
},
_ => warn!("Failed to get file count limit"),
};
}
}
#[cfg(not(target_os = "linux"))]
fn perform_platform_specific_initialization() {}
pub fn init_service_workers(sw_senders: SWManagerSenders) {
// Spawn the service worker manager passing the constellation sender
ServiceWorkerManager::spawn_manager(sw_senders);
}
#[allow(unsafe_code)]
pub fn init() -> JSEngineSetup {
unsafe {
proxyhandler::init();
// Create the global vtables used by the (generated) DOM
// bindings to implement JS proxies.
RegisterBindings::RegisterProxyHandlers();
}
perform_platform_specific_initialization();
JSEngineSetup::new()
}
| perform_platform_specific_initialization | identifier_name |
params.py | """
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
from resource_management.libraries.functions import conf_select
from resource_management.libraries.functions import stack_select
from resource_management.libraries.functions import default
from resource_management.libraries.functions import format_jvm_option
from resource_management.libraries.functions import format
from resource_management.libraries.functions.version import format_stack_version, compare_versions
from ambari_commons.os_check import OSCheck
from resource_management.libraries.script.script import Script
config = Script.get_config()
stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
iop_stack_version = format_stack_version(stack_version_unformatted)
# hadoop default params
mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
hadoop_libexec_dir = stack_select.get_hadoop_dir("libexec")
hadoop_lib_home = stack_select.get_hadoop_dir("lib")
hadoop_bin = stack_select.get_hadoop_dir("sbin")
hadoop_home = '/usr'
create_lib_snappy_symlinks = True
# IOP 4.0+ params
if Script.is_stack_greater_or_equal("4.0"):
mapreduce_libs_path = "/usr/iop/current/hadoop-mapreduce-client/*"
hadoop_home = stack_select.get_hadoop_dir("home")
create_lib_snappy_symlinks = False
current_service = config['serviceName']
#security params
security_enabled = config['configurations']['cluster-env']['security_enabled']
#users and groups
has_hadoop_env = 'hadoop-env' in config['configurations']
mapred_user = config['configurations']['mapred-env']['mapred_user']
hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
yarn_user = config['configurations']['yarn-env']['yarn_user']
user_group = config['configurations']['cluster-env']['user_group']
#hosts
hostname = config["hostname"]
ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
rm_host = default("/clusterHostInfo/rm_host", [])
slave_hosts = default("/clusterHostInfo/slave_hosts", [])
oozie_servers = default("/clusterHostInfo/oozie_server", [])
hcat_server_hosts = default("/clusterHostInfo/webhcat_server_host", [])
hive_server_host = default("/clusterHostInfo/hive_server_host", [])
hbase_master_hosts = default("/clusterHostInfo/hbase_master_hosts", [])
hs_host = default("/clusterHostInfo/hs_host", [])
jtnode_host = default("/clusterHostInfo/jtnode_host", [])
namenode_host = default("/clusterHostInfo/namenode_host", [])
zk_hosts = default("/clusterHostInfo/zookeeper_hosts", [])
ganglia_server_hosts = default("/clusterHostInfo/ganglia_server_host", [])
ams_collector_hosts = default("/clusterHostInfo/metrics_collector_hosts", [])
has_namenode = not len(namenode_host) == 0
has_resourcemanager = not len(rm_host) == 0
has_slaves = not len(slave_hosts) == 0
has_oozie_server = not len(oozie_servers) == 0
has_hcat_server_host = not len(hcat_server_hosts) == 0
has_hive_server_host = not len(hive_server_host) == 0
has_hbase_masters = not len(hbase_master_hosts) == 0
has_zk_host = not len(zk_hosts) == 0
has_ganglia_server = not len(ganglia_server_hosts) == 0
has_metric_collector = not len(ams_collector_hosts) == 0
is_namenode_master = hostname in namenode_host
is_jtnode_master = hostname in jtnode_host
is_rmnode_master = hostname in rm_host
is_hsnode_master = hostname in hs_host
is_hbase_master = hostname in hbase_master_hosts
is_slave = hostname in slave_hosts
if has_ganglia_server:
ganglia_server_host = ganglia_server_hosts[0]
if has_metric_collector:
if 'cluster-env' in config['configurations'] and \
'metrics_collector_vip_host' in config['configurations']['cluster-env']:
metric_collector_host = config['configurations']['cluster-env']['metrics_collector_vip_host']
else:
metric_collector_host = ams_collector_hosts[0]
if 'cluster-env' in config['configurations'] and \
'metrics_collector_vip_port' in config['configurations']['cluster-env']:
metric_collector_port = config['configurations']['cluster-env']['metrics_collector_vip_port']
else:
metric_collector_web_address = default("/configurations/ams-site/timeline.metrics.service.webapp.address", "0.0.0.0:6188")
if metric_collector_web_address.find(':') != -1:
metric_collector_port = metric_collector_web_address.split(':')[1]
else:
metric_collector_port = '6188'
pass
metrics_report_interval = default("/configurations/ams-site/timeline.metrics.sink.report.interval", 60)
metrics_collection_period = default("/configurations/ams-site/timeline.metrics.sink.collection.period", 60)
#hadoop params
if has_namenode:
hadoop_tmp_dir = format("/tmp/hadoop-{hdfs_user}")
hadoop_conf_dir = conf_select.get_hadoop_conf_dir(force_latest_on_upgrade=True)
task_log4j_properties_location = os.path.join(hadoop_conf_dir, "task-log4j.properties")
hadoop_pid_dir_prefix = config['configurations']['hadoop-env']['hadoop_pid_dir_prefix']
hdfs_log_dir_prefix = config['configurations']['hadoop-env']['hdfs_log_dir_prefix']
hbase_tmp_dir = "/tmp/hbase-hbase"
| oracle_driver_url = config['hostLevelParams']['oracle_jdbc_url']
mysql_driver_url = config['hostLevelParams']['mysql_jdbc_url']
ambari_server_resources = config['hostLevelParams']['jdk_location']
oracle_driver_symlink_url = format("{ambari_server_resources}oracle-jdbc-driver.jar")
mysql_driver_symlink_url = format("{ambari_server_resources}mysql-jdbc-driver.jar")
ambari_db_rca_url = config['hostLevelParams']['ambari_db_rca_url'][0]
ambari_db_rca_driver = config['hostLevelParams']['ambari_db_rca_driver'][0]
ambari_db_rca_username = config['hostLevelParams']['ambari_db_rca_username'][0]
ambari_db_rca_password = config['hostLevelParams']['ambari_db_rca_password'][0]
if has_namenode and 'rca_enabled' in config['configurations']['hadoop-env']:
rca_enabled = config['configurations']['hadoop-env']['rca_enabled']
else:
rca_enabled = False
rca_disabled_prefix = "###"
if rca_enabled == True:
rca_prefix = ""
else:
rca_prefix = rca_disabled_prefix
#hadoop-env.sh
java_home = config['hostLevelParams']['java_home']
jsvc_path = "/usr/lib/bigtop-utils"
hadoop_heapsize = config['configurations']['hadoop-env']['hadoop_heapsize']
namenode_heapsize = config['configurations']['hadoop-env']['namenode_heapsize']
namenode_opt_newsize = config['configurations']['hadoop-env']['namenode_opt_newsize']
namenode_opt_maxnewsize = config['configurations']['hadoop-env']['namenode_opt_maxnewsize']
namenode_opt_permsize = format_jvm_option("/configurations/hadoop-env/namenode_opt_permsize","128m")
namenode_opt_maxpermsize = format_jvm_option("/configurations/hadoop-env/namenode_opt_maxpermsize","256m")
jtnode_opt_newsize = "200m"
jtnode_opt_maxnewsize = "200m"
jtnode_heapsize = "1024m"
ttnode_heapsize = "1024m"
dtnode_heapsize = config['configurations']['hadoop-env']['dtnode_heapsize']
mapred_pid_dir_prefix = default("/configurations/mapred-env/mapred_pid_dir_prefix","/var/run/hadoop-mapreduce")
mapred_log_dir_prefix = default("/configurations/mapred-env/mapred_log_dir_prefix","/var/log/hadoop-mapreduce")
#log4j.properties
yarn_log_dir_prefix = default("/configurations/yarn-env/yarn_log_dir_prefix","/var/log/hadoop-yarn")
dfs_hosts = default('/configurations/hdfs-site/dfs.hosts', None)
#log4j.properties
if (('hdfs-log4j' in config['configurations']) and ('content' in config['configurations']['hdfs-log4j'])):
log4j_props = config['configurations']['hdfs-log4j']['content']
if (('yarn-log4j' in config['configurations']) and ('content' in config['configurations']['yarn-log4j'])):
log4j_props += config['configurations']['yarn-log4j']['content']
else:
log4j_props = None
refresh_topology = False
command_params = config["commandParams"] if "commandParams" in config else None
if command_params is not None:
refresh_topology = bool(command_params["refresh_topology"]) if "refresh_topology" in command_params else False
ambari_libs_dir = "/var/lib/ambari-agent/lib"
is_webhdfs_enabled = config['configurations']['hdfs-site']['dfs.webhdfs.enabled']
default_fs = config['configurations']['core-site']['fs.defaultFS']
#host info
all_hosts = default("/clusterHostInfo/all_hosts", [])
all_racks = default("/clusterHostInfo/all_racks", [])
all_ipv4_ips = default("/clusterHostInfo/all_ipv4_ips", [])
slave_hosts = default("/clusterHostInfo/slave_hosts", [])
#topology files
net_topology_script_file_path = "/etc/hadoop/conf/topology_script.py"
net_topology_script_dir = os.path.dirname(net_topology_script_file_path)
net_topology_mapping_data_file_name = 'topology_mappings.data'
net_topology_mapping_data_file_path = os.path.join(net_topology_script_dir, net_topology_mapping_data_file_name) | #db params
server_db_name = config['hostLevelParams']['db_name']
db_driver_filename = config['hostLevelParams']['db_driver_filename'] | random_line_split |
params.py | """
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
from resource_management.libraries.functions import conf_select
from resource_management.libraries.functions import stack_select
from resource_management.libraries.functions import default
from resource_management.libraries.functions import format_jvm_option
from resource_management.libraries.functions import format
from resource_management.libraries.functions.version import format_stack_version, compare_versions
from ambari_commons.os_check import OSCheck
from resource_management.libraries.script.script import Script
config = Script.get_config()
stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
iop_stack_version = format_stack_version(stack_version_unformatted)
# hadoop default params
mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
hadoop_libexec_dir = stack_select.get_hadoop_dir("libexec")
hadoop_lib_home = stack_select.get_hadoop_dir("lib")
hadoop_bin = stack_select.get_hadoop_dir("sbin")
hadoop_home = '/usr'
create_lib_snappy_symlinks = True
# IOP 4.0+ params
if Script.is_stack_greater_or_equal("4.0"):
|
current_service = config['serviceName']
#security params
security_enabled = config['configurations']['cluster-env']['security_enabled']
#users and groups
has_hadoop_env = 'hadoop-env' in config['configurations']
mapred_user = config['configurations']['mapred-env']['mapred_user']
hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
yarn_user = config['configurations']['yarn-env']['yarn_user']
user_group = config['configurations']['cluster-env']['user_group']
#hosts
hostname = config["hostname"]
ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
rm_host = default("/clusterHostInfo/rm_host", [])
slave_hosts = default("/clusterHostInfo/slave_hosts", [])
oozie_servers = default("/clusterHostInfo/oozie_server", [])
hcat_server_hosts = default("/clusterHostInfo/webhcat_server_host", [])
hive_server_host = default("/clusterHostInfo/hive_server_host", [])
hbase_master_hosts = default("/clusterHostInfo/hbase_master_hosts", [])
hs_host = default("/clusterHostInfo/hs_host", [])
jtnode_host = default("/clusterHostInfo/jtnode_host", [])
namenode_host = default("/clusterHostInfo/namenode_host", [])
zk_hosts = default("/clusterHostInfo/zookeeper_hosts", [])
ganglia_server_hosts = default("/clusterHostInfo/ganglia_server_host", [])
ams_collector_hosts = default("/clusterHostInfo/metrics_collector_hosts", [])
has_namenode = not len(namenode_host) == 0
has_resourcemanager = not len(rm_host) == 0
has_slaves = not len(slave_hosts) == 0
has_oozie_server = not len(oozie_servers) == 0
has_hcat_server_host = not len(hcat_server_hosts) == 0
has_hive_server_host = not len(hive_server_host) == 0
has_hbase_masters = not len(hbase_master_hosts) == 0
has_zk_host = not len(zk_hosts) == 0
has_ganglia_server = not len(ganglia_server_hosts) == 0
has_metric_collector = not len(ams_collector_hosts) == 0
is_namenode_master = hostname in namenode_host
is_jtnode_master = hostname in jtnode_host
is_rmnode_master = hostname in rm_host
is_hsnode_master = hostname in hs_host
is_hbase_master = hostname in hbase_master_hosts
is_slave = hostname in slave_hosts
if has_ganglia_server:
ganglia_server_host = ganglia_server_hosts[0]
if has_metric_collector:
if 'cluster-env' in config['configurations'] and \
'metrics_collector_vip_host' in config['configurations']['cluster-env']:
metric_collector_host = config['configurations']['cluster-env']['metrics_collector_vip_host']
else:
metric_collector_host = ams_collector_hosts[0]
if 'cluster-env' in config['configurations'] and \
'metrics_collector_vip_port' in config['configurations']['cluster-env']:
metric_collector_port = config['configurations']['cluster-env']['metrics_collector_vip_port']
else:
metric_collector_web_address = default("/configurations/ams-site/timeline.metrics.service.webapp.address", "0.0.0.0:6188")
if metric_collector_web_address.find(':') != -1:
metric_collector_port = metric_collector_web_address.split(':')[1]
else:
metric_collector_port = '6188'
pass
metrics_report_interval = default("/configurations/ams-site/timeline.metrics.sink.report.interval", 60)
metrics_collection_period = default("/configurations/ams-site/timeline.metrics.sink.collection.period", 60)
#hadoop params
if has_namenode:
hadoop_tmp_dir = format("/tmp/hadoop-{hdfs_user}")
hadoop_conf_dir = conf_select.get_hadoop_conf_dir(force_latest_on_upgrade=True)
task_log4j_properties_location = os.path.join(hadoop_conf_dir, "task-log4j.properties")
hadoop_pid_dir_prefix = config['configurations']['hadoop-env']['hadoop_pid_dir_prefix']
hdfs_log_dir_prefix = config['configurations']['hadoop-env']['hdfs_log_dir_prefix']
hbase_tmp_dir = "/tmp/hbase-hbase"
#db params
server_db_name = config['hostLevelParams']['db_name']
db_driver_filename = config['hostLevelParams']['db_driver_filename']
oracle_driver_url = config['hostLevelParams']['oracle_jdbc_url']
mysql_driver_url = config['hostLevelParams']['mysql_jdbc_url']
ambari_server_resources = config['hostLevelParams']['jdk_location']
oracle_driver_symlink_url = format("{ambari_server_resources}oracle-jdbc-driver.jar")
mysql_driver_symlink_url = format("{ambari_server_resources}mysql-jdbc-driver.jar")
ambari_db_rca_url = config['hostLevelParams']['ambari_db_rca_url'][0]
ambari_db_rca_driver = config['hostLevelParams']['ambari_db_rca_driver'][0]
ambari_db_rca_username = config['hostLevelParams']['ambari_db_rca_username'][0]
ambari_db_rca_password = config['hostLevelParams']['ambari_db_rca_password'][0]
if has_namenode and 'rca_enabled' in config['configurations']['hadoop-env']:
rca_enabled = config['configurations']['hadoop-env']['rca_enabled']
else:
rca_enabled = False
rca_disabled_prefix = "###"
if rca_enabled == True:
rca_prefix = ""
else:
rca_prefix = rca_disabled_prefix
#hadoop-env.sh
java_home = config['hostLevelParams']['java_home']
jsvc_path = "/usr/lib/bigtop-utils"
hadoop_heapsize = config['configurations']['hadoop-env']['hadoop_heapsize']
namenode_heapsize = config['configurations']['hadoop-env']['namenode_heapsize']
namenode_opt_newsize = config['configurations']['hadoop-env']['namenode_opt_newsize']
namenode_opt_maxnewsize = config['configurations']['hadoop-env']['namenode_opt_maxnewsize']
namenode_opt_permsize = format_jvm_option("/configurations/hadoop-env/namenode_opt_permsize","128m")
namenode_opt_maxpermsize = format_jvm_option("/configurations/hadoop-env/namenode_opt_maxpermsize","256m")
jtnode_opt_newsize = "200m"
jtnode_opt_maxnewsize = "200m"
jtnode_heapsize = "1024m"
ttnode_heapsize = "1024m"
dtnode_heapsize = config['configurations']['hadoop-env']['dtnode_heapsize']
mapred_pid_dir_prefix = default("/configurations/mapred-env/mapred_pid_dir_prefix","/var/run/hadoop-mapreduce")
mapred_log_dir_prefix = default("/configurations/mapred-env/mapred_log_dir_prefix","/var/log/hadoop-mapreduce")
#log4j.properties
yarn_log_dir_prefix = default("/configurations/yarn-env/yarn_log_dir_prefix","/var/log/hadoop-yarn")
dfs_hosts = default('/configurations/hdfs-site/dfs.hosts', None)
#log4j.properties
if (('hdfs-log4j' in config['configurations']) and ('content' in config['configurations']['hdfs-log4j'])):
log4j_props = config['configurations']['hdfs-log4j']['content']
if (('yarn-log4j' in config['configurations']) and ('content' in config['configurations']['yarn-log4j'])):
log4j_props += config['configurations']['yarn-log4j']['content']
else:
log4j_props = None
refresh_topology = False
command_params = config["commandParams"] if "commandParams" in config else None
if command_params is not None:
refresh_topology = bool(command_params["refresh_topology"]) if "refresh_topology" in command_params else False
ambari_libs_dir = "/var/lib/ambari-agent/lib"
is_webhdfs_enabled = config['configurations']['hdfs-site']['dfs.webhdfs.enabled']
default_fs = config['configurations']['core-site']['fs.defaultFS']
#host info
all_hosts = default("/clusterHostInfo/all_hosts", [])
all_racks = default("/clusterHostInfo/all_racks", [])
all_ipv4_ips = default("/clusterHostInfo/all_ipv4_ips", [])
slave_hosts = default("/clusterHostInfo/slave_hosts", [])
#topology files
net_topology_script_file_path = "/etc/hadoop/conf/topology_script.py"
net_topology_script_dir = os.path.dirname(net_topology_script_file_path)
net_topology_mapping_data_file_name = 'topology_mappings.data'
net_topology_mapping_data_file_path = os.path.join(net_topology_script_dir, net_topology_mapping_data_file_name)
| mapreduce_libs_path = "/usr/iop/current/hadoop-mapreduce-client/*"
hadoop_home = stack_select.get_hadoop_dir("home")
create_lib_snappy_symlinks = False | conditional_block |
moderatorPurge.js | "use strict";
const { EffectDependency } = require("../models/effectModels");
const { EffectCategory } = require('../../../shared/effect-constants');
const logger = require('../../logwrapper');
const twitchChat = require("../../chat/twitch-chat");
const model = {
definition: {
id: "firebot:modpurge",
name: "Purge",
description: "Purge a users chat messages from chat.",
icon: "fad fa-comment-slash",
categories: [EffectCategory.COMMON, EffectCategory.MODERATION],
dependencies: [EffectDependency.CHAT]
},
optionsTemplate: `
<eos-container header="Target" pad-top="true">
<div class="input-group">
<span class="input-group-addon" id="username-type">Username</span>
<input ng-model="effect.username" type="text" class="form-control" id="list-username-setting" aria-describedby="list-username-type" replace-variables menu-position="below">
</div>
</eos-container>
`,
optionsController: () => {},
optionsValidator: effect => {
let errors = [];
if (effect.username == null && effect.username !== "") |
return errors;
},
onTriggerEvent: async event => {
twitchChat.purgeUserMessages(event.effect.username);
logger.debug(event.effect.username + " was purged via the purge effect.");
return true;
}
};
module.exports = model;
| {
errors.push("Please put in a username.");
} | conditional_block |
moderatorPurge.js | "use strict";
const { EffectDependency } = require("../models/effectModels");
const { EffectCategory } = require('../../../shared/effect-constants');
const logger = require('../../logwrapper');
const twitchChat = require("../../chat/twitch-chat");
| description: "Purge a users chat messages from chat.",
icon: "fad fa-comment-slash",
categories: [EffectCategory.COMMON, EffectCategory.MODERATION],
dependencies: [EffectDependency.CHAT]
},
optionsTemplate: `
<eos-container header="Target" pad-top="true">
<div class="input-group">
<span class="input-group-addon" id="username-type">Username</span>
<input ng-model="effect.username" type="text" class="form-control" id="list-username-setting" aria-describedby="list-username-type" replace-variables menu-position="below">
</div>
</eos-container>
`,
optionsController: () => {},
optionsValidator: effect => {
let errors = [];
if (effect.username == null && effect.username !== "") {
errors.push("Please put in a username.");
}
return errors;
},
onTriggerEvent: async event => {
twitchChat.purgeUserMessages(event.effect.username);
logger.debug(event.effect.username + " was purged via the purge effect.");
return true;
}
};
module.exports = model; | const model = {
definition: {
id: "firebot:modpurge",
name: "Purge", | random_line_split |
clarity_cli.rs | /*
copyright: (c) 2013-2019 by Blockstack PBC, a public benefit corporation.
This file is part of Blockstack.
Blockstack is free software. You may redistribute or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License or
(at your option) any later version.
Blockstack is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY, including without the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Blockstack. If not, see <http://www.gnu.org/licenses/>.
*/
#![allow(unused_imports)]
#![allow(dead_code)]
#![allow(non_camel_case_types)]
#![allow(non_snake_case)]
#![allow(non_upper_case_globals)]
extern crate blockstack_lib;
use std::env;
use blockstack_lib::{ util::log, clarity };
fn | () {
log::set_loglevel(log::LOG_DEBUG).unwrap();
let argv : Vec<String> = env::args().collect();
clarity::invoke_command(&argv[0], &argv[1..]);
}
| main | identifier_name |
clarity_cli.rs | /*
copyright: (c) 2013-2019 by Blockstack PBC, a public benefit corporation.
This file is part of Blockstack.
Blockstack is free software. You may redistribute or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License or
(at your option) any later version.
Blockstack is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY, including without the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Blockstack. If not, see <http://www.gnu.org/licenses/>.
*/
#![allow(unused_imports)]
#![allow(dead_code)]
#![allow(non_camel_case_types)]
#![allow(non_snake_case)]
#![allow(non_upper_case_globals)]
extern crate blockstack_lib;
use std::env;
use blockstack_lib::{ util::log, clarity };
fn main() | {
log::set_loglevel(log::LOG_DEBUG).unwrap();
let argv : Vec<String> = env::args().collect();
clarity::invoke_command(&argv[0], &argv[1..]);
} | identifier_body |
|
clarity_cli.rs | /*
copyright: (c) 2013-2019 by Blockstack PBC, a public benefit corporation.
This file is part of Blockstack. | it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License or
(at your option) any later version.
Blockstack is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY, including without the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Blockstack. If not, see <http://www.gnu.org/licenses/>.
*/
#![allow(unused_imports)]
#![allow(dead_code)]
#![allow(non_camel_case_types)]
#![allow(non_snake_case)]
#![allow(non_upper_case_globals)]
extern crate blockstack_lib;
use std::env;
use blockstack_lib::{ util::log, clarity };
fn main() {
log::set_loglevel(log::LOG_DEBUG).unwrap();
let argv : Vec<String> = env::args().collect();
clarity::invoke_command(&argv[0], &argv[1..]);
} |
Blockstack is free software. You may redistribute or modify | random_line_split |
cat-file.rs | extern crate gitters;
extern crate rustc_serialize;
extern crate docopt;
use docopt::Docopt;
use gitters::cli;
use gitters::commits;
use gitters::objects;
use gitters::revisions;
const USAGE: &'static str = "
cat-file - Provide content or type and size information for repository objects
Usage:
cat-file -t <object>
cat-file -s <object>
cat-file -e <object>
cat-file -p <object>
cat-file (-h | --help)
Options:
-h --help Show this screen.
-t Instead of the content, show the object type identified by <object>.
-s Instead of the content, show the object size identified by <object>.
-e Surpress all output; instead exit with zero status if <object> exists and is a valid
object.
-p Pretty-print the contents of <object> based on its type.
";
#[derive(RustcDecodable)]
struct Args {
flag_t: bool,
flag_s: bool,
flag_e: bool,
flag_p: bool,
arg_object: String,
}
fn show_type(name: &objects::Name) -> cli::Result {
let header = try!(cli::wrap_with_status(objects::read_header(&name), 1));
let object_type = match header.object_type {
objects::Type::Blob => "blob",
objects::Type::Tree => "tree",
objects::Type::Commit => "commit",
};
println!("{}", object_type);
cli::success()
}
fn show_size(name: &objects::Name) -> cli::Result {
let header = try!(cli::wrap_with_status(objects::read_header(&name), 1));
println!("{}", header.content_length);
cli::success()
}
fn check_validity(name: &objects::Name) -> cli::Result {
try!(cli::wrap_with_status(objects::read_header(&name), 1)); | fn show_contents(name: &objects::Name) -> cli::Result {
let obj = try!(cli::wrap_with_status(objects::read_object(&name), 1));
match obj {
objects::Object::Commit(commit) => {
let objects::Name(name) = commit.name;
println!("commit {}", name);
let objects::Name(tree) = commit.tree;
println!("tree : {}", tree);
if commit.parent.is_some() {
let objects::Name(parent) = commit.parent.unwrap();
println!("parent : {}", parent);
}
let commits::CommitUser { name, date } = commit.author;
println!("author : {} at {}", name, date);
let commits::CommitUser { name, date } = commit.committer;
println!("committer: {} at {}", name, date);
println!("");
println!("{}", commit.message);
},
objects::Object::Blob(contents) => {
// Don't use println, as we don't want to include an extra newline at the end of the
// blob contents.
print!("{}", contents);
},
_ => { /* Not handled yet */ }
}
cli::success()
}
fn dispatch_for_args(args: &Args) -> cli::Result {
let name = try!(cli::wrap_with_status(revisions::resolve(&args.arg_object), 1));
if args.flag_t {
show_type(&name)
} else if args.flag_s {
show_size(&name)
} else if args.flag_e {
check_validity(&name)
} else if args.flag_p {
show_contents(&name)
} else {
Err(cli::Error { message: "No flags specified".to_string(), status: 2 })
}
}
fn main() {
let args: Args = Docopt::new(USAGE)
.and_then(|d| d.decode())
.unwrap_or_else(|e| e.exit());
cli::exit_with(dispatch_for_args(&args))
} | cli::success()
}
| random_line_split |
cat-file.rs | extern crate gitters;
extern crate rustc_serialize;
extern crate docopt;
use docopt::Docopt;
use gitters::cli;
use gitters::commits;
use gitters::objects;
use gitters::revisions;
const USAGE: &'static str = "
cat-file - Provide content or type and size information for repository objects
Usage:
cat-file -t <object>
cat-file -s <object>
cat-file -e <object>
cat-file -p <object>
cat-file (-h | --help)
Options:
-h --help Show this screen.
-t Instead of the content, show the object type identified by <object>.
-s Instead of the content, show the object size identified by <object>.
-e Surpress all output; instead exit with zero status if <object> exists and is a valid
object.
-p Pretty-print the contents of <object> based on its type.
";
#[derive(RustcDecodable)]
struct Args {
flag_t: bool,
flag_s: bool,
flag_e: bool,
flag_p: bool,
arg_object: String,
}
fn show_type(name: &objects::Name) -> cli::Result {
let header = try!(cli::wrap_with_status(objects::read_header(&name), 1));
let object_type = match header.object_type {
objects::Type::Blob => "blob",
objects::Type::Tree => "tree",
objects::Type::Commit => "commit",
};
println!("{}", object_type);
cli::success()
}
fn show_size(name: &objects::Name) -> cli::Result {
let header = try!(cli::wrap_with_status(objects::read_header(&name), 1));
println!("{}", header.content_length);
cli::success()
}
fn check_validity(name: &objects::Name) -> cli::Result {
try!(cli::wrap_with_status(objects::read_header(&name), 1));
cli::success()
}
fn show_contents(name: &objects::Name) -> cli::Result {
let obj = try!(cli::wrap_with_status(objects::read_object(&name), 1));
match obj {
objects::Object::Commit(commit) => {
let objects::Name(name) = commit.name;
println!("commit {}", name);
let objects::Name(tree) = commit.tree;
println!("tree : {}", tree);
if commit.parent.is_some() {
let objects::Name(parent) = commit.parent.unwrap();
println!("parent : {}", parent);
}
let commits::CommitUser { name, date } = commit.author;
println!("author : {} at {}", name, date);
let commits::CommitUser { name, date } = commit.committer;
println!("committer: {} at {}", name, date);
println!("");
println!("{}", commit.message);
},
objects::Object::Blob(contents) => {
// Don't use println, as we don't want to include an extra newline at the end of the
// blob contents.
print!("{}", contents);
},
_ => { /* Not handled yet */ }
}
cli::success()
}
fn dispatch_for_args(args: &Args) -> cli::Result {
let name = try!(cli::wrap_with_status(revisions::resolve(&args.arg_object), 1));
if args.flag_t | else if args.flag_s {
show_size(&name)
} else if args.flag_e {
check_validity(&name)
} else if args.flag_p {
show_contents(&name)
} else {
Err(cli::Error { message: "No flags specified".to_string(), status: 2 })
}
}
fn main() {
let args: Args = Docopt::new(USAGE)
.and_then(|d| d.decode())
.unwrap_or_else(|e| e.exit());
cli::exit_with(dispatch_for_args(&args))
}
| {
show_type(&name)
} | conditional_block |
cat-file.rs | extern crate gitters;
extern crate rustc_serialize;
extern crate docopt;
use docopt::Docopt;
use gitters::cli;
use gitters::commits;
use gitters::objects;
use gitters::revisions;
const USAGE: &'static str = "
cat-file - Provide content or type and size information for repository objects
Usage:
cat-file -t <object>
cat-file -s <object>
cat-file -e <object>
cat-file -p <object>
cat-file (-h | --help)
Options:
-h --help Show this screen.
-t Instead of the content, show the object type identified by <object>.
-s Instead of the content, show the object size identified by <object>.
-e Surpress all output; instead exit with zero status if <object> exists and is a valid
object.
-p Pretty-print the contents of <object> based on its type.
";
#[derive(RustcDecodable)]
struct Args {
flag_t: bool,
flag_s: bool,
flag_e: bool,
flag_p: bool,
arg_object: String,
}
fn show_type(name: &objects::Name) -> cli::Result {
let header = try!(cli::wrap_with_status(objects::read_header(&name), 1));
let object_type = match header.object_type {
objects::Type::Blob => "blob",
objects::Type::Tree => "tree",
objects::Type::Commit => "commit",
};
println!("{}", object_type);
cli::success()
}
fn show_size(name: &objects::Name) -> cli::Result {
let header = try!(cli::wrap_with_status(objects::read_header(&name), 1));
println!("{}", header.content_length);
cli::success()
}
fn check_validity(name: &objects::Name) -> cli::Result {
try!(cli::wrap_with_status(objects::read_header(&name), 1));
cli::success()
}
fn | (name: &objects::Name) -> cli::Result {
let obj = try!(cli::wrap_with_status(objects::read_object(&name), 1));
match obj {
objects::Object::Commit(commit) => {
let objects::Name(name) = commit.name;
println!("commit {}", name);
let objects::Name(tree) = commit.tree;
println!("tree : {}", tree);
if commit.parent.is_some() {
let objects::Name(parent) = commit.parent.unwrap();
println!("parent : {}", parent);
}
let commits::CommitUser { name, date } = commit.author;
println!("author : {} at {}", name, date);
let commits::CommitUser { name, date } = commit.committer;
println!("committer: {} at {}", name, date);
println!("");
println!("{}", commit.message);
},
objects::Object::Blob(contents) => {
// Don't use println, as we don't want to include an extra newline at the end of the
// blob contents.
print!("{}", contents);
},
_ => { /* Not handled yet */ }
}
cli::success()
}
fn dispatch_for_args(args: &Args) -> cli::Result {
let name = try!(cli::wrap_with_status(revisions::resolve(&args.arg_object), 1));
if args.flag_t {
show_type(&name)
} else if args.flag_s {
show_size(&name)
} else if args.flag_e {
check_validity(&name)
} else if args.flag_p {
show_contents(&name)
} else {
Err(cli::Error { message: "No flags specified".to_string(), status: 2 })
}
}
fn main() {
let args: Args = Docopt::new(USAGE)
.and_then(|d| d.decode())
.unwrap_or_else(|e| e.exit());
cli::exit_with(dispatch_for_args(&args))
}
| show_contents | identifier_name |
cat-file.rs | extern crate gitters;
extern crate rustc_serialize;
extern crate docopt;
use docopt::Docopt;
use gitters::cli;
use gitters::commits;
use gitters::objects;
use gitters::revisions;
const USAGE: &'static str = "
cat-file - Provide content or type and size information for repository objects
Usage:
cat-file -t <object>
cat-file -s <object>
cat-file -e <object>
cat-file -p <object>
cat-file (-h | --help)
Options:
-h --help Show this screen.
-t Instead of the content, show the object type identified by <object>.
-s Instead of the content, show the object size identified by <object>.
-e Surpress all output; instead exit with zero status if <object> exists and is a valid
object.
-p Pretty-print the contents of <object> based on its type.
";
#[derive(RustcDecodable)]
struct Args {
flag_t: bool,
flag_s: bool,
flag_e: bool,
flag_p: bool,
arg_object: String,
}
fn show_type(name: &objects::Name) -> cli::Result {
let header = try!(cli::wrap_with_status(objects::read_header(&name), 1));
let object_type = match header.object_type {
objects::Type::Blob => "blob",
objects::Type::Tree => "tree",
objects::Type::Commit => "commit",
};
println!("{}", object_type);
cli::success()
}
fn show_size(name: &objects::Name) -> cli::Result {
let header = try!(cli::wrap_with_status(objects::read_header(&name), 1));
println!("{}", header.content_length);
cli::success()
}
fn check_validity(name: &objects::Name) -> cli::Result {
try!(cli::wrap_with_status(objects::read_header(&name), 1));
cli::success()
}
fn show_contents(name: &objects::Name) -> cli::Result |
fn dispatch_for_args(args: &Args) -> cli::Result {
let name = try!(cli::wrap_with_status(revisions::resolve(&args.arg_object), 1));
if args.flag_t {
show_type(&name)
} else if args.flag_s {
show_size(&name)
} else if args.flag_e {
check_validity(&name)
} else if args.flag_p {
show_contents(&name)
} else {
Err(cli::Error { message: "No flags specified".to_string(), status: 2 })
}
}
fn main() {
let args: Args = Docopt::new(USAGE)
.and_then(|d| d.decode())
.unwrap_or_else(|e| e.exit());
cli::exit_with(dispatch_for_args(&args))
}
| {
let obj = try!(cli::wrap_with_status(objects::read_object(&name), 1));
match obj {
objects::Object::Commit(commit) => {
let objects::Name(name) = commit.name;
println!("commit {}", name);
let objects::Name(tree) = commit.tree;
println!("tree : {}", tree);
if commit.parent.is_some() {
let objects::Name(parent) = commit.parent.unwrap();
println!("parent : {}", parent);
}
let commits::CommitUser { name, date } = commit.author;
println!("author : {} at {}", name, date);
let commits::CommitUser { name, date } = commit.committer;
println!("committer: {} at {}", name, date);
println!("");
println!("{}", commit.message);
},
objects::Object::Blob(contents) => {
// Don't use println, as we don't want to include an extra newline at the end of the
// blob contents.
print!("{}", contents);
},
_ => { /* Not handled yet */ }
}
cli::success()
} | identifier_body |
getProvider.js | import config from 'config';
import path from 'path';
import api from '~/api/index.js';
export const PROVIDER_TYPE = {
MOVIES: 'movies',
SHOWS: 'shows'
};
const providers = {};
/**
* Get the correct API provider for a given provider type.
*
* @param {String} providerType -- PROVIDER_TYPE.MOVIES or PROVIDER_TYPE.SHOWS
* @returns {Object}
*/
export default function getProvider(providerType) {
if (!providerType) {
throw new Error('Missing a provider type');
}
let provider = providers[providerType];
| try {
provider = require(path.resolve(__dirname, providerName + '.js'));
providers[providerType] = provider;
}
catch (e) {
const apis = Object.keys(api).reduce((acc, val, i, arr) => {
const isLast = i === arr.length - 1;
return acc += `${isLast ? 'and ' : ''}"${val}"${!isLast ? ', ' : ''}`;
}, '');
throw new Error(`Invalid provider name: "${providerName}". ` +
`Valid values are ${apis}, all lower case.`);
}
}
return provider;
} | if (!provider) {
const providerName = config.get(`alexa-libby.${providerType}.provider`).toLowerCase();
| random_line_split |
getProvider.js | import config from 'config';
import path from 'path';
import api from '~/api/index.js';
export const PROVIDER_TYPE = {
MOVIES: 'movies',
SHOWS: 'shows'
};
const providers = {};
/**
* Get the correct API provider for a given provider type.
*
* @param {String} providerType -- PROVIDER_TYPE.MOVIES or PROVIDER_TYPE.SHOWS
* @returns {Object}
*/
export default function | (providerType) {
if (!providerType) {
throw new Error('Missing a provider type');
}
let provider = providers[providerType];
if (!provider) {
const providerName = config.get(`alexa-libby.${providerType}.provider`).toLowerCase();
try {
provider = require(path.resolve(__dirname, providerName + '.js'));
providers[providerType] = provider;
}
catch (e) {
const apis = Object.keys(api).reduce((acc, val, i, arr) => {
const isLast = i === arr.length - 1;
return acc += `${isLast ? 'and ' : ''}"${val}"${!isLast ? ', ' : ''}`;
}, '');
throw new Error(`Invalid provider name: "${providerName}". ` +
`Valid values are ${apis}, all lower case.`);
}
}
return provider;
}
| getProvider | identifier_name |
getProvider.js | import config from 'config';
import path from 'path';
import api from '~/api/index.js';
export const PROVIDER_TYPE = {
MOVIES: 'movies',
SHOWS: 'shows'
};
const providers = {};
/**
* Get the correct API provider for a given provider type.
*
* @param {String} providerType -- PROVIDER_TYPE.MOVIES or PROVIDER_TYPE.SHOWS
* @returns {Object}
*/
export default function getProvider(providerType) {
if (!providerType) {
throw new Error('Missing a provider type');
}
let provider = providers[providerType];
if (!provider) |
return provider;
}
| {
const providerName = config.get(`alexa-libby.${providerType}.provider`).toLowerCase();
try {
provider = require(path.resolve(__dirname, providerName + '.js'));
providers[providerType] = provider;
}
catch (e) {
const apis = Object.keys(api).reduce((acc, val, i, arr) => {
const isLast = i === arr.length - 1;
return acc += `${isLast ? 'and ' : ''}"${val}"${!isLast ? ', ' : ''}`;
}, '');
throw new Error(`Invalid provider name: "${providerName}". ` +
`Valid values are ${apis}, all lower case.`);
}
} | conditional_block |
getProvider.js | import config from 'config';
import path from 'path';
import api from '~/api/index.js';
export const PROVIDER_TYPE = {
MOVIES: 'movies',
SHOWS: 'shows'
};
const providers = {};
/**
* Get the correct API provider for a given provider type.
*
* @param {String} providerType -- PROVIDER_TYPE.MOVIES or PROVIDER_TYPE.SHOWS
* @returns {Object}
*/
export default function getProvider(providerType) | {
if (!providerType) {
throw new Error('Missing a provider type');
}
let provider = providers[providerType];
if (!provider) {
const providerName = config.get(`alexa-libby.${providerType}.provider`).toLowerCase();
try {
provider = require(path.resolve(__dirname, providerName + '.js'));
providers[providerType] = provider;
}
catch (e) {
const apis = Object.keys(api).reduce((acc, val, i, arr) => {
const isLast = i === arr.length - 1;
return acc += `${isLast ? 'and ' : ''}"${val}"${!isLast ? ', ' : ''}`;
}, '');
throw new Error(`Invalid provider name: "${providerName}". ` +
`Valid values are ${apis}, all lower case.`);
}
}
return provider;
} | identifier_body |
|
rAF.js | // Used exclusively for testing in PhantomJS environment.
// https://gist.github.com/paulirish/1579671
// http://paulirish.com/2011/requestanimationframe-for-smart-animating/
// http://my.opera.com/emoller/blog/2011/12/20/requestanimationframe-for-smart-er-animating |
(function() {
var lastTime = 0;
var vendors = ['ms', 'moz', 'webkit', 'o'];
for(var x = 0; x < vendors.length && !window.requestAnimationFrame; ++x) {
window.requestAnimationFrame = window[vendors[x]+'RequestAnimationFrame'];
window.cancelAnimationFrame = window[vendors[x]+'CancelAnimationFrame'] || window[vendors[x]+'CancelRequestAnimationFrame'];
}
if (!window.requestAnimationFrame)
window.requestAnimationFrame = function(callback, element) {
var currTime = new Date().getTime();
var timeToCall = Math.max(0, 16 - (currTime - lastTime));
var id = window.setTimeout(function() { callback(currTime + timeToCall); },
timeToCall);
lastTime = currTime + timeToCall;
return id;
};
if (!window.cancelAnimationFrame)
window.cancelAnimationFrame = function(id) {
clearTimeout(id);
};
}()); |
// requestAnimationFrame polyfill by Erik Möller. fixes from Paul Irish and Tino Zijdel
// MIT license | random_line_split |
TestDataFormatterObjCNSError.py | # encoding: utf-8
"""
Test lldb data formatter subsystem.
"""
from __future__ import print_function
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
from ObjCDataFormatterTestCase import ObjCDataFormatterTestCase
class ObjCDataFormatterNSError(ObjCDataFormatterTestCase):
@skipUnlessDarwin
def | (self):
"""Test formatters for NSError."""
self.appkit_tester_impl(self.nserror_data_formatter_commands)
def nserror_data_formatter_commands(self):
self.expect(
'frame variable nserror', substrs=['domain: @"Foobar" - code: 12'])
self.expect(
'frame variable nserrorptr',
substrs=['domain: @"Foobar" - code: 12'])
self.expect(
'frame variable nserror->_userInfo', substrs=['2 key/value pairs'])
self.expect(
'frame variable nserror->_userInfo --ptr-depth 1 -d run-target',
substrs=['@"a"', '@"b"', "1", "2"])
| test_nserror_with_run_command | identifier_name |
TestDataFormatterObjCNSError.py | # encoding: utf-8
"""
Test lldb data formatter subsystem.
"""
from __future__ import print_function
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import * |
class ObjCDataFormatterNSError(ObjCDataFormatterTestCase):
@skipUnlessDarwin
def test_nserror_with_run_command(self):
"""Test formatters for NSError."""
self.appkit_tester_impl(self.nserror_data_formatter_commands)
def nserror_data_formatter_commands(self):
self.expect(
'frame variable nserror', substrs=['domain: @"Foobar" - code: 12'])
self.expect(
'frame variable nserrorptr',
substrs=['domain: @"Foobar" - code: 12'])
self.expect(
'frame variable nserror->_userInfo', substrs=['2 key/value pairs'])
self.expect(
'frame variable nserror->_userInfo --ptr-depth 1 -d run-target',
substrs=['@"a"', '@"b"', "1", "2"]) | from lldbsuite.test import lldbutil
from ObjCDataFormatterTestCase import ObjCDataFormatterTestCase
| random_line_split |
TestDataFormatterObjCNSError.py | # encoding: utf-8
"""
Test lldb data formatter subsystem.
"""
from __future__ import print_function
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
from ObjCDataFormatterTestCase import ObjCDataFormatterTestCase
class ObjCDataFormatterNSError(ObjCDataFormatterTestCase):
@skipUnlessDarwin
def test_nserror_with_run_command(self):
|
def nserror_data_formatter_commands(self):
self.expect(
'frame variable nserror', substrs=['domain: @"Foobar" - code: 12'])
self.expect(
'frame variable nserrorptr',
substrs=['domain: @"Foobar" - code: 12'])
self.expect(
'frame variable nserror->_userInfo', substrs=['2 key/value pairs'])
self.expect(
'frame variable nserror->_userInfo --ptr-depth 1 -d run-target',
substrs=['@"a"', '@"b"', "1", "2"])
| """Test formatters for NSError."""
self.appkit_tester_impl(self.nserror_data_formatter_commands) | identifier_body |
models_tests.py | from nose.tools import eq_, ok_
from django.test import TestCase
from us_ignite.snippets.models import Snippet
from us_ignite.snippets.tests import fixtures
class TestSnippetModel(TestCase):
def tearDown(self):
Snippet.objects.all().delete()
def get_instance(self):
data = {
'name': 'Gigabit snippets',
'slug': 'featured',
'url': 'http://us-ignite.org/',
}
return Snippet.objects.create(**data)
def test_instance_is_created_successfully(self):
instance = self.get_instance()
eq_(instance.name, 'Gigabit snippets')
eq_(instance.status, Snippet.DRAFT)
eq_(instance.url, 'http://us-ignite.org/')
eq_(instance.url_text, '')
eq_(instance.body, '')
eq_(instance.image, '') | ok_(instance.created)
ok_(instance.modified)
eq_(instance.slug, 'featured')
ok_(instance.id)
eq_(instance.notes, '')
def test_instance_name_is_used_as_title(self):
instance = fixtures.get_snippet(name='About page')
eq_(instance.title, 'About page') | eq_(instance.is_featured, False) | random_line_split |
models_tests.py | from nose.tools import eq_, ok_
from django.test import TestCase
from us_ignite.snippets.models import Snippet
from us_ignite.snippets.tests import fixtures
class TestSnippetModel(TestCase):
def tearDown(self):
Snippet.objects.all().delete()
def | (self):
data = {
'name': 'Gigabit snippets',
'slug': 'featured',
'url': 'http://us-ignite.org/',
}
return Snippet.objects.create(**data)
def test_instance_is_created_successfully(self):
instance = self.get_instance()
eq_(instance.name, 'Gigabit snippets')
eq_(instance.status, Snippet.DRAFT)
eq_(instance.url, 'http://us-ignite.org/')
eq_(instance.url_text, '')
eq_(instance.body, '')
eq_(instance.image, '')
eq_(instance.is_featured, False)
ok_(instance.created)
ok_(instance.modified)
eq_(instance.slug, 'featured')
ok_(instance.id)
eq_(instance.notes, '')
def test_instance_name_is_used_as_title(self):
instance = fixtures.get_snippet(name='About page')
eq_(instance.title, 'About page')
| get_instance | identifier_name |
models_tests.py | from nose.tools import eq_, ok_
from django.test import TestCase
from us_ignite.snippets.models import Snippet
from us_ignite.snippets.tests import fixtures
class TestSnippetModel(TestCase):
def tearDown(self):
Snippet.objects.all().delete()
def get_instance(self):
data = {
'name': 'Gigabit snippets',
'slug': 'featured',
'url': 'http://us-ignite.org/',
}
return Snippet.objects.create(**data)
def test_instance_is_created_successfully(self):
|
def test_instance_name_is_used_as_title(self):
instance = fixtures.get_snippet(name='About page')
eq_(instance.title, 'About page')
| instance = self.get_instance()
eq_(instance.name, 'Gigabit snippets')
eq_(instance.status, Snippet.DRAFT)
eq_(instance.url, 'http://us-ignite.org/')
eq_(instance.url_text, '')
eq_(instance.body, '')
eq_(instance.image, '')
eq_(instance.is_featured, False)
ok_(instance.created)
ok_(instance.modified)
eq_(instance.slug, 'featured')
ok_(instance.id)
eq_(instance.notes, '') | identifier_body |
is-finite-number.js | /**
* The MIT License (MIT)
*
* Copyright (c) 2014-2022 Mickael Jeanroy
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
import {isNumber} from './is-number.js';
/**
* Check that a given value is a falsy value.
*
* @param {*} a Value to check.
* @return {boolean} `true` if parameter is a falsy value.
*/
export function | (a) {
return isNumber(a) && isFinite(a);
}
| isFiniteNumber | identifier_name |
is-finite-number.js | /**
* The MIT License (MIT)
*
* Copyright (c) 2014-2022 Mickael Jeanroy
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
import {isNumber} from './is-number.js';
/**
* Check that a given value is a falsy value.
*
* @param {*} a Value to check.
* @return {boolean} `true` if parameter is a falsy value.
*/
export function isFiniteNumber(a) | {
return isNumber(a) && isFinite(a);
} | identifier_body |
|
is-finite-number.js | /**
* The MIT License (MIT)
*
* Copyright (c) 2014-2022 Mickael Jeanroy
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
* | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
import {isNumber} from './is-number.js';
/**
* Check that a given value is a falsy value.
*
* @param {*} a Value to check.
* @return {boolean} `true` if parameter is a falsy value.
*/
export function isFiniteNumber(a) {
return isNumber(a) && isFinite(a);
} | * The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | random_line_split |
edgeop_lsys.rs | extern crate rand;
extern crate evo;
extern crate petgraph;
#[macro_use]
extern crate graph_annealing;
extern crate pcg;
extern crate triadic_census;
extern crate lindenmayer_system;
extern crate graph_edge_evolution;
extern crate asexp;
extern crate expression;
extern crate expression_num;
extern crate expression_closed01;
extern crate matplotlib;
extern crate closed01;
extern crate graph_io_gml;
extern crate nsga2;
#[path="genome/genome_edgeop_lsys.rs"]
pub mod genome;
use std::str::FromStr;
use rand::{Rng, SeedableRng};
use rand::os::OsRng;
use pcg::PcgRng;
use evo::Probability;
use nsga2::{Driver, DriverConfig};
use genome::{Genome, Toolbox};
use graph_annealing::helper::to_weighted_vec;
use graph_annealing::goal::{FitnessFunction, Goal};
use graph_annealing::graph;
pub use graph_annealing::UniformDistribution;
use graph_annealing::stat::Stat;
use petgraph::{Directed, EdgeDirection, Graph};
use triadic_census::OptDenseDigraph;
use std::fs::File;
use genome::{RuleMutOp, RuleProductionMutOp, VarOp};
use genome::edgeop::{EdgeOp, edgeops_to_graph};
use genome::expr_op::{FlatExprOp, RecursiveExprOp, EXPR_NAME};
use std::io::Read;
use asexp::Sexp;
use asexp::sexp::pp;
use std::env;
use std::collections::BTreeMap;
use std::fmt::Debug;
use matplotlib::{Env, Plot};
struct ReseedRecorder {
reseeds: Vec<(u64, u64)>
}
/*
impl Reseeder<pcg::RcgRng> for ReseedRecorder {
fn reseed(&mut self, rng: &mut pcg::RcgRng) {
let mut r = rand::thread_rng();
let s1 = r.next_u64();
let s2 = r.next_u64();
self.reseeds.push((s1, s2));
rng.reseed([s1, s2]);
}
}
*/
const MAX_OBJECTIVES: usize = 3;
fn graph_to_sexp<N, E, F, G>(g: &Graph<N, E, Directed>,
node_weight_map: F,
edge_weight_map: G)
-> Sexp
where F: Fn(&N) -> Option<Sexp>,
G: Fn(&E) -> Option<Sexp>
{
let mut nodes = Vec::new();
for node_idx in g.node_indices() {
let edges: Vec<_> = g.edges_directed(node_idx, EdgeDirection::Outgoing)
.map(|(target_node, edge_weight)| {
match edge_weight_map(edge_weight) {
Some(w) => Sexp::from((target_node.index(), w)),
None => Sexp::from(target_node.index()),
}
})
.collect();
let mut def = vec![
(Sexp::from("id"), Sexp::from(node_idx.index())),
(Sexp::from("edges"), Sexp::Array(edges)),
];
match node_weight_map(&g[node_idx]) {
Some(w) => def.push((Sexp::from("weight"), w)),
None => {}
}
nodes.push(Sexp::Map(def));
}
Sexp::Map(vec![
(Sexp::from("version"), Sexp::from(1usize)),
(Sexp::from("nodes"), Sexp::Array(nodes)),
])
}
#[derive(Debug)]
struct ConfigGenome {
max_iter: usize,
rules: usize,
initial_len: usize,
symbol_arity: usize,
num_params: usize,
prob_terminal: Probability,
}
#[derive(Debug)]
struct Config {
ngen: usize,
mu: usize,
lambda: usize,
k: usize,
seed: Vec<u64>,
objectives: Vec<Objective>,
graph: Graph<f32, f32, Directed>,
edge_ops: Vec<(EdgeOp, u32)>,
var_ops: Vec<(VarOp, u32)>,
rule_mut_ops: Vec<(RuleMutOp, u32)>,
rule_prod_ops: Vec<(RuleProductionMutOp, u32)>,
flat_expr_op: Vec<(FlatExprOp, u32)>,
recursive_expr_op: Vec<(RecursiveExprOp, u32)>,
genome: ConfigGenome,
plot: bool,
weight: f64,
}
#[derive(Debug)]
struct Objective {
fitness_function: FitnessFunction,
threshold: f32,
}
fn parse_ops<T, I>(map: &BTreeMap<String, Sexp>, key: &str) -> Vec<(T, u32)>
where T: FromStr<Err = I> + UniformDistribution,
I: Debug
{
if let Some(&Sexp::Map(ref list)) = map.get(key) {
let mut ops: Vec<(T, u32)> = Vec::new();
for &(ref k, ref v) in list.iter() {
ops.push((T::from_str(k.get_str().unwrap()).unwrap(),
v.get_uint().unwrap() as u32));
}
ops
} else {
T::uniform_distribution()
}
}
fn convert_weight(w: Option<&Sexp>) -> Option<f32> {
match w {
Some(s) => s.get_float().map(|f| f as f32),
None => {
// use a default
Some(0.0)
}
}
}
fn parse_config(sexp: Sexp) -> Config {
let map = sexp.into_map().unwrap();
// number of generations
let ngen: usize = map.get("ngen").and_then(|v| v.get_uint()).unwrap() as usize;
// size of population
let mu: usize = map.get("mu").and_then(|v| v.get_uint()).unwrap() as usize;
// size of offspring population
let lambda: usize = map.get("lambda").and_then(|v| v.get_uint()).unwrap() as usize;
// tournament selection
let k: usize = map.get("k").and_then(|v| v.get_uint()).unwrap_or(2) as usize;
assert!(k > 0);
let plot: bool = map.get("plot").map(|v| v.get_str() == Some("true")).unwrap_or(false);
let weight: f64 = map.get("weight").and_then(|v| v.get_float()).unwrap_or(1.0);
let seed: Vec<u64>;
if let Some(seed_expr) = map.get("seed") {
seed = seed_expr.get_uint_vec().unwrap();
} else {
println!("Use OsRng to generate seed..");
let mut rng = OsRng::new().unwrap();
seed = (0..2).map(|_| rng.next_u64()).collect();
}
// Parse objectives and thresholds
let mut objectives: Vec<Objective> = Vec::new();
if let Some(&Sexp::Map(ref list)) = map.get("objectives") {
for &(ref k, ref v) in list.iter() {
objectives.push(Objective {
fitness_function: FitnessFunction::from_str(k.get_str().unwrap()).unwrap(),
threshold: v.get_float().unwrap() as f32,
});
}
} else {
panic!("Map expected");
}
if objectives.len() > MAX_OBJECTIVES {
panic!("Max {} objectives allowed", MAX_OBJECTIVES);
}
// read graph
let graph_file = map.get("graph").unwrap().get_str().unwrap();
println!("Using graph file: {}", graph_file);
let graph_s = {
let mut graph_file = File::open(graph_file).unwrap();
let mut graph_s = String::new();
let _ = graph_file.read_to_string(&mut graph_s).unwrap();
graph_s
};
let graph = graph_io_gml::parse_gml(&graph_s,
&convert_weight,
&convert_weight)
.unwrap();
println!("graph: {:?}", graph);
let graph = graph::normalize_graph(&graph);
let genome_map = map.get("genome").unwrap().clone().into_map().unwrap();
Config {
ngen: ngen,
mu: mu,
lambda: lambda,
k: k,
plot: plot,
weight: weight,
seed: seed,
objectives: objectives,
graph: graph,
edge_ops: parse_ops(&map, "edge_ops"),
var_ops: parse_ops(&map, "var_ops"),
rule_mut_ops: parse_ops(&map, "rule_mut_ops"),
rule_prod_ops: parse_ops(&map, "rule_prod_mut_ops"),
flat_expr_op: parse_ops(&map, "flat_expr_ops"),
recursive_expr_op: parse_ops(&map, "recursive_expr_ops"),
genome: ConfigGenome {
rules: genome_map.get("rules").and_then(|v| v.get_uint()).unwrap() as usize,
symbol_arity: genome_map.get("symbol_arity").and_then(|v| v.get_uint()).unwrap() as usize,
num_params: genome_map.get("num_params").and_then(|v| v.get_uint()).unwrap() as usize,
initial_len: genome_map.get("initial_len").and_then(|v| v.get_uint()).unwrap() as usize,
max_iter: genome_map.get("max_iter").and_then(|v| v.get_uint()).unwrap() as usize,
prob_terminal: Probability::new(genome_map.get("prob_terminal").and_then(|v| v.get_float()).unwrap() as f32),
},
}
}
fn main() | {
println!("Using expr system: {}", EXPR_NAME);
let env = Env::new();
let plot = Plot::new(&env);
let mut s = String::new();
let configfile = env::args().nth(1).unwrap();
let _ = File::open(configfile).unwrap().read_to_string(&mut s).unwrap();
let expr = asexp::Sexp::parse_toplevel(&s).unwrap();
let config = parse_config(expr);
println!("{:#?}", config);
if config.plot {
plot.interactive();
plot.show();
}
let num_objectives = config.objectives.len();
let driver_config = DriverConfig {
mu: config.mu,
lambda: config.lambda,
k: config.k,
ngen: config.ngen,
num_objectives: num_objectives
};
let toolbox = Toolbox::new(Goal::new(OptDenseDigraph::from(config.graph.clone())),
config.objectives
.iter()
.map(|o| o.threshold)
.collect(),
config.objectives
.iter()
.map(|o| o.fitness_function.clone())
.collect(),
config.genome.max_iter, // iterations
config.genome.rules, // num_rules
config.genome.initial_len, // initial rule length
config.genome.symbol_arity, // we use 1-ary symbols
config.genome.num_params,
config.genome.prob_terminal,
to_weighted_vec(&config.edge_ops),
to_weighted_vec(&config.flat_expr_op),
to_weighted_vec(&config.recursive_expr_op),
to_weighted_vec(&config.var_ops),
to_weighted_vec(&config.rule_mut_ops),
to_weighted_vec(&config.rule_prod_ops));
assert!(config.seed.len() == 2);
let mut rng: PcgRng = SeedableRng::from_seed([config.seed[0], config.seed[1]]);
//let mut rng = rand::thread_rng();
let selected_population = toolbox.run(&mut rng, &driver_config, config.weight, &|iteration, duration, num_optima, population| {
let duration_ms = (duration as f32) / 1_000_000.0;
print!("# {:>6}", iteration);
let fitness_values = population.fitness_to_vec();
// XXX: Assume we have at least two objectives
let mut x = Vec::new();
let mut y = Vec::new();
for f in fitness_values.iter() {
x.push(f.objectives[0]);
y.push(f.objectives[1]);
}
if config.plot {
plot.clf();
plot.title(&format!("Iteration: {}", iteration));
plot.grid(true);
plot.scatter(&x, &y);
plot.draw();
}
// calculate a min/max/avg value for each objective.
let stats: Vec<Stat<f32>> = (0..num_objectives)
.into_iter()
.map(|i| {
Stat::from_iter(fitness_values.iter().map(|o| o.objectives[i]))
.unwrap()
})
.collect();
for stat in stats.iter() {
print!(" | ");
print!("{:>8.2}", stat.min);
print!("{:>9.2}", stat.avg);
print!("{:>10.2}", stat.max);
}
print!(" | {:>5} | {:>8.0} ms", num_optima, duration_ms);
println!("");
if num_optima > 0 {
println!("Found premature optimum in Iteration {}", iteration);
}
});
println!("===========================================================");
let mut best_solutions: Vec<(Genome, _)> = Vec::new();
selected_population.all_of_rank(0, &mut |ind, fit| {
if fit.objectives[0] < 0.1 && fit.objectives[1] < 0.1 {
best_solutions.push((ind.clone(), fit.clone()));
}
});
println!("Target graph");
let sexp = graph_to_sexp(&graph::normalize_graph_closed01(&config.graph),
|nw| Some(Sexp::from(nw.get())),
|ew| Some(Sexp::from(ew.get())));
println!("{}", pp(&sexp));
let mut solutions: Vec<Sexp> = Vec::new();
for (_i, &(ref ind, ref fitness)) in best_solutions.iter().enumerate() {
let genome: Sexp = ind.into();
let edge_ops = ind.to_edge_ops(&toolbox.axiom_args, toolbox.iterations);
let g = edgeops_to_graph(&edge_ops);
// output as sexp
let graph_sexp = graph_to_sexp(g.ref_graph(),
|&nw| Some(Sexp::from(nw)),
|&ew| Some(Sexp::from(ew)));
solutions.push(Sexp::Map(
vec![
(Sexp::from("fitness"), Sexp::from((fitness.objectives[0], fitness.objectives[1], fitness.objectives[2]))),
(Sexp::from("genome"), genome),
(Sexp::from("graph"), graph_sexp),
]
));
/*
draw_graph(g.ref_graph(),
// XXX: name
&format!("edgeop_lsys_g{}_f{}_i{}.svg",
config.ngen,
fitness.objectives[1] as usize,
i));
*/
}
println!("{}", pp(&Sexp::from(("solutions", Sexp::Array(solutions)))));
//println!("])");
println!("{:#?}", config);
} | identifier_body |
|
edgeop_lsys.rs | extern crate rand;
extern crate evo;
extern crate petgraph;
#[macro_use]
extern crate graph_annealing;
extern crate pcg;
extern crate triadic_census;
extern crate lindenmayer_system;
extern crate graph_edge_evolution;
extern crate asexp;
extern crate expression;
extern crate expression_num;
extern crate expression_closed01;
extern crate matplotlib;
extern crate closed01;
extern crate graph_io_gml;
extern crate nsga2;
#[path="genome/genome_edgeop_lsys.rs"]
pub mod genome;
use std::str::FromStr;
use rand::{Rng, SeedableRng};
use rand::os::OsRng;
use pcg::PcgRng;
use evo::Probability;
use nsga2::{Driver, DriverConfig};
use genome::{Genome, Toolbox};
use graph_annealing::helper::to_weighted_vec;
use graph_annealing::goal::{FitnessFunction, Goal};
use graph_annealing::graph;
pub use graph_annealing::UniformDistribution;
use graph_annealing::stat::Stat;
use petgraph::{Directed, EdgeDirection, Graph};
use triadic_census::OptDenseDigraph;
use std::fs::File;
use genome::{RuleMutOp, RuleProductionMutOp, VarOp};
use genome::edgeop::{EdgeOp, edgeops_to_graph};
use genome::expr_op::{FlatExprOp, RecursiveExprOp, EXPR_NAME};
use std::io::Read;
use asexp::Sexp;
use asexp::sexp::pp;
use std::env;
use std::collections::BTreeMap;
use std::fmt::Debug;
use matplotlib::{Env, Plot};
struct ReseedRecorder {
reseeds: Vec<(u64, u64)>
}
/*
impl Reseeder<pcg::RcgRng> for ReseedRecorder {
fn reseed(&mut self, rng: &mut pcg::RcgRng) {
let mut r = rand::thread_rng();
let s1 = r.next_u64();
let s2 = r.next_u64();
self.reseeds.push((s1, s2));
rng.reseed([s1, s2]);
}
}
*/
const MAX_OBJECTIVES: usize = 3;
fn graph_to_sexp<N, E, F, G>(g: &Graph<N, E, Directed>,
node_weight_map: F,
edge_weight_map: G)
-> Sexp
where F: Fn(&N) -> Option<Sexp>,
G: Fn(&E) -> Option<Sexp>
{
let mut nodes = Vec::new();
for node_idx in g.node_indices() {
let edges: Vec<_> = g.edges_directed(node_idx, EdgeDirection::Outgoing)
.map(|(target_node, edge_weight)| {
match edge_weight_map(edge_weight) {
Some(w) => Sexp::from((target_node.index(), w)),
None => Sexp::from(target_node.index()),
}
})
.collect();
let mut def = vec![
(Sexp::from("id"), Sexp::from(node_idx.index())),
(Sexp::from("edges"), Sexp::Array(edges)),
];
match node_weight_map(&g[node_idx]) {
Some(w) => def.push((Sexp::from("weight"), w)),
None => {}
}
nodes.push(Sexp::Map(def));
}
Sexp::Map(vec![
(Sexp::from("version"), Sexp::from(1usize)),
(Sexp::from("nodes"), Sexp::Array(nodes)),
])
}
#[derive(Debug)]
struct ConfigGenome {
max_iter: usize,
rules: usize,
initial_len: usize,
symbol_arity: usize,
num_params: usize,
prob_terminal: Probability,
}
#[derive(Debug)]
struct Config {
ngen: usize,
mu: usize,
lambda: usize,
k: usize,
seed: Vec<u64>,
objectives: Vec<Objective>,
graph: Graph<f32, f32, Directed>,
edge_ops: Vec<(EdgeOp, u32)>,
var_ops: Vec<(VarOp, u32)>,
rule_mut_ops: Vec<(RuleMutOp, u32)>,
rule_prod_ops: Vec<(RuleProductionMutOp, u32)>,
flat_expr_op: Vec<(FlatExprOp, u32)>,
recursive_expr_op: Vec<(RecursiveExprOp, u32)>,
genome: ConfigGenome,
plot: bool,
weight: f64,
}
| struct Objective {
fitness_function: FitnessFunction,
threshold: f32,
}
fn parse_ops<T, I>(map: &BTreeMap<String, Sexp>, key: &str) -> Vec<(T, u32)>
where T: FromStr<Err = I> + UniformDistribution,
I: Debug
{
if let Some(&Sexp::Map(ref list)) = map.get(key) {
let mut ops: Vec<(T, u32)> = Vec::new();
for &(ref k, ref v) in list.iter() {
ops.push((T::from_str(k.get_str().unwrap()).unwrap(),
v.get_uint().unwrap() as u32));
}
ops
} else {
T::uniform_distribution()
}
}
fn convert_weight(w: Option<&Sexp>) -> Option<f32> {
match w {
Some(s) => s.get_float().map(|f| f as f32),
None => {
// use a default
Some(0.0)
}
}
}
fn parse_config(sexp: Sexp) -> Config {
let map = sexp.into_map().unwrap();
// number of generations
let ngen: usize = map.get("ngen").and_then(|v| v.get_uint()).unwrap() as usize;
// size of population
let mu: usize = map.get("mu").and_then(|v| v.get_uint()).unwrap() as usize;
// size of offspring population
let lambda: usize = map.get("lambda").and_then(|v| v.get_uint()).unwrap() as usize;
// tournament selection
let k: usize = map.get("k").and_then(|v| v.get_uint()).unwrap_or(2) as usize;
assert!(k > 0);
let plot: bool = map.get("plot").map(|v| v.get_str() == Some("true")).unwrap_or(false);
let weight: f64 = map.get("weight").and_then(|v| v.get_float()).unwrap_or(1.0);
let seed: Vec<u64>;
if let Some(seed_expr) = map.get("seed") {
seed = seed_expr.get_uint_vec().unwrap();
} else {
println!("Use OsRng to generate seed..");
let mut rng = OsRng::new().unwrap();
seed = (0..2).map(|_| rng.next_u64()).collect();
}
// Parse objectives and thresholds
let mut objectives: Vec<Objective> = Vec::new();
if let Some(&Sexp::Map(ref list)) = map.get("objectives") {
for &(ref k, ref v) in list.iter() {
objectives.push(Objective {
fitness_function: FitnessFunction::from_str(k.get_str().unwrap()).unwrap(),
threshold: v.get_float().unwrap() as f32,
});
}
} else {
panic!("Map expected");
}
if objectives.len() > MAX_OBJECTIVES {
panic!("Max {} objectives allowed", MAX_OBJECTIVES);
}
// read graph
let graph_file = map.get("graph").unwrap().get_str().unwrap();
println!("Using graph file: {}", graph_file);
let graph_s = {
let mut graph_file = File::open(graph_file).unwrap();
let mut graph_s = String::new();
let _ = graph_file.read_to_string(&mut graph_s).unwrap();
graph_s
};
let graph = graph_io_gml::parse_gml(&graph_s,
&convert_weight,
&convert_weight)
.unwrap();
println!("graph: {:?}", graph);
let graph = graph::normalize_graph(&graph);
let genome_map = map.get("genome").unwrap().clone().into_map().unwrap();
Config {
ngen: ngen,
mu: mu,
lambda: lambda,
k: k,
plot: plot,
weight: weight,
seed: seed,
objectives: objectives,
graph: graph,
edge_ops: parse_ops(&map, "edge_ops"),
var_ops: parse_ops(&map, "var_ops"),
rule_mut_ops: parse_ops(&map, "rule_mut_ops"),
rule_prod_ops: parse_ops(&map, "rule_prod_mut_ops"),
flat_expr_op: parse_ops(&map, "flat_expr_ops"),
recursive_expr_op: parse_ops(&map, "recursive_expr_ops"),
genome: ConfigGenome {
rules: genome_map.get("rules").and_then(|v| v.get_uint()).unwrap() as usize,
symbol_arity: genome_map.get("symbol_arity").and_then(|v| v.get_uint()).unwrap() as usize,
num_params: genome_map.get("num_params").and_then(|v| v.get_uint()).unwrap() as usize,
initial_len: genome_map.get("initial_len").and_then(|v| v.get_uint()).unwrap() as usize,
max_iter: genome_map.get("max_iter").and_then(|v| v.get_uint()).unwrap() as usize,
prob_terminal: Probability::new(genome_map.get("prob_terminal").and_then(|v| v.get_float()).unwrap() as f32),
},
}
}
fn main() {
println!("Using expr system: {}", EXPR_NAME);
let env = Env::new();
let plot = Plot::new(&env);
let mut s = String::new();
let configfile = env::args().nth(1).unwrap();
let _ = File::open(configfile).unwrap().read_to_string(&mut s).unwrap();
let expr = asexp::Sexp::parse_toplevel(&s).unwrap();
let config = parse_config(expr);
println!("{:#?}", config);
if config.plot {
plot.interactive();
plot.show();
}
let num_objectives = config.objectives.len();
let driver_config = DriverConfig {
mu: config.mu,
lambda: config.lambda,
k: config.k,
ngen: config.ngen,
num_objectives: num_objectives
};
let toolbox = Toolbox::new(Goal::new(OptDenseDigraph::from(config.graph.clone())),
config.objectives
.iter()
.map(|o| o.threshold)
.collect(),
config.objectives
.iter()
.map(|o| o.fitness_function.clone())
.collect(),
config.genome.max_iter, // iterations
config.genome.rules, // num_rules
config.genome.initial_len, // initial rule length
config.genome.symbol_arity, // we use 1-ary symbols
config.genome.num_params,
config.genome.prob_terminal,
to_weighted_vec(&config.edge_ops),
to_weighted_vec(&config.flat_expr_op),
to_weighted_vec(&config.recursive_expr_op),
to_weighted_vec(&config.var_ops),
to_weighted_vec(&config.rule_mut_ops),
to_weighted_vec(&config.rule_prod_ops));
assert!(config.seed.len() == 2);
let mut rng: PcgRng = SeedableRng::from_seed([config.seed[0], config.seed[1]]);
//let mut rng = rand::thread_rng();
let selected_population = toolbox.run(&mut rng, &driver_config, config.weight, &|iteration, duration, num_optima, population| {
let duration_ms = (duration as f32) / 1_000_000.0;
print!("# {:>6}", iteration);
let fitness_values = population.fitness_to_vec();
// XXX: Assume we have at least two objectives
let mut x = Vec::new();
let mut y = Vec::new();
for f in fitness_values.iter() {
x.push(f.objectives[0]);
y.push(f.objectives[1]);
}
if config.plot {
plot.clf();
plot.title(&format!("Iteration: {}", iteration));
plot.grid(true);
plot.scatter(&x, &y);
plot.draw();
}
// calculate a min/max/avg value for each objective.
let stats: Vec<Stat<f32>> = (0..num_objectives)
.into_iter()
.map(|i| {
Stat::from_iter(fitness_values.iter().map(|o| o.objectives[i]))
.unwrap()
})
.collect();
for stat in stats.iter() {
print!(" | ");
print!("{:>8.2}", stat.min);
print!("{:>9.2}", stat.avg);
print!("{:>10.2}", stat.max);
}
print!(" | {:>5} | {:>8.0} ms", num_optima, duration_ms);
println!("");
if num_optima > 0 {
println!("Found premature optimum in Iteration {}", iteration);
}
});
println!("===========================================================");
let mut best_solutions: Vec<(Genome, _)> = Vec::new();
selected_population.all_of_rank(0, &mut |ind, fit| {
if fit.objectives[0] < 0.1 && fit.objectives[1] < 0.1 {
best_solutions.push((ind.clone(), fit.clone()));
}
});
println!("Target graph");
let sexp = graph_to_sexp(&graph::normalize_graph_closed01(&config.graph),
|nw| Some(Sexp::from(nw.get())),
|ew| Some(Sexp::from(ew.get())));
println!("{}", pp(&sexp));
let mut solutions: Vec<Sexp> = Vec::new();
for (_i, &(ref ind, ref fitness)) in best_solutions.iter().enumerate() {
let genome: Sexp = ind.into();
let edge_ops = ind.to_edge_ops(&toolbox.axiom_args, toolbox.iterations);
let g = edgeops_to_graph(&edge_ops);
// output as sexp
let graph_sexp = graph_to_sexp(g.ref_graph(),
|&nw| Some(Sexp::from(nw)),
|&ew| Some(Sexp::from(ew)));
solutions.push(Sexp::Map(
vec![
(Sexp::from("fitness"), Sexp::from((fitness.objectives[0], fitness.objectives[1], fitness.objectives[2]))),
(Sexp::from("genome"), genome),
(Sexp::from("graph"), graph_sexp),
]
));
/*
draw_graph(g.ref_graph(),
// XXX: name
&format!("edgeop_lsys_g{}_f{}_i{}.svg",
config.ngen,
fitness.objectives[1] as usize,
i));
*/
}
println!("{}", pp(&Sexp::from(("solutions", Sexp::Array(solutions)))));
//println!("])");
println!("{:#?}", config);
} | #[derive(Debug)] | random_line_split |
edgeop_lsys.rs | extern crate rand;
extern crate evo;
extern crate petgraph;
#[macro_use]
extern crate graph_annealing;
extern crate pcg;
extern crate triadic_census;
extern crate lindenmayer_system;
extern crate graph_edge_evolution;
extern crate asexp;
extern crate expression;
extern crate expression_num;
extern crate expression_closed01;
extern crate matplotlib;
extern crate closed01;
extern crate graph_io_gml;
extern crate nsga2;
#[path="genome/genome_edgeop_lsys.rs"]
pub mod genome;
use std::str::FromStr;
use rand::{Rng, SeedableRng};
use rand::os::OsRng;
use pcg::PcgRng;
use evo::Probability;
use nsga2::{Driver, DriverConfig};
use genome::{Genome, Toolbox};
use graph_annealing::helper::to_weighted_vec;
use graph_annealing::goal::{FitnessFunction, Goal};
use graph_annealing::graph;
pub use graph_annealing::UniformDistribution;
use graph_annealing::stat::Stat;
use petgraph::{Directed, EdgeDirection, Graph};
use triadic_census::OptDenseDigraph;
use std::fs::File;
use genome::{RuleMutOp, RuleProductionMutOp, VarOp};
use genome::edgeop::{EdgeOp, edgeops_to_graph};
use genome::expr_op::{FlatExprOp, RecursiveExprOp, EXPR_NAME};
use std::io::Read;
use asexp::Sexp;
use asexp::sexp::pp;
use std::env;
use std::collections::BTreeMap;
use std::fmt::Debug;
use matplotlib::{Env, Plot};
struct ReseedRecorder {
reseeds: Vec<(u64, u64)>
}
/*
impl Reseeder<pcg::RcgRng> for ReseedRecorder {
fn reseed(&mut self, rng: &mut pcg::RcgRng) {
let mut r = rand::thread_rng();
let s1 = r.next_u64();
let s2 = r.next_u64();
self.reseeds.push((s1, s2));
rng.reseed([s1, s2]);
}
}
*/
const MAX_OBJECTIVES: usize = 3;
fn graph_to_sexp<N, E, F, G>(g: &Graph<N, E, Directed>,
node_weight_map: F,
edge_weight_map: G)
-> Sexp
where F: Fn(&N) -> Option<Sexp>,
G: Fn(&E) -> Option<Sexp>
{
let mut nodes = Vec::new();
for node_idx in g.node_indices() {
let edges: Vec<_> = g.edges_directed(node_idx, EdgeDirection::Outgoing)
.map(|(target_node, edge_weight)| {
match edge_weight_map(edge_weight) {
Some(w) => Sexp::from((target_node.index(), w)),
None => Sexp::from(target_node.index()),
}
})
.collect();
let mut def = vec![
(Sexp::from("id"), Sexp::from(node_idx.index())),
(Sexp::from("edges"), Sexp::Array(edges)),
];
match node_weight_map(&g[node_idx]) {
Some(w) => def.push((Sexp::from("weight"), w)),
None => {}
}
nodes.push(Sexp::Map(def));
}
Sexp::Map(vec![
(Sexp::from("version"), Sexp::from(1usize)),
(Sexp::from("nodes"), Sexp::Array(nodes)),
])
}
#[derive(Debug)]
struct ConfigGenome {
max_iter: usize,
rules: usize,
initial_len: usize,
symbol_arity: usize,
num_params: usize,
prob_terminal: Probability,
}
#[derive(Debug)]
struct Config {
ngen: usize,
mu: usize,
lambda: usize,
k: usize,
seed: Vec<u64>,
objectives: Vec<Objective>,
graph: Graph<f32, f32, Directed>,
edge_ops: Vec<(EdgeOp, u32)>,
var_ops: Vec<(VarOp, u32)>,
rule_mut_ops: Vec<(RuleMutOp, u32)>,
rule_prod_ops: Vec<(RuleProductionMutOp, u32)>,
flat_expr_op: Vec<(FlatExprOp, u32)>,
recursive_expr_op: Vec<(RecursiveExprOp, u32)>,
genome: ConfigGenome,
plot: bool,
weight: f64,
}
#[derive(Debug)]
struct Objective {
fitness_function: FitnessFunction,
threshold: f32,
}
fn parse_ops<T, I>(map: &BTreeMap<String, Sexp>, key: &str) -> Vec<(T, u32)>
where T: FromStr<Err = I> + UniformDistribution,
I: Debug
{
if let Some(&Sexp::Map(ref list)) = map.get(key) {
let mut ops: Vec<(T, u32)> = Vec::new();
for &(ref k, ref v) in list.iter() {
ops.push((T::from_str(k.get_str().unwrap()).unwrap(),
v.get_uint().unwrap() as u32));
}
ops
} else {
T::uniform_distribution()
}
}
fn | (w: Option<&Sexp>) -> Option<f32> {
match w {
Some(s) => s.get_float().map(|f| f as f32),
None => {
// use a default
Some(0.0)
}
}
}
fn parse_config(sexp: Sexp) -> Config {
let map = sexp.into_map().unwrap();
// number of generations
let ngen: usize = map.get("ngen").and_then(|v| v.get_uint()).unwrap() as usize;
// size of population
let mu: usize = map.get("mu").and_then(|v| v.get_uint()).unwrap() as usize;
// size of offspring population
let lambda: usize = map.get("lambda").and_then(|v| v.get_uint()).unwrap() as usize;
// tournament selection
let k: usize = map.get("k").and_then(|v| v.get_uint()).unwrap_or(2) as usize;
assert!(k > 0);
let plot: bool = map.get("plot").map(|v| v.get_str() == Some("true")).unwrap_or(false);
let weight: f64 = map.get("weight").and_then(|v| v.get_float()).unwrap_or(1.0);
let seed: Vec<u64>;
if let Some(seed_expr) = map.get("seed") {
seed = seed_expr.get_uint_vec().unwrap();
} else {
println!("Use OsRng to generate seed..");
let mut rng = OsRng::new().unwrap();
seed = (0..2).map(|_| rng.next_u64()).collect();
}
// Parse objectives and thresholds
let mut objectives: Vec<Objective> = Vec::new();
if let Some(&Sexp::Map(ref list)) = map.get("objectives") {
for &(ref k, ref v) in list.iter() {
objectives.push(Objective {
fitness_function: FitnessFunction::from_str(k.get_str().unwrap()).unwrap(),
threshold: v.get_float().unwrap() as f32,
});
}
} else {
panic!("Map expected");
}
if objectives.len() > MAX_OBJECTIVES {
panic!("Max {} objectives allowed", MAX_OBJECTIVES);
}
// read graph
let graph_file = map.get("graph").unwrap().get_str().unwrap();
println!("Using graph file: {}", graph_file);
let graph_s = {
let mut graph_file = File::open(graph_file).unwrap();
let mut graph_s = String::new();
let _ = graph_file.read_to_string(&mut graph_s).unwrap();
graph_s
};
let graph = graph_io_gml::parse_gml(&graph_s,
&convert_weight,
&convert_weight)
.unwrap();
println!("graph: {:?}", graph);
let graph = graph::normalize_graph(&graph);
let genome_map = map.get("genome").unwrap().clone().into_map().unwrap();
Config {
ngen: ngen,
mu: mu,
lambda: lambda,
k: k,
plot: plot,
weight: weight,
seed: seed,
objectives: objectives,
graph: graph,
edge_ops: parse_ops(&map, "edge_ops"),
var_ops: parse_ops(&map, "var_ops"),
rule_mut_ops: parse_ops(&map, "rule_mut_ops"),
rule_prod_ops: parse_ops(&map, "rule_prod_mut_ops"),
flat_expr_op: parse_ops(&map, "flat_expr_ops"),
recursive_expr_op: parse_ops(&map, "recursive_expr_ops"),
genome: ConfigGenome {
rules: genome_map.get("rules").and_then(|v| v.get_uint()).unwrap() as usize,
symbol_arity: genome_map.get("symbol_arity").and_then(|v| v.get_uint()).unwrap() as usize,
num_params: genome_map.get("num_params").and_then(|v| v.get_uint()).unwrap() as usize,
initial_len: genome_map.get("initial_len").and_then(|v| v.get_uint()).unwrap() as usize,
max_iter: genome_map.get("max_iter").and_then(|v| v.get_uint()).unwrap() as usize,
prob_terminal: Probability::new(genome_map.get("prob_terminal").and_then(|v| v.get_float()).unwrap() as f32),
},
}
}
fn main() {
println!("Using expr system: {}", EXPR_NAME);
let env = Env::new();
let plot = Plot::new(&env);
let mut s = String::new();
let configfile = env::args().nth(1).unwrap();
let _ = File::open(configfile).unwrap().read_to_string(&mut s).unwrap();
let expr = asexp::Sexp::parse_toplevel(&s).unwrap();
let config = parse_config(expr);
println!("{:#?}", config);
if config.plot {
plot.interactive();
plot.show();
}
let num_objectives = config.objectives.len();
let driver_config = DriverConfig {
mu: config.mu,
lambda: config.lambda,
k: config.k,
ngen: config.ngen,
num_objectives: num_objectives
};
let toolbox = Toolbox::new(Goal::new(OptDenseDigraph::from(config.graph.clone())),
config.objectives
.iter()
.map(|o| o.threshold)
.collect(),
config.objectives
.iter()
.map(|o| o.fitness_function.clone())
.collect(),
config.genome.max_iter, // iterations
config.genome.rules, // num_rules
config.genome.initial_len, // initial rule length
config.genome.symbol_arity, // we use 1-ary symbols
config.genome.num_params,
config.genome.prob_terminal,
to_weighted_vec(&config.edge_ops),
to_weighted_vec(&config.flat_expr_op),
to_weighted_vec(&config.recursive_expr_op),
to_weighted_vec(&config.var_ops),
to_weighted_vec(&config.rule_mut_ops),
to_weighted_vec(&config.rule_prod_ops));
assert!(config.seed.len() == 2);
let mut rng: PcgRng = SeedableRng::from_seed([config.seed[0], config.seed[1]]);
//let mut rng = rand::thread_rng();
let selected_population = toolbox.run(&mut rng, &driver_config, config.weight, &|iteration, duration, num_optima, population| {
let duration_ms = (duration as f32) / 1_000_000.0;
print!("# {:>6}", iteration);
let fitness_values = population.fitness_to_vec();
// XXX: Assume we have at least two objectives
let mut x = Vec::new();
let mut y = Vec::new();
for f in fitness_values.iter() {
x.push(f.objectives[0]);
y.push(f.objectives[1]);
}
if config.plot {
plot.clf();
plot.title(&format!("Iteration: {}", iteration));
plot.grid(true);
plot.scatter(&x, &y);
plot.draw();
}
// calculate a min/max/avg value for each objective.
let stats: Vec<Stat<f32>> = (0..num_objectives)
.into_iter()
.map(|i| {
Stat::from_iter(fitness_values.iter().map(|o| o.objectives[i]))
.unwrap()
})
.collect();
for stat in stats.iter() {
print!(" | ");
print!("{:>8.2}", stat.min);
print!("{:>9.2}", stat.avg);
print!("{:>10.2}", stat.max);
}
print!(" | {:>5} | {:>8.0} ms", num_optima, duration_ms);
println!("");
if num_optima > 0 {
println!("Found premature optimum in Iteration {}", iteration);
}
});
println!("===========================================================");
let mut best_solutions: Vec<(Genome, _)> = Vec::new();
selected_population.all_of_rank(0, &mut |ind, fit| {
if fit.objectives[0] < 0.1 && fit.objectives[1] < 0.1 {
best_solutions.push((ind.clone(), fit.clone()));
}
});
println!("Target graph");
let sexp = graph_to_sexp(&graph::normalize_graph_closed01(&config.graph),
|nw| Some(Sexp::from(nw.get())),
|ew| Some(Sexp::from(ew.get())));
println!("{}", pp(&sexp));
let mut solutions: Vec<Sexp> = Vec::new();
for (_i, &(ref ind, ref fitness)) in best_solutions.iter().enumerate() {
let genome: Sexp = ind.into();
let edge_ops = ind.to_edge_ops(&toolbox.axiom_args, toolbox.iterations);
let g = edgeops_to_graph(&edge_ops);
// output as sexp
let graph_sexp = graph_to_sexp(g.ref_graph(),
|&nw| Some(Sexp::from(nw)),
|&ew| Some(Sexp::from(ew)));
solutions.push(Sexp::Map(
vec![
(Sexp::from("fitness"), Sexp::from((fitness.objectives[0], fitness.objectives[1], fitness.objectives[2]))),
(Sexp::from("genome"), genome),
(Sexp::from("graph"), graph_sexp),
]
));
/*
draw_graph(g.ref_graph(),
// XXX: name
&format!("edgeop_lsys_g{}_f{}_i{}.svg",
config.ngen,
fitness.objectives[1] as usize,
i));
*/
}
println!("{}", pp(&Sexp::from(("solutions", Sexp::Array(solutions)))));
//println!("])");
println!("{:#?}", config);
}
| convert_weight | identifier_name |
htmlareaelement.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::HTMLAreaElementBinding;
use dom::bindings::codegen::InheritTypes::HTMLAreaElementDerived;
use dom::bindings::codegen::InheritTypes::{HTMLElementCast, NodeCast};
use dom::bindings::js::{JSRef, Temporary};
use dom::bindings::utils::{Reflectable, Reflector};
use dom::document::Document;
use dom::element::HTMLAreaElementTypeId;
use dom::eventtarget::{EventTarget, NodeTargetTypeId};
use dom::htmlelement::HTMLElement;
use dom::node::{Node, NodeHelpers, ElementNodeTypeId};
use dom::virtualmethods::VirtualMethods;
use servo_util::atom::Atom;
use servo_util::str::DOMString;
#[deriving(Encodable)]
#[must_root]
pub struct HTMLAreaElement {
pub htmlelement: HTMLElement
}
impl HTMLAreaElementDerived for EventTarget {
fn is_htmlareaelement(&self) -> bool {
self.type_id == NodeTargetTypeId(ElementNodeTypeId(HTMLAreaElementTypeId))
}
}
impl HTMLAreaElement {
pub fn new_inherited(localName: DOMString, document: JSRef<Document>) -> HTMLAreaElement {
HTMLAreaElement {
htmlelement: HTMLElement::new_inherited(HTMLAreaElementTypeId, localName, document)
}
}
#[allow(unrooted_must_root)]
pub fn new(localName: DOMString, document: JSRef<Document>) -> Temporary<HTMLAreaElement> {
let element = HTMLAreaElement::new_inherited(localName, document);
Node::reflect_node(box element, document, HTMLAreaElementBinding::Wrap)
}
}
impl<'a> VirtualMethods for JSRef<'a, HTMLAreaElement> {
fn super_type<'a>(&'a self) -> Option<&'a VirtualMethods> {
let htmlelement: &JSRef<HTMLElement> = HTMLElementCast::from_borrowed_ref(self);
Some(htmlelement as &VirtualMethods)
}
fn after_set_attr(&self, name: &Atom, value: DOMString) {
match self.super_type() {
Some(ref s) => s.after_set_attr(name, value.clone()),
_ => (),
}
| }
}
fn before_remove_attr(&self, name: &Atom, value: DOMString) {
match self.super_type() {
Some(ref s) => s.before_remove_attr(name, value.clone()),
_ => (),
}
let node: JSRef<Node> = NodeCast::from_ref(*self);
match name.as_slice() {
"href" => node.set_enabled_state(false),
_ => ()
}
}
}
impl Reflectable for HTMLAreaElement {
fn reflector<'a>(&'a self) -> &'a Reflector {
self.htmlelement.reflector()
}
} | let node: JSRef<Node> = NodeCast::from_ref(*self);
match name.as_slice() {
"href" => node.set_enabled_state(true),
_ => () | random_line_split |
htmlareaelement.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::HTMLAreaElementBinding;
use dom::bindings::codegen::InheritTypes::HTMLAreaElementDerived;
use dom::bindings::codegen::InheritTypes::{HTMLElementCast, NodeCast};
use dom::bindings::js::{JSRef, Temporary};
use dom::bindings::utils::{Reflectable, Reflector};
use dom::document::Document;
use dom::element::HTMLAreaElementTypeId;
use dom::eventtarget::{EventTarget, NodeTargetTypeId};
use dom::htmlelement::HTMLElement;
use dom::node::{Node, NodeHelpers, ElementNodeTypeId};
use dom::virtualmethods::VirtualMethods;
use servo_util::atom::Atom;
use servo_util::str::DOMString;
#[deriving(Encodable)]
#[must_root]
pub struct HTMLAreaElement {
pub htmlelement: HTMLElement
}
impl HTMLAreaElementDerived for EventTarget {
fn is_htmlareaelement(&self) -> bool {
self.type_id == NodeTargetTypeId(ElementNodeTypeId(HTMLAreaElementTypeId))
}
}
impl HTMLAreaElement {
pub fn new_inherited(localName: DOMString, document: JSRef<Document>) -> HTMLAreaElement {
HTMLAreaElement {
htmlelement: HTMLElement::new_inherited(HTMLAreaElementTypeId, localName, document)
}
}
#[allow(unrooted_must_root)]
pub fn new(localName: DOMString, document: JSRef<Document>) -> Temporary<HTMLAreaElement> |
}
impl<'a> VirtualMethods for JSRef<'a, HTMLAreaElement> {
fn super_type<'a>(&'a self) -> Option<&'a VirtualMethods> {
let htmlelement: &JSRef<HTMLElement> = HTMLElementCast::from_borrowed_ref(self);
Some(htmlelement as &VirtualMethods)
}
fn after_set_attr(&self, name: &Atom, value: DOMString) {
match self.super_type() {
Some(ref s) => s.after_set_attr(name, value.clone()),
_ => (),
}
let node: JSRef<Node> = NodeCast::from_ref(*self);
match name.as_slice() {
"href" => node.set_enabled_state(true),
_ => ()
}
}
fn before_remove_attr(&self, name: &Atom, value: DOMString) {
match self.super_type() {
Some(ref s) => s.before_remove_attr(name, value.clone()),
_ => (),
}
let node: JSRef<Node> = NodeCast::from_ref(*self);
match name.as_slice() {
"href" => node.set_enabled_state(false),
_ => ()
}
}
}
impl Reflectable for HTMLAreaElement {
fn reflector<'a>(&'a self) -> &'a Reflector {
self.htmlelement.reflector()
}
}
| {
let element = HTMLAreaElement::new_inherited(localName, document);
Node::reflect_node(box element, document, HTMLAreaElementBinding::Wrap)
} | identifier_body |
htmlareaelement.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::HTMLAreaElementBinding;
use dom::bindings::codegen::InheritTypes::HTMLAreaElementDerived;
use dom::bindings::codegen::InheritTypes::{HTMLElementCast, NodeCast};
use dom::bindings::js::{JSRef, Temporary};
use dom::bindings::utils::{Reflectable, Reflector};
use dom::document::Document;
use dom::element::HTMLAreaElementTypeId;
use dom::eventtarget::{EventTarget, NodeTargetTypeId};
use dom::htmlelement::HTMLElement;
use dom::node::{Node, NodeHelpers, ElementNodeTypeId};
use dom::virtualmethods::VirtualMethods;
use servo_util::atom::Atom;
use servo_util::str::DOMString;
#[deriving(Encodable)]
#[must_root]
pub struct HTMLAreaElement {
pub htmlelement: HTMLElement
}
impl HTMLAreaElementDerived for EventTarget {
fn | (&self) -> bool {
self.type_id == NodeTargetTypeId(ElementNodeTypeId(HTMLAreaElementTypeId))
}
}
impl HTMLAreaElement {
pub fn new_inherited(localName: DOMString, document: JSRef<Document>) -> HTMLAreaElement {
HTMLAreaElement {
htmlelement: HTMLElement::new_inherited(HTMLAreaElementTypeId, localName, document)
}
}
#[allow(unrooted_must_root)]
pub fn new(localName: DOMString, document: JSRef<Document>) -> Temporary<HTMLAreaElement> {
let element = HTMLAreaElement::new_inherited(localName, document);
Node::reflect_node(box element, document, HTMLAreaElementBinding::Wrap)
}
}
impl<'a> VirtualMethods for JSRef<'a, HTMLAreaElement> {
fn super_type<'a>(&'a self) -> Option<&'a VirtualMethods> {
let htmlelement: &JSRef<HTMLElement> = HTMLElementCast::from_borrowed_ref(self);
Some(htmlelement as &VirtualMethods)
}
fn after_set_attr(&self, name: &Atom, value: DOMString) {
match self.super_type() {
Some(ref s) => s.after_set_attr(name, value.clone()),
_ => (),
}
let node: JSRef<Node> = NodeCast::from_ref(*self);
match name.as_slice() {
"href" => node.set_enabled_state(true),
_ => ()
}
}
fn before_remove_attr(&self, name: &Atom, value: DOMString) {
match self.super_type() {
Some(ref s) => s.before_remove_attr(name, value.clone()),
_ => (),
}
let node: JSRef<Node> = NodeCast::from_ref(*self);
match name.as_slice() {
"href" => node.set_enabled_state(false),
_ => ()
}
}
}
impl Reflectable for HTMLAreaElement {
fn reflector<'a>(&'a self) -> &'a Reflector {
self.htmlelement.reflector()
}
}
| is_htmlareaelement | identifier_name |
selection.directive.ts | import { AfterContentInit, ChangeDetectorRef, ContentChildren, Directive, EventEmitter, HostBinding, Input, OnDestroy, Output, QueryList } from '@angular/core';
import { debounceTime, takeUntil } from 'rxjs/operators';
import { Subject } from 'rxjs';
import { SelectionItemDirective } from './selection-item.directive';
import { SelectionMode, SelectionService } from './selection.service';
import { SelectionStrategy } from './strategies/selection.strategy';
@Directive({
selector: '[uxSelection]',
exportAs: 'ux-selection',
providers: [SelectionService]
})
export class SelectionDirective<T> implements AfterContentInit, OnDestroy {
/** Defines the items that should be selected. */
@Input() set uxSelection(items: Array<T> | ReadonlyArray<T>) {
this._lastSelection = items;
this._selectionService.selectOnly(...items);
}
/** Can be used to enabled/disable selection behavior. */
@Input() set disabled(disabled: boolean) {
this._selectionService.setDisabled(disabled);
}
/**
* Defines the selection behavior. Alternatively, custom selection behavior can be defined by defining a
* class which extends SelectionStrategy, and providing an instance of the custom class to this property.
* See below for details of the SelectionStrategy class.
*/
@Input() set mode(mode: SelectionMode | SelectionStrategy<T>) {
this._selectionService.setStrategy(mode);
}
/**
* Can be used to enable/disable click selection on items. This can be used to manually control the selection of an item,
* for example, binding the selection state to a checkbox.
*/
@Input() set clickSelection(isClickEnabled: boolean) {
this._selectionService.isClickEnabled = isClickEnabled;
}
/** Can be used to enable/disable keyboard navigation on items. Use this if you wish to provide custom keyboard controls for selection. */
@Input() set keyboardSelection(isKeyboardEnabled: boolean) {
this._selectionService.isKeyboardEnabled = isKeyboardEnabled;
}
/**
* The full set of selection items.
* Only needed if the full set of `uxSelectionItem`s is not available, e.g. within a virtual scroll container.
*/
@Input() set selectionItems(value: T[]) {
this._hasExplicitDataset = !!value;
if (value) {
this._selectionService.dataset = value;
}
}
/** The tabstop of the selection outer element */
@Input() @HostBinding('attr.tabindex') tabindex: string | number = null;
/** This event will be triggered when there is a change to the selected items. It will contain an array of the currently selected items. */
@Output() uxSelectionChange = new EventEmitter<T[]>();
/** Access all items within the list */
@ContentChildren(SelectionItemDirective) items: QueryList<SelectionItemDirective<T>>;
/** Unsubscribe from all observables on component destroy */
private _onDestroy = new Subject<void>();
/** Store the previous selection so we don't emit more than we have to */
private _lastSelection: ReadonlyArray<T> = [];
/** Whether a value has been provided to the `selectionItems` input. */
private _hasExplicitDataset: boolean = false;
constructor(private _selectionService: SelectionService<T>, private _cdRef: ChangeDetectorRef) {
_selectionService.selection$.pipe(debounceTime(0), takeUntil(this._onDestroy)).subscribe(items => {
if (this.isSelectionChanged(items)) {
this.uxSelectionChange.emit(items);
}
// store the most recent selection
this._lastSelection = [...items];
});
}
ngAfterContentInit(): void {
// provide the initial list of selection items
this.update();
// if the list changes then inform the service
this.items.changes.pipe(takeUntil(this._onDestroy)).subscribe(() => this.update());
}
ngOnDestroy(): void {
this._onDestroy.next();
this._onDestroy.complete();
}
/**
* Update the dataset to reflect the latest selection items
*/
update(): void {
// Capture the set of data items from the ContentChildren, unless an explicit value has been provided.
if (!this._hasExplicitDataset) {
this._selectionService.dataset = this.items.map(item => item.uxSelectionItem);
}
// Make sure that a tab target has been defined so that the component can be tabbed to.
if (this._selectionService.focus$.getValue() === null && this._selectionService.dataset.length > 0) {
this._selectionService.focus$.next(this._selectionService.dataset[0]);
}
// The above could trigger a change in the computed tabindex for selection items
this._cdRef.detectChanges();
}
/**
* Select all the items in the list
*/
selectAll(): void {
if (this._selectionService.isEnabled) |
}
/**
* Deselect all currently selected items
*/
deselectAll(): void {
if (this._selectionService.isEnabled) {
this._selectionService.strategy.deselectAll();
}
}
/**
* Determine if the previous selection is the same as the current selection
*/
private isSelectionChanged(selection: T[]): boolean {
// fast, efficient check, if length is different they must have changed
if (!this._lastSelection && selection || this._lastSelection.length !== selection.length) {
return true;
}
// if both arrays have 0 items then they have not changed
if (this._lastSelection.length === 0 && selection.length === 0) {
return false;
}
// otherwise do a check on each item
return !this._lastSelection.every(item => selection.indexOf(item) !== -1);
}
}
| {
this._selectionService.strategy.selectAll();
} | conditional_block |
selection.directive.ts | import { AfterContentInit, ChangeDetectorRef, ContentChildren, Directive, EventEmitter, HostBinding, Input, OnDestroy, Output, QueryList } from '@angular/core';
import { debounceTime, takeUntil } from 'rxjs/operators';
import { Subject } from 'rxjs';
import { SelectionItemDirective } from './selection-item.directive';
import { SelectionMode, SelectionService } from './selection.service';
import { SelectionStrategy } from './strategies/selection.strategy';
@Directive({
selector: '[uxSelection]',
exportAs: 'ux-selection',
providers: [SelectionService]
})
export class SelectionDirective<T> implements AfterContentInit, OnDestroy {
/** Defines the items that should be selected. */
@Input() set uxSelection(items: Array<T> | ReadonlyArray<T>) {
this._lastSelection = items;
this._selectionService.selectOnly(...items);
}
/** Can be used to enabled/disable selection behavior. */
@Input() set disabled(disabled: boolean) {
this._selectionService.setDisabled(disabled);
}
/**
* Defines the selection behavior. Alternatively, custom selection behavior can be defined by defining a
* class which extends SelectionStrategy, and providing an instance of the custom class to this property.
* See below for details of the SelectionStrategy class.
*/
@Input() set mode(mode: SelectionMode | SelectionStrategy<T>) {
this._selectionService.setStrategy(mode);
}
/**
* Can be used to enable/disable click selection on items. This can be used to manually control the selection of an item,
* for example, binding the selection state to a checkbox.
*/
@Input() set clickSelection(isClickEnabled: boolean) {
this._selectionService.isClickEnabled = isClickEnabled;
}
/** Can be used to enable/disable keyboard navigation on items. Use this if you wish to provide custom keyboard controls for selection. */
@Input() set keyboardSelection(isKeyboardEnabled: boolean) {
this._selectionService.isKeyboardEnabled = isKeyboardEnabled;
}
/**
* The full set of selection items.
* Only needed if the full set of `uxSelectionItem`s is not available, e.g. within a virtual scroll container.
*/
@Input() set selectionItems(value: T[]) {
this._hasExplicitDataset = !!value;
if (value) {
this._selectionService.dataset = value;
}
}
/** The tabstop of the selection outer element */
@Input() @HostBinding('attr.tabindex') tabindex: string | number = null;
/** This event will be triggered when there is a change to the selected items. It will contain an array of the currently selected items. */
@Output() uxSelectionChange = new EventEmitter<T[]>();
/** Access all items within the list */
@ContentChildren(SelectionItemDirective) items: QueryList<SelectionItemDirective<T>>;
/** Unsubscribe from all observables on component destroy */
private _onDestroy = new Subject<void>();
|
constructor(private _selectionService: SelectionService<T>, private _cdRef: ChangeDetectorRef) {
_selectionService.selection$.pipe(debounceTime(0), takeUntil(this._onDestroy)).subscribe(items => {
if (this.isSelectionChanged(items)) {
this.uxSelectionChange.emit(items);
}
// store the most recent selection
this._lastSelection = [...items];
});
}
ngAfterContentInit(): void {
// provide the initial list of selection items
this.update();
// if the list changes then inform the service
this.items.changes.pipe(takeUntil(this._onDestroy)).subscribe(() => this.update());
}
ngOnDestroy(): void {
this._onDestroy.next();
this._onDestroy.complete();
}
/**
* Update the dataset to reflect the latest selection items
*/
update(): void {
// Capture the set of data items from the ContentChildren, unless an explicit value has been provided.
if (!this._hasExplicitDataset) {
this._selectionService.dataset = this.items.map(item => item.uxSelectionItem);
}
// Make sure that a tab target has been defined so that the component can be tabbed to.
if (this._selectionService.focus$.getValue() === null && this._selectionService.dataset.length > 0) {
this._selectionService.focus$.next(this._selectionService.dataset[0]);
}
// The above could trigger a change in the computed tabindex for selection items
this._cdRef.detectChanges();
}
/**
* Select all the items in the list
*/
selectAll(): void {
if (this._selectionService.isEnabled) {
this._selectionService.strategy.selectAll();
}
}
/**
* Deselect all currently selected items
*/
deselectAll(): void {
if (this._selectionService.isEnabled) {
this._selectionService.strategy.deselectAll();
}
}
/**
* Determine if the previous selection is the same as the current selection
*/
private isSelectionChanged(selection: T[]): boolean {
// fast, efficient check, if length is different they must have changed
if (!this._lastSelection && selection || this._lastSelection.length !== selection.length) {
return true;
}
// if both arrays have 0 items then they have not changed
if (this._lastSelection.length === 0 && selection.length === 0) {
return false;
}
// otherwise do a check on each item
return !this._lastSelection.every(item => selection.indexOf(item) !== -1);
}
} | /** Store the previous selection so we don't emit more than we have to */
private _lastSelection: ReadonlyArray<T> = [];
/** Whether a value has been provided to the `selectionItems` input. */
private _hasExplicitDataset: boolean = false; | random_line_split |
selection.directive.ts | import { AfterContentInit, ChangeDetectorRef, ContentChildren, Directive, EventEmitter, HostBinding, Input, OnDestroy, Output, QueryList } from '@angular/core';
import { debounceTime, takeUntil } from 'rxjs/operators';
import { Subject } from 'rxjs';
import { SelectionItemDirective } from './selection-item.directive';
import { SelectionMode, SelectionService } from './selection.service';
import { SelectionStrategy } from './strategies/selection.strategy';
@Directive({
selector: '[uxSelection]',
exportAs: 'ux-selection',
providers: [SelectionService]
})
export class SelectionDirective<T> implements AfterContentInit, OnDestroy {
/** Defines the items that should be selected. */
@Input() set uxSelection(items: Array<T> | ReadonlyArray<T>) {
this._lastSelection = items;
this._selectionService.selectOnly(...items);
}
/** Can be used to enabled/disable selection behavior. */
@Input() set disabled(disabled: boolean) {
this._selectionService.setDisabled(disabled);
}
/**
* Defines the selection behavior. Alternatively, custom selection behavior can be defined by defining a
* class which extends SelectionStrategy, and providing an instance of the custom class to this property.
* See below for details of the SelectionStrategy class.
*/
@Input() set mode(mode: SelectionMode | SelectionStrategy<T>) {
this._selectionService.setStrategy(mode);
}
/**
* Can be used to enable/disable click selection on items. This can be used to manually control the selection of an item,
* for example, binding the selection state to a checkbox.
*/
@Input() set clickSelection(isClickEnabled: boolean) {
this._selectionService.isClickEnabled = isClickEnabled;
}
/** Can be used to enable/disable keyboard navigation on items. Use this if you wish to provide custom keyboard controls for selection. */
@Input() set keyboardSelection(isKeyboardEnabled: boolean) {
this._selectionService.isKeyboardEnabled = isKeyboardEnabled;
}
/**
* The full set of selection items.
* Only needed if the full set of `uxSelectionItem`s is not available, e.g. within a virtual scroll container.
*/
@Input() set selectionItems(value: T[]) {
this._hasExplicitDataset = !!value;
if (value) {
this._selectionService.dataset = value;
}
}
/** The tabstop of the selection outer element */
@Input() @HostBinding('attr.tabindex') tabindex: string | number = null;
/** This event will be triggered when there is a change to the selected items. It will contain an array of the currently selected items. */
@Output() uxSelectionChange = new EventEmitter<T[]>();
/** Access all items within the list */
@ContentChildren(SelectionItemDirective) items: QueryList<SelectionItemDirective<T>>;
/** Unsubscribe from all observables on component destroy */
private _onDestroy = new Subject<void>();
/** Store the previous selection so we don't emit more than we have to */
private _lastSelection: ReadonlyArray<T> = [];
/** Whether a value has been provided to the `selectionItems` input. */
private _hasExplicitDataset: boolean = false;
constructor(private _selectionService: SelectionService<T>, private _cdRef: ChangeDetectorRef) {
_selectionService.selection$.pipe(debounceTime(0), takeUntil(this._onDestroy)).subscribe(items => {
if (this.isSelectionChanged(items)) {
this.uxSelectionChange.emit(items);
}
// store the most recent selection
this._lastSelection = [...items];
});
}
ngAfterContentInit(): void {
// provide the initial list of selection items
this.update();
// if the list changes then inform the service
this.items.changes.pipe(takeUntil(this._onDestroy)).subscribe(() => this.update());
}
ngOnDestroy(): void {
this._onDestroy.next();
this._onDestroy.complete();
}
/**
* Update the dataset to reflect the latest selection items
*/
update(): void {
// Capture the set of data items from the ContentChildren, unless an explicit value has been provided.
if (!this._hasExplicitDataset) {
this._selectionService.dataset = this.items.map(item => item.uxSelectionItem);
}
// Make sure that a tab target has been defined so that the component can be tabbed to.
if (this._selectionService.focus$.getValue() === null && this._selectionService.dataset.length > 0) {
this._selectionService.focus$.next(this._selectionService.dataset[0]);
}
// The above could trigger a change in the computed tabindex for selection items
this._cdRef.detectChanges();
}
/**
* Select all the items in the list
*/
| (): void {
if (this._selectionService.isEnabled) {
this._selectionService.strategy.selectAll();
}
}
/**
* Deselect all currently selected items
*/
deselectAll(): void {
if (this._selectionService.isEnabled) {
this._selectionService.strategy.deselectAll();
}
}
/**
* Determine if the previous selection is the same as the current selection
*/
private isSelectionChanged(selection: T[]): boolean {
// fast, efficient check, if length is different they must have changed
if (!this._lastSelection && selection || this._lastSelection.length !== selection.length) {
return true;
}
// if both arrays have 0 items then they have not changed
if (this._lastSelection.length === 0 && selection.length === 0) {
return false;
}
// otherwise do a check on each item
return !this._lastSelection.every(item => selection.indexOf(item) !== -1);
}
}
| selectAll | identifier_name |
selection.directive.ts | import { AfterContentInit, ChangeDetectorRef, ContentChildren, Directive, EventEmitter, HostBinding, Input, OnDestroy, Output, QueryList } from '@angular/core';
import { debounceTime, takeUntil } from 'rxjs/operators';
import { Subject } from 'rxjs';
import { SelectionItemDirective } from './selection-item.directive';
import { SelectionMode, SelectionService } from './selection.service';
import { SelectionStrategy } from './strategies/selection.strategy';
@Directive({
selector: '[uxSelection]',
exportAs: 'ux-selection',
providers: [SelectionService]
})
export class SelectionDirective<T> implements AfterContentInit, OnDestroy {
/** Defines the items that should be selected. */
@Input() set uxSelection(items: Array<T> | ReadonlyArray<T>) {
this._lastSelection = items;
this._selectionService.selectOnly(...items);
}
/** Can be used to enabled/disable selection behavior. */
@Input() set disabled(disabled: boolean) {
this._selectionService.setDisabled(disabled);
}
/**
* Defines the selection behavior. Alternatively, custom selection behavior can be defined by defining a
* class which extends SelectionStrategy, and providing an instance of the custom class to this property.
* See below for details of the SelectionStrategy class.
*/
@Input() set mode(mode: SelectionMode | SelectionStrategy<T>) {
this._selectionService.setStrategy(mode);
}
/**
* Can be used to enable/disable click selection on items. This can be used to manually control the selection of an item,
* for example, binding the selection state to a checkbox.
*/
@Input() set clickSelection(isClickEnabled: boolean) {
this._selectionService.isClickEnabled = isClickEnabled;
}
/** Can be used to enable/disable keyboard navigation on items. Use this if you wish to provide custom keyboard controls for selection. */
@Input() set keyboardSelection(isKeyboardEnabled: boolean) {
this._selectionService.isKeyboardEnabled = isKeyboardEnabled;
}
/**
* The full set of selection items.
* Only needed if the full set of `uxSelectionItem`s is not available, e.g. within a virtual scroll container.
*/
@Input() set selectionItems(value: T[]) {
this._hasExplicitDataset = !!value;
if (value) {
this._selectionService.dataset = value;
}
}
/** The tabstop of the selection outer element */
@Input() @HostBinding('attr.tabindex') tabindex: string | number = null;
/** This event will be triggered when there is a change to the selected items. It will contain an array of the currently selected items. */
@Output() uxSelectionChange = new EventEmitter<T[]>();
/** Access all items within the list */
@ContentChildren(SelectionItemDirective) items: QueryList<SelectionItemDirective<T>>;
/** Unsubscribe from all observables on component destroy */
private _onDestroy = new Subject<void>();
/** Store the previous selection so we don't emit more than we have to */
private _lastSelection: ReadonlyArray<T> = [];
/** Whether a value has been provided to the `selectionItems` input. */
private _hasExplicitDataset: boolean = false;
constructor(private _selectionService: SelectionService<T>, private _cdRef: ChangeDetectorRef) {
_selectionService.selection$.pipe(debounceTime(0), takeUntil(this._onDestroy)).subscribe(items => {
if (this.isSelectionChanged(items)) {
this.uxSelectionChange.emit(items);
}
// store the most recent selection
this._lastSelection = [...items];
});
}
ngAfterContentInit(): void {
// provide the initial list of selection items
this.update();
// if the list changes then inform the service
this.items.changes.pipe(takeUntil(this._onDestroy)).subscribe(() => this.update());
}
ngOnDestroy(): void {
this._onDestroy.next();
this._onDestroy.complete();
}
/**
* Update the dataset to reflect the latest selection items
*/
update(): void {
// Capture the set of data items from the ContentChildren, unless an explicit value has been provided.
if (!this._hasExplicitDataset) {
this._selectionService.dataset = this.items.map(item => item.uxSelectionItem);
}
// Make sure that a tab target has been defined so that the component can be tabbed to.
if (this._selectionService.focus$.getValue() === null && this._selectionService.dataset.length > 0) {
this._selectionService.focus$.next(this._selectionService.dataset[0]);
}
// The above could trigger a change in the computed tabindex for selection items
this._cdRef.detectChanges();
}
/**
* Select all the items in the list
*/
selectAll(): void |
/**
* Deselect all currently selected items
*/
deselectAll(): void {
if (this._selectionService.isEnabled) {
this._selectionService.strategy.deselectAll();
}
}
/**
* Determine if the previous selection is the same as the current selection
*/
private isSelectionChanged(selection: T[]): boolean {
// fast, efficient check, if length is different they must have changed
if (!this._lastSelection && selection || this._lastSelection.length !== selection.length) {
return true;
}
// if both arrays have 0 items then they have not changed
if (this._lastSelection.length === 0 && selection.length === 0) {
return false;
}
// otherwise do a check on each item
return !this._lastSelection.every(item => selection.indexOf(item) !== -1);
}
}
| {
if (this._selectionService.isEnabled) {
this._selectionService.strategy.selectAll();
}
} | identifier_body |
tops_sql.py | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# filename: tops_sql.py
# Copyright 2008-2010 Stefano Costa <[email protected]>
#
# This file is part of Total Open Station.
#
# Total Open Station is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# Total Open Station is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty
# of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Total Open Station. If not, see
# <http://www.gnu.org/licenses/>.
def to_sql(point, tablename):
'''Generate SQL line corresponding to the input point.
At this moment the column names are fixed, but they could change in the
future. The default names are reasonable.'''
params = {
'wkt': to_wkt(point),
'tablename': tablename,
'pid': point[0],
'text': point[4]}
sql_string = "INSERT INTO %(tablename)s" % params
sql_string += "(point_id, point_geom, point_text) VALUES"
sql_string += "(%(pid)s,GeomFromText('%(wkt)s'),'%(text)s');\n" % params
return sql_string
def to_wkt(point):
pid, x, y, z, text = point
wkt_representation = 'POINT(%s %s)' % (x, y)
return wkt_representation
class OutputFormat:
"""
Exports points data in SQL format suitable for use with PostGIS & friends.
http://postgis.refractions.net/documentation/manual-1.3/ch04.html#id2986280
has an example of loading an SQL file into a PostgreSQL database.
``data`` should be an iterable (e.g. list) containing one iterable (e.g.
tuple) for each point. The default order is PID, x, x, z, TEXT.
This is consistent with our current standard.
""" | self.data = data
self.tablename = tablename
def process(self):
lines = [to_sql(e, self.tablename) for e in self.data]
lines.insert(0, 'BEGIN;\n')
lines.append('COMMIT;\n')
output = "".join(lines)
return output
if __name__ == "__main__":
TotalOpenSQL(
[(1, 2, 3, 4, 'qwerty'),
("2.3", 42, 45, 12, 'asdfg')],
'prova') |
def __init__(self, data, tablename='topsdata'): | random_line_split |
tops_sql.py | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# filename: tops_sql.py
# Copyright 2008-2010 Stefano Costa <[email protected]>
#
# This file is part of Total Open Station.
#
# Total Open Station is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# Total Open Station is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty
# of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Total Open Station. If not, see
# <http://www.gnu.org/licenses/>.
def to_sql(point, tablename):
'''Generate SQL line corresponding to the input point.
At this moment the column names are fixed, but they could change in the
future. The default names are reasonable.'''
params = {
'wkt': to_wkt(point),
'tablename': tablename,
'pid': point[0],
'text': point[4]}
sql_string = "INSERT INTO %(tablename)s" % params
sql_string += "(point_id, point_geom, point_text) VALUES"
sql_string += "(%(pid)s,GeomFromText('%(wkt)s'),'%(text)s');\n" % params
return sql_string
def to_wkt(point):
pid, x, y, z, text = point
wkt_representation = 'POINT(%s %s)' % (x, y)
return wkt_representation
class OutputFormat:
"""
Exports points data in SQL format suitable for use with PostGIS & friends.
http://postgis.refractions.net/documentation/manual-1.3/ch04.html#id2986280
has an example of loading an SQL file into a PostgreSQL database.
``data`` should be an iterable (e.g. list) containing one iterable (e.g.
tuple) for each point. The default order is PID, x, x, z, TEXT.
This is consistent with our current standard.
"""
def __init__(self, data, tablename='topsdata'):
self.data = data
self.tablename = tablename
def process(self):
lines = [to_sql(e, self.tablename) for e in self.data]
lines.insert(0, 'BEGIN;\n')
lines.append('COMMIT;\n')
output = "".join(lines)
return output
if __name__ == "__main__":
| TotalOpenSQL(
[(1, 2, 3, 4, 'qwerty'),
("2.3", 42, 45, 12, 'asdfg')],
'prova') | conditional_block |
|
tops_sql.py | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# filename: tops_sql.py
# Copyright 2008-2010 Stefano Costa <[email protected]>
#
# This file is part of Total Open Station.
#
# Total Open Station is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# Total Open Station is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty
# of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Total Open Station. If not, see
# <http://www.gnu.org/licenses/>.
def to_sql(point, tablename):
'''Generate SQL line corresponding to the input point.
At this moment the column names are fixed, but they could change in the
future. The default names are reasonable.'''
params = {
'wkt': to_wkt(point),
'tablename': tablename,
'pid': point[0],
'text': point[4]}
sql_string = "INSERT INTO %(tablename)s" % params
sql_string += "(point_id, point_geom, point_text) VALUES"
sql_string += "(%(pid)s,GeomFromText('%(wkt)s'),'%(text)s');\n" % params
return sql_string
def to_wkt(point):
pid, x, y, z, text = point
wkt_representation = 'POINT(%s %s)' % (x, y)
return wkt_representation
class OutputFormat:
"""
Exports points data in SQL format suitable for use with PostGIS & friends.
http://postgis.refractions.net/documentation/manual-1.3/ch04.html#id2986280
has an example of loading an SQL file into a PostgreSQL database.
``data`` should be an iterable (e.g. list) containing one iterable (e.g.
tuple) for each point. The default order is PID, x, x, z, TEXT.
This is consistent with our current standard.
"""
def __init__(self, data, tablename='topsdata'):
self.data = data
self.tablename = tablename
def | (self):
lines = [to_sql(e, self.tablename) for e in self.data]
lines.insert(0, 'BEGIN;\n')
lines.append('COMMIT;\n')
output = "".join(lines)
return output
if __name__ == "__main__":
TotalOpenSQL(
[(1, 2, 3, 4, 'qwerty'),
("2.3", 42, 45, 12, 'asdfg')],
'prova')
| process | identifier_name |
tops_sql.py | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# filename: tops_sql.py
# Copyright 2008-2010 Stefano Costa <[email protected]>
#
# This file is part of Total Open Station.
#
# Total Open Station is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# Total Open Station is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty
# of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Total Open Station. If not, see
# <http://www.gnu.org/licenses/>.
def to_sql(point, tablename):
'''Generate SQL line corresponding to the input point.
At this moment the column names are fixed, but they could change in the
future. The default names are reasonable.'''
params = {
'wkt': to_wkt(point),
'tablename': tablename,
'pid': point[0],
'text': point[4]}
sql_string = "INSERT INTO %(tablename)s" % params
sql_string += "(point_id, point_geom, point_text) VALUES"
sql_string += "(%(pid)s,GeomFromText('%(wkt)s'),'%(text)s');\n" % params
return sql_string
def to_wkt(point):
pid, x, y, z, text = point
wkt_representation = 'POINT(%s %s)' % (x, y)
return wkt_representation
class OutputFormat:
"""
Exports points data in SQL format suitable for use with PostGIS & friends.
http://postgis.refractions.net/documentation/manual-1.3/ch04.html#id2986280
has an example of loading an SQL file into a PostgreSQL database.
``data`` should be an iterable (e.g. list) containing one iterable (e.g.
tuple) for each point. The default order is PID, x, x, z, TEXT.
This is consistent with our current standard.
"""
def __init__(self, data, tablename='topsdata'):
|
def process(self):
lines = [to_sql(e, self.tablename) for e in self.data]
lines.insert(0, 'BEGIN;\n')
lines.append('COMMIT;\n')
output = "".join(lines)
return output
if __name__ == "__main__":
TotalOpenSQL(
[(1, 2, 3, 4, 'qwerty'),
("2.3", 42, 45, 12, 'asdfg')],
'prova')
| self.data = data
self.tablename = tablename | identifier_body |
sizing.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! https://drafts.csswg.org/css-sizing/
use crate::style_ext::ComputedValuesExt;
use style::properties::ComputedValues;
use style::values::computed::{Length, LengthPercentage, Percentage};
use style::values::generics::length::MaxSize;
use style::Zero;
/// Which min/max-content values should be computed during box construction
#[derive(Clone, Copy, Debug)]
pub(crate) enum ContentSizesRequest {
Inline,
None,
}
impl ContentSizesRequest {
pub fn inline_if(condition: bool) -> Self {
if condition {
Self::Inline
} else {
Self::None
}
}
pub fn requests_inline(self) -> bool {
match self {
Self::Inline => true,
Self::None => false,
}
}
pub fn if_requests_inline<T>(self, f: impl FnOnce() -> T) -> Option<T> {
match self {
Self::Inline => Some(f()),
Self::None => None,
}
}
pub fn compute(self, compute_inline: impl FnOnce() -> ContentSizes) -> BoxContentSizes {
match self {
Self::Inline => BoxContentSizes::Inline(compute_inline()),
Self::None => BoxContentSizes::NoneWereRequested,
}
}
}
#[derive(Clone, Debug, Serialize)]
pub(crate) struct ContentSizes {
pub min_content: Length,
pub max_content: Length,
}
/// https://drafts.csswg.org/css-sizing/#intrinsic-sizes
impl ContentSizes {
pub fn zero() -> Self {
Self {
min_content: Length::zero(),
max_content: Length::zero(),
}
}
pub fn max_assign(&mut self, other: &Self) {
self.min_content.max_assign(other.min_content);
self.max_content.max_assign(other.max_content);
}
/// Relevant to outer intrinsic inline sizes, for percentages from padding and margin.
pub fn adjust_for_pbm_percentages(&mut self, percentages: Percentage) {
// " Note that this may yield an infinite result, but undefined results
// (zero divided by zero) must be treated as zero. "
if self.max_content.px() == 0. {
// Avoid a potential `NaN`.
// Zero is already the result we want regardless of `denominator`.
} else {
let denominator = (1. - percentages.0).max(0.);
self.max_content = Length::new(self.max_content.px() / denominator);
}
}
}
/// Optional min/max-content for storage in the box tree
#[derive(Debug, Serialize)]
pub(crate) enum BoxContentSizes {
NoneWereRequested, // … during box construction
Inline(ContentSizes),
}
impl BoxContentSizes {
fn expect_inline(&self) -> &ContentSizes {
match self {
Self::NoneWereRequested => panic!("Accessing content size that was not requested"),
Self::Inline(s) => s,
}
}
/// https://dbaron.org/css/intrinsic/#outer-intrinsic
pub fn outer_inline(&self, style: &ComputedValues) -> ContentSizes {
let (mut outer, percentages) = self.outer_inline_and_percentages(style);
outer.adjust_for_pbm_percentages(percentages);
outer
}
pub(crate) fn outer_inline_and_percentages(
&self,
style: &ComputedValues,
) -> (ContentSizes, Percentage) {
// FIXME: account for 'box-sizing'
let inline_size = style.box_size().inline;
let min_inline_size = style
.min_box_size()
.inline
.percentage_relative_to(Length::zero())
.auto_is(Length::zero);
let max_inline_size = match style.max_box_size().inline {
MaxSize::None => None,
MaxSize::LengthPercentage(ref lp) => lp.to_length(),
};
let clamp = |l: Length| l.clamp_between_extremums(min_inline_size, max_inline_size);
// Percentages for 'width' are treated as 'auto'
let inline_size = inline_size.map(|lp| lp.to_length());
// The (inner) min/max-content are only used for 'auto'
let mut outer = match inline_size.non_auto().flatten() {
None => {
let inner = self.expect_inline().clone();
ContentSizes {
min_content: clamp(inner.min_content),
max_content: clamp(inner.max_content),
}
},
Some(length) => {
let length = clamp(length);
ContentSizes {
min_content: length,
max_content: length,
}
},
};
let mut pbm_lengths = Length::zero(); | let border = style.border_width();
let margin = style.margin();
pbm_lengths += border.inline_sum();
let mut add = |x: LengthPercentage| {
if let Some(l) = x.to_length() {
pbm_lengths += l;
}
if let Some(p) = x.to_percentage() {
pbm_percentages += p;
}
};
add(padding.inline_start);
add(padding.inline_end);
margin.inline_start.non_auto().map(&mut add);
margin.inline_end.non_auto().map(&mut add);
outer.min_content += pbm_lengths;
outer.max_content += pbm_lengths;
(outer, pbm_percentages)
}
/// https://drafts.csswg.org/css2/visudet.html#shrink-to-fit-float
pub(crate) fn shrink_to_fit(&self, available_size: Length) -> Length {
let inline = self.expect_inline();
available_size
.max(inline.min_content)
.min(inline.max_content)
}
} | let mut pbm_percentages = Percentage::zero();
let padding = style.padding(); | random_line_split |
sizing.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! https://drafts.csswg.org/css-sizing/
use crate::style_ext::ComputedValuesExt;
use style::properties::ComputedValues;
use style::values::computed::{Length, LengthPercentage, Percentage};
use style::values::generics::length::MaxSize;
use style::Zero;
/// Which min/max-content values should be computed during box construction
#[derive(Clone, Copy, Debug)]
pub(crate) enum ContentSizesRequest {
Inline,
None,
}
impl ContentSizesRequest {
pub fn inline_if(condition: bool) -> Self {
if condition {
Self::Inline
} else {
Self::None
}
}
pub fn requests_inline(self) -> bool {
match self {
Self::Inline => true,
Self::None => false,
}
}
pub fn if_requests_inline<T>(self, f: impl FnOnce() -> T) -> Option<T> {
match self {
Self::Inline => Some(f()),
Self::None => None,
}
}
pub fn compute(self, compute_inline: impl FnOnce() -> ContentSizes) -> BoxContentSizes {
match self {
Self::Inline => BoxContentSizes::Inline(compute_inline()),
Self::None => BoxContentSizes::NoneWereRequested,
}
}
}
#[derive(Clone, Debug, Serialize)]
pub(crate) struct ContentSizes {
pub min_content: Length,
pub max_content: Length,
}
/// https://drafts.csswg.org/css-sizing/#intrinsic-sizes
impl ContentSizes {
pub fn zero() -> Self {
Self {
min_content: Length::zero(),
max_content: Length::zero(),
}
}
pub fn max_assign(&mut self, other: &Self) {
self.min_content.max_assign(other.min_content);
self.max_content.max_assign(other.max_content);
}
/// Relevant to outer intrinsic inline sizes, for percentages from padding and margin.
pub fn adjust_for_pbm_percentages(&mut self, percentages: Percentage) {
// " Note that this may yield an infinite result, but undefined results
// (zero divided by zero) must be treated as zero. "
if self.max_content.px() == 0. {
// Avoid a potential `NaN`.
// Zero is already the result we want regardless of `denominator`.
} else {
let denominator = (1. - percentages.0).max(0.);
self.max_content = Length::new(self.max_content.px() / denominator);
}
}
}
/// Optional min/max-content for storage in the box tree
#[derive(Debug, Serialize)]
pub(crate) enum BoxContentSizes {
NoneWereRequested, // … during box construction
Inline(ContentSizes),
}
impl BoxContentSizes {
fn expect_inline(&self) -> &ContentSizes {
match self {
Self::NoneWereRequested => panic!("Accessing content size that was not requested"),
Self::Inline(s) => s,
}
}
/// https://dbaron.org/css/intrinsic/#outer-intrinsic
pub fn outer_inline(&self, style: &ComputedValues) -> ContentSizes {
let (mut outer, percentages) = self.outer_inline_and_percentages(style);
outer.adjust_for_pbm_percentages(percentages);
outer
}
pub(crate) fn outer_inline_and_percentages(
&self,
style: &ComputedValues,
) -> (ContentSizes, Percentage) {
// FIXME: account for 'box-sizing'
let inline_size = style.box_size().inline;
let min_inline_size = style
.min_box_size()
.inline
.percentage_relative_to(Length::zero())
.auto_is(Length::zero);
let max_inline_size = match style.max_box_size().inline {
MaxSize::None => None,
MaxSize::LengthPercentage(ref lp) => lp.to_length(),
};
let clamp = |l: Length| l.clamp_between_extremums(min_inline_size, max_inline_size);
// Percentages for 'width' are treated as 'auto'
let inline_size = inline_size.map(|lp| lp.to_length());
// The (inner) min/max-content are only used for 'auto'
let mut outer = match inline_size.non_auto().flatten() {
None => {
let inner = self.expect_inline().clone();
ContentSizes {
min_content: clamp(inner.min_content),
max_content: clamp(inner.max_content),
}
},
Some(length) => {
let length = clamp(length);
ContentSizes {
min_content: length,
max_content: length,
}
},
};
let mut pbm_lengths = Length::zero();
let mut pbm_percentages = Percentage::zero();
let padding = style.padding();
let border = style.border_width();
let margin = style.margin();
pbm_lengths += border.inline_sum();
let mut add = |x: LengthPercentage| {
if let Some(l) = x.to_length() {
pbm_lengths += l;
}
if let Some(p) = x.to_percentage() {
pbm_percentages += p;
}
};
add(padding.inline_start);
add(padding.inline_end);
margin.inline_start.non_auto().map(&mut add);
margin.inline_end.non_auto().map(&mut add);
outer.min_content += pbm_lengths;
outer.max_content += pbm_lengths;
(outer, pbm_percentages)
}
/// https://drafts.csswg.org/css2/visudet.html#shrink-to-fit-float
pub(crate) fn shrink_to_fit(&self, available_size: Length) -> Length {
| let inline = self.expect_inline();
available_size
.max(inline.min_content)
.min(inline.max_content)
}
} | identifier_body |
|
sizing.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! https://drafts.csswg.org/css-sizing/
use crate::style_ext::ComputedValuesExt;
use style::properties::ComputedValues;
use style::values::computed::{Length, LengthPercentage, Percentage};
use style::values::generics::length::MaxSize;
use style::Zero;
/// Which min/max-content values should be computed during box construction
#[derive(Clone, Copy, Debug)]
pub(crate) enum ContentSizesRequest {
Inline,
None,
}
impl ContentSizesRequest {
pub fn inline_if(condition: bool) -> Self {
if condition {
Self::Inline
} else {
Self::None
}
}
pub fn requests_inline(self) -> bool {
match self {
Self::Inline => true,
Self::None => false,
}
}
pub fn if_requests_inline<T>(self, f: impl FnOnce() -> T) -> Option<T> {
match self {
Self::Inline => Some(f()),
Self::None => None,
}
}
pub fn compute(self, compute_inline: impl FnOnce() -> ContentSizes) -> BoxContentSizes {
match self {
Self::Inline => BoxContentSizes::Inline(compute_inline()),
Self::None => BoxContentSizes::NoneWereRequested,
}
}
}
#[derive(Clone, Debug, Serialize)]
pub(crate) struct ContentSizes {
pub min_content: Length,
pub max_content: Length,
}
/// https://drafts.csswg.org/css-sizing/#intrinsic-sizes
impl ContentSizes {
pub fn | () -> Self {
Self {
min_content: Length::zero(),
max_content: Length::zero(),
}
}
pub fn max_assign(&mut self, other: &Self) {
self.min_content.max_assign(other.min_content);
self.max_content.max_assign(other.max_content);
}
/// Relevant to outer intrinsic inline sizes, for percentages from padding and margin.
pub fn adjust_for_pbm_percentages(&mut self, percentages: Percentage) {
// " Note that this may yield an infinite result, but undefined results
// (zero divided by zero) must be treated as zero. "
if self.max_content.px() == 0. {
// Avoid a potential `NaN`.
// Zero is already the result we want regardless of `denominator`.
} else {
let denominator = (1. - percentages.0).max(0.);
self.max_content = Length::new(self.max_content.px() / denominator);
}
}
}
/// Optional min/max-content for storage in the box tree
#[derive(Debug, Serialize)]
pub(crate) enum BoxContentSizes {
NoneWereRequested, // … during box construction
Inline(ContentSizes),
}
impl BoxContentSizes {
fn expect_inline(&self) -> &ContentSizes {
match self {
Self::NoneWereRequested => panic!("Accessing content size that was not requested"),
Self::Inline(s) => s,
}
}
/// https://dbaron.org/css/intrinsic/#outer-intrinsic
pub fn outer_inline(&self, style: &ComputedValues) -> ContentSizes {
let (mut outer, percentages) = self.outer_inline_and_percentages(style);
outer.adjust_for_pbm_percentages(percentages);
outer
}
pub(crate) fn outer_inline_and_percentages(
&self,
style: &ComputedValues,
) -> (ContentSizes, Percentage) {
// FIXME: account for 'box-sizing'
let inline_size = style.box_size().inline;
let min_inline_size = style
.min_box_size()
.inline
.percentage_relative_to(Length::zero())
.auto_is(Length::zero);
let max_inline_size = match style.max_box_size().inline {
MaxSize::None => None,
MaxSize::LengthPercentage(ref lp) => lp.to_length(),
};
let clamp = |l: Length| l.clamp_between_extremums(min_inline_size, max_inline_size);
// Percentages for 'width' are treated as 'auto'
let inline_size = inline_size.map(|lp| lp.to_length());
// The (inner) min/max-content are only used for 'auto'
let mut outer = match inline_size.non_auto().flatten() {
None => {
let inner = self.expect_inline().clone();
ContentSizes {
min_content: clamp(inner.min_content),
max_content: clamp(inner.max_content),
}
},
Some(length) => {
let length = clamp(length);
ContentSizes {
min_content: length,
max_content: length,
}
},
};
let mut pbm_lengths = Length::zero();
let mut pbm_percentages = Percentage::zero();
let padding = style.padding();
let border = style.border_width();
let margin = style.margin();
pbm_lengths += border.inline_sum();
let mut add = |x: LengthPercentage| {
if let Some(l) = x.to_length() {
pbm_lengths += l;
}
if let Some(p) = x.to_percentage() {
pbm_percentages += p;
}
};
add(padding.inline_start);
add(padding.inline_end);
margin.inline_start.non_auto().map(&mut add);
margin.inline_end.non_auto().map(&mut add);
outer.min_content += pbm_lengths;
outer.max_content += pbm_lengths;
(outer, pbm_percentages)
}
/// https://drafts.csswg.org/css2/visudet.html#shrink-to-fit-float
pub(crate) fn shrink_to_fit(&self, available_size: Length) -> Length {
let inline = self.expect_inline();
available_size
.max(inline.min_content)
.min(inline.max_content)
}
}
| zero | identifier_name |
webcontentedit.py | # -*- coding: utf-8 -*-
# Copyright(C) 2010 Romain Bignon
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, version 3 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# python2.5 compatibility
from __future__ import with_statement
import os
import sys
import tempfile
from weboob.core.bcall import CallErrors
from weboob.capabilities.content import ICapContent
from weboob.tools.application.repl import ReplApplication
__all__ = ['WebContentEdit']
class WebContentEdit(ReplApplication):
APPNAME = 'webcontentedit'
VERSION = '0.4'
COPYRIGHT = 'Copyright(C) 2010 Romain Bignon'
CAPS = ICapContent
def | (self, line):
"""
edit ID
Edit a content with $EDITOR, then push it on the website.
"""
contents = []
for id in line.split():
_id, backend_name = self.parse_id(id)
backend_names = (backend_name,) if backend_name is not None else self.enabled_backends
contents += [content for backend, content in self.do('get_content', _id, backends=backend_names) if content]
if len(contents) == 0:
print >>sys.stderr, 'No contents found'
return 1
paths = {}
for content in contents:
tmpdir = os.path.join(tempfile.gettempdir(), "weboob")
if not os.path.isdir(tmpdir):
os.makedirs(tmpdir)
fd, path = tempfile.mkstemp(prefix='%s_' % content.id.replace(os.path.sep, '_'), dir=tmpdir)
with os.fdopen(fd, 'w') as f:
data = content.content
if isinstance(data, unicode):
data = data.encode('utf-8')
f.write(data)
paths[path] = content
params = ''
if os.environ['EDITOR'] == 'vim':
params = '-p'
os.system("$EDITOR %s %s" % (params, ' '.join(paths.iterkeys())))
for path, content in paths.iteritems():
with open(path, 'r') as f:
data = f.read()
try:
data = data.decode('utf-8')
except UnicodeError:
pass
if content.content != data:
content.content = data
else:
contents.remove(content)
if len(contents) == 0:
print 'No changes. Abort.'
return
print 'Contents changed:\n%s' % ('\n'.join([' * %s' % content.id for content in contents]))
message = self.ask('Enter a commit message', default='')
if not self.ask('Do you want to push?', default=True):
return
errors = CallErrors([])
for content in contents:
path = [path for path, c in paths.iteritems() if c == content][0]
sys.stdout.write('Pushing %s...' % content.id)
sys.stdout.flush()
try:
self.do('push_content', content, message, backends=[content.backend]).wait()
except CallErrors, e:
errors.errors += e.errors
sys.stdout.write(' error (content saved in %s)\n' % path)
else:
sys.stdout.write(' done\n')
os.unlink(path)
if len(errors.errors) > 0:
raise errors
| do_edit | identifier_name |
webcontentedit.py | # -*- coding: utf-8 -*-
# Copyright(C) 2010 Romain Bignon
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, version 3 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# python2.5 compatibility
from __future__ import with_statement
import os
import sys
import tempfile
from weboob.core.bcall import CallErrors
from weboob.capabilities.content import ICapContent
from weboob.tools.application.repl import ReplApplication
__all__ = ['WebContentEdit']
class WebContentEdit(ReplApplication):
APPNAME = 'webcontentedit'
VERSION = '0.4'
COPYRIGHT = 'Copyright(C) 2010 Romain Bignon'
CAPS = ICapContent
def do_edit(self, line):
"""
edit ID
Edit a content with $EDITOR, then push it on the website.
"""
contents = []
for id in line.split():
_id, backend_name = self.parse_id(id)
backend_names = (backend_name,) if backend_name is not None else self.enabled_backends
contents += [content for backend, content in self.do('get_content', _id, backends=backend_names) if content]
if len(contents) == 0:
print >>sys.stderr, 'No contents found'
return 1
paths = {}
for content in contents:
tmpdir = os.path.join(tempfile.gettempdir(), "weboob")
if not os.path.isdir(tmpdir):
os.makedirs(tmpdir)
fd, path = tempfile.mkstemp(prefix='%s_' % content.id.replace(os.path.sep, '_'), dir=tmpdir)
with os.fdopen(fd, 'w') as f:
data = content.content
if isinstance(data, unicode):
data = data.encode('utf-8')
f.write(data)
paths[path] = content
params = ''
if os.environ['EDITOR'] == 'vim':
params = '-p'
os.system("$EDITOR %s %s" % (params, ' '.join(paths.iterkeys())))
for path, content in paths.iteritems():
with open(path, 'r') as f:
data = f.read()
try:
data = data.decode('utf-8')
except UnicodeError:
pass
if content.content != data:
|
else:
contents.remove(content)
if len(contents) == 0:
print 'No changes. Abort.'
return
print 'Contents changed:\n%s' % ('\n'.join([' * %s' % content.id for content in contents]))
message = self.ask('Enter a commit message', default='')
if not self.ask('Do you want to push?', default=True):
return
errors = CallErrors([])
for content in contents:
path = [path for path, c in paths.iteritems() if c == content][0]
sys.stdout.write('Pushing %s...' % content.id)
sys.stdout.flush()
try:
self.do('push_content', content, message, backends=[content.backend]).wait()
except CallErrors, e:
errors.errors += e.errors
sys.stdout.write(' error (content saved in %s)\n' % path)
else:
sys.stdout.write(' done\n')
os.unlink(path)
if len(errors.errors) > 0:
raise errors
| content.content = data | conditional_block |
webcontentedit.py | # -*- coding: utf-8 -*-
# Copyright(C) 2010 Romain Bignon
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, version 3 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# python2.5 compatibility
from __future__ import with_statement
import os
import sys
import tempfile
from weboob.core.bcall import CallErrors
from weboob.capabilities.content import ICapContent
from weboob.tools.application.repl import ReplApplication
__all__ = ['WebContentEdit']
class WebContentEdit(ReplApplication):
| APPNAME = 'webcontentedit'
VERSION = '0.4'
COPYRIGHT = 'Copyright(C) 2010 Romain Bignon'
CAPS = ICapContent
def do_edit(self, line):
"""
edit ID
Edit a content with $EDITOR, then push it on the website.
"""
contents = []
for id in line.split():
_id, backend_name = self.parse_id(id)
backend_names = (backend_name,) if backend_name is not None else self.enabled_backends
contents += [content for backend, content in self.do('get_content', _id, backends=backend_names) if content]
if len(contents) == 0:
print >>sys.stderr, 'No contents found'
return 1
paths = {}
for content in contents:
tmpdir = os.path.join(tempfile.gettempdir(), "weboob")
if not os.path.isdir(tmpdir):
os.makedirs(tmpdir)
fd, path = tempfile.mkstemp(prefix='%s_' % content.id.replace(os.path.sep, '_'), dir=tmpdir)
with os.fdopen(fd, 'w') as f:
data = content.content
if isinstance(data, unicode):
data = data.encode('utf-8')
f.write(data)
paths[path] = content
params = ''
if os.environ['EDITOR'] == 'vim':
params = '-p'
os.system("$EDITOR %s %s" % (params, ' '.join(paths.iterkeys())))
for path, content in paths.iteritems():
with open(path, 'r') as f:
data = f.read()
try:
data = data.decode('utf-8')
except UnicodeError:
pass
if content.content != data:
content.content = data
else:
contents.remove(content)
if len(contents) == 0:
print 'No changes. Abort.'
return
print 'Contents changed:\n%s' % ('\n'.join([' * %s' % content.id for content in contents]))
message = self.ask('Enter a commit message', default='')
if not self.ask('Do you want to push?', default=True):
return
errors = CallErrors([])
for content in contents:
path = [path for path, c in paths.iteritems() if c == content][0]
sys.stdout.write('Pushing %s...' % content.id)
sys.stdout.flush()
try:
self.do('push_content', content, message, backends=[content.backend]).wait()
except CallErrors, e:
errors.errors += e.errors
sys.stdout.write(' error (content saved in %s)\n' % path)
else:
sys.stdout.write(' done\n')
os.unlink(path)
if len(errors.errors) > 0:
raise errors | identifier_body |
|
webcontentedit.py | # -*- coding: utf-8 -*-
# Copyright(C) 2010 Romain Bignon | # it under the terms of the GNU General Public License as published by
# the Free Software Foundation, version 3 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# python2.5 compatibility
from __future__ import with_statement
import os
import sys
import tempfile
from weboob.core.bcall import CallErrors
from weboob.capabilities.content import ICapContent
from weboob.tools.application.repl import ReplApplication
__all__ = ['WebContentEdit']
class WebContentEdit(ReplApplication):
APPNAME = 'webcontentedit'
VERSION = '0.4'
COPYRIGHT = 'Copyright(C) 2010 Romain Bignon'
CAPS = ICapContent
def do_edit(self, line):
"""
edit ID
Edit a content with $EDITOR, then push it on the website.
"""
contents = []
for id in line.split():
_id, backend_name = self.parse_id(id)
backend_names = (backend_name,) if backend_name is not None else self.enabled_backends
contents += [content for backend, content in self.do('get_content', _id, backends=backend_names) if content]
if len(contents) == 0:
print >>sys.stderr, 'No contents found'
return 1
paths = {}
for content in contents:
tmpdir = os.path.join(tempfile.gettempdir(), "weboob")
if not os.path.isdir(tmpdir):
os.makedirs(tmpdir)
fd, path = tempfile.mkstemp(prefix='%s_' % content.id.replace(os.path.sep, '_'), dir=tmpdir)
with os.fdopen(fd, 'w') as f:
data = content.content
if isinstance(data, unicode):
data = data.encode('utf-8')
f.write(data)
paths[path] = content
params = ''
if os.environ['EDITOR'] == 'vim':
params = '-p'
os.system("$EDITOR %s %s" % (params, ' '.join(paths.iterkeys())))
for path, content in paths.iteritems():
with open(path, 'r') as f:
data = f.read()
try:
data = data.decode('utf-8')
except UnicodeError:
pass
if content.content != data:
content.content = data
else:
contents.remove(content)
if len(contents) == 0:
print 'No changes. Abort.'
return
print 'Contents changed:\n%s' % ('\n'.join([' * %s' % content.id for content in contents]))
message = self.ask('Enter a commit message', default='')
if not self.ask('Do you want to push?', default=True):
return
errors = CallErrors([])
for content in contents:
path = [path for path, c in paths.iteritems() if c == content][0]
sys.stdout.write('Pushing %s...' % content.id)
sys.stdout.flush()
try:
self.do('push_content', content, message, backends=[content.backend]).wait()
except CallErrors, e:
errors.errors += e.errors
sys.stdout.write(' error (content saved in %s)\n' % path)
else:
sys.stdout.write(' done\n')
os.unlink(path)
if len(errors.errors) > 0:
raise errors | #
# This program is free software; you can redistribute it and/or modify | random_line_split |
requireSource.ts | /*
This is a slightly-modified version of preconstruct's hook for use with
keystone project files in the monorepo. Importantly it doesn't accept a cwd and
sets rootMode: "upward-optional"
*/
import { addHook } from 'pirates';
import * as babel from '@babel/core';
import sourceMapSupport from 'source-map-support';
const EXTENSIONS = ['.js', '.jsx', '.ts', '.tsx'];
const hook = () => {
let compiling = false;
let sourceMaps: Record<string, any> = {};
let needsToInstallSourceMapSupport = true;
function compileHook(code: string, filename: string) {
if (compiling) return code;
// we do this lazily because jest has its own require implementation
// which means preconstruct's require hook won't be run
// so we don't want to install source map support because that will mess up
// jest's source map support
if (needsToInstallSourceMapSupport) {
sourceMapSupport.install({
environment: 'node',
| (source) {
let map = sourceMaps[source];
if (map !== undefined) {
return { url: source, map };
} else {
return null;
}
},
});
needsToInstallSourceMapSupport = false;
}
try {
compiling = true;
const output = babel.transformSync(code, {
filename,
presets: [require.resolve('next/babel')],
configFile: false,
babelrc: false,
sourceMaps: 'both',
})!;
sourceMaps[filename] = output.map;
return output.code!;
} finally {
compiling = false;
}
}
return addHook(compileHook, {
exts: EXTENSIONS,
});
};
export const requireSource = (filePath: string) => {
const unregister = hook();
const result = require(filePath);
unregister();
return result;
};
| retrieveSourceMap | identifier_name |
requireSource.ts | /*
This is a slightly-modified version of preconstruct's hook for use with
keystone project files in the monorepo. Importantly it doesn't accept a cwd and
sets rootMode: "upward-optional"
*/
import { addHook } from 'pirates';
import * as babel from '@babel/core';
import sourceMapSupport from 'source-map-support';
const EXTENSIONS = ['.js', '.jsx', '.ts', '.tsx'];
const hook = () => {
let compiling = false;
let sourceMaps: Record<string, any> = {};
let needsToInstallSourceMapSupport = true;
function compileHook(code: string, filename: string) {
if (compiling) return code;
// we do this lazily because jest has its own require implementation
// which means preconstruct's require hook won't be run
// so we don't want to install source map support because that will mess up
// jest's source map support
if (needsToInstallSourceMapSupport) {
sourceMapSupport.install({
environment: 'node',
retrieveSourceMap(source) | ,
});
needsToInstallSourceMapSupport = false;
}
try {
compiling = true;
const output = babel.transformSync(code, {
filename,
presets: [require.resolve('next/babel')],
configFile: false,
babelrc: false,
sourceMaps: 'both',
})!;
sourceMaps[filename] = output.map;
return output.code!;
} finally {
compiling = false;
}
}
return addHook(compileHook, {
exts: EXTENSIONS,
});
};
export const requireSource = (filePath: string) => {
const unregister = hook();
const result = require(filePath);
unregister();
return result;
};
| {
let map = sourceMaps[source];
if (map !== undefined) {
return { url: source, map };
} else {
return null;
}
} | identifier_body |
requireSource.ts | /*
This is a slightly-modified version of preconstruct's hook for use with
keystone project files in the monorepo. Importantly it doesn't accept a cwd and
sets rootMode: "upward-optional"
*/
import { addHook } from 'pirates';
import * as babel from '@babel/core';
import sourceMapSupport from 'source-map-support';
const EXTENSIONS = ['.js', '.jsx', '.ts', '.tsx'];
const hook = () => {
let compiling = false;
let sourceMaps: Record<string, any> = {};
let needsToInstallSourceMapSupport = true;
function compileHook(code: string, filename: string) {
if (compiling) return code;
// we do this lazily because jest has its own require implementation
// which means preconstruct's require hook won't be run
// so we don't want to install source map support because that will mess up
// jest's source map support
if (needsToInstallSourceMapSupport) |
try {
compiling = true;
const output = babel.transformSync(code, {
filename,
presets: [require.resolve('next/babel')],
configFile: false,
babelrc: false,
sourceMaps: 'both',
})!;
sourceMaps[filename] = output.map;
return output.code!;
} finally {
compiling = false;
}
}
return addHook(compileHook, {
exts: EXTENSIONS,
});
};
export const requireSource = (filePath: string) => {
const unregister = hook();
const result = require(filePath);
unregister();
return result;
};
| {
sourceMapSupport.install({
environment: 'node',
retrieveSourceMap(source) {
let map = sourceMaps[source];
if (map !== undefined) {
return { url: source, map };
} else {
return null;
}
},
});
needsToInstallSourceMapSupport = false;
} | conditional_block |
requireSource.ts | /*
This is a slightly-modified version of preconstruct's hook for use with
keystone project files in the monorepo. Importantly it doesn't accept a cwd and
sets rootMode: "upward-optional"
*/
import { addHook } from 'pirates';
import * as babel from '@babel/core';
import sourceMapSupport from 'source-map-support'; |
const hook = () => {
let compiling = false;
let sourceMaps: Record<string, any> = {};
let needsToInstallSourceMapSupport = true;
function compileHook(code: string, filename: string) {
if (compiling) return code;
// we do this lazily because jest has its own require implementation
// which means preconstruct's require hook won't be run
// so we don't want to install source map support because that will mess up
// jest's source map support
if (needsToInstallSourceMapSupport) {
sourceMapSupport.install({
environment: 'node',
retrieveSourceMap(source) {
let map = sourceMaps[source];
if (map !== undefined) {
return { url: source, map };
} else {
return null;
}
},
});
needsToInstallSourceMapSupport = false;
}
try {
compiling = true;
const output = babel.transformSync(code, {
filename,
presets: [require.resolve('next/babel')],
configFile: false,
babelrc: false,
sourceMaps: 'both',
})!;
sourceMaps[filename] = output.map;
return output.code!;
} finally {
compiling = false;
}
}
return addHook(compileHook, {
exts: EXTENSIONS,
});
};
export const requireSource = (filePath: string) => {
const unregister = hook();
const result = require(filePath);
unregister();
return result;
}; |
const EXTENSIONS = ['.js', '.jsx', '.ts', '.tsx']; | random_line_split |
worker.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use devtools_traits::{DevtoolsPageInfo, ScriptToDevtoolsControlMsg};
use dom::abstractworker::{SharedRt, SimpleWorkerErrorHandler};
use dom::abstractworker::WorkerScriptMsg;
use dom::bindings::codegen::Bindings::WorkerBinding;
use dom::bindings::codegen::Bindings::WorkerBinding::WorkerMethods;
use dom::bindings::error::{Error, ErrorResult, Fallible};
use dom::bindings::inheritance::Castable;
use dom::bindings::refcounted::Trusted;
use dom::bindings::reflector::{DomObject, reflect_dom_object};
use dom::bindings::root::DomRoot;
use dom::bindings::str::DOMString;
use dom::bindings::structuredclone::StructuredCloneData;
use dom::dedicatedworkerglobalscope::DedicatedWorkerGlobalScope;
use dom::eventtarget::EventTarget;
use dom::globalscope::GlobalScope;
use dom::messageevent::MessageEvent;
use dom::workerglobalscope::prepare_workerscope_init;
use dom_struct::dom_struct;
use ipc_channel::ipc;
use js::jsapi::{HandleValue, JSAutoCompartment, JSContext};
use js::jsval::UndefinedValue;
use script_traits::WorkerScriptLoadOrigin;
use std::cell::Cell;
use std::sync::{Arc, Mutex};
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::mpsc::{Sender, channel};
use task::TaskOnce;
pub type TrustedWorkerAddress = Trusted<Worker>;
// https://html.spec.whatwg.org/multipage/#worker
#[dom_struct]
pub struct Worker {
eventtarget: EventTarget,
#[ignore_malloc_size_of = "Defined in std"]
/// Sender to the Receiver associated with the DedicatedWorkerGlobalScope
/// this Worker created.
sender: Sender<(TrustedWorkerAddress, WorkerScriptMsg)>,
#[ignore_malloc_size_of = "Arc"]
closing: Arc<AtomicBool>,
#[ignore_malloc_size_of = "Defined in rust-mozjs"]
runtime: Arc<Mutex<Option<SharedRt>>>,
terminated: Cell<bool>,
}
impl Worker {
fn new_inherited(sender: Sender<(TrustedWorkerAddress, WorkerScriptMsg)>,
closing: Arc<AtomicBool>) -> Worker {
Worker {
eventtarget: EventTarget::new_inherited(),
sender: sender,
closing: closing,
runtime: Arc::new(Mutex::new(None)),
terminated: Cell::new(false),
}
}
pub fn new(global: &GlobalScope,
sender: Sender<(TrustedWorkerAddress, WorkerScriptMsg)>,
closing: Arc<AtomicBool>) -> DomRoot<Worker> {
reflect_dom_object(Box::new(Worker::new_inherited(sender, closing)),
global,
WorkerBinding::Wrap)
}
// https://html.spec.whatwg.org/multipage/#dom-worker
#[allow(unsafe_code)]
pub fn Constructor(global: &GlobalScope, script_url: DOMString) -> Fallible<DomRoot<Worker>> {
// Step 2-4.
let worker_url = match global.api_base_url().join(&script_url) {
Ok(url) => url,
Err(_) => return Err(Error::Syntax),
};
let (sender, receiver) = channel();
let closing = Arc::new(AtomicBool::new(false));
let worker = Worker::new(global, sender.clone(), closing.clone());
let worker_ref = Trusted::new(&*worker);
let worker_load_origin = WorkerScriptLoadOrigin {
referrer_url: None,
referrer_policy: None,
pipeline_id: Some(global.pipeline_id()),
};
let (devtools_sender, devtools_receiver) = ipc::channel().unwrap();
let worker_id = global.get_next_worker_id();
if let Some(ref chan) = global.devtools_chan() {
let pipeline_id = global.pipeline_id();
let title = format!("Worker for {}", worker_url);
let page_info = DevtoolsPageInfo {
title: title,
url: worker_url.clone(),
};
let _ = chan.send(ScriptToDevtoolsControlMsg::NewGlobal((pipeline_id, Some(worker_id)),
devtools_sender.clone(),
page_info));
}
let init = prepare_workerscope_init(global, Some(devtools_sender));
DedicatedWorkerGlobalScope::run_worker_scope(
init, worker_url, devtools_receiver, worker.runtime.clone(), worker_ref,
global.script_chan(), sender, receiver, worker_load_origin, closing);
Ok(worker)
}
pub fn is_closing(&self) -> bool {
self.closing.load(Ordering::SeqCst)
}
pub fn is_terminated(&self) -> bool {
self.terminated.get()
}
pub fn handle_message(address: TrustedWorkerAddress,
data: StructuredCloneData) {
let worker = address.root();
if worker.is_terminated() {
return;
}
let global = worker.global();
let target = worker.upcast();
let _ac = JSAutoCompartment::new(global.get_cx(), target.reflector().get_jsobject().get());
rooted!(in(global.get_cx()) let mut message = UndefinedValue());
data.read(&global, message.handle_mut());
MessageEvent::dispatch_jsval(target, &global, message.handle());
}
pub fn | (address: TrustedWorkerAddress) {
let worker = address.root();
worker.upcast().fire_event(atom!("error"));
}
}
impl WorkerMethods for Worker {
#[allow(unsafe_code)]
// https://html.spec.whatwg.org/multipage/#dom-worker-postmessage
unsafe fn PostMessage(&self, cx: *mut JSContext, message: HandleValue) -> ErrorResult {
let data = StructuredCloneData::write(cx, message)?;
let address = Trusted::new(self);
// NOTE: step 9 of https://html.spec.whatwg.org/multipage/#dom-messageport-postmessage
// indicates that a nonexistent communication channel should result in a silent error.
let _ = self.sender.send((address, WorkerScriptMsg::DOMMessage(data)));
Ok(())
}
// https://html.spec.whatwg.org/multipage/#terminate-a-worker
fn Terminate(&self) {
// Step 1
if self.closing.swap(true, Ordering::SeqCst) {
return;
}
// Step 2
self.terminated.set(true);
// Step 3
if let Some(runtime) = *self.runtime.lock().unwrap() {
runtime.request_interrupt();
}
}
// https://html.spec.whatwg.org/multipage/#handler-worker-onmessage
event_handler!(message, GetOnmessage, SetOnmessage);
// https://html.spec.whatwg.org/multipage/#handler-workerglobalscope-onerror
event_handler!(error, GetOnerror, SetOnerror);
}
impl TaskOnce for SimpleWorkerErrorHandler<Worker> {
#[allow(unrooted_must_root)]
fn run_once(self) {
Worker::dispatch_simple_error(self.addr);
}
}
| dispatch_simple_error | identifier_name |
worker.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use devtools_traits::{DevtoolsPageInfo, ScriptToDevtoolsControlMsg};
use dom::abstractworker::{SharedRt, SimpleWorkerErrorHandler};
use dom::abstractworker::WorkerScriptMsg;
use dom::bindings::codegen::Bindings::WorkerBinding;
use dom::bindings::codegen::Bindings::WorkerBinding::WorkerMethods;
use dom::bindings::error::{Error, ErrorResult, Fallible};
use dom::bindings::inheritance::Castable;
use dom::bindings::refcounted::Trusted;
use dom::bindings::reflector::{DomObject, reflect_dom_object};
use dom::bindings::root::DomRoot;
use dom::bindings::str::DOMString;
use dom::bindings::structuredclone::StructuredCloneData;
use dom::dedicatedworkerglobalscope::DedicatedWorkerGlobalScope;
use dom::eventtarget::EventTarget;
use dom::globalscope::GlobalScope;
use dom::messageevent::MessageEvent;
use dom::workerglobalscope::prepare_workerscope_init;
use dom_struct::dom_struct;
use ipc_channel::ipc;
use js::jsapi::{HandleValue, JSAutoCompartment, JSContext};
use js::jsval::UndefinedValue;
use script_traits::WorkerScriptLoadOrigin;
use std::cell::Cell;
use std::sync::{Arc, Mutex};
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::mpsc::{Sender, channel};
use task::TaskOnce;
pub type TrustedWorkerAddress = Trusted<Worker>;
// https://html.spec.whatwg.org/multipage/#worker
#[dom_struct]
pub struct Worker {
eventtarget: EventTarget,
#[ignore_malloc_size_of = "Defined in std"]
/// Sender to the Receiver associated with the DedicatedWorkerGlobalScope
/// this Worker created.
sender: Sender<(TrustedWorkerAddress, WorkerScriptMsg)>,
#[ignore_malloc_size_of = "Arc"]
closing: Arc<AtomicBool>,
#[ignore_malloc_size_of = "Defined in rust-mozjs"]
runtime: Arc<Mutex<Option<SharedRt>>>,
terminated: Cell<bool>,
}
impl Worker {
fn new_inherited(sender: Sender<(TrustedWorkerAddress, WorkerScriptMsg)>,
closing: Arc<AtomicBool>) -> Worker {
Worker {
eventtarget: EventTarget::new_inherited(),
sender: sender,
closing: closing,
runtime: Arc::new(Mutex::new(None)),
terminated: Cell::new(false),
}
}
pub fn new(global: &GlobalScope,
sender: Sender<(TrustedWorkerAddress, WorkerScriptMsg)>,
closing: Arc<AtomicBool>) -> DomRoot<Worker> {
reflect_dom_object(Box::new(Worker::new_inherited(sender, closing)),
global,
WorkerBinding::Wrap)
}
// https://html.spec.whatwg.org/multipage/#dom-worker
#[allow(unsafe_code)]
pub fn Constructor(global: &GlobalScope, script_url: DOMString) -> Fallible<DomRoot<Worker>> {
// Step 2-4.
let worker_url = match global.api_base_url().join(&script_url) {
Ok(url) => url,
Err(_) => return Err(Error::Syntax),
};
let (sender, receiver) = channel();
let closing = Arc::new(AtomicBool::new(false));
let worker = Worker::new(global, sender.clone(), closing.clone());
let worker_ref = Trusted::new(&*worker);
let worker_load_origin = WorkerScriptLoadOrigin {
referrer_url: None,
referrer_policy: None,
pipeline_id: Some(global.pipeline_id()),
};
let (devtools_sender, devtools_receiver) = ipc::channel().unwrap();
let worker_id = global.get_next_worker_id();
if let Some(ref chan) = global.devtools_chan() {
let pipeline_id = global.pipeline_id();
let title = format!("Worker for {}", worker_url);
let page_info = DevtoolsPageInfo {
title: title,
url: worker_url.clone(),
};
let _ = chan.send(ScriptToDevtoolsControlMsg::NewGlobal((pipeline_id, Some(worker_id)),
devtools_sender.clone(),
page_info));
}
let init = prepare_workerscope_init(global, Some(devtools_sender));
DedicatedWorkerGlobalScope::run_worker_scope(
init, worker_url, devtools_receiver, worker.runtime.clone(), worker_ref,
global.script_chan(), sender, receiver, worker_load_origin, closing);
Ok(worker)
}
pub fn is_closing(&self) -> bool {
self.closing.load(Ordering::SeqCst)
}
pub fn is_terminated(&self) -> bool {
self.terminated.get()
}
pub fn handle_message(address: TrustedWorkerAddress,
data: StructuredCloneData) {
let worker = address.root();
if worker.is_terminated() |
let global = worker.global();
let target = worker.upcast();
let _ac = JSAutoCompartment::new(global.get_cx(), target.reflector().get_jsobject().get());
rooted!(in(global.get_cx()) let mut message = UndefinedValue());
data.read(&global, message.handle_mut());
MessageEvent::dispatch_jsval(target, &global, message.handle());
}
pub fn dispatch_simple_error(address: TrustedWorkerAddress) {
let worker = address.root();
worker.upcast().fire_event(atom!("error"));
}
}
impl WorkerMethods for Worker {
#[allow(unsafe_code)]
// https://html.spec.whatwg.org/multipage/#dom-worker-postmessage
unsafe fn PostMessage(&self, cx: *mut JSContext, message: HandleValue) -> ErrorResult {
let data = StructuredCloneData::write(cx, message)?;
let address = Trusted::new(self);
// NOTE: step 9 of https://html.spec.whatwg.org/multipage/#dom-messageport-postmessage
// indicates that a nonexistent communication channel should result in a silent error.
let _ = self.sender.send((address, WorkerScriptMsg::DOMMessage(data)));
Ok(())
}
// https://html.spec.whatwg.org/multipage/#terminate-a-worker
fn Terminate(&self) {
// Step 1
if self.closing.swap(true, Ordering::SeqCst) {
return;
}
// Step 2
self.terminated.set(true);
// Step 3
if let Some(runtime) = *self.runtime.lock().unwrap() {
runtime.request_interrupt();
}
}
// https://html.spec.whatwg.org/multipage/#handler-worker-onmessage
event_handler!(message, GetOnmessage, SetOnmessage);
// https://html.spec.whatwg.org/multipage/#handler-workerglobalscope-onerror
event_handler!(error, GetOnerror, SetOnerror);
}
impl TaskOnce for SimpleWorkerErrorHandler<Worker> {
#[allow(unrooted_must_root)]
fn run_once(self) {
Worker::dispatch_simple_error(self.addr);
}
}
| {
return;
} | conditional_block |
worker.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use devtools_traits::{DevtoolsPageInfo, ScriptToDevtoolsControlMsg};
use dom::abstractworker::{SharedRt, SimpleWorkerErrorHandler};
use dom::abstractworker::WorkerScriptMsg;
use dom::bindings::codegen::Bindings::WorkerBinding;
use dom::bindings::codegen::Bindings::WorkerBinding::WorkerMethods;
use dom::bindings::error::{Error, ErrorResult, Fallible};
use dom::bindings::inheritance::Castable;
use dom::bindings::refcounted::Trusted;
use dom::bindings::reflector::{DomObject, reflect_dom_object};
use dom::bindings::root::DomRoot;
use dom::bindings::str::DOMString;
use dom::bindings::structuredclone::StructuredCloneData;
use dom::dedicatedworkerglobalscope::DedicatedWorkerGlobalScope;
use dom::eventtarget::EventTarget;
use dom::globalscope::GlobalScope;
use dom::messageevent::MessageEvent;
use dom::workerglobalscope::prepare_workerscope_init;
use dom_struct::dom_struct;
use ipc_channel::ipc;
use js::jsapi::{HandleValue, JSAutoCompartment, JSContext};
use js::jsval::UndefinedValue;
use script_traits::WorkerScriptLoadOrigin;
use std::cell::Cell;
use std::sync::{Arc, Mutex};
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::mpsc::{Sender, channel};
use task::TaskOnce;
pub type TrustedWorkerAddress = Trusted<Worker>;
// https://html.spec.whatwg.org/multipage/#worker
#[dom_struct]
pub struct Worker {
eventtarget: EventTarget,
#[ignore_malloc_size_of = "Defined in std"]
/// Sender to the Receiver associated with the DedicatedWorkerGlobalScope
/// this Worker created.
sender: Sender<(TrustedWorkerAddress, WorkerScriptMsg)>,
#[ignore_malloc_size_of = "Arc"]
closing: Arc<AtomicBool>,
#[ignore_malloc_size_of = "Defined in rust-mozjs"]
runtime: Arc<Mutex<Option<SharedRt>>>,
terminated: Cell<bool>,
}
impl Worker {
fn new_inherited(sender: Sender<(TrustedWorkerAddress, WorkerScriptMsg)>,
closing: Arc<AtomicBool>) -> Worker {
Worker {
eventtarget: EventTarget::new_inherited(),
sender: sender,
closing: closing,
runtime: Arc::new(Mutex::new(None)),
terminated: Cell::new(false),
}
} |
pub fn new(global: &GlobalScope,
sender: Sender<(TrustedWorkerAddress, WorkerScriptMsg)>,
closing: Arc<AtomicBool>) -> DomRoot<Worker> {
reflect_dom_object(Box::new(Worker::new_inherited(sender, closing)),
global,
WorkerBinding::Wrap)
}
// https://html.spec.whatwg.org/multipage/#dom-worker
#[allow(unsafe_code)]
pub fn Constructor(global: &GlobalScope, script_url: DOMString) -> Fallible<DomRoot<Worker>> {
// Step 2-4.
let worker_url = match global.api_base_url().join(&script_url) {
Ok(url) => url,
Err(_) => return Err(Error::Syntax),
};
let (sender, receiver) = channel();
let closing = Arc::new(AtomicBool::new(false));
let worker = Worker::new(global, sender.clone(), closing.clone());
let worker_ref = Trusted::new(&*worker);
let worker_load_origin = WorkerScriptLoadOrigin {
referrer_url: None,
referrer_policy: None,
pipeline_id: Some(global.pipeline_id()),
};
let (devtools_sender, devtools_receiver) = ipc::channel().unwrap();
let worker_id = global.get_next_worker_id();
if let Some(ref chan) = global.devtools_chan() {
let pipeline_id = global.pipeline_id();
let title = format!("Worker for {}", worker_url);
let page_info = DevtoolsPageInfo {
title: title,
url: worker_url.clone(),
};
let _ = chan.send(ScriptToDevtoolsControlMsg::NewGlobal((pipeline_id, Some(worker_id)),
devtools_sender.clone(),
page_info));
}
let init = prepare_workerscope_init(global, Some(devtools_sender));
DedicatedWorkerGlobalScope::run_worker_scope(
init, worker_url, devtools_receiver, worker.runtime.clone(), worker_ref,
global.script_chan(), sender, receiver, worker_load_origin, closing);
Ok(worker)
}
pub fn is_closing(&self) -> bool {
self.closing.load(Ordering::SeqCst)
}
pub fn is_terminated(&self) -> bool {
self.terminated.get()
}
pub fn handle_message(address: TrustedWorkerAddress,
data: StructuredCloneData) {
let worker = address.root();
if worker.is_terminated() {
return;
}
let global = worker.global();
let target = worker.upcast();
let _ac = JSAutoCompartment::new(global.get_cx(), target.reflector().get_jsobject().get());
rooted!(in(global.get_cx()) let mut message = UndefinedValue());
data.read(&global, message.handle_mut());
MessageEvent::dispatch_jsval(target, &global, message.handle());
}
pub fn dispatch_simple_error(address: TrustedWorkerAddress) {
let worker = address.root();
worker.upcast().fire_event(atom!("error"));
}
}
impl WorkerMethods for Worker {
#[allow(unsafe_code)]
// https://html.spec.whatwg.org/multipage/#dom-worker-postmessage
unsafe fn PostMessage(&self, cx: *mut JSContext, message: HandleValue) -> ErrorResult {
let data = StructuredCloneData::write(cx, message)?;
let address = Trusted::new(self);
// NOTE: step 9 of https://html.spec.whatwg.org/multipage/#dom-messageport-postmessage
// indicates that a nonexistent communication channel should result in a silent error.
let _ = self.sender.send((address, WorkerScriptMsg::DOMMessage(data)));
Ok(())
}
// https://html.spec.whatwg.org/multipage/#terminate-a-worker
fn Terminate(&self) {
// Step 1
if self.closing.swap(true, Ordering::SeqCst) {
return;
}
// Step 2
self.terminated.set(true);
// Step 3
if let Some(runtime) = *self.runtime.lock().unwrap() {
runtime.request_interrupt();
}
}
// https://html.spec.whatwg.org/multipage/#handler-worker-onmessage
event_handler!(message, GetOnmessage, SetOnmessage);
// https://html.spec.whatwg.org/multipage/#handler-workerglobalscope-onerror
event_handler!(error, GetOnerror, SetOnerror);
}
impl TaskOnce for SimpleWorkerErrorHandler<Worker> {
#[allow(unrooted_must_root)]
fn run_once(self) {
Worker::dispatch_simple_error(self.addr);
}
} | random_line_split |
|
interlis_model.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import math
import re
from hub.formats import Format, Formatter
from hub.structures.file import File
from hub.structures.frame import OdhType
class InterlisModelFormat(Format):
name = 'INTERLIS1Model'
label = 'INTERLIS 1 Modell'
description = """
Modell für INTERLIS 1. Dies wird automatisch generiert aus den vorhandenen Daten und sollte von Hand korrigiert
werden
"""
extension = 'ili'
@classmethod
def is_format(cls, input_file, *args, **kwargs):
# ILI is a write-only format for the moment, so identifying it doesn't help us, really.
return False
class InterlisModelFormatter(Formatter):
targets = InterlisModelFormat,
@classmethod
def format(cls, dfs, name, format, *args, **kwargs):
tables = []
for df in dfs:
tables.append(Table(df.name, df))
model = Model(name, [Topic(name, tables)])
return [File.from_string(name + '.ili', model.get_model_definition()).file_group]
class Model(object):
def __init__(self, name, topics):
self.name = sanitize_name(name)
self.topics = topics
def get_model_definition(self):
result = 'TRANSFER {}; \n\n'.format(self.name)
result += '!! ACHTUNG: Dies ist ein automatisch generiertes Modell und sollte nicht ohne Anpassungen \n'
result += '!! verwendet werden.\n\n'
domain = {}
for topic in self.topics:
for table in topic.tables:
domain.update(table.domain)
if len(domain) > 0:
result += 'DOMAIN\n\n'
for k, v in domain.iteritems():
result += '\t{} = {};\n'.format(k, v)
result += '\nMODEL {}\n\n'.format(self.name)
for topic in self.topics:
result += topic.get_topic_definition()
result += '\nEND {}.\n\n'.format(self.name)
result += 'FORMAT FREE;\n\n'
result += '\nCODE\n\tBLANK = DEFAULT, UNDEFINED = DEFAULT, CONTINUE = DEFAULT;\n\t TID = ANY;\n\nEND.'
return result
class Topic(object):
def _ | self, name, tables):
self.name = sanitize_name(name)
self.tables = tables
def get_topic_definition(self):
result = 'TOPIC {} = \n\n'.format(self.name)
for table in self.tables:
result += table.get_table_definition()
result += '\nEND {}.\n'.format(self.name)
return result
class Table(object):
def __init__(self, name, df):
self.name = sanitize_name(name)
self.df = df
self.fields, self.domain = self.get_fields()
def get_table_definition(self):
result = '\tTABLE {} = \n'.format(self.name)
for field in self.fields:
result += '\t\t{}: {};\n'.format(sanitize_name(field[0]), field[1])
result += '\tNO IDENT\n'
result += '\tEND {};\n'.format(self.name)
return result
def next_nines(self, x):
'''
results in the next series of 999...
'''
return int(10 ** (math.floor(math.log10(x) + 1)) - 1)
def get_bounds(self, name):
bounds = self.df[name].geom_op('bounds')
min = bounds.min()
max = bounds.max()
return [min.minx, min.miny, max.maxx, max.maxy]
def get_fields(self):
domain = {}
fields = []
for name in self.df.columns:
type = self.df[name].odh_type
ili_type = '!! Unbekannter Typ'
if type == OdhType.TEXT:
max_length = self.df[name].str.len().max() if self.df[name].any() else 10
ili_type = 'TEXT*{}'.format(int(max_length))
elif type in (OdhType.INTEGER, OdhType.BIGINT, OdhType.SMALLINT):
min = self.df[name].min()
min = -self.next_nines(-min) if min and min < 0 else 0
max = self.df[name].max()
max = self.next_nines(max) if max and max > 0 else 0
ili_type = '[{} .. {}]'.format(min, max)
elif type == OdhType.FLOAT:
max = self.df[name].max()
max = self.next_nines(max) if max and max > 0 else 0
ili_type = '[0.000 .. {}.999]'.format(max)
elif type == OdhType.BOOLEAN:
ili_type = 'BOOLEAN'
domain['BOOLEAN'] = '(True, False)'
elif type == OdhType.DATETIME:
ili_type = 'DATE' # actually, this can't include time in interlis. oh well.
else:
first_valid = self.df[name].first_valid_index()
if type == OdhType.GEOMETRY and first_valid is not None:
import shapely.geometry as shp
value = self.df[name][first_valid]
if isinstance(value, shp.Point):
ili_type = 'POINT'
domain['POINT'] = 'COORD2 {:.3f} {:.3f} {:.3f} {:.3f}'.format(*self.get_bounds(name))
elif isinstance(value, (shp.LineString, shp.LinearRing)):
ili_type = ('POLYLINE WITH (STRAIGHTS) '
'VERTEX COORD2 {:.3f} {:.3f} {:.3f} {:.3f} '
'WITHOUT OVERLAPS > 0.001').format(*self.get_bounds(name))
elif isinstance(value, shp.Polygon):
ili_type = ('AREA WITH (STRAIGHTS) '
'VERTEX COORD2 {:.3f} {:.3f} {:.3f} {:.3f} '
'WITHOUT OVERLAPS > 0.001').format(*self.get_bounds(name))
else:
ili_type = '!! Geometrie-Feld'
optional = 'OPTIONAL ' if self.df[name].isnull().any() else ''
fields.append((name, optional + ili_type))
return fields, domain
def sanitize_name(name):
sanitized = re.sub(r'[^A-Za-z0-9_\s]', '', name)
return ''.join([s.capitalize() for s in re.split(r'\s', sanitized.strip())])
| _init__( | identifier_name |
interlis_model.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import math
import re
from hub.formats import Format, Formatter
from hub.structures.file import File
from hub.structures.frame import OdhType
class InterlisModelFormat(Format):
name = 'INTERLIS1Model'
label = 'INTERLIS 1 Modell'
description = """
Modell für INTERLIS 1. Dies wird automatisch generiert aus den vorhandenen Daten und sollte von Hand korrigiert
werden
"""
extension = 'ili'
@classmethod
def is_format(cls, input_file, *args, **kwargs):
# ILI is a write-only format for the moment, so identifying it doesn't help us, really.
return False
class InterlisModelFormatter(Formatter):
targets = InterlisModelFormat,
@classmethod
def format(cls, dfs, name, format, *args, **kwargs):
tables = []
for df in dfs:
tables.append(Table(df.name, df))
model = Model(name, [Topic(name, tables)])
return [File.from_string(name + '.ili', model.get_model_definition()).file_group]
class Model(object):
def __init__(self, name, topics):
self.name = sanitize_name(name)
self.topics = topics
def get_model_definition(self):
result = 'TRANSFER {}; \n\n'.format(self.name)
result += '!! ACHTUNG: Dies ist ein automatisch generiertes Modell und sollte nicht ohne Anpassungen \n'
result += '!! verwendet werden.\n\n'
domain = {}
for topic in self.topics:
for table in topic.tables:
domain.update(table.domain)
if len(domain) > 0:
r |
result += '\nMODEL {}\n\n'.format(self.name)
for topic in self.topics:
result += topic.get_topic_definition()
result += '\nEND {}.\n\n'.format(self.name)
result += 'FORMAT FREE;\n\n'
result += '\nCODE\n\tBLANK = DEFAULT, UNDEFINED = DEFAULT, CONTINUE = DEFAULT;\n\t TID = ANY;\n\nEND.'
return result
class Topic(object):
def __init__(self, name, tables):
self.name = sanitize_name(name)
self.tables = tables
def get_topic_definition(self):
result = 'TOPIC {} = \n\n'.format(self.name)
for table in self.tables:
result += table.get_table_definition()
result += '\nEND {}.\n'.format(self.name)
return result
class Table(object):
def __init__(self, name, df):
self.name = sanitize_name(name)
self.df = df
self.fields, self.domain = self.get_fields()
def get_table_definition(self):
result = '\tTABLE {} = \n'.format(self.name)
for field in self.fields:
result += '\t\t{}: {};\n'.format(sanitize_name(field[0]), field[1])
result += '\tNO IDENT\n'
result += '\tEND {};\n'.format(self.name)
return result
def next_nines(self, x):
'''
results in the next series of 999...
'''
return int(10 ** (math.floor(math.log10(x) + 1)) - 1)
def get_bounds(self, name):
bounds = self.df[name].geom_op('bounds')
min = bounds.min()
max = bounds.max()
return [min.minx, min.miny, max.maxx, max.maxy]
def get_fields(self):
domain = {}
fields = []
for name in self.df.columns:
type = self.df[name].odh_type
ili_type = '!! Unbekannter Typ'
if type == OdhType.TEXT:
max_length = self.df[name].str.len().max() if self.df[name].any() else 10
ili_type = 'TEXT*{}'.format(int(max_length))
elif type in (OdhType.INTEGER, OdhType.BIGINT, OdhType.SMALLINT):
min = self.df[name].min()
min = -self.next_nines(-min) if min and min < 0 else 0
max = self.df[name].max()
max = self.next_nines(max) if max and max > 0 else 0
ili_type = '[{} .. {}]'.format(min, max)
elif type == OdhType.FLOAT:
max = self.df[name].max()
max = self.next_nines(max) if max and max > 0 else 0
ili_type = '[0.000 .. {}.999]'.format(max)
elif type == OdhType.BOOLEAN:
ili_type = 'BOOLEAN'
domain['BOOLEAN'] = '(True, False)'
elif type == OdhType.DATETIME:
ili_type = 'DATE' # actually, this can't include time in interlis. oh well.
else:
first_valid = self.df[name].first_valid_index()
if type == OdhType.GEOMETRY and first_valid is not None:
import shapely.geometry as shp
value = self.df[name][first_valid]
if isinstance(value, shp.Point):
ili_type = 'POINT'
domain['POINT'] = 'COORD2 {:.3f} {:.3f} {:.3f} {:.3f}'.format(*self.get_bounds(name))
elif isinstance(value, (shp.LineString, shp.LinearRing)):
ili_type = ('POLYLINE WITH (STRAIGHTS) '
'VERTEX COORD2 {:.3f} {:.3f} {:.3f} {:.3f} '
'WITHOUT OVERLAPS > 0.001').format(*self.get_bounds(name))
elif isinstance(value, shp.Polygon):
ili_type = ('AREA WITH (STRAIGHTS) '
'VERTEX COORD2 {:.3f} {:.3f} {:.3f} {:.3f} '
'WITHOUT OVERLAPS > 0.001').format(*self.get_bounds(name))
else:
ili_type = '!! Geometrie-Feld'
optional = 'OPTIONAL ' if self.df[name].isnull().any() else ''
fields.append((name, optional + ili_type))
return fields, domain
def sanitize_name(name):
sanitized = re.sub(r'[^A-Za-z0-9_\s]', '', name)
return ''.join([s.capitalize() for s in re.split(r'\s', sanitized.strip())])
| esult += 'DOMAIN\n\n'
for k, v in domain.iteritems():
result += '\t{} = {};\n'.format(k, v)
| conditional_block |
interlis_model.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import math
import re
from hub.formats import Format, Formatter
from hub.structures.file import File
from hub.structures.frame import OdhType
class InterlisModelFormat(Format):
name = 'INTERLIS1Model'
label = 'INTERLIS 1 Modell'
description = """
Modell für INTERLIS 1. Dies wird automatisch generiert aus den vorhandenen Daten und sollte von Hand korrigiert
werden
"""
extension = 'ili'
@classmethod
def is_format(cls, input_file, *args, **kwargs):
# ILI is a write-only format for the moment, so identifying it doesn't help us, really.
return False
class InterlisModelFormatter(Formatter):
targets = InterlisModelFormat,
@classmethod
def format(cls, dfs, name, format, *args, **kwargs):
tables = []
for df in dfs:
tables.append(Table(df.name, df))
model = Model(name, [Topic(name, tables)])
return [File.from_string(name + '.ili', model.get_model_definition()).file_group]
class Model(object):
def __init__(self, name, topics):
self.name = sanitize_name(name)
self.topics = topics
def get_model_definition(self):
result = 'TRANSFER {}; \n\n'.format(self.name)
result += '!! ACHTUNG: Dies ist ein automatisch generiertes Modell und sollte nicht ohne Anpassungen \n'
result += '!! verwendet werden.\n\n'
domain = {}
for topic in self.topics:
for table in topic.tables:
domain.update(table.domain)
if len(domain) > 0:
result += 'DOMAIN\n\n'
for k, v in domain.iteritems():
result += '\t{} = {};\n'.format(k, v)
result += '\nMODEL {}\n\n'.format(self.name)
for topic in self.topics:
result += topic.get_topic_definition()
result += '\nEND {}.\n\n'.format(self.name)
result += 'FORMAT FREE;\n\n'
result += '\nCODE\n\tBLANK = DEFAULT, UNDEFINED = DEFAULT, CONTINUE = DEFAULT;\n\t TID = ANY;\n\nEND.'
return result
class Topic(object):
d |
class Table(object):
def __init__(self, name, df):
self.name = sanitize_name(name)
self.df = df
self.fields, self.domain = self.get_fields()
def get_table_definition(self):
result = '\tTABLE {} = \n'.format(self.name)
for field in self.fields:
result += '\t\t{}: {};\n'.format(sanitize_name(field[0]), field[1])
result += '\tNO IDENT\n'
result += '\tEND {};\n'.format(self.name)
return result
def next_nines(self, x):
'''
results in the next series of 999...
'''
return int(10 ** (math.floor(math.log10(x) + 1)) - 1)
def get_bounds(self, name):
bounds = self.df[name].geom_op('bounds')
min = bounds.min()
max = bounds.max()
return [min.minx, min.miny, max.maxx, max.maxy]
def get_fields(self):
domain = {}
fields = []
for name in self.df.columns:
type = self.df[name].odh_type
ili_type = '!! Unbekannter Typ'
if type == OdhType.TEXT:
max_length = self.df[name].str.len().max() if self.df[name].any() else 10
ili_type = 'TEXT*{}'.format(int(max_length))
elif type in (OdhType.INTEGER, OdhType.BIGINT, OdhType.SMALLINT):
min = self.df[name].min()
min = -self.next_nines(-min) if min and min < 0 else 0
max = self.df[name].max()
max = self.next_nines(max) if max and max > 0 else 0
ili_type = '[{} .. {}]'.format(min, max)
elif type == OdhType.FLOAT:
max = self.df[name].max()
max = self.next_nines(max) if max and max > 0 else 0
ili_type = '[0.000 .. {}.999]'.format(max)
elif type == OdhType.BOOLEAN:
ili_type = 'BOOLEAN'
domain['BOOLEAN'] = '(True, False)'
elif type == OdhType.DATETIME:
ili_type = 'DATE' # actually, this can't include time in interlis. oh well.
else:
first_valid = self.df[name].first_valid_index()
if type == OdhType.GEOMETRY and first_valid is not None:
import shapely.geometry as shp
value = self.df[name][first_valid]
if isinstance(value, shp.Point):
ili_type = 'POINT'
domain['POINT'] = 'COORD2 {:.3f} {:.3f} {:.3f} {:.3f}'.format(*self.get_bounds(name))
elif isinstance(value, (shp.LineString, shp.LinearRing)):
ili_type = ('POLYLINE WITH (STRAIGHTS) '
'VERTEX COORD2 {:.3f} {:.3f} {:.3f} {:.3f} '
'WITHOUT OVERLAPS > 0.001').format(*self.get_bounds(name))
elif isinstance(value, shp.Polygon):
ili_type = ('AREA WITH (STRAIGHTS) '
'VERTEX COORD2 {:.3f} {:.3f} {:.3f} {:.3f} '
'WITHOUT OVERLAPS > 0.001').format(*self.get_bounds(name))
else:
ili_type = '!! Geometrie-Feld'
optional = 'OPTIONAL ' if self.df[name].isnull().any() else ''
fields.append((name, optional + ili_type))
return fields, domain
def sanitize_name(name):
sanitized = re.sub(r'[^A-Za-z0-9_\s]', '', name)
return ''.join([s.capitalize() for s in re.split(r'\s', sanitized.strip())])
| ef __init__(self, name, tables):
self.name = sanitize_name(name)
self.tables = tables
def get_topic_definition(self):
result = 'TOPIC {} = \n\n'.format(self.name)
for table in self.tables:
result += table.get_table_definition()
result += '\nEND {}.\n'.format(self.name)
return result
| identifier_body |
interlis_model.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import math
import re
from hub.formats import Format, Formatter
from hub.structures.file import File
from hub.structures.frame import OdhType
class InterlisModelFormat(Format):
name = 'INTERLIS1Model'
label = 'INTERLIS 1 Modell'
description = """
Modell für INTERLIS 1. Dies wird automatisch generiert aus den vorhandenen Daten und sollte von Hand korrigiert
werden
"""
extension = 'ili'
@classmethod
def is_format(cls, input_file, *args, **kwargs):
# ILI is a write-only format for the moment, so identifying it doesn't help us, really.
return False
class InterlisModelFormatter(Formatter):
targets = InterlisModelFormat,
@classmethod
def format(cls, dfs, name, format, *args, **kwargs):
tables = []
for df in dfs:
tables.append(Table(df.name, df))
model = Model(name, [Topic(name, tables)])
return [File.from_string(name + '.ili', model.get_model_definition()).file_group]
class Model(object):
def __init__(self, name, topics): | def get_model_definition(self):
result = 'TRANSFER {}; \n\n'.format(self.name)
result += '!! ACHTUNG: Dies ist ein automatisch generiertes Modell und sollte nicht ohne Anpassungen \n'
result += '!! verwendet werden.\n\n'
domain = {}
for topic in self.topics:
for table in topic.tables:
domain.update(table.domain)
if len(domain) > 0:
result += 'DOMAIN\n\n'
for k, v in domain.iteritems():
result += '\t{} = {};\n'.format(k, v)
result += '\nMODEL {}\n\n'.format(self.name)
for topic in self.topics:
result += topic.get_topic_definition()
result += '\nEND {}.\n\n'.format(self.name)
result += 'FORMAT FREE;\n\n'
result += '\nCODE\n\tBLANK = DEFAULT, UNDEFINED = DEFAULT, CONTINUE = DEFAULT;\n\t TID = ANY;\n\nEND.'
return result
class Topic(object):
def __init__(self, name, tables):
self.name = sanitize_name(name)
self.tables = tables
def get_topic_definition(self):
result = 'TOPIC {} = \n\n'.format(self.name)
for table in self.tables:
result += table.get_table_definition()
result += '\nEND {}.\n'.format(self.name)
return result
class Table(object):
def __init__(self, name, df):
self.name = sanitize_name(name)
self.df = df
self.fields, self.domain = self.get_fields()
def get_table_definition(self):
result = '\tTABLE {} = \n'.format(self.name)
for field in self.fields:
result += '\t\t{}: {};\n'.format(sanitize_name(field[0]), field[1])
result += '\tNO IDENT\n'
result += '\tEND {};\n'.format(self.name)
return result
def next_nines(self, x):
'''
results in the next series of 999...
'''
return int(10 ** (math.floor(math.log10(x) + 1)) - 1)
def get_bounds(self, name):
bounds = self.df[name].geom_op('bounds')
min = bounds.min()
max = bounds.max()
return [min.minx, min.miny, max.maxx, max.maxy]
def get_fields(self):
domain = {}
fields = []
for name in self.df.columns:
type = self.df[name].odh_type
ili_type = '!! Unbekannter Typ'
if type == OdhType.TEXT:
max_length = self.df[name].str.len().max() if self.df[name].any() else 10
ili_type = 'TEXT*{}'.format(int(max_length))
elif type in (OdhType.INTEGER, OdhType.BIGINT, OdhType.SMALLINT):
min = self.df[name].min()
min = -self.next_nines(-min) if min and min < 0 else 0
max = self.df[name].max()
max = self.next_nines(max) if max and max > 0 else 0
ili_type = '[{} .. {}]'.format(min, max)
elif type == OdhType.FLOAT:
max = self.df[name].max()
max = self.next_nines(max) if max and max > 0 else 0
ili_type = '[0.000 .. {}.999]'.format(max)
elif type == OdhType.BOOLEAN:
ili_type = 'BOOLEAN'
domain['BOOLEAN'] = '(True, False)'
elif type == OdhType.DATETIME:
ili_type = 'DATE' # actually, this can't include time in interlis. oh well.
else:
first_valid = self.df[name].first_valid_index()
if type == OdhType.GEOMETRY and first_valid is not None:
import shapely.geometry as shp
value = self.df[name][first_valid]
if isinstance(value, shp.Point):
ili_type = 'POINT'
domain['POINT'] = 'COORD2 {:.3f} {:.3f} {:.3f} {:.3f}'.format(*self.get_bounds(name))
elif isinstance(value, (shp.LineString, shp.LinearRing)):
ili_type = ('POLYLINE WITH (STRAIGHTS) '
'VERTEX COORD2 {:.3f} {:.3f} {:.3f} {:.3f} '
'WITHOUT OVERLAPS > 0.001').format(*self.get_bounds(name))
elif isinstance(value, shp.Polygon):
ili_type = ('AREA WITH (STRAIGHTS) '
'VERTEX COORD2 {:.3f} {:.3f} {:.3f} {:.3f} '
'WITHOUT OVERLAPS > 0.001').format(*self.get_bounds(name))
else:
ili_type = '!! Geometrie-Feld'
optional = 'OPTIONAL ' if self.df[name].isnull().any() else ''
fields.append((name, optional + ili_type))
return fields, domain
def sanitize_name(name):
sanitized = re.sub(r'[^A-Za-z0-9_\s]', '', name)
return ''.join([s.capitalize() for s in re.split(r'\s', sanitized.strip())]) | self.name = sanitize_name(name)
self.topics = topics
| random_line_split |
tag_utils.py | #
# Copyright 2003-2012 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
""" Conversion tools between stream tags and Python objects """
import pmt
try:
from gnuradio import gr
except ImportError:
from runtime_swig import tag_t
class PythonTag(object):
" Python container for tags "
def __init__(self):
self.offset = None | self.srcid = None
def tag_to_python(tag):
""" Convert a stream tag to a Python-readable object """
newtag = PythonTag()
newtag.offset = tag.offset
newtag.key = pmt.to_python(tag.key)
newtag.value = pmt.to_python(tag.value)
newtag.srcid = pmt.to_python(tag.srcid)
return newtag
def tag_to_pmt(tag):
""" Convert a Python-readable object to a stream tag """
newtag = tag_t()
newtag.offset = tag.offset
newtag.key = pmt.to_python(tag.key)
newtag.value = pmt.from_python(tag.value)
newtag.srcid = pmt.from_python(tag.srcid)
return newtag | self.key = None
self.value = None | random_line_split |
tag_utils.py | #
# Copyright 2003-2012 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
""" Conversion tools between stream tags and Python objects """
import pmt
try:
from gnuradio import gr
except ImportError:
from runtime_swig import tag_t
class | (object):
" Python container for tags "
def __init__(self):
self.offset = None
self.key = None
self.value = None
self.srcid = None
def tag_to_python(tag):
""" Convert a stream tag to a Python-readable object """
newtag = PythonTag()
newtag.offset = tag.offset
newtag.key = pmt.to_python(tag.key)
newtag.value = pmt.to_python(tag.value)
newtag.srcid = pmt.to_python(tag.srcid)
return newtag
def tag_to_pmt(tag):
""" Convert a Python-readable object to a stream tag """
newtag = tag_t()
newtag.offset = tag.offset
newtag.key = pmt.to_python(tag.key)
newtag.value = pmt.from_python(tag.value)
newtag.srcid = pmt.from_python(tag.srcid)
return newtag
| PythonTag | identifier_name |
tag_utils.py | #
# Copyright 2003-2012 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
""" Conversion tools between stream tags and Python objects """
import pmt
try:
from gnuradio import gr
except ImportError:
from runtime_swig import tag_t
class PythonTag(object):
|
def tag_to_python(tag):
""" Convert a stream tag to a Python-readable object """
newtag = PythonTag()
newtag.offset = tag.offset
newtag.key = pmt.to_python(tag.key)
newtag.value = pmt.to_python(tag.value)
newtag.srcid = pmt.to_python(tag.srcid)
return newtag
def tag_to_pmt(tag):
""" Convert a Python-readable object to a stream tag """
newtag = tag_t()
newtag.offset = tag.offset
newtag.key = pmt.to_python(tag.key)
newtag.value = pmt.from_python(tag.value)
newtag.srcid = pmt.from_python(tag.srcid)
return newtag
| " Python container for tags "
def __init__(self):
self.offset = None
self.key = None
self.value = None
self.srcid = None | identifier_body |
overloaded-deref-count.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::cell::Cell;
use std::ops::{Deref, DerefMut};
use std::vec::Vec;
struct DerefCounter<T> {
count_imm: Cell<uint>,
count_mut: uint,
value: T
}
impl<T> DerefCounter<T> {
fn new(value: T) -> DerefCounter<T> {
DerefCounter {
count_imm: Cell::new(0),
count_mut: 0,
value: value
}
}
fn counts(&self) -> (uint, uint) {
(self.count_imm.get(), self.count_mut)
}
} | self.count_imm.set(self.count_imm.get() + 1);
&self.value
}
}
impl<T> DerefMut<T> for DerefCounter<T> {
fn deref_mut(&mut self) -> &mut T {
self.count_mut += 1;
&mut self.value
}
}
pub fn main() {
let mut n = DerefCounter::new(0i);
let mut v = DerefCounter::new(Vec::new());
let _ = *n; // Immutable deref + copy a POD.
assert_eq!(n.counts(), (1, 0));
let _ = (&*n, &*v); // Immutable deref + borrow.
assert_eq!(n.counts(), (2, 0)); assert_eq!(v.counts(), (1, 0));
let _ = (&mut *n, &mut *v); // Mutable deref + mutable borrow.
assert_eq!(n.counts(), (2, 1)); assert_eq!(v.counts(), (1, 1));
let mut v2 = Vec::new();
v2.push(1i);
*n = 5; *v = v2; // Mutable deref + assignment.
assert_eq!(n.counts(), (2, 2)); assert_eq!(v.counts(), (1, 2));
*n -= 3; // Mutable deref + assignment with binary operation.
assert_eq!(n.counts(), (2, 3));
// Immutable deref used for calling a method taking &self. (The
// typechecker is smarter now about doing this.)
(*n).to_string();
assert_eq!(n.counts(), (3, 3));
// Mutable deref used for calling a method taking &mut self.
(*v).push(2);
assert_eq!(v.counts(), (1, 3));
// Check the final states.
assert_eq!(*n, 2);
let expected: &[_] = &[1, 2];
assert_eq!((*v).as_slice(), expected);
} |
impl<T> Deref<T> for DerefCounter<T> {
fn deref(&self) -> &T { | random_line_split |
overloaded-deref-count.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::cell::Cell;
use std::ops::{Deref, DerefMut};
use std::vec::Vec;
struct DerefCounter<T> {
count_imm: Cell<uint>,
count_mut: uint,
value: T
}
impl<T> DerefCounter<T> {
fn new(value: T) -> DerefCounter<T> {
DerefCounter {
count_imm: Cell::new(0),
count_mut: 0,
value: value
}
}
fn | (&self) -> (uint, uint) {
(self.count_imm.get(), self.count_mut)
}
}
impl<T> Deref<T> for DerefCounter<T> {
fn deref(&self) -> &T {
self.count_imm.set(self.count_imm.get() + 1);
&self.value
}
}
impl<T> DerefMut<T> for DerefCounter<T> {
fn deref_mut(&mut self) -> &mut T {
self.count_mut += 1;
&mut self.value
}
}
pub fn main() {
let mut n = DerefCounter::new(0i);
let mut v = DerefCounter::new(Vec::new());
let _ = *n; // Immutable deref + copy a POD.
assert_eq!(n.counts(), (1, 0));
let _ = (&*n, &*v); // Immutable deref + borrow.
assert_eq!(n.counts(), (2, 0)); assert_eq!(v.counts(), (1, 0));
let _ = (&mut *n, &mut *v); // Mutable deref + mutable borrow.
assert_eq!(n.counts(), (2, 1)); assert_eq!(v.counts(), (1, 1));
let mut v2 = Vec::new();
v2.push(1i);
*n = 5; *v = v2; // Mutable deref + assignment.
assert_eq!(n.counts(), (2, 2)); assert_eq!(v.counts(), (1, 2));
*n -= 3; // Mutable deref + assignment with binary operation.
assert_eq!(n.counts(), (2, 3));
// Immutable deref used for calling a method taking &self. (The
// typechecker is smarter now about doing this.)
(*n).to_string();
assert_eq!(n.counts(), (3, 3));
// Mutable deref used for calling a method taking &mut self.
(*v).push(2);
assert_eq!(v.counts(), (1, 3));
// Check the final states.
assert_eq!(*n, 2);
let expected: &[_] = &[1, 2];
assert_eq!((*v).as_slice(), expected);
}
| counts | identifier_name |
overloaded-deref-count.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::cell::Cell;
use std::ops::{Deref, DerefMut};
use std::vec::Vec;
struct DerefCounter<T> {
count_imm: Cell<uint>,
count_mut: uint,
value: T
}
impl<T> DerefCounter<T> {
fn new(value: T) -> DerefCounter<T> {
DerefCounter {
count_imm: Cell::new(0),
count_mut: 0,
value: value
}
}
fn counts(&self) -> (uint, uint) |
}
impl<T> Deref<T> for DerefCounter<T> {
fn deref(&self) -> &T {
self.count_imm.set(self.count_imm.get() + 1);
&self.value
}
}
impl<T> DerefMut<T> for DerefCounter<T> {
fn deref_mut(&mut self) -> &mut T {
self.count_mut += 1;
&mut self.value
}
}
pub fn main() {
let mut n = DerefCounter::new(0i);
let mut v = DerefCounter::new(Vec::new());
let _ = *n; // Immutable deref + copy a POD.
assert_eq!(n.counts(), (1, 0));
let _ = (&*n, &*v); // Immutable deref + borrow.
assert_eq!(n.counts(), (2, 0)); assert_eq!(v.counts(), (1, 0));
let _ = (&mut *n, &mut *v); // Mutable deref + mutable borrow.
assert_eq!(n.counts(), (2, 1)); assert_eq!(v.counts(), (1, 1));
let mut v2 = Vec::new();
v2.push(1i);
*n = 5; *v = v2; // Mutable deref + assignment.
assert_eq!(n.counts(), (2, 2)); assert_eq!(v.counts(), (1, 2));
*n -= 3; // Mutable deref + assignment with binary operation.
assert_eq!(n.counts(), (2, 3));
// Immutable deref used for calling a method taking &self. (The
// typechecker is smarter now about doing this.)
(*n).to_string();
assert_eq!(n.counts(), (3, 3));
// Mutable deref used for calling a method taking &mut self.
(*v).push(2);
assert_eq!(v.counts(), (1, 3));
// Check the final states.
assert_eq!(*n, 2);
let expected: &[_] = &[1, 2];
assert_eq!((*v).as_slice(), expected);
}
| {
(self.count_imm.get(), self.count_mut)
} | identifier_body |
grql-test.js | const chai = require('chai')
const expect = chai.expect
const path = require('path')
const fs = require('fs')
const os = require('os')
const YAML = require('yamljs')
const pkg = require('../package') | describe('grql', () => {
let config, grql, sampleServer
before(() => {
process.env['NODE_ENV'] = 'test'
config = require('../lib/config')
config.configFile = path.join(os.tmpdir(), `.${pkg.name}.yml`)
grql = require('../lib/grql')
sampleServer = require('./sample-server')
process.stdin.isTTY = true
})
after(() => {
if (!config.configFile) {
return
}
fs.unlinkSync(config.configFile)
})
describe('exec', () => {
it('should return an error if no argument', async () => {
try {
await grql.exec()
throw new Error('should not return a result')
} catch (err) {
expect(err).to.be.an('error')
expect(err).to.have.property('message', grql.__('Error : missing argument (try --help)'))
}
})
it('should show help', async () => {
const stdout = {}
const stderr = {}
await grql.exec({ stdout, stderr, args: ['--nocolor', '--help'] })
expect(stderr).to.have.property('data').that.is.a('string')
})
describe('sample server', () => {
let stdout
let stderr
before(async () => {
await sampleServer.start()
const port = sampleServer.getPort()
await grql.exec({
args: [
'--nocolor',
'-e', 'test',
'-b', `http://localhost:${port}/graphql`,
'-s'
]
})
})
after(() => sampleServer.stop())
beforeEach(() => {
stdout = {}
stderr = {}
})
it('should return schema', async () => {
await grql.exec({ stdout, stderr, args: ['--nocolor', 'schema'] })
expect(stdout).to.have.property('data').that.is.a('string')
const out = JSON.parse(stdout.data)
expect(Object.keys(out)).to.eql(['queryType',
'mutationType',
'subscriptionType',
'types',
'directives'
])
})
it('should return hello', async () => {
await grql.exec({ stdout, stderr, args: ['--nocolor', 'query', '{ hello }'] })
expect(stdout).to.have.property('data').that.is.a('string')
const out = JSON.parse(stdout.data)
expect(out).to.eql({ hello: 'world' })
})
it('should return hello in yaml format', async () => {
await grql.exec({ stdout, stderr, args: ['--nocolor', '-y', 'query', '{ hello }'] })
expect(stdout).to.have.property('data').that.is.a('string')
const out = YAML.parse(stdout.data)
expect(out).to.eql({ hello: 'world' })
})
})
})
}) | random_line_split |
|
grql-test.js | const chai = require('chai')
const expect = chai.expect
const path = require('path')
const fs = require('fs')
const os = require('os')
const YAML = require('yamljs')
const pkg = require('../package')
describe('grql', () => {
let config, grql, sampleServer
before(() => {
process.env['NODE_ENV'] = 'test'
config = require('../lib/config')
config.configFile = path.join(os.tmpdir(), `.${pkg.name}.yml`)
grql = require('../lib/grql')
sampleServer = require('./sample-server')
process.stdin.isTTY = true
})
after(() => {
if (!config.configFile) |
fs.unlinkSync(config.configFile)
})
describe('exec', () => {
it('should return an error if no argument', async () => {
try {
await grql.exec()
throw new Error('should not return a result')
} catch (err) {
expect(err).to.be.an('error')
expect(err).to.have.property('message', grql.__('Error : missing argument (try --help)'))
}
})
it('should show help', async () => {
const stdout = {}
const stderr = {}
await grql.exec({ stdout, stderr, args: ['--nocolor', '--help'] })
expect(stderr).to.have.property('data').that.is.a('string')
})
describe('sample server', () => {
let stdout
let stderr
before(async () => {
await sampleServer.start()
const port = sampleServer.getPort()
await grql.exec({
args: [
'--nocolor',
'-e', 'test',
'-b', `http://localhost:${port}/graphql`,
'-s'
]
})
})
after(() => sampleServer.stop())
beforeEach(() => {
stdout = {}
stderr = {}
})
it('should return schema', async () => {
await grql.exec({ stdout, stderr, args: ['--nocolor', 'schema'] })
expect(stdout).to.have.property('data').that.is.a('string')
const out = JSON.parse(stdout.data)
expect(Object.keys(out)).to.eql(['queryType',
'mutationType',
'subscriptionType',
'types',
'directives'
])
})
it('should return hello', async () => {
await grql.exec({ stdout, stderr, args: ['--nocolor', 'query', '{ hello }'] })
expect(stdout).to.have.property('data').that.is.a('string')
const out = JSON.parse(stdout.data)
expect(out).to.eql({ hello: 'world' })
})
it('should return hello in yaml format', async () => {
await grql.exec({ stdout, stderr, args: ['--nocolor', '-y', 'query', '{ hello }'] })
expect(stdout).to.have.property('data').that.is.a('string')
const out = YAML.parse(stdout.data)
expect(out).to.eql({ hello: 'world' })
})
})
})
})
| {
return
} | conditional_block |
iter.rs | use ::NbitsVec;
use num::PrimInt;
use typenum::NonZero;
use typenum::uint::Unsigned;
pub struct Iter<'a, N:'a, Block: 'a> where N: Unsigned + NonZero, Block: PrimInt {
vec: &'a NbitsVec<N, Block>,
pos: usize,
}
impl<'a, N:'a, Block: 'a> Iter<'a, N, Block> where N: Unsigned + NonZero, Block: PrimInt {
}
impl<'a, N:'a, Block: 'a> Iterator for Iter<'a, N, Block> where N: Unsigned + NonZero, Block: PrimInt {
type Item = Block;
#[inline]
fn next(&mut self) -> Option<Block> {
self.pos += 1;
if self.vec.len() > self.pos | else {
None
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
match self.vec.len() {
len if len > self.pos => {
let diff = len - self.pos;
(diff, Some(diff))
},
_ => (0, None),
}
}
#[inline]
fn count(self) -> usize {
self.size_hint().0
}
#[inline]
fn nth(&mut self, n: usize) -> Option<Block> {
self.pos += n;
if self.vec.len() > self.pos {
Some(self.vec.get(self.pos))
} else {
None
}
}
}
impl<'a, N:'a, Block: 'a> IntoIterator for &'a NbitsVec<N, Block> where N: Unsigned + NonZero, Block: PrimInt {
type Item = Block;
type IntoIter = Iter<'a, N, Block>;
fn into_iter(self) -> Iter<'a, N, Block> {
Iter {
vec: self,
pos: 0,
}
}
}
#[cfg(test)]
mod tests {
use ::{NbitsVec, N2};
type NV = NbitsVec<N2, usize>;
#[test]
fn into_iter() {
let vec = NV::new();
for val in vec.into_iter() {
let _ = val;
}
}
}
| {
Some(self.vec.get(self.pos))
} | conditional_block |
iter.rs | use ::NbitsVec;
use num::PrimInt;
use typenum::NonZero;
use typenum::uint::Unsigned;
pub struct Iter<'a, N:'a, Block: 'a> where N: Unsigned + NonZero, Block: PrimInt {
vec: &'a NbitsVec<N, Block>,
pos: usize,
}
impl<'a, N:'a, Block: 'a> Iter<'a, N, Block> where N: Unsigned + NonZero, Block: PrimInt {
}
impl<'a, N:'a, Block: 'a> Iterator for Iter<'a, N, Block> where N: Unsigned + NonZero, Block: PrimInt {
type Item = Block;
#[inline]
fn next(&mut self) -> Option<Block> {
self.pos += 1;
if self.vec.len() > self.pos {
Some(self.vec.get(self.pos))
} else {
None
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
match self.vec.len() {
len if len > self.pos => {
let diff = len - self.pos;
(diff, Some(diff))
},
_ => (0, None),
}
}
#[inline]
fn | (self) -> usize {
self.size_hint().0
}
#[inline]
fn nth(&mut self, n: usize) -> Option<Block> {
self.pos += n;
if self.vec.len() > self.pos {
Some(self.vec.get(self.pos))
} else {
None
}
}
}
impl<'a, N:'a, Block: 'a> IntoIterator for &'a NbitsVec<N, Block> where N: Unsigned + NonZero, Block: PrimInt {
type Item = Block;
type IntoIter = Iter<'a, N, Block>;
fn into_iter(self) -> Iter<'a, N, Block> {
Iter {
vec: self,
pos: 0,
}
}
}
#[cfg(test)]
mod tests {
use ::{NbitsVec, N2};
type NV = NbitsVec<N2, usize>;
#[test]
fn into_iter() {
let vec = NV::new();
for val in vec.into_iter() {
let _ = val;
}
}
}
| count | identifier_name |
iter.rs | use ::NbitsVec;
use num::PrimInt;
use typenum::NonZero;
use typenum::uint::Unsigned;
pub struct Iter<'a, N:'a, Block: 'a> where N: Unsigned + NonZero, Block: PrimInt {
vec: &'a NbitsVec<N, Block>,
pos: usize,
}
impl<'a, N:'a, Block: 'a> Iter<'a, N, Block> where N: Unsigned + NonZero, Block: PrimInt {
}
impl<'a, N:'a, Block: 'a> Iterator for Iter<'a, N, Block> where N: Unsigned + NonZero, Block: PrimInt {
type Item = Block;
#[inline]
fn next(&mut self) -> Option<Block> {
self.pos += 1;
if self.vec.len() > self.pos {
Some(self.vec.get(self.pos))
} else {
None
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
match self.vec.len() {
len if len > self.pos => {
let diff = len - self.pos;
(diff, Some(diff))
},
_ => (0, None),
}
}
#[inline]
fn count(self) -> usize {
self.size_hint().0
}
#[inline]
fn nth(&mut self, n: usize) -> Option<Block> |
}
impl<'a, N:'a, Block: 'a> IntoIterator for &'a NbitsVec<N, Block> where N: Unsigned + NonZero, Block: PrimInt {
type Item = Block;
type IntoIter = Iter<'a, N, Block>;
fn into_iter(self) -> Iter<'a, N, Block> {
Iter {
vec: self,
pos: 0,
}
}
}
#[cfg(test)]
mod tests {
use ::{NbitsVec, N2};
type NV = NbitsVec<N2, usize>;
#[test]
fn into_iter() {
let vec = NV::new();
for val in vec.into_iter() {
let _ = val;
}
}
}
| {
self.pos += n;
if self.vec.len() > self.pos {
Some(self.vec.get(self.pos))
} else {
None
}
} | identifier_body |
iter.rs | use ::NbitsVec;
use num::PrimInt;
use typenum::NonZero;
use typenum::uint::Unsigned;
pub struct Iter<'a, N:'a, Block: 'a> where N: Unsigned + NonZero, Block: PrimInt {
vec: &'a NbitsVec<N, Block>,
pos: usize,
}
impl<'a, N:'a, Block: 'a> Iter<'a, N, Block> where N: Unsigned + NonZero, Block: PrimInt {
}
impl<'a, N:'a, Block: 'a> Iterator for Iter<'a, N, Block> where N: Unsigned + NonZero, Block: PrimInt {
type Item = Block;
#[inline]
fn next(&mut self) -> Option<Block> {
self.pos += 1;
if self.vec.len() > self.pos {
Some(self.vec.get(self.pos))
} else {
None
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
match self.vec.len() {
len if len > self.pos => {
let diff = len - self.pos;
(diff, Some(diff))
},
_ => (0, None),
}
}
| #[inline]
fn nth(&mut self, n: usize) -> Option<Block> {
self.pos += n;
if self.vec.len() > self.pos {
Some(self.vec.get(self.pos))
} else {
None
}
}
}
impl<'a, N:'a, Block: 'a> IntoIterator for &'a NbitsVec<N, Block> where N: Unsigned + NonZero, Block: PrimInt {
type Item = Block;
type IntoIter = Iter<'a, N, Block>;
fn into_iter(self) -> Iter<'a, N, Block> {
Iter {
vec: self,
pos: 0,
}
}
}
#[cfg(test)]
mod tests {
use ::{NbitsVec, N2};
type NV = NbitsVec<N2, usize>;
#[test]
fn into_iter() {
let vec = NV::new();
for val in vec.into_iter() {
let _ = val;
}
}
} | #[inline]
fn count(self) -> usize {
self.size_hint().0
}
| random_line_split |
varinput.js | // varinput.js
// Javascript routines to handle variable rendering
// $Id: //dev/EPS/js/varinput.js#48 $
function inspect()
|
// global variable used for items that submit on change or selection
var autosubmit = '';
var autopublish = '';
var workspaceByPass = false;
function valueChanged(ident) {
var theField = document.getElementById('form1').variablechanged;
if (theField != null) {
theField.value = (theField.value == '') ? ',' + ident + ',' : (theField.value.indexOf(',' + ident + ',') > -1) ? theField.value : theField.value + ident + ',';
if (autopublish.indexOf(',' + ident + ',') > -1 && autosubmitting == 0) {
bypass_comment = 1; autosubmitting = 1;
document.form1.pubPress.value = 'pubPress';
document.form1.autopubvar.value = ident;
PreSubmit('autopublish');
}
else if (autosubmit.indexOf(',' + ident + ',') > -1 && autosubmitting == 0) {
bypass_comment = 1; autosubmitting = 1;
document.form1.subPress.value = 'subPress';
if (workspaceByPass == true) {
document.form1.bypassPress.value = 'bypassPress';
}
PreSubmit();
}
}
}
function valueUnChanged(ident) {
var theField = document.getElementById('form1').variablechanged;
if (theField != null) {
if (theField.value.indexOf(ident + ',') > -1) {
theField.value = theField.value.replace(ident + ',', '');
}
}
}
function processCmt(obj, ident) {
obj.value = obj.value.substring(0, 1999);
var theField = document.getElementById('form1').commentchanged;
theField.value = (theField.value == '') ? ',' + ident + ',' : (theField.value.indexOf(',' + ident + ',') > -1) ? theField.value : theField.value + ident + ',';
valueChanged(ident);
}
function addAutosubmit(ident) {
autosubmit = ((autosubmit == '') ? ',' + ident + ',' : autosubmit + ident + ',');
}
function hasAutoSubmit(ident) {
return (autosubmit.indexOf(',' + ident + ',') > -1);
}
function addAutoPublish(ident) {
autopublish = ((autopublish == '') ? ',' + ident + ',' : autopublish + ident + ',');
}
function hasVariableChanged() { return (document.getElementById('form1').variablechanged.value.length > 1); }
function hasCommentChanged() { return (document.getElementById('form1').commentchanged.value.length > 1); }
function keyPressed(f,e, acceptEnter) // can use as form or field handler
{
var keycode;
if (window.event) keycode = window.event.keyCode;
else if (e) keycode = e.which;
else return true;
if (keycode == 13)
return acceptEnter;
else
return true;
}
function SubmitFormSpecial(s, s2) {
var form1 = document.getElementById('form1');
if (s2 != null) {
if (s2 == 'table_image')
form1.table_image.value = 'true';
}
form1.act.value = s;
form1.submit();
}
function SubmitFormSpecialLarge(s,v) {
var form1 = document.getElementById('form1');
form1.act.value = s;
form1.sview.value = v;
form1.submit();
form1.act.value = '';
form1.sview.value = '';
}
function SubmitFormAndExcel() {
var form1 = document.getElementById('form1');
form1.act.value = 'Excel';
form1.submit();
form1.act.value = '';
form1.sview.value = '';
}
function ExcelandRestore(s, isPortForm, varMode) {
var form1 = document.getElementById('form1');
var formType = 'proj';
if (isPortForm == 'true') { formType = 'port'; }
if (varMode != 'output') { varMode = 'input'; }
var aspxPage = formType + varMode + '.aspx?var=';
form1.target = 'excel';
var oldact = form1.action;
form1.action = aspxPage + s + oldact.substring(oldact.indexOf('&')).replace(/var=(\w+)&/, '');
form1.act.value = 'Excel';
form1.submit();
form1.target = '_self';
form1.act.value = '';
form1.action = oldact;
}
function SubmitFormSpecialWithMenus(s) {
var d = new Date(), form1 = document.getElementById('form1');
var wName = d.getUTCSeconds() + '_' + d.getUTCMinutes() + '_'; //Create a unique name for the window
window.open('about:blank', wName, 'toolbar=yes,location=no,directories=yes,status=yes,menubar=yes,scrollbars=yes,resizable=yes,copyhistory=no,width=850,height=700');
form1.target = wName;
form1.act.value = s;
form1.submit();
form1.target = '_self';
form1.act.value = '';
}
function ReturnComment() {
PostSubmit();
}
function SubmitFormSpecialOnValue(field,s) {
if (field.value.length > 0) {
SubmitFormSpecial(s);
field.selectedIndex = 0;
}
}
function currTime() {
var now = new Date();
var hours = now.getHours();
var minutes = now.getMinutes();
var seconds = now.getSeconds();
var timeValue = '' + ((hours > 12) ? hours - 12 : hours);
if (timeValue == '0') timeValue = 12;
timeValue += ((minutes < 10) ? ':0' : ':') + minutes;
timeValue += ((seconds < 10) ? ':0' : ':') + seconds;
timeValue += (hours >= 12) ? ' PM' : ' AM';
return timeValue;
}
function showHandle() {
if (showHandleFlag == 0) {
showHandleFlag = 1;
$('img.frameHandle').show();
setTimeout("$('img.frameHandle').hide(); showHandleFlag=0;", 2600);
}
}
function frameResize() {
if (window.parent.document.getElementById('fs2').getAttribute('cols') == '200,*') {
window.parent.document.getElementById('fs2').setAttribute('cols', '0,*');
$('img.frameHandle').attr('src', 'images/dbl_r.gif').attr('alt', 'Restore the navigation bar').attr('title', 'Restore the navigation bar');
} else {
window.parent.document.getElementById('fs2').setAttribute('cols', '200,*');
$('img.frameHandle').attr('src', 'images/dbl_l.gif').attr('alt', 'Minimize the navigation bar').attr('title', 'Minimize the navigation bar');
}
}
function bindEvents() {
var justFocused;
$('.epsgrid input').mouseup(function(e) {if (justFocused == 1) {e.preventDefault(); justFocused = 0;} });
$('.epsgrid input[type!="checkbox"]').focus(function() {
origVals[this.name] = this.value;
this.select();
justFocused = 1;
setTimeout('justFocused = 0', 50);
}).blur(function() {
resizeCol(this);
}).keydown(function(e) {
return InputKeyPress(this, e);
}).keypress(function(e) {
return keyPressed(this, e, false);
}).change(function() {
fch(this, true, this.id.substring(0, this.id.lastIndexOf('_id')));
});
}
// LEAVE AT BOTTOM OF JS FILE //
// special jQuery to paint events onto inputs //
var showHandleFlag;
showHandleFlag = 0;
if (typeof jQuery != 'undefined') {
$(document).ready(function() {
setTimeout('bindEvents()', 1);
$('.epsvar').change(function() {
var cmdStr;
cmdStr = 'validate_' + $(this).attr('vname') + "($('#valueField_" + $(this).attr('vname') + "').val(),'" + $(this).attr('orig_value') + "',true)";
setTimeout(cmdStr, 1);
});
var textValue;
$('textarea.expanding').each(function() {
textValue = $(this).text();
while (textValue.indexOf('**br**') != -1) {
textValue = textValue.replace('**br**', '\n');
}
$(this).val(textValue);
});
$('textarea.expanding').autogrow();
if (window.parent.document.getElementById('fs2')) {
$('body').mousemove(function(e) {
if (e.pageX < 50 && e.pageY < 50) showHandle();
}).append('<img class="frameHandle" src="images/dbl_l.gif"/>');
$('img.frameHandle').bind('click', function() {frameResize()});
if (window.parent.document.getElementById('fs2').getAttribute('cols') == '0,*') {
$('img.frameHandle').attr('src', 'images/dbl_r.gif');
$('img.frameHandle').attr('alt', 'Restore the navigation bar');
$('img.frameHandle').attr('title', 'Restore the navigation bar');
} else {
$('img.frameHandle').attr('src', 'images/dbl_l.gif').attr('alt', 'Minimize the navigation bar').attr('title', 'Minimize the navigation bar');
}
}
});
}
function openShortcutNode(aspxfile, sc_tid, sc_pid, sc_var, formcls)
{
var urlStr;
urlStr = aspxfile + '?' + 'tempid=' + sc_tid + '\&modelid=' + sc_pid + '\&var=' + sc_var + '\&form_class=' + formcls;
urlStr = urlStr + '\&shortcut_popup=true';
window.open(urlStr, 'shortcut_popup', 'toolbar=no,location=no,directories=no,status=no,menubar=no,scrollbars=yes,resizable=yes,copyhistory=no,width=650,height=550');
/*
// TODO: thread this into comment and form submit logic
// fixup close button not working
$("#shortcutdiv").load(urlStr).dialog(
{ autoOpen: false ,
modal:true,
height: 650,
width: 800,
buttons: [ {text: "Cancel",
click: function() { $(this).dialog("close"); alert('closing'); }
},
{ text: "Submit",
click: function() { var thisForm = $("#shortcutdiv form"); thisForm.submit(); }
}
]
}
);
$("#shortcutdiv").dialog("open");
*/
}
| {
form1.xml.value = '1';
SubmitFormSpecial('table');
} | identifier_body |
varinput.js | // varinput.js
// Javascript routines to handle variable rendering
// $Id: //dev/EPS/js/varinput.js#48 $
function inspect()
{
form1.xml.value = '1';
SubmitFormSpecial('table');
}
// global variable used for items that submit on change or selection
var autosubmit = '';
var autopublish = '';
var workspaceByPass = false;
function valueChanged(ident) {
var theField = document.getElementById('form1').variablechanged;
if (theField != null) {
theField.value = (theField.value == '') ? ',' + ident + ',' : (theField.value.indexOf(',' + ident + ',') > -1) ? theField.value : theField.value + ident + ',';
if (autopublish.indexOf(',' + ident + ',') > -1 && autosubmitting == 0) {
bypass_comment = 1; autosubmitting = 1;
document.form1.pubPress.value = 'pubPress';
document.form1.autopubvar.value = ident;
PreSubmit('autopublish');
}
else if (autosubmit.indexOf(',' + ident + ',') > -1 && autosubmitting == 0) {
bypass_comment = 1; autosubmitting = 1;
document.form1.subPress.value = 'subPress';
if (workspaceByPass == true) {
document.form1.bypassPress.value = 'bypassPress';
}
PreSubmit();
}
}
}
function | (ident) {
var theField = document.getElementById('form1').variablechanged;
if (theField != null) {
if (theField.value.indexOf(ident + ',') > -1) {
theField.value = theField.value.replace(ident + ',', '');
}
}
}
function processCmt(obj, ident) {
obj.value = obj.value.substring(0, 1999);
var theField = document.getElementById('form1').commentchanged;
theField.value = (theField.value == '') ? ',' + ident + ',' : (theField.value.indexOf(',' + ident + ',') > -1) ? theField.value : theField.value + ident + ',';
valueChanged(ident);
}
function addAutosubmit(ident) {
autosubmit = ((autosubmit == '') ? ',' + ident + ',' : autosubmit + ident + ',');
}
function hasAutoSubmit(ident) {
return (autosubmit.indexOf(',' + ident + ',') > -1);
}
function addAutoPublish(ident) {
autopublish = ((autopublish == '') ? ',' + ident + ',' : autopublish + ident + ',');
}
function hasVariableChanged() { return (document.getElementById('form1').variablechanged.value.length > 1); }
function hasCommentChanged() { return (document.getElementById('form1').commentchanged.value.length > 1); }
function keyPressed(f,e, acceptEnter) // can use as form or field handler
{
var keycode;
if (window.event) keycode = window.event.keyCode;
else if (e) keycode = e.which;
else return true;
if (keycode == 13)
return acceptEnter;
else
return true;
}
function SubmitFormSpecial(s, s2) {
var form1 = document.getElementById('form1');
if (s2 != null) {
if (s2 == 'table_image')
form1.table_image.value = 'true';
}
form1.act.value = s;
form1.submit();
}
function SubmitFormSpecialLarge(s,v) {
var form1 = document.getElementById('form1');
form1.act.value = s;
form1.sview.value = v;
form1.submit();
form1.act.value = '';
form1.sview.value = '';
}
function SubmitFormAndExcel() {
var form1 = document.getElementById('form1');
form1.act.value = 'Excel';
form1.submit();
form1.act.value = '';
form1.sview.value = '';
}
function ExcelandRestore(s, isPortForm, varMode) {
var form1 = document.getElementById('form1');
var formType = 'proj';
if (isPortForm == 'true') { formType = 'port'; }
if (varMode != 'output') { varMode = 'input'; }
var aspxPage = formType + varMode + '.aspx?var=';
form1.target = 'excel';
var oldact = form1.action;
form1.action = aspxPage + s + oldact.substring(oldact.indexOf('&')).replace(/var=(\w+)&/, '');
form1.act.value = 'Excel';
form1.submit();
form1.target = '_self';
form1.act.value = '';
form1.action = oldact;
}
function SubmitFormSpecialWithMenus(s) {
var d = new Date(), form1 = document.getElementById('form1');
var wName = d.getUTCSeconds() + '_' + d.getUTCMinutes() + '_'; //Create a unique name for the window
window.open('about:blank', wName, 'toolbar=yes,location=no,directories=yes,status=yes,menubar=yes,scrollbars=yes,resizable=yes,copyhistory=no,width=850,height=700');
form1.target = wName;
form1.act.value = s;
form1.submit();
form1.target = '_self';
form1.act.value = '';
}
function ReturnComment() {
PostSubmit();
}
function SubmitFormSpecialOnValue(field,s) {
if (field.value.length > 0) {
SubmitFormSpecial(s);
field.selectedIndex = 0;
}
}
function currTime() {
var now = new Date();
var hours = now.getHours();
var minutes = now.getMinutes();
var seconds = now.getSeconds();
var timeValue = '' + ((hours > 12) ? hours - 12 : hours);
if (timeValue == '0') timeValue = 12;
timeValue += ((minutes < 10) ? ':0' : ':') + minutes;
timeValue += ((seconds < 10) ? ':0' : ':') + seconds;
timeValue += (hours >= 12) ? ' PM' : ' AM';
return timeValue;
}
function showHandle() {
if (showHandleFlag == 0) {
showHandleFlag = 1;
$('img.frameHandle').show();
setTimeout("$('img.frameHandle').hide(); showHandleFlag=0;", 2600);
}
}
function frameResize() {
if (window.parent.document.getElementById('fs2').getAttribute('cols') == '200,*') {
window.parent.document.getElementById('fs2').setAttribute('cols', '0,*');
$('img.frameHandle').attr('src', 'images/dbl_r.gif').attr('alt', 'Restore the navigation bar').attr('title', 'Restore the navigation bar');
} else {
window.parent.document.getElementById('fs2').setAttribute('cols', '200,*');
$('img.frameHandle').attr('src', 'images/dbl_l.gif').attr('alt', 'Minimize the navigation bar').attr('title', 'Minimize the navigation bar');
}
}
function bindEvents() {
var justFocused;
$('.epsgrid input').mouseup(function(e) {if (justFocused == 1) {e.preventDefault(); justFocused = 0;} });
$('.epsgrid input[type!="checkbox"]').focus(function() {
origVals[this.name] = this.value;
this.select();
justFocused = 1;
setTimeout('justFocused = 0', 50);
}).blur(function() {
resizeCol(this);
}).keydown(function(e) {
return InputKeyPress(this, e);
}).keypress(function(e) {
return keyPressed(this, e, false);
}).change(function() {
fch(this, true, this.id.substring(0, this.id.lastIndexOf('_id')));
});
}
// LEAVE AT BOTTOM OF JS FILE //
// special jQuery to paint events onto inputs //
var showHandleFlag;
showHandleFlag = 0;
if (typeof jQuery != 'undefined') {
$(document).ready(function() {
setTimeout('bindEvents()', 1);
$('.epsvar').change(function() {
var cmdStr;
cmdStr = 'validate_' + $(this).attr('vname') + "($('#valueField_" + $(this).attr('vname') + "').val(),'" + $(this).attr('orig_value') + "',true)";
setTimeout(cmdStr, 1);
});
var textValue;
$('textarea.expanding').each(function() {
textValue = $(this).text();
while (textValue.indexOf('**br**') != -1) {
textValue = textValue.replace('**br**', '\n');
}
$(this).val(textValue);
});
$('textarea.expanding').autogrow();
if (window.parent.document.getElementById('fs2')) {
$('body').mousemove(function(e) {
if (e.pageX < 50 && e.pageY < 50) showHandle();
}).append('<img class="frameHandle" src="images/dbl_l.gif"/>');
$('img.frameHandle').bind('click', function() {frameResize()});
if (window.parent.document.getElementById('fs2').getAttribute('cols') == '0,*') {
$('img.frameHandle').attr('src', 'images/dbl_r.gif');
$('img.frameHandle').attr('alt', 'Restore the navigation bar');
$('img.frameHandle').attr('title', 'Restore the navigation bar');
} else {
$('img.frameHandle').attr('src', 'images/dbl_l.gif').attr('alt', 'Minimize the navigation bar').attr('title', 'Minimize the navigation bar');
}
}
});
}
function openShortcutNode(aspxfile, sc_tid, sc_pid, sc_var, formcls)
{
var urlStr;
urlStr = aspxfile + '?' + 'tempid=' + sc_tid + '\&modelid=' + sc_pid + '\&var=' + sc_var + '\&form_class=' + formcls;
urlStr = urlStr + '\&shortcut_popup=true';
window.open(urlStr, 'shortcut_popup', 'toolbar=no,location=no,directories=no,status=no,menubar=no,scrollbars=yes,resizable=yes,copyhistory=no,width=650,height=550');
/*
// TODO: thread this into comment and form submit logic
// fixup close button not working
$("#shortcutdiv").load(urlStr).dialog(
{ autoOpen: false ,
modal:true,
height: 650,
width: 800,
buttons: [ {text: "Cancel",
click: function() { $(this).dialog("close"); alert('closing'); }
},
{ text: "Submit",
click: function() { var thisForm = $("#shortcutdiv form"); thisForm.submit(); }
}
]
}
);
$("#shortcutdiv").dialog("open");
*/
}
| valueUnChanged | identifier_name |
varinput.js | // varinput.js
// Javascript routines to handle variable rendering
// $Id: //dev/EPS/js/varinput.js#48 $
function inspect()
{
form1.xml.value = '1';
SubmitFormSpecial('table');
}
// global variable used for items that submit on change or selection
var autosubmit = '';
var autopublish = '';
var workspaceByPass = false;
function valueChanged(ident) {
var theField = document.getElementById('form1').variablechanged;
if (theField != null) {
theField.value = (theField.value == '') ? ',' + ident + ',' : (theField.value.indexOf(',' + ident + ',') > -1) ? theField.value : theField.value + ident + ',';
if (autopublish.indexOf(',' + ident + ',') > -1 && autosubmitting == 0) {
bypass_comment = 1; autosubmitting = 1;
document.form1.pubPress.value = 'pubPress';
document.form1.autopubvar.value = ident;
PreSubmit('autopublish');
}
else if (autosubmit.indexOf(',' + ident + ',') > -1 && autosubmitting == 0) {
bypass_comment = 1; autosubmitting = 1;
document.form1.subPress.value = 'subPress';
if (workspaceByPass == true) {
document.form1.bypassPress.value = 'bypassPress';
}
PreSubmit();
}
}
}
function valueUnChanged(ident) {
var theField = document.getElementById('form1').variablechanged;
if (theField != null) {
if (theField.value.indexOf(ident + ',') > -1) {
theField.value = theField.value.replace(ident + ',', '');
}
}
}
function processCmt(obj, ident) {
obj.value = obj.value.substring(0, 1999);
var theField = document.getElementById('form1').commentchanged;
theField.value = (theField.value == '') ? ',' + ident + ',' : (theField.value.indexOf(',' + ident + ',') > -1) ? theField.value : theField.value + ident + ',';
valueChanged(ident);
}
function addAutosubmit(ident) {
autosubmit = ((autosubmit == '') ? ',' + ident + ',' : autosubmit + ident + ',');
}
function hasAutoSubmit(ident) {
return (autosubmit.indexOf(',' + ident + ',') > -1);
}
function addAutoPublish(ident) {
autopublish = ((autopublish == '') ? ',' + ident + ',' : autopublish + ident + ',');
}
function hasVariableChanged() { return (document.getElementById('form1').variablechanged.value.length > 1); }
function hasCommentChanged() { return (document.getElementById('form1').commentchanged.value.length > 1); }
function keyPressed(f,e, acceptEnter) // can use as form or field handler
{
var keycode;
if (window.event) keycode = window.event.keyCode;
else if (e) keycode = e.which;
else return true;
if (keycode == 13)
return acceptEnter;
else
return true;
}
function SubmitFormSpecial(s, s2) {
var form1 = document.getElementById('form1');
if (s2 != null) {
if (s2 == 'table_image')
form1.table_image.value = 'true';
}
form1.act.value = s;
form1.submit();
}
function SubmitFormSpecialLarge(s,v) {
var form1 = document.getElementById('form1');
form1.act.value = s;
form1.sview.value = v;
form1.submit();
form1.act.value = '';
form1.sview.value = '';
}
function SubmitFormAndExcel() {
var form1 = document.getElementById('form1');
form1.act.value = 'Excel';
form1.submit();
form1.act.value = '';
form1.sview.value = '';
}
function ExcelandRestore(s, isPortForm, varMode) {
var form1 = document.getElementById('form1');
var formType = 'proj';
if (isPortForm == 'true') { formType = 'port'; }
if (varMode != 'output') { varMode = 'input'; }
var aspxPage = formType + varMode + '.aspx?var=';
form1.target = 'excel';
var oldact = form1.action;
form1.action = aspxPage + s + oldact.substring(oldact.indexOf('&')).replace(/var=(\w+)&/, '');
form1.act.value = 'Excel';
form1.submit();
form1.target = '_self';
form1.act.value = '';
form1.action = oldact;
}
function SubmitFormSpecialWithMenus(s) {
var d = new Date(), form1 = document.getElementById('form1');
var wName = d.getUTCSeconds() + '_' + d.getUTCMinutes() + '_'; //Create a unique name for the window
window.open('about:blank', wName, 'toolbar=yes,location=no,directories=yes,status=yes,menubar=yes,scrollbars=yes,resizable=yes,copyhistory=no,width=850,height=700');
form1.target = wName; | form1.act.value = s;
form1.submit();
form1.target = '_self';
form1.act.value = '';
}
function ReturnComment() {
PostSubmit();
}
function SubmitFormSpecialOnValue(field,s) {
if (field.value.length > 0) {
SubmitFormSpecial(s);
field.selectedIndex = 0;
}
}
function currTime() {
var now = new Date();
var hours = now.getHours();
var minutes = now.getMinutes();
var seconds = now.getSeconds();
var timeValue = '' + ((hours > 12) ? hours - 12 : hours);
if (timeValue == '0') timeValue = 12;
timeValue += ((minutes < 10) ? ':0' : ':') + minutes;
timeValue += ((seconds < 10) ? ':0' : ':') + seconds;
timeValue += (hours >= 12) ? ' PM' : ' AM';
return timeValue;
}
function showHandle() {
if (showHandleFlag == 0) {
showHandleFlag = 1;
$('img.frameHandle').show();
setTimeout("$('img.frameHandle').hide(); showHandleFlag=0;", 2600);
}
}
function frameResize() {
if (window.parent.document.getElementById('fs2').getAttribute('cols') == '200,*') {
window.parent.document.getElementById('fs2').setAttribute('cols', '0,*');
$('img.frameHandle').attr('src', 'images/dbl_r.gif').attr('alt', 'Restore the navigation bar').attr('title', 'Restore the navigation bar');
} else {
window.parent.document.getElementById('fs2').setAttribute('cols', '200,*');
$('img.frameHandle').attr('src', 'images/dbl_l.gif').attr('alt', 'Minimize the navigation bar').attr('title', 'Minimize the navigation bar');
}
}
function bindEvents() {
var justFocused;
$('.epsgrid input').mouseup(function(e) {if (justFocused == 1) {e.preventDefault(); justFocused = 0;} });
$('.epsgrid input[type!="checkbox"]').focus(function() {
origVals[this.name] = this.value;
this.select();
justFocused = 1;
setTimeout('justFocused = 0', 50);
}).blur(function() {
resizeCol(this);
}).keydown(function(e) {
return InputKeyPress(this, e);
}).keypress(function(e) {
return keyPressed(this, e, false);
}).change(function() {
fch(this, true, this.id.substring(0, this.id.lastIndexOf('_id')));
});
}
// LEAVE AT BOTTOM OF JS FILE //
// special jQuery to paint events onto inputs //
var showHandleFlag;
showHandleFlag = 0;
if (typeof jQuery != 'undefined') {
$(document).ready(function() {
setTimeout('bindEvents()', 1);
$('.epsvar').change(function() {
var cmdStr;
cmdStr = 'validate_' + $(this).attr('vname') + "($('#valueField_" + $(this).attr('vname') + "').val(),'" + $(this).attr('orig_value') + "',true)";
setTimeout(cmdStr, 1);
});
var textValue;
$('textarea.expanding').each(function() {
textValue = $(this).text();
while (textValue.indexOf('**br**') != -1) {
textValue = textValue.replace('**br**', '\n');
}
$(this).val(textValue);
});
$('textarea.expanding').autogrow();
if (window.parent.document.getElementById('fs2')) {
$('body').mousemove(function(e) {
if (e.pageX < 50 && e.pageY < 50) showHandle();
}).append('<img class="frameHandle" src="images/dbl_l.gif"/>');
$('img.frameHandle').bind('click', function() {frameResize()});
if (window.parent.document.getElementById('fs2').getAttribute('cols') == '0,*') {
$('img.frameHandle').attr('src', 'images/dbl_r.gif');
$('img.frameHandle').attr('alt', 'Restore the navigation bar');
$('img.frameHandle').attr('title', 'Restore the navigation bar');
} else {
$('img.frameHandle').attr('src', 'images/dbl_l.gif').attr('alt', 'Minimize the navigation bar').attr('title', 'Minimize the navigation bar');
}
}
});
}
function openShortcutNode(aspxfile, sc_tid, sc_pid, sc_var, formcls)
{
var urlStr;
urlStr = aspxfile + '?' + 'tempid=' + sc_tid + '\&modelid=' + sc_pid + '\&var=' + sc_var + '\&form_class=' + formcls;
urlStr = urlStr + '\&shortcut_popup=true';
window.open(urlStr, 'shortcut_popup', 'toolbar=no,location=no,directories=no,status=no,menubar=no,scrollbars=yes,resizable=yes,copyhistory=no,width=650,height=550');
/*
// TODO: thread this into comment and form submit logic
// fixup close button not working
$("#shortcutdiv").load(urlStr).dialog(
{ autoOpen: false ,
modal:true,
height: 650,
width: 800,
buttons: [ {text: "Cancel",
click: function() { $(this).dialog("close"); alert('closing'); }
},
{ text: "Submit",
click: function() { var thisForm = $("#shortcutdiv form"); thisForm.submit(); }
}
]
}
);
$("#shortcutdiv").dialog("open");
*/
} | random_line_split |
|
rec-align-u64.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Issue #2303
mod rusti {
#[abi = "rust-intrinsic"]
pub extern "rust-intrinsic" {
pub fn pref_align_of<T>() -> uint;
pub fn min_align_of<T>() -> uint;
}
}
// This is the type with the questionable alignment
struct Inner {
c64: u64
}
// This is the type that contains the type with the
// questionable alignment, for testing
struct Outer {
c8: u8,
t: Inner
}
#[cfg(target_os = "linux")]
#[cfg(target_os = "macos")]
#[cfg(target_os = "freebsd")]
mod m {
#[cfg(target_arch = "x86")]
pub mod m {
pub fn align() -> uint { 4u }
pub fn size() -> uint { 12u }
}
#[cfg(target_arch = "x86_64")]
mod m {
pub fn align() -> uint { 8u }
pub fn size() -> uint { 16u }
}
}
#[cfg(target_os = "win32")]
mod m {
#[cfg(target_arch = "x86")]
pub mod m {
pub fn align() -> uint { 8u }
pub fn size() -> uint { 16u }
}
}
#[cfg(target_os = "android")]
mod m {
#[cfg(target_arch = "arm")]
pub mod m {
pub fn align() -> uint { 4u }
pub fn size() -> uint { 12u } | pub fn main() {
unsafe {
let x = Outer {c8: 22u8, t: Inner {c64: 44u64}};
// Send it through the shape code
let y = fmt!("%?", x);
debug!("align inner = %?", rusti::min_align_of::<Inner>());
debug!("size outer = %?", sys::size_of::<Outer>());
debug!("y = %s", y);
// per clang/gcc the alignment of `Inner` is 4 on x86.
assert!(rusti::min_align_of::<Inner>() == m::m::align());
// per clang/gcc the size of `Outer` should be 12
// because `Inner`s alignment was 4.
assert!(sys::size_of::<Outer>() == m::m::size());
assert!(y == ~"{c8: 22, t: {c64: 44}}");
}
} | }
}
| random_line_split |
rec-align-u64.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Issue #2303
mod rusti {
#[abi = "rust-intrinsic"]
pub extern "rust-intrinsic" {
pub fn pref_align_of<T>() -> uint;
pub fn min_align_of<T>() -> uint;
}
}
// This is the type with the questionable alignment
struct Inner {
c64: u64
}
// This is the type that contains the type with the
// questionable alignment, for testing
struct Outer {
c8: u8,
t: Inner
}
#[cfg(target_os = "linux")]
#[cfg(target_os = "macos")]
#[cfg(target_os = "freebsd")]
mod m {
#[cfg(target_arch = "x86")]
pub mod m {
pub fn align() -> uint { 4u }
pub fn size() -> uint { 12u }
}
#[cfg(target_arch = "x86_64")]
mod m {
pub fn align() -> uint { 8u }
pub fn size() -> uint { 16u }
}
}
#[cfg(target_os = "win32")]
mod m {
#[cfg(target_arch = "x86")]
pub mod m {
pub fn | () -> uint { 8u }
pub fn size() -> uint { 16u }
}
}
#[cfg(target_os = "android")]
mod m {
#[cfg(target_arch = "arm")]
pub mod m {
pub fn align() -> uint { 4u }
pub fn size() -> uint { 12u }
}
}
pub fn main() {
unsafe {
let x = Outer {c8: 22u8, t: Inner {c64: 44u64}};
// Send it through the shape code
let y = fmt!("%?", x);
debug!("align inner = %?", rusti::min_align_of::<Inner>());
debug!("size outer = %?", sys::size_of::<Outer>());
debug!("y = %s", y);
// per clang/gcc the alignment of `Inner` is 4 on x86.
assert!(rusti::min_align_of::<Inner>() == m::m::align());
// per clang/gcc the size of `Outer` should be 12
// because `Inner`s alignment was 4.
assert!(sys::size_of::<Outer>() == m::m::size());
assert!(y == ~"{c8: 22, t: {c64: 44}}");
}
}
| align | identifier_name |
rec-align-u64.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Issue #2303
mod rusti {
#[abi = "rust-intrinsic"]
pub extern "rust-intrinsic" {
pub fn pref_align_of<T>() -> uint;
pub fn min_align_of<T>() -> uint;
}
}
// This is the type with the questionable alignment
struct Inner {
c64: u64
}
// This is the type that contains the type with the
// questionable alignment, for testing
struct Outer {
c8: u8,
t: Inner
}
#[cfg(target_os = "linux")]
#[cfg(target_os = "macos")]
#[cfg(target_os = "freebsd")]
mod m {
#[cfg(target_arch = "x86")]
pub mod m {
pub fn align() -> uint |
pub fn size() -> uint { 12u }
}
#[cfg(target_arch = "x86_64")]
mod m {
pub fn align() -> uint { 8u }
pub fn size() -> uint { 16u }
}
}
#[cfg(target_os = "win32")]
mod m {
#[cfg(target_arch = "x86")]
pub mod m {
pub fn align() -> uint { 8u }
pub fn size() -> uint { 16u }
}
}
#[cfg(target_os = "android")]
mod m {
#[cfg(target_arch = "arm")]
pub mod m {
pub fn align() -> uint { 4u }
pub fn size() -> uint { 12u }
}
}
pub fn main() {
unsafe {
let x = Outer {c8: 22u8, t: Inner {c64: 44u64}};
// Send it through the shape code
let y = fmt!("%?", x);
debug!("align inner = %?", rusti::min_align_of::<Inner>());
debug!("size outer = %?", sys::size_of::<Outer>());
debug!("y = %s", y);
// per clang/gcc the alignment of `Inner` is 4 on x86.
assert!(rusti::min_align_of::<Inner>() == m::m::align());
// per clang/gcc the size of `Outer` should be 12
// because `Inner`s alignment was 4.
assert!(sys::size_of::<Outer>() == m::m::size());
assert!(y == ~"{c8: 22, t: {c64: 44}}");
}
}
| { 4u } | identifier_body |
skia_gold_properties.py | # Copyright 2020 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Class for storing Skia Gold comparison properties.
Examples:
* git revision being tested
* Whether the test is being run locally or on a bot
* What the continuous integration system is
"""
import logging
import os
class SkiaGoldProperties(object):
def __init__(self, args):
"""Abstract class to validate and store properties related to Skia Gold.
Args:
args: The parsed arguments from an argparse.ArgumentParser.
"""
self._git_revision = None
self._issue = None
self._patchset = None
self._job_id = None
self._local_pixel_tests = None
self._no_luci_auth = None
self._bypass_skia_gold_functionality = None
self._code_review_system = None
self._continuous_integration_system = None
self._local_png_directory = None
self._InitializeProperties(args)
def IsTryjobRun(self):
return self.issue is not None
@property
def continuous_integration_system(self):
return self._continuous_integration_system or 'buildbucket'
@property
def code_review_system(self):
return self._code_review_system or 'gerrit'
@property
def git_revision(self):
return self._GetGitRevision()
@property
def issue(self):
return self._issue
@property
def job_id(self):
return self._job_id
@property
def local_pixel_tests(self):
return self._IsLocalRun()
@property
def local_png_directory(self):
return self._local_png_directory
@property
def no_luci_auth(self):
return self._no_luci_auth
@property
def patchset(self):
|
@property
def bypass_skia_gold_functionality(self):
return self._bypass_skia_gold_functionality
@staticmethod
def _GetGitOriginMainHeadSha1():
raise NotImplementedError()
def _GetGitRevision(self):
if not self._git_revision:
# Automated tests should always pass the revision, so assume we're on
# a workstation and try to get the local origin/master HEAD.
if not self._IsLocalRun():
raise RuntimeError(
'--git-revision was not passed when running on a bot')
revision = self._GetGitOriginMainHeadSha1()
if not revision or len(revision) != 40:
raise RuntimeError(
'--git-revision not passed and unable to determine from git')
self._git_revision = revision
return self._git_revision
def _IsLocalRun(self):
if self._local_pixel_tests is None:
# Look for the presence of the SWARMING_SERVER environment variable as a
# heuristic to determine whether we're running on a workstation or a bot.
# This should always be set on swarming, but would be strange to be set on
# a workstation.
self._local_pixel_tests = 'SWARMING_SERVER' not in os.environ
if self._local_pixel_tests:
logging.warning(
'Automatically determined that test is running on a workstation')
else:
logging.warning(
'Automatically determined that test is running on a bot')
return self._local_pixel_tests
def _InitializeProperties(self, args):
if hasattr(args, 'local_pixel_tests'):
# If not set, will be automatically determined later if needed.
self._local_pixel_tests = args.local_pixel_tests
if hasattr(args, 'skia_gold_local_png_write_directory'):
self._local_png_directory = args.skia_gold_local_png_write_directory
if hasattr(args, 'no_luci_auth'):
self._no_luci_auth = args.no_luci_auth
if hasattr(args, 'bypass_skia_gold_functionality'):
self._bypass_skia_gold_functionality = args.bypass_skia_gold_functionality
if hasattr(args, 'code_review_system'):
self._code_review_system = args.code_review_system
if hasattr(args, 'continuous_integration_system'):
self._continuous_integration_system = args.continuous_integration_system
# Will be automatically determined later if needed.
if not hasattr(args, 'git_revision') or not args.git_revision:
return
self._git_revision = args.git_revision
# Only expected on tryjob runs.
if not hasattr(args, 'gerrit_issue') or not args.gerrit_issue:
return
self._issue = args.gerrit_issue
if not hasattr(args, 'gerrit_patchset') or not args.gerrit_patchset:
raise RuntimeError(
'--gerrit-issue passed, but --gerrit-patchset not passed.')
self._patchset = args.gerrit_patchset
if not hasattr(args, 'buildbucket_id') or not args.buildbucket_id:
raise RuntimeError(
'--gerrit-issue passed, but --buildbucket-id not passed.')
self._job_id = args.buildbucket_id
| return self._patchset | identifier_body |
skia_gold_properties.py | # Copyright 2020 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Class for storing Skia Gold comparison properties.
Examples:
* git revision being tested
* Whether the test is being run locally or on a bot
* What the continuous integration system is
"""
import logging
import os
class SkiaGoldProperties(object):
def __init__(self, args):
"""Abstract class to validate and store properties related to Skia Gold.
Args:
args: The parsed arguments from an argparse.ArgumentParser.
"""
self._git_revision = None
self._issue = None
self._patchset = None
self._job_id = None
self._local_pixel_tests = None
self._no_luci_auth = None
self._bypass_skia_gold_functionality = None
self._code_review_system = None
self._continuous_integration_system = None
self._local_png_directory = None
self._InitializeProperties(args)
def IsTryjobRun(self):
return self.issue is not None
@property
def continuous_integration_system(self):
return self._continuous_integration_system or 'buildbucket'
@property
def code_review_system(self):
return self._code_review_system or 'gerrit'
@property
def git_revision(self):
return self._GetGitRevision()
@property
def issue(self):
return self._issue
@property
def job_id(self):
return self._job_id
@property
def local_pixel_tests(self):
return self._IsLocalRun()
@property
def local_png_directory(self):
return self._local_png_directory
@property
def no_luci_auth(self):
return self._no_luci_auth
@property
def patchset(self):
return self._patchset
@property
def bypass_skia_gold_functionality(self):
return self._bypass_skia_gold_functionality
@staticmethod
def _GetGitOriginMainHeadSha1():
raise NotImplementedError()
def _GetGitRevision(self):
if not self._git_revision:
# Automated tests should always pass the revision, so assume we're on
# a workstation and try to get the local origin/master HEAD.
if not self._IsLocalRun():
raise RuntimeError(
'--git-revision was not passed when running on a bot')
revision = self._GetGitOriginMainHeadSha1()
if not revision or len(revision) != 40:
raise RuntimeError(
'--git-revision not passed and unable to determine from git')
self._git_revision = revision
return self._git_revision
def | (self):
if self._local_pixel_tests is None:
# Look for the presence of the SWARMING_SERVER environment variable as a
# heuristic to determine whether we're running on a workstation or a bot.
# This should always be set on swarming, but would be strange to be set on
# a workstation.
self._local_pixel_tests = 'SWARMING_SERVER' not in os.environ
if self._local_pixel_tests:
logging.warning(
'Automatically determined that test is running on a workstation')
else:
logging.warning(
'Automatically determined that test is running on a bot')
return self._local_pixel_tests
def _InitializeProperties(self, args):
if hasattr(args, 'local_pixel_tests'):
# If not set, will be automatically determined later if needed.
self._local_pixel_tests = args.local_pixel_tests
if hasattr(args, 'skia_gold_local_png_write_directory'):
self._local_png_directory = args.skia_gold_local_png_write_directory
if hasattr(args, 'no_luci_auth'):
self._no_luci_auth = args.no_luci_auth
if hasattr(args, 'bypass_skia_gold_functionality'):
self._bypass_skia_gold_functionality = args.bypass_skia_gold_functionality
if hasattr(args, 'code_review_system'):
self._code_review_system = args.code_review_system
if hasattr(args, 'continuous_integration_system'):
self._continuous_integration_system = args.continuous_integration_system
# Will be automatically determined later if needed.
if not hasattr(args, 'git_revision') or not args.git_revision:
return
self._git_revision = args.git_revision
# Only expected on tryjob runs.
if not hasattr(args, 'gerrit_issue') or not args.gerrit_issue:
return
self._issue = args.gerrit_issue
if not hasattr(args, 'gerrit_patchset') or not args.gerrit_patchset:
raise RuntimeError(
'--gerrit-issue passed, but --gerrit-patchset not passed.')
self._patchset = args.gerrit_patchset
if not hasattr(args, 'buildbucket_id') or not args.buildbucket_id:
raise RuntimeError(
'--gerrit-issue passed, but --buildbucket-id not passed.')
self._job_id = args.buildbucket_id
| _IsLocalRun | identifier_name |
skia_gold_properties.py | # Copyright 2020 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Class for storing Skia Gold comparison properties.
Examples:
* git revision being tested
* Whether the test is being run locally or on a bot
* What the continuous integration system is
"""
import logging
import os
class SkiaGoldProperties(object):
def __init__(self, args):
"""Abstract class to validate and store properties related to Skia Gold.
Args:
args: The parsed arguments from an argparse.ArgumentParser.
"""
self._git_revision = None
self._issue = None
self._patchset = None
self._job_id = None
self._local_pixel_tests = None
self._no_luci_auth = None
self._bypass_skia_gold_functionality = None
self._code_review_system = None
self._continuous_integration_system = None
self._local_png_directory = None
self._InitializeProperties(args)
def IsTryjobRun(self):
return self.issue is not None
@property
def continuous_integration_system(self):
return self._continuous_integration_system or 'buildbucket'
@property
def code_review_system(self):
return self._code_review_system or 'gerrit'
@property
def git_revision(self):
return self._GetGitRevision()
@property
def issue(self):
return self._issue
@property
def job_id(self):
return self._job_id
@property
def local_pixel_tests(self):
return self._IsLocalRun()
@property
def local_png_directory(self):
return self._local_png_directory
@property
def no_luci_auth(self):
return self._no_luci_auth
@property
def patchset(self):
return self._patchset
@property
def bypass_skia_gold_functionality(self):
return self._bypass_skia_gold_functionality
@staticmethod
def _GetGitOriginMainHeadSha1():
raise NotImplementedError()
def _GetGitRevision(self):
if not self._git_revision:
# Automated tests should always pass the revision, so assume we're on
# a workstation and try to get the local origin/master HEAD.
if not self._IsLocalRun():
raise RuntimeError(
'--git-revision was not passed when running on a bot')
revision = self._GetGitOriginMainHeadSha1()
if not revision or len(revision) != 40:
raise RuntimeError(
'--git-revision not passed and unable to determine from git')
self._git_revision = revision
return self._git_revision
def _IsLocalRun(self):
if self._local_pixel_tests is None:
# Look for the presence of the SWARMING_SERVER environment variable as a
# heuristic to determine whether we're running on a workstation or a bot.
# This should always be set on swarming, but would be strange to be set on
# a workstation.
self._local_pixel_tests = 'SWARMING_SERVER' not in os.environ
if self._local_pixel_tests:
logging.warning(
'Automatically determined that test is running on a workstation')
else:
logging.warning(
'Automatically determined that test is running on a bot')
return self._local_pixel_tests
def _InitializeProperties(self, args):
if hasattr(args, 'local_pixel_tests'):
# If not set, will be automatically determined later if needed.
self._local_pixel_tests = args.local_pixel_tests
if hasattr(args, 'skia_gold_local_png_write_directory'):
self._local_png_directory = args.skia_gold_local_png_write_directory
if hasattr(args, 'no_luci_auth'):
self._no_luci_auth = args.no_luci_auth | if hasattr(args, 'bypass_skia_gold_functionality'):
self._bypass_skia_gold_functionality = args.bypass_skia_gold_functionality
if hasattr(args, 'code_review_system'):
self._code_review_system = args.code_review_system
if hasattr(args, 'continuous_integration_system'):
self._continuous_integration_system = args.continuous_integration_system
# Will be automatically determined later if needed.
if not hasattr(args, 'git_revision') or not args.git_revision:
return
self._git_revision = args.git_revision
# Only expected on tryjob runs.
if not hasattr(args, 'gerrit_issue') or not args.gerrit_issue:
return
self._issue = args.gerrit_issue
if not hasattr(args, 'gerrit_patchset') or not args.gerrit_patchset:
raise RuntimeError(
'--gerrit-issue passed, but --gerrit-patchset not passed.')
self._patchset = args.gerrit_patchset
if not hasattr(args, 'buildbucket_id') or not args.buildbucket_id:
raise RuntimeError(
'--gerrit-issue passed, but --buildbucket-id not passed.')
self._job_id = args.buildbucket_id | random_line_split |
|
skia_gold_properties.py | # Copyright 2020 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Class for storing Skia Gold comparison properties.
Examples:
* git revision being tested
* Whether the test is being run locally or on a bot
* What the continuous integration system is
"""
import logging
import os
class SkiaGoldProperties(object):
def __init__(self, args):
"""Abstract class to validate and store properties related to Skia Gold.
Args:
args: The parsed arguments from an argparse.ArgumentParser.
"""
self._git_revision = None
self._issue = None
self._patchset = None
self._job_id = None
self._local_pixel_tests = None
self._no_luci_auth = None
self._bypass_skia_gold_functionality = None
self._code_review_system = None
self._continuous_integration_system = None
self._local_png_directory = None
self._InitializeProperties(args)
def IsTryjobRun(self):
return self.issue is not None
@property
def continuous_integration_system(self):
return self._continuous_integration_system or 'buildbucket'
@property
def code_review_system(self):
return self._code_review_system or 'gerrit'
@property
def git_revision(self):
return self._GetGitRevision()
@property
def issue(self):
return self._issue
@property
def job_id(self):
return self._job_id
@property
def local_pixel_tests(self):
return self._IsLocalRun()
@property
def local_png_directory(self):
return self._local_png_directory
@property
def no_luci_auth(self):
return self._no_luci_auth
@property
def patchset(self):
return self._patchset
@property
def bypass_skia_gold_functionality(self):
return self._bypass_skia_gold_functionality
@staticmethod
def _GetGitOriginMainHeadSha1():
raise NotImplementedError()
def _GetGitRevision(self):
if not self._git_revision:
# Automated tests should always pass the revision, so assume we're on
# a workstation and try to get the local origin/master HEAD.
if not self._IsLocalRun():
|
revision = self._GetGitOriginMainHeadSha1()
if not revision or len(revision) != 40:
raise RuntimeError(
'--git-revision not passed and unable to determine from git')
self._git_revision = revision
return self._git_revision
def _IsLocalRun(self):
if self._local_pixel_tests is None:
# Look for the presence of the SWARMING_SERVER environment variable as a
# heuristic to determine whether we're running on a workstation or a bot.
# This should always be set on swarming, but would be strange to be set on
# a workstation.
self._local_pixel_tests = 'SWARMING_SERVER' not in os.environ
if self._local_pixel_tests:
logging.warning(
'Automatically determined that test is running on a workstation')
else:
logging.warning(
'Automatically determined that test is running on a bot')
return self._local_pixel_tests
def _InitializeProperties(self, args):
if hasattr(args, 'local_pixel_tests'):
# If not set, will be automatically determined later if needed.
self._local_pixel_tests = args.local_pixel_tests
if hasattr(args, 'skia_gold_local_png_write_directory'):
self._local_png_directory = args.skia_gold_local_png_write_directory
if hasattr(args, 'no_luci_auth'):
self._no_luci_auth = args.no_luci_auth
if hasattr(args, 'bypass_skia_gold_functionality'):
self._bypass_skia_gold_functionality = args.bypass_skia_gold_functionality
if hasattr(args, 'code_review_system'):
self._code_review_system = args.code_review_system
if hasattr(args, 'continuous_integration_system'):
self._continuous_integration_system = args.continuous_integration_system
# Will be automatically determined later if needed.
if not hasattr(args, 'git_revision') or not args.git_revision:
return
self._git_revision = args.git_revision
# Only expected on tryjob runs.
if not hasattr(args, 'gerrit_issue') or not args.gerrit_issue:
return
self._issue = args.gerrit_issue
if not hasattr(args, 'gerrit_patchset') or not args.gerrit_patchset:
raise RuntimeError(
'--gerrit-issue passed, but --gerrit-patchset not passed.')
self._patchset = args.gerrit_patchset
if not hasattr(args, 'buildbucket_id') or not args.buildbucket_id:
raise RuntimeError(
'--gerrit-issue passed, but --buildbucket-id not passed.')
self._job_id = args.buildbucket_id
| raise RuntimeError(
'--git-revision was not passed when running on a bot') | conditional_block |
0004_unique_together_sort.py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2016-12-29 19:01
from __future__ import unicode_literals
from django.db import migrations
class | (migrations.Migration):
dependencies = [
('books', '0003_initial_subjects_languages_creatortypes'),
]
operations = [
migrations.AlterModelOptions(
name='creatortype',
options={'ordering': ['name']},
),
migrations.AlterModelOptions(
name='language',
options={'ordering': ['name']},
),
migrations.AlterModelOptions(
name='owninginstitution',
options={'ordering': ['name']},
),
migrations.AlterModelOptions(
name='personbookrelationshiptype',
options={'ordering': ['name']},
),
migrations.AlterModelOptions(
name='publisher',
options={'ordering': ['name']},
),
migrations.AlterModelOptions(
name='subject',
options={'ordering': ['name']},
),
migrations.AlterUniqueTogether(
name='booklanguage',
unique_together=set([('book', 'language')]),
),
migrations.AlterUniqueTogether(
name='booksubject',
unique_together=set([('subject', 'book')]),
),
]
| Migration | identifier_name |
0004_unique_together_sort.py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2016-12-29 19:01
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
| dependencies = [
('books', '0003_initial_subjects_languages_creatortypes'),
]
operations = [
migrations.AlterModelOptions(
name='creatortype',
options={'ordering': ['name']},
),
migrations.AlterModelOptions(
name='language',
options={'ordering': ['name']},
),
migrations.AlterModelOptions(
name='owninginstitution',
options={'ordering': ['name']},
),
migrations.AlterModelOptions(
name='personbookrelationshiptype',
options={'ordering': ['name']},
),
migrations.AlterModelOptions(
name='publisher',
options={'ordering': ['name']},
),
migrations.AlterModelOptions(
name='subject',
options={'ordering': ['name']},
),
migrations.AlterUniqueTogether(
name='booklanguage',
unique_together=set([('book', 'language')]),
),
migrations.AlterUniqueTogether(
name='booksubject',
unique_together=set([('subject', 'book')]),
),
] | identifier_body |
|
0004_unique_together_sort.py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2016-12-29 19:01
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('books', '0003_initial_subjects_languages_creatortypes'),
]
operations = [
migrations.AlterModelOptions(
name='creatortype',
options={'ordering': ['name']},
),
migrations.AlterModelOptions(
name='language', | options={'ordering': ['name']},
),
migrations.AlterModelOptions(
name='owninginstitution',
options={'ordering': ['name']},
),
migrations.AlterModelOptions(
name='personbookrelationshiptype',
options={'ordering': ['name']},
),
migrations.AlterModelOptions(
name='publisher',
options={'ordering': ['name']},
),
migrations.AlterModelOptions(
name='subject',
options={'ordering': ['name']},
),
migrations.AlterUniqueTogether(
name='booklanguage',
unique_together=set([('book', 'language')]),
),
migrations.AlterUniqueTogether(
name='booksubject',
unique_together=set([('subject', 'book')]),
),
] | random_line_split |
|
test-fb-hgext-diff-since-last-submit-t.py | # Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2.
from __future__ import absolute_import
from testutil.dott import feature, sh, testtmp # noqa: F401
# Load extensions
(
sh % "cat"
<< r"""
[extensions]
arcconfig=$TESTDIR/../edenscm/hgext/extlib/phabricator/arcconfig.py
arcdiff=
"""
>> "$HGRCPATH"
)
# Diff with no revision
sh % "hg init repo"
sh % "cd repo"
sh % "touch foo"
sh % "hg add foo"
sh % "hg ci -qm 'No rev'" | sh % "hg diff --since-last-submit" == r"""
abort: local changeset is not associated with a differential revision
[255]"""
sh % "hg log -r 'lastsubmitted(.)' -T '{node} {desc}\\n'" == r"""
abort: local changeset is not associated with a differential revision
[255]"""
# Fake a diff
sh % "echo bleet" > "foo"
sh % "hg ci -qm 'Differential Revision: https://phabricator.fb.com/D1'"
sh % "hg diff --since-last-submit" == r"""
abort: no .arcconfig found
[255]"""
sh % "hg log -r 'lastsubmitted(.)' -T '{node} {desc}\\n'" == r"""
abort: no .arcconfig found
[255]"""
# Prep configuration
sh % "echo '{}'" > ".arcrc"
sh % 'echo \'{"config" : {"default" : "https://a.com/api"}, "hosts" : {"https://a.com/api/" : { "user" : "testuser", "oauth" : "garbage_cert"}}}\'' > ".arcconfig"
# Now progressively test the response handling for variations of missing data
sh % "cat" << r"""
[{}]
""" > "$TESTTMP/mockduit"
sh % "'HG_ARC_CONDUIT_MOCK=$TESTTMP/mockduit' hg diff --since-last-submit" == r"""
Error calling graphql: Unexpected graphql response format
abort: unable to determine previous changeset hash
[255]"""
sh % "'HG_ARC_CONDUIT_MOCK=$TESTTMP/mockduit' hg log -r 'lastsubmitted(.)' -T '{node} {desc}\\n'" == r"""
Error calling graphql: Unexpected graphql response format
abort: unable to determine previous changeset hash
[255]"""
sh % "cat" << r"""
[{"data": {"query": [{"results": {"nodes": [{
"number": 1,
"diff_status_name": "Needs Review",
"differential_diffs": {"count": 3},
"is_landing": false,
"land_job_status": "NO_LAND_RUNNING",
"needs_final_review_status": "NOT_NEEDED",
"created_time": 123,
"updated_time": 222
}]}}]}}]
""" > "$TESTTMP/mockduit"
sh % "'HG_ARC_CONDUIT_MOCK=$TESTTMP/mockduit' hg diff --since-last-submit" == r"""
abort: unable to determine previous changeset hash
[255]"""
sh % "'HG_ARC_CONDUIT_MOCK=$TESTTMP/mockduit' hg log -r 'lastsubmitted(.)' -T '{node} {desc}\\n'" == r"""
abort: unable to determine previous changeset hash
[255]"""
sh % "cat" << r"""
[{"data": {"query": [{"results": {"nodes": [{
"number": 1,
"diff_status_name": "Needs Review",
"is_landing": false,
"land_job_status": "NO_LAND_RUNNING",
"needs_final_review_status": "NOT_NEEDED",
"created_time": 123,
"updated_time": 222
}]}}]}}]
""" > "$TESTTMP/mockduit"
sh % "'HG_ARC_CONDUIT_MOCK=$TESTTMP/mockduit' hg diff --since-last-submit" == r"""
abort: unable to determine previous changeset hash
[255]"""
sh % "'HG_ARC_CONDUIT_MOCK=$TESTTMP/mockduit' hg log -r 'lastsubmitted(.)' -T '{node} {desc}\\n'" == r"""
abort: unable to determine previous changeset hash
[255]"""
# This is the case when the diff is up to date with the current commit;
# there is no diff since what was landed.
sh % "cat" << r"""
[{"data": {"query": [{"results": {"nodes": [{
"number": 1,
"diff_status_name": "Needs Review",
"latest_active_diff": {
"local_commit_info": {
"nodes": [
{"property_value": "{\"lolwut\": {\"time\": 0, \"commit\": \"2e6531b7dada2a3e5638e136de05f51e94a427f4\"}}"}
]
}
},
"differential_diffs": {"count": 1},
"is_landing": false,
"land_job_status": "NO_LAND_RUNNING",
"needs_final_review_status": "NOT_NEEDED",
"created_time": 123,
"updated_time": 222
}]}}]}}]
""" > "$TESTTMP/mockduit"
sh % "'HG_ARC_CONDUIT_MOCK=$TESTTMP/mockduit' hg diff --since-last-submit"
sh % "'HG_ARC_CONDUIT_MOCK=$TESTTMP/mockduit' hg log -r 'lastsubmitted(.)' -T '{node} {desc}\\n'" == "2e6531b7dada2a3e5638e136de05f51e94a427f4 Differential Revision: https://phabricator.fb.com/D1"
# This is the case when the diff points at our parent commit, we expect to
# see the bleet text show up. There's a fake hash that I've injected into
# the commit list returned from our mocked phabricator; it is present to
# assert that we order the commits consistently based on the time field.
sh % "cat" << r"""
[{"data": {"query": [{"results": {"nodes": [{
"number": 1,
"diff_status_name": "Needs Review",
"latest_active_diff": {
"local_commit_info": {
"nodes": [
{"property_value": "{\"lolwut\": {\"time\": 0, \"commit\": \"88dd5a13bf28b99853a24bddfc93d4c44e07c6bd\"}}"}
]
}
},
"differential_diffs": {"count": 1},
"is_landing": false,
"land_job_status": "NO_LAND_RUNNING",
"needs_final_review_status": "NOT_NEEDED",
"created_time": 123,
"updated_time": 222
}]}}]}}]
""" > "$TESTTMP/mockduit"
sh % "'HG_ARC_CONDUIT_MOCK=$TESTTMP/mockduit' hg diff --since-last-submit --nodates" == r"""
diff -r 88dd5a13bf28 -r 2e6531b7dada foo
--- a/foo
+++ b/foo
@@ -0,0 +1,1 @@
+bleet"""
sh % "'HG_ARC_CONDUIT_MOCK=$TESTTMP/mockduit' hg log -r 'lastsubmitted(.)' -T '{node} {desc}\\n'" == "88dd5a13bf28b99853a24bddfc93d4c44e07c6bd No rev"
sh % "'HG_ARC_CONDUIT_MOCK=$TESTTMP/mockduit' hg diff --since-last-submit-2o" == r"""
Phabricator rev: 88dd5a13bf28b99853a24bddfc93d4c44e07c6bd
Local rev: 2e6531b7dada2a3e5638e136de05f51e94a427f4 (.)
Changed: foo
| ...
| +bleet"""
# Make a new commit on top, and then use -r to look at the previous commit
sh % "echo other" > "foo"
sh % "hg commit -m 'Other commmit'"
sh % "'HG_ARC_CONDUIT_MOCK=$TESTTMP/mockduit' hg diff --since-last-submit --nodates -r 2e6531b" == r"""
diff -r 88dd5a13bf28 -r 2e6531b7dada foo
--- a/foo
+++ b/foo
@@ -0,0 +1,1 @@
+bleet"""
sh % "'HG_ARC_CONDUIT_MOCK=$TESTTMP/mockduit' hg log -r 'lastsubmitted(2e6531b)' -T '{node} {desc}\\n'" == "88dd5a13bf28b99853a24bddfc93d4c44e07c6bd No rev" | random_line_split |
|
main.rs | //Test File gtk_test
extern crate gtk;
//Custom mods
mod system_io;
mod gtk_converter;
pub mod m_config;
//Os interaction
use std::process::Command;
use std::process::ChildStdout;
use std::io;
use std::io::prelude::*;
use gtk::Builder;
use gtk::prelude::*;
// make moving clones into closures more convenient
//shameless copied from the examples
macro_rules! clone {
(@param _) => ( _ );
(@param $x:ident) => ( $x );
($($n:ident),+ => move || $body:expr) => (
{
$( let $n = $n.clone(); )+
move || $body
}
);
($($n:ident),+ => move |$($p:tt),+| $body:expr) => (
{
$( let $n = $n.clone(); )+
move |$(clone!(@param $p),)+| $body
}
);
}
fn execute_command(location: &String, command: &String, arguments: &String) |
fn convert_to_str(x: &str) -> &str{
x
}
fn main() {
if gtk::init().is_err() {
println!("Failed to initialize GTK.");
return;
}
let glade_src = include_str!("shipload.glade");
let builder = Builder::new();
builder.add_from_string(glade_src).unwrap();
//**********************************************
//Crucial
let configuration = m_config::create_config();
//Main
//Get Window
let window: gtk::Window = builder.get_object("window").unwrap();
//Close Button
let close_button: gtk::Button = builder.get_object("B_Close").unwrap();
//Set Header bar information
let header: gtk::HeaderBar = builder.get_object("Header").unwrap();
let pref_window: gtk::Window = builder.get_object("W_Preferences").unwrap();
let pref_button: gtk::Button = builder.get_object("B_Preferences").unwrap();
let pref_close: gtk::Button = builder.get_object("Pref_Close").unwrap();
let pref_save: gtk::Button = builder.get_object("Pref_Save").unwrap();
//Cargo
let cargo_build: gtk::Button = builder.get_object("B_Cargo_Build").unwrap();
let cargo_build_folder: gtk::FileChooserButton = builder.get_object("Cargo_Build_FolderChooser").unwrap();
let cargo_build_arguments: gtk::Entry = builder.get_object("Cargo_Build_ExtraOptions_Entry").unwrap();
let cargo_run_run: gtk::Button = builder.get_object("B_Cargo_Run").unwrap();
let cargo_run_arguments: gtk::Entry = builder.get_object("Cargo_Run_ExtraOptions_Entry").unwrap();
//RustUp
let ru_install_Button: gtk::Button = builder.get_object("B_NT_Install").unwrap();
let ru_install_channel: gtk::ComboBoxText = builder.get_object("RU_New_Channel").unwrap();
let ru_activate_channel_chooser: gtk::ComboBoxText = builder.get_object("RU_Active_Channel").unwrap();
let ru_activate_channel_button: gtk::Button = builder.get_object("B_NT_Activate").unwrap();
let ru_update_button: gtk::Button = builder.get_object("B_RU_Update").unwrap();
//Crates.io
let text_buffer: gtk::TextBuffer = builder.get_object("CratesTextBuffer").unwrap();
let search_button: gtk::Button = builder.get_object("CratesSearch").unwrap();
let search_entry: gtk::Entry = builder.get_object("CratesSearch_Entry").unwrap();
let level_bar: gtk::LevelBar = builder.get_object("SearchLevel").unwrap();
//**********************************************
//Main
header.set_title("Teddy");
header.set_subtitle("Rolf");
//Close event
close_button.connect_clicked(move |_| {
println!("Closing normal!");
gtk::main_quit();
Inhibit(false);
});
//Window Close event
window.connect_delete_event(|_,_| {
gtk::main_quit();
Inhibit(false)
});
//Preferences show event
pref_button.connect_clicked(clone!(pref_window => move |_| {
pref_window.show_all();
}));
//Hide, without save
pref_close.connect_clicked(clone!(pref_window => move |_| {
pref_window.hide();
}));
//Hide, with save
pref_save.connect_clicked(clone!(pref_window => move |_| {
pref_window.hide();
}));
//Cargo
cargo_build.connect_clicked(clone!(cargo_build_folder, cargo_build_arguments => move |_|{
let argument_string: String = gtk_converter::text_from_entry(&cargo_build_arguments);
let locationstr: String = gtk_converter::path_from_filechooser(&cargo_build_folder);
execute_command(&locationstr, &"cargo build".to_string(), &argument_string.to_string());
}));
cargo_run_run.connect_clicked(clone!(cargo_run_arguments, cargo_build_folder => move |_|{
let argument_string: String = gtk_converter::text_from_entry(&cargo_run_arguments);
let locationstr: String = gtk_converter::path_from_filechooser(&cargo_build_folder);
system_io::execute_command(&locationstr, &"cargo run".to_string(), &argument_string.to_string());
}));
//RustUp
//Install new toolchain
ru_install_Button.connect_clicked(clone!(ru_install_channel => move |_| {
//Sort output
let entry = ru_install_channel.get_active_text();
let mut to_install: String =String::from("NoContent");
match entry {
Some(e) => to_install = e,
None => {}
}
//Join install command/argument
let execute_string: String = String::from("toolchain install ") + to_install.as_str();
//INstall
system_io::execute_command(&String::from("~/"), &String::from("rustup"), &execute_string);
println!("Installed: {}", to_install);
}));
//Activate channel
ru_activate_channel_button.connect_clicked(clone!(ru_activate_channel_chooser => move |_|{
//Sort output
let entry = ru_install_channel.get_active_text();
let mut to_activate: String =String::from("NoContent");
match entry {
Some(e) => to_activate = e,
None => {}
}
let activate_arg: String = String::from("default ") + to_activate.as_str();
system_io::execute_command(&String::from("~/"), &String::from("rustup"), &activate_arg);
}));
//Update everything
ru_update_button.connect_clicked(|_| {
system_io::execute_command(&String::from("~/"), &String::from("rustup"), &String::from("update"));
});
//Crates.io
search_button.connect_clicked(clone!(text_buffer, search_entry => move |_| {
let entry: String = gtk_converter::text_from_entry(&search_entry);
while level_bar.get_value() != 0.2 {
level_bar.set_value(0.2);
}
println!("Outside: {}", entry);
level_bar.set_value(0.5);
let output = Command::new("cargo").arg("search")
.arg(entry)
.arg("--limit")
.arg("40")
.output()
.expect("Failed to ls");
let out: String = String::from_utf8(output.stdout).expect("Not UTF-8");
level_bar.set_value(0.75);
let last: &str = convert_to_str(&out);
text_buffer.set_text(last);
level_bar.set_value(1.0);
}));
window.show_all();
gtk::main();
}
| {
Command::new("xterm")
.arg("-hold")
.arg("-e")
.arg("cd ".to_string() + location + " && " + command + " " + arguments)
.spawn()
.expect("Failed to run command");
} | identifier_body |
main.rs | //Test File gtk_test
extern crate gtk;
//Custom mods
mod system_io;
mod gtk_converter;
pub mod m_config;
//Os interaction
use std::process::Command;
use std::process::ChildStdout;
use std::io;
use std::io::prelude::*;
use gtk::Builder;
use gtk::prelude::*;
// make moving clones into closures more convenient
//shameless copied from the examples
macro_rules! clone {
(@param _) => ( _ );
(@param $x:ident) => ( $x );
($($n:ident),+ => move || $body:expr) => (
{
$( let $n = $n.clone(); )+
move || $body
}
);
($($n:ident),+ => move |$($p:tt),+| $body:expr) => (
{
$( let $n = $n.clone(); )+
move |$(clone!(@param $p),)+| $body
}
);
}
fn | (location: &String, command: &String, arguments: &String){
Command::new("xterm")
.arg("-hold")
.arg("-e")
.arg("cd ".to_string() + location + " && " + command + " " + arguments)
.spawn()
.expect("Failed to run command");
}
fn convert_to_str(x: &str) -> &str{
x
}
fn main() {
if gtk::init().is_err() {
println!("Failed to initialize GTK.");
return;
}
let glade_src = include_str!("shipload.glade");
let builder = Builder::new();
builder.add_from_string(glade_src).unwrap();
//**********************************************
//Crucial
let configuration = m_config::create_config();
//Main
//Get Window
let window: gtk::Window = builder.get_object("window").unwrap();
//Close Button
let close_button: gtk::Button = builder.get_object("B_Close").unwrap();
//Set Header bar information
let header: gtk::HeaderBar = builder.get_object("Header").unwrap();
let pref_window: gtk::Window = builder.get_object("W_Preferences").unwrap();
let pref_button: gtk::Button = builder.get_object("B_Preferences").unwrap();
let pref_close: gtk::Button = builder.get_object("Pref_Close").unwrap();
let pref_save: gtk::Button = builder.get_object("Pref_Save").unwrap();
//Cargo
let cargo_build: gtk::Button = builder.get_object("B_Cargo_Build").unwrap();
let cargo_build_folder: gtk::FileChooserButton = builder.get_object("Cargo_Build_FolderChooser").unwrap();
let cargo_build_arguments: gtk::Entry = builder.get_object("Cargo_Build_ExtraOptions_Entry").unwrap();
let cargo_run_run: gtk::Button = builder.get_object("B_Cargo_Run").unwrap();
let cargo_run_arguments: gtk::Entry = builder.get_object("Cargo_Run_ExtraOptions_Entry").unwrap();
//RustUp
let ru_install_Button: gtk::Button = builder.get_object("B_NT_Install").unwrap();
let ru_install_channel: gtk::ComboBoxText = builder.get_object("RU_New_Channel").unwrap();
let ru_activate_channel_chooser: gtk::ComboBoxText = builder.get_object("RU_Active_Channel").unwrap();
let ru_activate_channel_button: gtk::Button = builder.get_object("B_NT_Activate").unwrap();
let ru_update_button: gtk::Button = builder.get_object("B_RU_Update").unwrap();
//Crates.io
let text_buffer: gtk::TextBuffer = builder.get_object("CratesTextBuffer").unwrap();
let search_button: gtk::Button = builder.get_object("CratesSearch").unwrap();
let search_entry: gtk::Entry = builder.get_object("CratesSearch_Entry").unwrap();
let level_bar: gtk::LevelBar = builder.get_object("SearchLevel").unwrap();
//**********************************************
//Main
header.set_title("Teddy");
header.set_subtitle("Rolf");
//Close event
close_button.connect_clicked(move |_| {
println!("Closing normal!");
gtk::main_quit();
Inhibit(false);
});
//Window Close event
window.connect_delete_event(|_,_| {
gtk::main_quit();
Inhibit(false)
});
//Preferences show event
pref_button.connect_clicked(clone!(pref_window => move |_| {
pref_window.show_all();
}));
//Hide, without save
pref_close.connect_clicked(clone!(pref_window => move |_| {
pref_window.hide();
}));
//Hide, with save
pref_save.connect_clicked(clone!(pref_window => move |_| {
pref_window.hide();
}));
//Cargo
cargo_build.connect_clicked(clone!(cargo_build_folder, cargo_build_arguments => move |_|{
let argument_string: String = gtk_converter::text_from_entry(&cargo_build_arguments);
let locationstr: String = gtk_converter::path_from_filechooser(&cargo_build_folder);
execute_command(&locationstr, &"cargo build".to_string(), &argument_string.to_string());
}));
cargo_run_run.connect_clicked(clone!(cargo_run_arguments, cargo_build_folder => move |_|{
let argument_string: String = gtk_converter::text_from_entry(&cargo_run_arguments);
let locationstr: String = gtk_converter::path_from_filechooser(&cargo_build_folder);
system_io::execute_command(&locationstr, &"cargo run".to_string(), &argument_string.to_string());
}));
//RustUp
//Install new toolchain
ru_install_Button.connect_clicked(clone!(ru_install_channel => move |_| {
//Sort output
let entry = ru_install_channel.get_active_text();
let mut to_install: String =String::from("NoContent");
match entry {
Some(e) => to_install = e,
None => {}
}
//Join install command/argument
let execute_string: String = String::from("toolchain install ") + to_install.as_str();
//INstall
system_io::execute_command(&String::from("~/"), &String::from("rustup"), &execute_string);
println!("Installed: {}", to_install);
}));
//Activate channel
ru_activate_channel_button.connect_clicked(clone!(ru_activate_channel_chooser => move |_|{
//Sort output
let entry = ru_install_channel.get_active_text();
let mut to_activate: String =String::from("NoContent");
match entry {
Some(e) => to_activate = e,
None => {}
}
let activate_arg: String = String::from("default ") + to_activate.as_str();
system_io::execute_command(&String::from("~/"), &String::from("rustup"), &activate_arg);
}));
//Update everything
ru_update_button.connect_clicked(|_| {
system_io::execute_command(&String::from("~/"), &String::from("rustup"), &String::from("update"));
});
//Crates.io
search_button.connect_clicked(clone!(text_buffer, search_entry => move |_| {
let entry: String = gtk_converter::text_from_entry(&search_entry);
while level_bar.get_value() != 0.2 {
level_bar.set_value(0.2);
}
println!("Outside: {}", entry);
level_bar.set_value(0.5);
let output = Command::new("cargo").arg("search")
.arg(entry)
.arg("--limit")
.arg("40")
.output()
.expect("Failed to ls");
let out: String = String::from_utf8(output.stdout).expect("Not UTF-8");
level_bar.set_value(0.75);
let last: &str = convert_to_str(&out);
text_buffer.set_text(last);
level_bar.set_value(1.0);
}));
window.show_all();
gtk::main();
}
| execute_command | identifier_name |
main.rs | //Test File gtk_test
extern crate gtk;
//Custom mods
mod system_io;
mod gtk_converter;
pub mod m_config;
//Os interaction
use std::process::Command;
use std::process::ChildStdout;
use std::io;
use std::io::prelude::*;
use gtk::Builder;
use gtk::prelude::*;
// make moving clones into closures more convenient
//shameless copied from the examples
macro_rules! clone {
(@param _) => ( _ );
(@param $x:ident) => ( $x );
($($n:ident),+ => move || $body:expr) => (
{
$( let $n = $n.clone(); )+
move || $body
}
);
($($n:ident),+ => move |$($p:tt),+| $body:expr) => (
{
$( let $n = $n.clone(); )+
move |$(clone!(@param $p),)+| $body
}
);
}
fn execute_command(location: &String, command: &String, arguments: &String){
Command::new("xterm")
.arg("-hold")
.arg("-e")
.arg("cd ".to_string() + location + " && " + command + " " + arguments)
.spawn()
.expect("Failed to run command");
}
fn convert_to_str(x: &str) -> &str{
x
}
fn main() {
if gtk::init().is_err() {
println!("Failed to initialize GTK.");
return;
}
let glade_src = include_str!("shipload.glade");
let builder = Builder::new();
builder.add_from_string(glade_src).unwrap();
//**********************************************
//Crucial
let configuration = m_config::create_config();
//Main
//Get Window
let window: gtk::Window = builder.get_object("window").unwrap();
//Close Button
let close_button: gtk::Button = builder.get_object("B_Close").unwrap();
//Set Header bar information
let header: gtk::HeaderBar = builder.get_object("Header").unwrap();
let pref_window: gtk::Window = builder.get_object("W_Preferences").unwrap();
let pref_button: gtk::Button = builder.get_object("B_Preferences").unwrap();
let pref_close: gtk::Button = builder.get_object("Pref_Close").unwrap();
let pref_save: gtk::Button = builder.get_object("Pref_Save").unwrap();
//Cargo
let cargo_build: gtk::Button = builder.get_object("B_Cargo_Build").unwrap();
let cargo_build_folder: gtk::FileChooserButton = builder.get_object("Cargo_Build_FolderChooser").unwrap();
let cargo_build_arguments: gtk::Entry = builder.get_object("Cargo_Build_ExtraOptions_Entry").unwrap();
let cargo_run_run: gtk::Button = builder.get_object("B_Cargo_Run").unwrap();
let cargo_run_arguments: gtk::Entry = builder.get_object("Cargo_Run_ExtraOptions_Entry").unwrap();
//RustUp
let ru_install_Button: gtk::Button = builder.get_object("B_NT_Install").unwrap();
let ru_install_channel: gtk::ComboBoxText = builder.get_object("RU_New_Channel").unwrap();
let ru_activate_channel_chooser: gtk::ComboBoxText = builder.get_object("RU_Active_Channel").unwrap();
let ru_activate_channel_button: gtk::Button = builder.get_object("B_NT_Activate").unwrap();
let ru_update_button: gtk::Button = builder.get_object("B_RU_Update").unwrap();
//Crates.io
let text_buffer: gtk::TextBuffer = builder.get_object("CratesTextBuffer").unwrap();
let search_button: gtk::Button = builder.get_object("CratesSearch").unwrap();
let search_entry: gtk::Entry = builder.get_object("CratesSearch_Entry").unwrap();
let level_bar: gtk::LevelBar = builder.get_object("SearchLevel").unwrap();
//**********************************************
//Main
header.set_title("Teddy");
header.set_subtitle("Rolf");
//Close event
close_button.connect_clicked(move |_| {
println!("Closing normal!");
gtk::main_quit();
Inhibit(false);
});
//Window Close event
window.connect_delete_event(|_,_| {
gtk::main_quit();
Inhibit(false)
});
//Preferences show event
pref_button.connect_clicked(clone!(pref_window => move |_| {
pref_window.show_all();
}));
//Hide, without save
pref_close.connect_clicked(clone!(pref_window => move |_| {
pref_window.hide();
}));
| pref_window.hide();
}));
//Cargo
cargo_build.connect_clicked(clone!(cargo_build_folder, cargo_build_arguments => move |_|{
let argument_string: String = gtk_converter::text_from_entry(&cargo_build_arguments);
let locationstr: String = gtk_converter::path_from_filechooser(&cargo_build_folder);
execute_command(&locationstr, &"cargo build".to_string(), &argument_string.to_string());
}));
cargo_run_run.connect_clicked(clone!(cargo_run_arguments, cargo_build_folder => move |_|{
let argument_string: String = gtk_converter::text_from_entry(&cargo_run_arguments);
let locationstr: String = gtk_converter::path_from_filechooser(&cargo_build_folder);
system_io::execute_command(&locationstr, &"cargo run".to_string(), &argument_string.to_string());
}));
//RustUp
//Install new toolchain
ru_install_Button.connect_clicked(clone!(ru_install_channel => move |_| {
//Sort output
let entry = ru_install_channel.get_active_text();
let mut to_install: String =String::from("NoContent");
match entry {
Some(e) => to_install = e,
None => {}
}
//Join install command/argument
let execute_string: String = String::from("toolchain install ") + to_install.as_str();
//INstall
system_io::execute_command(&String::from("~/"), &String::from("rustup"), &execute_string);
println!("Installed: {}", to_install);
}));
//Activate channel
ru_activate_channel_button.connect_clicked(clone!(ru_activate_channel_chooser => move |_|{
//Sort output
let entry = ru_install_channel.get_active_text();
let mut to_activate: String =String::from("NoContent");
match entry {
Some(e) => to_activate = e,
None => {}
}
let activate_arg: String = String::from("default ") + to_activate.as_str();
system_io::execute_command(&String::from("~/"), &String::from("rustup"), &activate_arg);
}));
//Update everything
ru_update_button.connect_clicked(|_| {
system_io::execute_command(&String::from("~/"), &String::from("rustup"), &String::from("update"));
});
//Crates.io
search_button.connect_clicked(clone!(text_buffer, search_entry => move |_| {
let entry: String = gtk_converter::text_from_entry(&search_entry);
while level_bar.get_value() != 0.2 {
level_bar.set_value(0.2);
}
println!("Outside: {}", entry);
level_bar.set_value(0.5);
let output = Command::new("cargo").arg("search")
.arg(entry)
.arg("--limit")
.arg("40")
.output()
.expect("Failed to ls");
let out: String = String::from_utf8(output.stdout).expect("Not UTF-8");
level_bar.set_value(0.75);
let last: &str = convert_to_str(&out);
text_buffer.set_text(last);
level_bar.set_value(1.0);
}));
window.show_all();
gtk::main();
} | //Hide, with save
pref_save.connect_clicked(clone!(pref_window => move |_| { | random_line_split |
main.rs | //Test File gtk_test
extern crate gtk;
//Custom mods
mod system_io;
mod gtk_converter;
pub mod m_config;
//Os interaction
use std::process::Command;
use std::process::ChildStdout;
use std::io;
use std::io::prelude::*;
use gtk::Builder;
use gtk::prelude::*;
// make moving clones into closures more convenient
//shameless copied from the examples
macro_rules! clone {
(@param _) => ( _ );
(@param $x:ident) => ( $x );
($($n:ident),+ => move || $body:expr) => (
{
$( let $n = $n.clone(); )+
move || $body
}
);
($($n:ident),+ => move |$($p:tt),+| $body:expr) => (
{
$( let $n = $n.clone(); )+
move |$(clone!(@param $p),)+| $body
}
);
}
fn execute_command(location: &String, command: &String, arguments: &String){
Command::new("xterm")
.arg("-hold")
.arg("-e")
.arg("cd ".to_string() + location + " && " + command + " " + arguments)
.spawn()
.expect("Failed to run command");
}
fn convert_to_str(x: &str) -> &str{
x
}
fn main() {
if gtk::init().is_err() |
let glade_src = include_str!("shipload.glade");
let builder = Builder::new();
builder.add_from_string(glade_src).unwrap();
//**********************************************
//Crucial
let configuration = m_config::create_config();
//Main
//Get Window
let window: gtk::Window = builder.get_object("window").unwrap();
//Close Button
let close_button: gtk::Button = builder.get_object("B_Close").unwrap();
//Set Header bar information
let header: gtk::HeaderBar = builder.get_object("Header").unwrap();
let pref_window: gtk::Window = builder.get_object("W_Preferences").unwrap();
let pref_button: gtk::Button = builder.get_object("B_Preferences").unwrap();
let pref_close: gtk::Button = builder.get_object("Pref_Close").unwrap();
let pref_save: gtk::Button = builder.get_object("Pref_Save").unwrap();
//Cargo
let cargo_build: gtk::Button = builder.get_object("B_Cargo_Build").unwrap();
let cargo_build_folder: gtk::FileChooserButton = builder.get_object("Cargo_Build_FolderChooser").unwrap();
let cargo_build_arguments: gtk::Entry = builder.get_object("Cargo_Build_ExtraOptions_Entry").unwrap();
let cargo_run_run: gtk::Button = builder.get_object("B_Cargo_Run").unwrap();
let cargo_run_arguments: gtk::Entry = builder.get_object("Cargo_Run_ExtraOptions_Entry").unwrap();
//RustUp
let ru_install_Button: gtk::Button = builder.get_object("B_NT_Install").unwrap();
let ru_install_channel: gtk::ComboBoxText = builder.get_object("RU_New_Channel").unwrap();
let ru_activate_channel_chooser: gtk::ComboBoxText = builder.get_object("RU_Active_Channel").unwrap();
let ru_activate_channel_button: gtk::Button = builder.get_object("B_NT_Activate").unwrap();
let ru_update_button: gtk::Button = builder.get_object("B_RU_Update").unwrap();
//Crates.io
let text_buffer: gtk::TextBuffer = builder.get_object("CratesTextBuffer").unwrap();
let search_button: gtk::Button = builder.get_object("CratesSearch").unwrap();
let search_entry: gtk::Entry = builder.get_object("CratesSearch_Entry").unwrap();
let level_bar: gtk::LevelBar = builder.get_object("SearchLevel").unwrap();
//**********************************************
//Main
header.set_title("Teddy");
header.set_subtitle("Rolf");
//Close event
close_button.connect_clicked(move |_| {
println!("Closing normal!");
gtk::main_quit();
Inhibit(false);
});
//Window Close event
window.connect_delete_event(|_,_| {
gtk::main_quit();
Inhibit(false)
});
//Preferences show event
pref_button.connect_clicked(clone!(pref_window => move |_| {
pref_window.show_all();
}));
//Hide, without save
pref_close.connect_clicked(clone!(pref_window => move |_| {
pref_window.hide();
}));
//Hide, with save
pref_save.connect_clicked(clone!(pref_window => move |_| {
pref_window.hide();
}));
//Cargo
cargo_build.connect_clicked(clone!(cargo_build_folder, cargo_build_arguments => move |_|{
let argument_string: String = gtk_converter::text_from_entry(&cargo_build_arguments);
let locationstr: String = gtk_converter::path_from_filechooser(&cargo_build_folder);
execute_command(&locationstr, &"cargo build".to_string(), &argument_string.to_string());
}));
cargo_run_run.connect_clicked(clone!(cargo_run_arguments, cargo_build_folder => move |_|{
let argument_string: String = gtk_converter::text_from_entry(&cargo_run_arguments);
let locationstr: String = gtk_converter::path_from_filechooser(&cargo_build_folder);
system_io::execute_command(&locationstr, &"cargo run".to_string(), &argument_string.to_string());
}));
//RustUp
//Install new toolchain
ru_install_Button.connect_clicked(clone!(ru_install_channel => move |_| {
//Sort output
let entry = ru_install_channel.get_active_text();
let mut to_install: String =String::from("NoContent");
match entry {
Some(e) => to_install = e,
None => {}
}
//Join install command/argument
let execute_string: String = String::from("toolchain install ") + to_install.as_str();
//INstall
system_io::execute_command(&String::from("~/"), &String::from("rustup"), &execute_string);
println!("Installed: {}", to_install);
}));
//Activate channel
ru_activate_channel_button.connect_clicked(clone!(ru_activate_channel_chooser => move |_|{
//Sort output
let entry = ru_install_channel.get_active_text();
let mut to_activate: String =String::from("NoContent");
match entry {
Some(e) => to_activate = e,
None => {}
}
let activate_arg: String = String::from("default ") + to_activate.as_str();
system_io::execute_command(&String::from("~/"), &String::from("rustup"), &activate_arg);
}));
//Update everything
ru_update_button.connect_clicked(|_| {
system_io::execute_command(&String::from("~/"), &String::from("rustup"), &String::from("update"));
});
//Crates.io
search_button.connect_clicked(clone!(text_buffer, search_entry => move |_| {
let entry: String = gtk_converter::text_from_entry(&search_entry);
while level_bar.get_value() != 0.2 {
level_bar.set_value(0.2);
}
println!("Outside: {}", entry);
level_bar.set_value(0.5);
let output = Command::new("cargo").arg("search")
.arg(entry)
.arg("--limit")
.arg("40")
.output()
.expect("Failed to ls");
let out: String = String::from_utf8(output.stdout).expect("Not UTF-8");
level_bar.set_value(0.75);
let last: &str = convert_to_str(&out);
text_buffer.set_text(last);
level_bar.set_value(1.0);
}));
window.show_all();
gtk::main();
}
| {
println!("Failed to initialize GTK.");
return;
} | conditional_block |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.