file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
39k
| suffix
large_stringlengths 0
36.1k
| middle
large_stringlengths 0
29.4k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
VirtualTimeScheduler.ts | import { AsyncAction } from './AsyncAction';
import { Subscription } from '../Subscription';
import { AsyncScheduler } from './AsyncScheduler';
export class VirtualTimeScheduler extends AsyncScheduler {
protected static frameTimeFactor: number = 10;
public frame: number = 0;
public index: number = -1;
constructor(SchedulerAction: typeof AsyncAction = VirtualAction,
public maxFrames: number = Number.POSITIVE_INFINITY) {
super(SchedulerAction, () => this.frame);
}
/**
* Prompt the Scheduler to execute all of its queued actions, therefore
* clearing its queue.
* @return {void}
*/
public flush(): void {
const {actions, maxFrames} = this;
let error: any, action: AsyncAction<any>;
while ((action = actions.shift()) && (this.frame = action.delay) <= maxFrames) {
if (error = action.execute(action.state, action.delay)) {
break;
}
}
if (error) {
while (action = actions.shift()) {
action.unsubscribe();
}
throw error;
}
}
}
/**
* We need this JSDoc comment for affecting ESDoc.
* @ignore
* @extends {Ignored}
*/
export class | <T> extends AsyncAction<T> {
constructor(protected scheduler: VirtualTimeScheduler,
protected work: (this: VirtualAction<T>, state?: T) => void,
protected index: number = scheduler.index += 1) {
super(scheduler, work);
this.index = scheduler.index = index;
}
public schedule(state?: T, delay: number = 0): Subscription {
return !this.id ?
super.schedule(state, delay) : (
// If an action is rescheduled, we save allocations by mutating its state,
// pushing it to the end of the scheduler queue, and recycling the action.
// But since the VirtualTimeScheduler is used for testing, VirtualActions
// must be immutable so they can be inspected later.
<VirtualAction<T>> this.add(
new VirtualAction<T>(this.scheduler, this.work))
).schedule(state, delay);
}
protected requestAsyncId(scheduler: VirtualTimeScheduler, id?: any, delay: number = 0): any {
this.delay = scheduler.frame + delay;
const {actions} = scheduler;
actions.push(this);
actions.sort(VirtualAction.sortActions);
return true;
}
protected recycleAsyncId(scheduler: VirtualTimeScheduler, id?: any, delay: number = 0): any {
return undefined;
}
public static sortActions<T>(a: VirtualAction<T>, b: VirtualAction<T>) {
if (a.delay === b.delay) {
if (a.index === b.index) {
return 0;
} else if (a.index > b.index) {
return 1;
} else {
return -1;
}
} else if (a.delay > b.delay) {
return 1;
} else {
return -1;
}
}
}
| VirtualAction | identifier_name |
index.js | var DB = require('./lib/db.js');
function SQLContext(options) {
this.readOnly = options.isReadOnly;
this.db = options.db;
}
function _put(db, key, value, callback) {
db.createOrUpdate(key, value, function(err) {
if(err) {
return callback(err);
}
callback();
});
}
SQLContext.prototype.putObject = function(key, value, callback) {
if(this.readOnly) {
return callback(new Error('write operation on read-only context.'));
}
var json = JSON.stringify(value);
var buf = new Buffer(json, 'utf8');
_put(this.db, key, buf, callback);
};
SQLContext.prototype.putBuffer = function(key, value, callback) {
if(this.readOnly) {
return callback(new Error('write operation on read-only context.'));
}
_put(this.db, key, value, callback);
};
SQLContext.prototype.delete = function (key, callback) {
if(this.readOnly) {
return callback(new Error('write operation on read-only context.'));
}
this.db.remove(key, function(err) {
if(err) {
return callback(err);
}
callback();
});
};
SQLContext.prototype.clear = function (callback) {
if(this.readOnly) {
return callback(new Error('write operation on read-only context.'));
}
this.db.clearAll(callback);
};
function _get(db, key, callback) {
db.find(key, callback);
}
SQLContext.prototype.getObject = function(key, callback) {
_get(this.db, key, function(err, data) {
if(err) {
return callback(err);
}
if(data) {
try {
data = JSON.parse(data.toString('utf8'));
} catch(e) {
return callback(e);
}
}
callback(null, data);
});
};
SQLContext.prototype.getBuffer = function(key, callback) {
_get(this.db, key, callback);
};
function SQLProvider(options) {
this.options = options || {};
this.user = options.user;
}
| SQLProvider.prototype.open = function(callback) {
if(!this.user) {
return callback(new Error('missing user'));
}
this.db = new DB(this.options, function(err) {
if (err) {
return callback(err);
}
callback();
});
};
SQLProvider.prototype.getReadOnlyContext = function() {
return new SQLContext({isReadOnly: true, db: this.db});
};
SQLProvider.prototype.getReadWriteContext = function() {
return new SQLContext({isReadOnly: false, db: this.db});
};
// Forward db type constants
SQLProvider.MYSQL = DB.MYSQL;
SQLProvider.SQLITE = DB.SQLITE;
SQLProvider.POSTGRES = DB.POSTGRES;
SQLProvider.MARIADB = DB.MARIADB;
module.exports = SQLProvider; | SQLProvider.isSupported = function() {
return (typeof module !== 'undefined' && module.exports);
};
| random_line_split |
index.js | var DB = require('./lib/db.js');
function SQLContext(options) {
this.readOnly = options.isReadOnly;
this.db = options.db;
}
function _put(db, key, value, callback) {
db.createOrUpdate(key, value, function(err) {
if(err) {
return callback(err);
}
callback();
});
}
SQLContext.prototype.putObject = function(key, value, callback) {
if(this.readOnly) {
return callback(new Error('write operation on read-only context.'));
}
var json = JSON.stringify(value);
var buf = new Buffer(json, 'utf8');
_put(this.db, key, buf, callback);
};
SQLContext.prototype.putBuffer = function(key, value, callback) {
if(this.readOnly) {
return callback(new Error('write operation on read-only context.'));
}
_put(this.db, key, value, callback);
};
SQLContext.prototype.delete = function (key, callback) {
if(this.readOnly) {
return callback(new Error('write operation on read-only context.'));
}
this.db.remove(key, function(err) {
if(err) {
return callback(err);
}
callback();
});
};
SQLContext.prototype.clear = function (callback) {
if(this.readOnly) {
return callback(new Error('write operation on read-only context.'));
}
this.db.clearAll(callback);
};
function _get(db, key, callback) {
db.find(key, callback);
}
SQLContext.prototype.getObject = function(key, callback) {
_get(this.db, key, function(err, data) {
if(err) {
return callback(err);
}
if(data) {
try {
data = JSON.parse(data.toString('utf8'));
} catch(e) {
return callback(e);
}
}
callback(null, data);
});
};
SQLContext.prototype.getBuffer = function(key, callback) {
_get(this.db, key, callback);
};
function SQLProvider(options) |
SQLProvider.isSupported = function() {
return (typeof module !== 'undefined' && module.exports);
};
SQLProvider.prototype.open = function(callback) {
if(!this.user) {
return callback(new Error('missing user'));
}
this.db = new DB(this.options, function(err) {
if (err) {
return callback(err);
}
callback();
});
};
SQLProvider.prototype.getReadOnlyContext = function() {
return new SQLContext({isReadOnly: true, db: this.db});
};
SQLProvider.prototype.getReadWriteContext = function() {
return new SQLContext({isReadOnly: false, db: this.db});
};
// Forward db type constants
SQLProvider.MYSQL = DB.MYSQL;
SQLProvider.SQLITE = DB.SQLITE;
SQLProvider.POSTGRES = DB.POSTGRES;
SQLProvider.MARIADB = DB.MARIADB;
module.exports = SQLProvider;
| {
this.options = options || {};
this.user = options.user;
} | identifier_body |
index.js | var DB = require('./lib/db.js');
function SQLContext(options) {
this.readOnly = options.isReadOnly;
this.db = options.db;
}
function _put(db, key, value, callback) {
db.createOrUpdate(key, value, function(err) {
if(err) {
return callback(err);
}
callback();
});
}
SQLContext.prototype.putObject = function(key, value, callback) {
if(this.readOnly) {
return callback(new Error('write operation on read-only context.'));
}
var json = JSON.stringify(value);
var buf = new Buffer(json, 'utf8');
_put(this.db, key, buf, callback);
};
SQLContext.prototype.putBuffer = function(key, value, callback) {
if(this.readOnly) {
return callback(new Error('write operation on read-only context.'));
}
_put(this.db, key, value, callback);
};
SQLContext.prototype.delete = function (key, callback) {
if(this.readOnly) {
return callback(new Error('write operation on read-only context.'));
}
this.db.remove(key, function(err) {
if(err) {
return callback(err);
}
callback();
});
};
SQLContext.prototype.clear = function (callback) {
if(this.readOnly) {
return callback(new Error('write operation on read-only context.'));
}
this.db.clearAll(callback);
};
function _get(db, key, callback) {
db.find(key, callback);
}
SQLContext.prototype.getObject = function(key, callback) {
_get(this.db, key, function(err, data) {
if(err) |
if(data) {
try {
data = JSON.parse(data.toString('utf8'));
} catch(e) {
return callback(e);
}
}
callback(null, data);
});
};
SQLContext.prototype.getBuffer = function(key, callback) {
_get(this.db, key, callback);
};
function SQLProvider(options) {
this.options = options || {};
this.user = options.user;
}
SQLProvider.isSupported = function() {
return (typeof module !== 'undefined' && module.exports);
};
SQLProvider.prototype.open = function(callback) {
if(!this.user) {
return callback(new Error('missing user'));
}
this.db = new DB(this.options, function(err) {
if (err) {
return callback(err);
}
callback();
});
};
SQLProvider.prototype.getReadOnlyContext = function() {
return new SQLContext({isReadOnly: true, db: this.db});
};
SQLProvider.prototype.getReadWriteContext = function() {
return new SQLContext({isReadOnly: false, db: this.db});
};
// Forward db type constants
SQLProvider.MYSQL = DB.MYSQL;
SQLProvider.SQLITE = DB.SQLITE;
SQLProvider.POSTGRES = DB.POSTGRES;
SQLProvider.MARIADB = DB.MARIADB;
module.exports = SQLProvider;
| {
return callback(err);
} | conditional_block |
index.js | var DB = require('./lib/db.js');
function SQLContext(options) {
this.readOnly = options.isReadOnly;
this.db = options.db;
}
function _put(db, key, value, callback) {
db.createOrUpdate(key, value, function(err) {
if(err) {
return callback(err);
}
callback();
});
}
SQLContext.prototype.putObject = function(key, value, callback) {
if(this.readOnly) {
return callback(new Error('write operation on read-only context.'));
}
var json = JSON.stringify(value);
var buf = new Buffer(json, 'utf8');
_put(this.db, key, buf, callback);
};
SQLContext.prototype.putBuffer = function(key, value, callback) {
if(this.readOnly) {
return callback(new Error('write operation on read-only context.'));
}
_put(this.db, key, value, callback);
};
SQLContext.prototype.delete = function (key, callback) {
if(this.readOnly) {
return callback(new Error('write operation on read-only context.'));
}
this.db.remove(key, function(err) {
if(err) {
return callback(err);
}
callback();
});
};
SQLContext.prototype.clear = function (callback) {
if(this.readOnly) {
return callback(new Error('write operation on read-only context.'));
}
this.db.clearAll(callback);
};
function _get(db, key, callback) {
db.find(key, callback);
}
SQLContext.prototype.getObject = function(key, callback) {
_get(this.db, key, function(err, data) {
if(err) {
return callback(err);
}
if(data) {
try {
data = JSON.parse(data.toString('utf8'));
} catch(e) {
return callback(e);
}
}
callback(null, data);
});
};
SQLContext.prototype.getBuffer = function(key, callback) {
_get(this.db, key, callback);
};
function | (options) {
this.options = options || {};
this.user = options.user;
}
SQLProvider.isSupported = function() {
return (typeof module !== 'undefined' && module.exports);
};
SQLProvider.prototype.open = function(callback) {
if(!this.user) {
return callback(new Error('missing user'));
}
this.db = new DB(this.options, function(err) {
if (err) {
return callback(err);
}
callback();
});
};
SQLProvider.prototype.getReadOnlyContext = function() {
return new SQLContext({isReadOnly: true, db: this.db});
};
SQLProvider.prototype.getReadWriteContext = function() {
return new SQLContext({isReadOnly: false, db: this.db});
};
// Forward db type constants
SQLProvider.MYSQL = DB.MYSQL;
SQLProvider.SQLITE = DB.SQLITE;
SQLProvider.POSTGRES = DB.POSTGRES;
SQLProvider.MARIADB = DB.MARIADB;
module.exports = SQLProvider;
| SQLProvider | identifier_name |
color_space_10_8.py | """
HLS and Color Threshold
-----------------------
You've now seen that various color thresholds can be applied to find the lane lines in images. Here we'll explore
this a bit further and look at a couple examples to see why a color space like HLS can be more robust.
"""
import numpy as np
import cv2
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
def | ():
"""
Run different HLS and its thresholds.
"""
image = mpimg.imread('test6.jpg')
# Converting original to gray
gray = cv2.cvtColor(image, cv2.COLOR_RGB2GRAY)
# Threshold for original image
thresh = (180, 255)
binary = np.zeros_like(gray)
binary[(gray > thresh[0]) & (gray <= thresh[1])] = 1
red = image[:, :, 0]
green = image[:, :, 1]
blue = image[:, :, 2]
thresh_2 = (200, 255)
binary_2 = np.zeros_like(red)
binary_2[(red > thresh_2[0]) & (red <= thresh_2[1])] = 1
# Converting image to HLS
hls = cv2.cvtColor(image, cv2.COLOR_RGB2HLS)
# Splitting HSL
hue = hls[:, :, 0]
lightness = hls[:, :, 1]
saturation = hls[:, :, 2]
# Threshold for saturation
thresh_3 = (90, 255)
binary_3 = np.zeros_like(saturation)
binary_3[(saturation > thresh_3[0]) & (saturation <= thresh_3[1])] = 1
# Threshold for Hue
thresh_4 = (15, 100)
binary_4 = np.zeros_like(hue)
binary_4[(hue > thresh_4[0]) & (hue <= thresh_4[1])] = 1
# -------------------- Figure -----------------------
f = plt.figure()
size_x, size_y = (4, 4)
f.add_subplot(size_x, size_y, 1)
plt.imshow(image)
plt.title("Original")
f.add_subplot(size_x, size_y, 2)
plt.imshow(gray, cmap='gray')
plt.title("Gray")
f.add_subplot(size_x, size_y, 3)
plt.imshow(binary, cmap='gray')
plt.title("Threshold of ({}, {})".format(thresh[0], thresh[1]))
f.add_subplot(size_x, size_y, 4)
plt.imshow(red, cmap='gray')
plt.title("Red")
f.add_subplot(size_x, size_y, 5)
plt.imshow(green, cmap='gray')
plt.title("Green")
f.add_subplot(size_x, size_y, 6)
plt.imshow(blue, cmap='gray')
plt.title("Blue")
f.add_subplot(size_x, size_y, 7)
plt.imshow(binary_2, cmap='gray')
plt.title("Threshold of Red color")
f.add_subplot(size_x, size_y, 8)
plt.imshow(hue, cmap='gray')
plt.title("Hue")
f.add_subplot(size_x, size_y, 9)
plt.imshow(lightness, cmap='gray')
plt.title("Lightness")
f.add_subplot(size_x, size_y, 10)
plt.imshow(saturation, cmap='gray')
plt.title("Saturation")
f.add_subplot(size_x, size_y, 11)
plt.imshow(binary_3, cmap='gray')
plt.title("Threshold of saturation")
f.add_subplot(size_x, size_y, 12)
plt.imshow(binary_4, cmap='gray')
plt.title("Threshold of hue")
plt.show()
if __name__ == '__main__':
run()
| run | identifier_name |
color_space_10_8.py | """
HLS and Color Threshold
-----------------------
You've now seen that various color thresholds can be applied to find the lane lines in images. Here we'll explore
this a bit further and look at a couple examples to see why a color space like HLS can be more robust.
"""
import numpy as np
import cv2
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
def run():
"""
Run different HLS and its thresholds.
"""
image = mpimg.imread('test6.jpg')
# Converting original to gray
gray = cv2.cvtColor(image, cv2.COLOR_RGB2GRAY)
# Threshold for original image
thresh = (180, 255)
binary = np.zeros_like(gray)
binary[(gray > thresh[0]) & (gray <= thresh[1])] = 1
red = image[:, :, 0]
green = image[:, :, 1]
blue = image[:, :, 2]
thresh_2 = (200, 255)
binary_2 = np.zeros_like(red)
binary_2[(red > thresh_2[0]) & (red <= thresh_2[1])] = 1
# Converting image to HLS
hls = cv2.cvtColor(image, cv2.COLOR_RGB2HLS)
# Splitting HSL
hue = hls[:, :, 0]
lightness = hls[:, :, 1]
saturation = hls[:, :, 2]
# Threshold for saturation
thresh_3 = (90, 255)
binary_3 = np.zeros_like(saturation)
binary_3[(saturation > thresh_3[0]) & (saturation <= thresh_3[1])] = 1
# Threshold for Hue
thresh_4 = (15, 100)
binary_4 = np.zeros_like(hue)
binary_4[(hue > thresh_4[0]) & (hue <= thresh_4[1])] = 1
# -------------------- Figure -----------------------
f = plt.figure()
size_x, size_y = (4, 4)
f.add_subplot(size_x, size_y, 1)
plt.imshow(image)
plt.title("Original")
f.add_subplot(size_x, size_y, 2)
plt.imshow(gray, cmap='gray')
plt.title("Gray")
f.add_subplot(size_x, size_y, 3)
plt.imshow(binary, cmap='gray')
plt.title("Threshold of ({}, {})".format(thresh[0], thresh[1]))
f.add_subplot(size_x, size_y, 4)
plt.imshow(red, cmap='gray')
plt.title("Red")
f.add_subplot(size_x, size_y, 5)
plt.imshow(green, cmap='gray')
plt.title("Green")
f.add_subplot(size_x, size_y, 6)
plt.imshow(blue, cmap='gray')
plt.title("Blue")
f.add_subplot(size_x, size_y, 7)
plt.imshow(binary_2, cmap='gray')
plt.title("Threshold of Red color")
f.add_subplot(size_x, size_y, 8)
plt.imshow(hue, cmap='gray')
plt.title("Hue")
f.add_subplot(size_x, size_y, 9)
plt.imshow(lightness, cmap='gray')
plt.title("Lightness")
f.add_subplot(size_x, size_y, 10)
plt.imshow(saturation, cmap='gray')
plt.title("Saturation")
f.add_subplot(size_x, size_y, 11)
plt.imshow(binary_3, cmap='gray')
plt.title("Threshold of saturation")
f.add_subplot(size_x, size_y, 12)
plt.imshow(binary_4, cmap='gray') | plt.title("Threshold of hue")
plt.show()
if __name__ == '__main__':
run() | random_line_split |
|
color_space_10_8.py | """
HLS and Color Threshold
-----------------------
You've now seen that various color thresholds can be applied to find the lane lines in images. Here we'll explore
this a bit further and look at a couple examples to see why a color space like HLS can be more robust.
"""
import numpy as np
import cv2
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
def run():
|
if __name__ == '__main__':
run()
| """
Run different HLS and its thresholds.
"""
image = mpimg.imread('test6.jpg')
# Converting original to gray
gray = cv2.cvtColor(image, cv2.COLOR_RGB2GRAY)
# Threshold for original image
thresh = (180, 255)
binary = np.zeros_like(gray)
binary[(gray > thresh[0]) & (gray <= thresh[1])] = 1
red = image[:, :, 0]
green = image[:, :, 1]
blue = image[:, :, 2]
thresh_2 = (200, 255)
binary_2 = np.zeros_like(red)
binary_2[(red > thresh_2[0]) & (red <= thresh_2[1])] = 1
# Converting image to HLS
hls = cv2.cvtColor(image, cv2.COLOR_RGB2HLS)
# Splitting HSL
hue = hls[:, :, 0]
lightness = hls[:, :, 1]
saturation = hls[:, :, 2]
# Threshold for saturation
thresh_3 = (90, 255)
binary_3 = np.zeros_like(saturation)
binary_3[(saturation > thresh_3[0]) & (saturation <= thresh_3[1])] = 1
# Threshold for Hue
thresh_4 = (15, 100)
binary_4 = np.zeros_like(hue)
binary_4[(hue > thresh_4[0]) & (hue <= thresh_4[1])] = 1
# -------------------- Figure -----------------------
f = plt.figure()
size_x, size_y = (4, 4)
f.add_subplot(size_x, size_y, 1)
plt.imshow(image)
plt.title("Original")
f.add_subplot(size_x, size_y, 2)
plt.imshow(gray, cmap='gray')
plt.title("Gray")
f.add_subplot(size_x, size_y, 3)
plt.imshow(binary, cmap='gray')
plt.title("Threshold of ({}, {})".format(thresh[0], thresh[1]))
f.add_subplot(size_x, size_y, 4)
plt.imshow(red, cmap='gray')
plt.title("Red")
f.add_subplot(size_x, size_y, 5)
plt.imshow(green, cmap='gray')
plt.title("Green")
f.add_subplot(size_x, size_y, 6)
plt.imshow(blue, cmap='gray')
plt.title("Blue")
f.add_subplot(size_x, size_y, 7)
plt.imshow(binary_2, cmap='gray')
plt.title("Threshold of Red color")
f.add_subplot(size_x, size_y, 8)
plt.imshow(hue, cmap='gray')
plt.title("Hue")
f.add_subplot(size_x, size_y, 9)
plt.imshow(lightness, cmap='gray')
plt.title("Lightness")
f.add_subplot(size_x, size_y, 10)
plt.imshow(saturation, cmap='gray')
plt.title("Saturation")
f.add_subplot(size_x, size_y, 11)
plt.imshow(binary_3, cmap='gray')
plt.title("Threshold of saturation")
f.add_subplot(size_x, size_y, 12)
plt.imshow(binary_4, cmap='gray')
plt.title("Threshold of hue")
plt.show() | identifier_body |
color_space_10_8.py | """
HLS and Color Threshold
-----------------------
You've now seen that various color thresholds can be applied to find the lane lines in images. Here we'll explore
this a bit further and look at a couple examples to see why a color space like HLS can be more robust.
"""
import numpy as np
import cv2
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
def run():
"""
Run different HLS and its thresholds.
"""
image = mpimg.imread('test6.jpg')
# Converting original to gray
gray = cv2.cvtColor(image, cv2.COLOR_RGB2GRAY)
# Threshold for original image
thresh = (180, 255)
binary = np.zeros_like(gray)
binary[(gray > thresh[0]) & (gray <= thresh[1])] = 1
red = image[:, :, 0]
green = image[:, :, 1]
blue = image[:, :, 2]
thresh_2 = (200, 255)
binary_2 = np.zeros_like(red)
binary_2[(red > thresh_2[0]) & (red <= thresh_2[1])] = 1
# Converting image to HLS
hls = cv2.cvtColor(image, cv2.COLOR_RGB2HLS)
# Splitting HSL
hue = hls[:, :, 0]
lightness = hls[:, :, 1]
saturation = hls[:, :, 2]
# Threshold for saturation
thresh_3 = (90, 255)
binary_3 = np.zeros_like(saturation)
binary_3[(saturation > thresh_3[0]) & (saturation <= thresh_3[1])] = 1
# Threshold for Hue
thresh_4 = (15, 100)
binary_4 = np.zeros_like(hue)
binary_4[(hue > thresh_4[0]) & (hue <= thresh_4[1])] = 1
# -------------------- Figure -----------------------
f = plt.figure()
size_x, size_y = (4, 4)
f.add_subplot(size_x, size_y, 1)
plt.imshow(image)
plt.title("Original")
f.add_subplot(size_x, size_y, 2)
plt.imshow(gray, cmap='gray')
plt.title("Gray")
f.add_subplot(size_x, size_y, 3)
plt.imshow(binary, cmap='gray')
plt.title("Threshold of ({}, {})".format(thresh[0], thresh[1]))
f.add_subplot(size_x, size_y, 4)
plt.imshow(red, cmap='gray')
plt.title("Red")
f.add_subplot(size_x, size_y, 5)
plt.imshow(green, cmap='gray')
plt.title("Green")
f.add_subplot(size_x, size_y, 6)
plt.imshow(blue, cmap='gray')
plt.title("Blue")
f.add_subplot(size_x, size_y, 7)
plt.imshow(binary_2, cmap='gray')
plt.title("Threshold of Red color")
f.add_subplot(size_x, size_y, 8)
plt.imshow(hue, cmap='gray')
plt.title("Hue")
f.add_subplot(size_x, size_y, 9)
plt.imshow(lightness, cmap='gray')
plt.title("Lightness")
f.add_subplot(size_x, size_y, 10)
plt.imshow(saturation, cmap='gray')
plt.title("Saturation")
f.add_subplot(size_x, size_y, 11)
plt.imshow(binary_3, cmap='gray')
plt.title("Threshold of saturation")
f.add_subplot(size_x, size_y, 12)
plt.imshow(binary_4, cmap='gray')
plt.title("Threshold of hue")
plt.show()
if __name__ == '__main__':
| run() | conditional_block |
|
_npm.js | module.exports = {
build_dll: function() {
return new Promise(function (resolve, reject) {
console.log("_postinstall > NPM: DLL build started. Please wait..");
require('child_process').execSync('npm run build:dll', {stdio:[0,1,2]});
console.log("_postinstall > NPM: DLL build completed");
resolve();
});
},
compile_aot: function() {
return new Promise(function (resolve, reject) {
console.log("_postinstall > NPM: AOT compilation has started. Please wait..");
require('child_process').execSync('npm run compile', {stdio:[0,1,2]});
console.log("_postinstall > NPM: AOT compilation completed");
resolve();
});
},
build_web: function() {
return new Promise(function (resolve, reject) {
console.log("_postinstall > NPM: Building web (heroku) distro. Please wait..");
require('child_process').execSync('npm run build:aot -- heroku', {stdio:[0,1,2]});
console.log("_postinstall > NPM: Web distro build completed. Ready to serve");
resolve();
});
},
build_ionic_resources: function() {
return new Promise(function (resolve, reject) {
console.log("_postinstall > NPM: Building Ionic resources. Please wait..");
require('child_process').execSync('npm run utils:ionic:resources', {stdio:[0,1,2]});
console.log("_postinstall > NPM: Ionic resources built.");
resolve();
});
},
set_timezone: function() {
return new Promise(function (resolve, reject) {
console.log("_postinstall > NPM: Set timezone to UTC");
exec(`export TZ=UTC`, (error, stdout, stderr) => {
if (error) reject(error);
console.log(`_postinstall > NPM: Timezone has been set to UTC`);
resolve(); | }; | });
});
},
| random_line_split |
proxy.ts | import observable = require("data/observable");
import bindable = require("ui/core/bindable");
import dependencyObservable = require("ui/core/dependency-observable");
import types = require("utils/types");
import definition = require("ui/core/proxy");
export class PropertyMetadata extends dependencyObservable.PropertyMetadata implements definition.PropertyMetadata {
private _onSetNativeValue: dependencyObservable.PropertyChangedCallback;
co | defaultValue: any,
options?: number,
onChanged?: dependencyObservable.PropertyChangedCallback,
onValidateValue?: dependencyObservable.PropertyValidationCallback,
onSetNativeValue?: dependencyObservable.PropertyChangedCallback) {
super(defaultValue, options, onChanged, onValidateValue);
this._onSetNativeValue = onSetNativeValue;
}
get onSetNativeValue(): dependencyObservable.PropertyChangedCallback {
return this._onSetNativeValue;
}
set onSetNativeValue(value: dependencyObservable.PropertyChangedCallback) {
this._onSetNativeValue = value;
}
}
export class ProxyObject extends bindable.Bindable implements definition.ProxyObject {
private _updatingJSPropertiesDict = {};
/**
* Gets the android-specific native instance that lies behind this proxy. Will be available if running on an Android platform.
*/
get android(): any {
return undefined;
}
/**
* Gets the ios-specific native instance that lies behind this proxy. Will be available if running on an iOS platform.
*/
get ios(): any {
return undefined;
}
public _onPropertyChanged(property: dependencyObservable.Property, oldValue: any, newValue: any) {
super._onPropertyChanged(property, oldValue, newValue);
this._trySetNativeValue(property, oldValue, newValue);
}
/**
* A property has changed on the native side directly - e.g. the user types in a TextField.
*/
public _onPropertyChangedFromNative(property: dependencyObservable.Property, newValue: any) {
if (this._updatingJSPropertiesDict[property.name]) {
return;
}
this._updatingJSPropertiesDict[property.name] = true;
this._setValue(property, newValue);
delete this._updatingJSPropertiesDict[property.name];
}
public _syncNativeProperties() {
var that = this;
var eachPropertyCallback = function (property: dependencyObservable.Property): boolean {
that._trySetNativeValue(property);
return true;
}
this._eachSetProperty(eachPropertyCallback);
}
private _trySetNativeValue(property: dependencyObservable.Property, oldValue?:any, newValue?: any) {
if (this._updatingJSPropertiesDict[property.name]) {
// This is the case when a property has changed from the native side directly and we have received the "_onPropertyChanged" event while synchronizing our local cache
return;
}
if (global.android && !this.android) {
// in android we have lazy loading and we do not have a native widget created yet, do not call the onSetNativeValue callback
// properties will be synced when the widget is created
return;
}
var metadata = property.metadata;
if (!(metadata instanceof PropertyMetadata)) {
return;
}
var proxyMetadata = <PropertyMetadata>metadata;
if (proxyMetadata.onSetNativeValue) {
if (types.isUndefined(newValue)) {
newValue = this._getValue(property);
}
proxyMetadata.onSetNativeValue({
object: this,
property: property,
eventName: observable.Observable.propertyChangeEvent,
newValue: newValue,
oldValue: oldValue
});
}
}
} | nstructor(
| identifier_name |
proxy.ts | import observable = require("data/observable");
import bindable = require("ui/core/bindable");
import dependencyObservable = require("ui/core/dependency-observable");
import types = require("utils/types");
import definition = require("ui/core/proxy");
export class PropertyMetadata extends dependencyObservable.PropertyMetadata implements definition.PropertyMetadata {
private _onSetNativeValue: dependencyObservable.PropertyChangedCallback;
constructor(
defaultValue: any,
options?: number,
onChanged?: dependencyObservable.PropertyChangedCallback,
onValidateValue?: dependencyObservable.PropertyValidationCallback,
onSetNativeValue?: dependencyObservable.PropertyChangedCallback) {
super(defaultValue, options, onChanged, onValidateValue);
this._onSetNativeValue = onSetNativeValue;
}
get onSetNativeValue(): dependencyObservable.PropertyChangedCallback {
return this._onSetNativeValue;
}
set onSetNativeValue(value: dependencyObservable.PropertyChangedCallback) {
this._onSetNativeValue = value;
}
}
export class ProxyObject extends bindable.Bindable implements definition.ProxyObject {
private _updatingJSPropertiesDict = {};
/**
* Gets the android-specific native instance that lies behind this proxy. Will be available if running on an Android platform. | get android(): any {
return undefined;
}
/**
* Gets the ios-specific native instance that lies behind this proxy. Will be available if running on an iOS platform.
*/
get ios(): any {
return undefined;
}
public _onPropertyChanged(property: dependencyObservable.Property, oldValue: any, newValue: any) {
super._onPropertyChanged(property, oldValue, newValue);
this._trySetNativeValue(property, oldValue, newValue);
}
/**
* A property has changed on the native side directly - e.g. the user types in a TextField.
*/
public _onPropertyChangedFromNative(property: dependencyObservable.Property, newValue: any) {
if (this._updatingJSPropertiesDict[property.name]) {
return;
}
this._updatingJSPropertiesDict[property.name] = true;
this._setValue(property, newValue);
delete this._updatingJSPropertiesDict[property.name];
}
public _syncNativeProperties() {
var that = this;
var eachPropertyCallback = function (property: dependencyObservable.Property): boolean {
that._trySetNativeValue(property);
return true;
}
this._eachSetProperty(eachPropertyCallback);
}
private _trySetNativeValue(property: dependencyObservable.Property, oldValue?:any, newValue?: any) {
if (this._updatingJSPropertiesDict[property.name]) {
// This is the case when a property has changed from the native side directly and we have received the "_onPropertyChanged" event while synchronizing our local cache
return;
}
if (global.android && !this.android) {
// in android we have lazy loading and we do not have a native widget created yet, do not call the onSetNativeValue callback
// properties will be synced when the widget is created
return;
}
var metadata = property.metadata;
if (!(metadata instanceof PropertyMetadata)) {
return;
}
var proxyMetadata = <PropertyMetadata>metadata;
if (proxyMetadata.onSetNativeValue) {
if (types.isUndefined(newValue)) {
newValue = this._getValue(property);
}
proxyMetadata.onSetNativeValue({
object: this,
property: property,
eventName: observable.Observable.propertyChangeEvent,
newValue: newValue,
oldValue: oldValue
});
}
}
} | */ | random_line_split |
proxy.ts | import observable = require("data/observable");
import bindable = require("ui/core/bindable");
import dependencyObservable = require("ui/core/dependency-observable");
import types = require("utils/types");
import definition = require("ui/core/proxy");
export class PropertyMetadata extends dependencyObservable.PropertyMetadata implements definition.PropertyMetadata {
private _onSetNativeValue: dependencyObservable.PropertyChangedCallback;
constructor(
defaultValue: any,
options?: number,
onChanged?: dependencyObservable.PropertyChangedCallback,
onValidateValue?: dependencyObservable.PropertyValidationCallback,
onSetNativeValue?: dependencyObservable.PropertyChangedCallback) {
super(defaultValue, options, onChanged, onValidateValue);
this._onSetNativeValue = onSetNativeValue;
}
get onSetNativeValue(): dependencyObservable.PropertyChangedCallback {
return this._onSetNativeValue;
}
set onSetNativeValue(value: dependencyObservable.PropertyChangedCallback) {
this._onSetNativeValue = value;
}
}
export class ProxyObject extends bindable.Bindable implements definition.ProxyObject {
private _updatingJSPropertiesDict = {};
/**
* Gets the android-specific native instance that lies behind this proxy. Will be available if running on an Android platform.
*/
get android(): any {
return undefined;
}
/**
* Gets the ios-specific native instance that lies behind this proxy. Will be available if running on an iOS platform.
*/
get ios(): any {
return undefined;
}
public _onPropertyChanged(property: dependencyObservable.Property, oldValue: any, newValue: any) {
super._onPropertyChanged(property, oldValue, newValue);
this._trySetNativeValue(property, oldValue, newValue);
}
/**
* A property has changed on the native side directly - e.g. the user types in a TextField.
*/
public _onPropertyChangedFromNative(property: dependencyObservable.Property, newValue: any) {
if (this._updatingJSPropertiesDict[property.name]) {
return;
}
this._updatingJSPropertiesDict[property.name] = true;
this._setValue(property, newValue);
delete this._updatingJSPropertiesDict[property.name];
}
public _syncNativeProperties() {
var that = this;
var eachPropertyCallback = function (property: dependencyObservable.Property): boolean {
that._trySetNativeValue(property);
return true;
}
this._eachSetProperty(eachPropertyCallback);
}
private _trySetNativeValue(property: dependencyObservable.Property, oldValue?:any, newValue?: any) {
if (this._updatingJSPropertiesDict[property.name]) {
| if (global.android && !this.android) {
// in android we have lazy loading and we do not have a native widget created yet, do not call the onSetNativeValue callback
// properties will be synced when the widget is created
return;
}
var metadata = property.metadata;
if (!(metadata instanceof PropertyMetadata)) {
return;
}
var proxyMetadata = <PropertyMetadata>metadata;
if (proxyMetadata.onSetNativeValue) {
if (types.isUndefined(newValue)) {
newValue = this._getValue(property);
}
proxyMetadata.onSetNativeValue({
object: this,
property: property,
eventName: observable.Observable.propertyChangeEvent,
newValue: newValue,
oldValue: oldValue
});
}
}
} | // This is the case when a property has changed from the native side directly and we have received the "_onPropertyChanged" event while synchronizing our local cache
return;
}
| conditional_block |
proxy.ts | import observable = require("data/observable");
import bindable = require("ui/core/bindable");
import dependencyObservable = require("ui/core/dependency-observable");
import types = require("utils/types");
import definition = require("ui/core/proxy");
export class PropertyMetadata extends dependencyObservable.PropertyMetadata implements definition.PropertyMetadata {
private _onSetNativeValue: dependencyObservable.PropertyChangedCallback;
constructor(
defaultValue: any,
options?: number,
onChanged?: dependencyObservable.PropertyChangedCallback,
onValidateValue?: dependencyObservable.PropertyValidationCallback,
onSetNativeValue?: dependencyObservable.PropertyChangedCallback) {
super(defaultValue, options, onChanged, onValidateValue);
this._onSetNativeValue = onSetNativeValue;
}
get onSetNativeValue(): dependencyObservable.PropertyChangedCallback {
return this._onSetNativeValue;
}
set onSetNativeValue(value: dependencyObservable.PropertyChangedCallback) {
this._onSetNativeValue = value;
}
}
export class ProxyObject extends bindable.Bindable implements definition.ProxyObject {
private _updatingJSPropertiesDict = {};
/**
* Gets the android-specific native instance that lies behind this proxy. Will be available if running on an Android platform.
*/
get android(): any {
return undefined;
}
/**
* Gets the ios-specific native instance that lies behind this proxy. Will be available if running on an iOS platform.
*/
get ios(): any {
return undefined;
}
public _onPropertyChanged(property: dependencyObservable.Property, oldValue: any, newValue: any) {
super._onPropertyChanged(property, oldValue, newValue);
this._trySetNativeValue(property, oldValue, newValue);
}
/**
* A property has changed on the native side directly - e.g. the user types in a TextField.
*/
public _onPropertyChangedFromNative(property: dependencyObservable.Property, newValue: any) {
if (this._updatingJSPropertiesDict[property.name]) {
return;
}
this._updatingJSPropertiesDict[property.name] = true;
this._setValue(property, newValue);
delete this._updatingJSPropertiesDict[property.name];
}
public _syncNativeProperties() {
| private _trySetNativeValue(property: dependencyObservable.Property, oldValue?:any, newValue?: any) {
if (this._updatingJSPropertiesDict[property.name]) {
// This is the case when a property has changed from the native side directly and we have received the "_onPropertyChanged" event while synchronizing our local cache
return;
}
if (global.android && !this.android) {
// in android we have lazy loading and we do not have a native widget created yet, do not call the onSetNativeValue callback
// properties will be synced when the widget is created
return;
}
var metadata = property.metadata;
if (!(metadata instanceof PropertyMetadata)) {
return;
}
var proxyMetadata = <PropertyMetadata>metadata;
if (proxyMetadata.onSetNativeValue) {
if (types.isUndefined(newValue)) {
newValue = this._getValue(property);
}
proxyMetadata.onSetNativeValue({
object: this,
property: property,
eventName: observable.Observable.propertyChangeEvent,
newValue: newValue,
oldValue: oldValue
});
}
}
} | var that = this;
var eachPropertyCallback = function (property: dependencyObservable.Property): boolean {
that._trySetNativeValue(property);
return true;
}
this._eachSetProperty(eachPropertyCallback);
}
| identifier_body |
playlist_rename_spec.py | # -*- coding: utf-8 -*-
from expects import expect
from mamba import describe, context, before
from spec.ui._ipod_helpers import *
from spec.ui._fixture import update_environment
with describe('ipodio playlist create') as _:
@before.all
def setup_all():
_.new_name = 'leño'
_.playlist_name = 'playlist'
_.existing_name = 'roña'
update_environment(_)
bootstrap_ipod(_.mountpoint_path)
create_playlist(_.mountpoint_path, _.playlist_name)
create_playlist(_.mountpoint_path, _.existing_name)
def should_print_an_error():
execution = _.env.run(*_.cmd + ['playlist', 'rename'], expect_error=True)
expect(execution.stderr).to.have('Usage:')
with context('given a non existing playlist name'):
def should_print_an_error_():
execution = _.env.run(*_.cmd + ['playlist', 'rename', _.new_name, _.playlist_name])
expect(execution.stdout).to.have('does not exist')
with context('given an existing playlist name'):
def should_print_an_error__():
execution = _.env.run(*_.cmd + ['playlist', 'rename'], expect_error=True)
expect(execution.stderr).to.have('Usage:')
with context('given an existing playlist name'):
def should_print_an_error___():
execution = _.env.run(*_.cmd + ['playlist', 'rename', _.playlist_name, _.existing_name])
expect(execution.stdout).to.have('already exists')
with context('and another valid playlist name'):
def should_rename_that_playlist():
ex | ecution = _.env.run(*_.cmd + ['playlist', 'rename', _.playlist_name, _.new_name])
playlists = get_ipod_playlists_by_name(_.mountpoint_path)
expect(playlists).to.have(_.new_name)
expect(playlists).not_to.have(_.playlist_name)
expect(execution.stdout).to.have('renamed to')
| identifier_body |
|
playlist_rename_spec.py | # -*- coding: utf-8 -*-
from expects import expect
from mamba import describe, context, before
from spec.ui._ipod_helpers import *
from spec.ui._fixture import update_environment
with describe('ipodio playlist create') as _:
@before.all
def setup_all():
_.new_name = 'leño'
_.playlist_name = 'playlist'
_.existing_name = 'roña'
update_environment(_)
bootstrap_ipod(_.mountpoint_path)
create_playlist(_.mountpoint_path, _.playlist_name)
create_playlist(_.mountpoint_path, _.existing_name)
def should_print_an_error():
execution = _.env.run(*_.cmd + ['playlist', 'rename'], expect_error=True)
expect(execution.stderr).to.have('Usage:')
with context('given a non existing playlist name'):
def should_print_an_error_(): | with context('given an existing playlist name'):
def should_print_an_error__():
execution = _.env.run(*_.cmd + ['playlist', 'rename'], expect_error=True)
expect(execution.stderr).to.have('Usage:')
with context('given an existing playlist name'):
def should_print_an_error___():
execution = _.env.run(*_.cmd + ['playlist', 'rename', _.playlist_name, _.existing_name])
expect(execution.stdout).to.have('already exists')
with context('and another valid playlist name'):
def should_rename_that_playlist():
execution = _.env.run(*_.cmd + ['playlist', 'rename', _.playlist_name, _.new_name])
playlists = get_ipod_playlists_by_name(_.mountpoint_path)
expect(playlists).to.have(_.new_name)
expect(playlists).not_to.have(_.playlist_name)
expect(execution.stdout).to.have('renamed to') | execution = _.env.run(*_.cmd + ['playlist', 'rename', _.new_name, _.playlist_name])
expect(execution.stdout).to.have('does not exist')
| random_line_split |
playlist_rename_spec.py | # -*- coding: utf-8 -*-
from expects import expect
from mamba import describe, context, before
from spec.ui._ipod_helpers import *
from spec.ui._fixture import update_environment
with describe('ipodio playlist create') as _:
@before.all
def setup_all():
_.new_name = 'leño'
_.playlist_name = 'playlist'
_.existing_name = 'roña'
update_environment(_)
bootstrap_ipod(_.mountpoint_path)
create_playlist(_.mountpoint_path, _.playlist_name)
create_playlist(_.mountpoint_path, _.existing_name)
def should_print_an_error():
execution = _.env.run(*_.cmd + ['playlist', 'rename'], expect_error=True)
expect(execution.stderr).to.have('Usage:')
with context('given a non existing playlist name'):
def should_print_an_error_():
execution = _.env.run(*_.cmd + ['playlist', 'rename', _.new_name, _.playlist_name])
expect(execution.stdout).to.have('does not exist')
with context('given an existing playlist name'):
def should_print_an_error__():
execution = _.env.run(*_.cmd + ['playlist', 'rename'], expect_error=True)
expect(execution.stderr).to.have('Usage:')
with context('given an existing playlist name'):
def should_print_an_error___():
execution = _.env.run(*_.cmd + ['playlist', 'rename', _.playlist_name, _.existing_name])
expect(execution.stdout).to.have('already exists')
with context('and another valid playlist name'):
def sh | :
execution = _.env.run(*_.cmd + ['playlist', 'rename', _.playlist_name, _.new_name])
playlists = get_ipod_playlists_by_name(_.mountpoint_path)
expect(playlists).to.have(_.new_name)
expect(playlists).not_to.have(_.playlist_name)
expect(execution.stdout).to.have('renamed to')
| ould_rename_that_playlist() | identifier_name |
index.d.ts | // Type definitions for vanilla-tilt 1.4
// Project: https://github.com/micku7zu/vanilla-tilt.js
// Definitions by: Livio Brunner <https://github.com/BrunnerLivio>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
/**
* A smooth 3D tilt javascript library forked from Tilt.js (jQuery version).
*/
export namespace VanillaTilt {
/**
* Options which configures the tilting
*/
interface TiltOptions {
/**
* Reverse the tilt direction
*/
reverse?: boolean;
/**
* Max tilt rotation (degrees)
*/
max?: number;
/**
* Transform perspective, the lower the more extreme the tilt gets.
*/
perspective?: number;
/**
* 2 = 200%, 1.5 = 150%, etc..
*/
scale?: number;
/**
* Speed of the enter/exit transition
*/
speed?: number;
/**
* Set a transition on enter/exit.
*/
transition?: boolean;
/**
* What axis should be disabled. Can be X or Y.
*/
axis?: null | "x" | "y";
/**
* If the tilt effect has to be reset on exit.
*/
reset?: boolean;
/**
* Easing on enter/exit.
*/
easing?: string;
/**
* if it should have a "glare" effect
*/
glare?: boolean;
/**
* the maximum "glare" opacity
*/
"max-glare"?: number;
/**
* false = VanillaTilt creates the glare elements for you, otherwise
* you need to add .js-tilt-glare>.js-tilt-glare-inner by yourself
*/
"glare-prerender"?: boolean;
}
interface TiltValues {
/**
* The current tilt on the X axis
*/
tiltX: number;
/**
* The current tilt on the Y axis
*/
tiltY: number;
/**
* The current percentage on the X axis
*/
percentageX: number;
/**
* The current percentage on the Y axis
*/
percentageY: number;
}
interface HTMLVanillaTiltElement extends HTMLElement {
vanillaTilt: VanillaTilt;
}
}
/**
* A smooth 3D tilt javascript library forked from Tilt.js (jQuery version).
*/
export class | {
/**
* Creates a new instance of a VanillaTilt element.
* @param element The element, which should be a VanillaTilt element
* @param settings Settings which configures the element
*/
constructor(element: HTMLElement, settings?: VanillaTilt.TiltOptions);
/**
* Initializes one or multiple elements
* @param elements The element, which should tilt
* @param settings Settings, which configures the elements
*/
static init(elements: HTMLElement | HTMLElement[], settings?: VanillaTilt.TiltOptions): void;
/**
* Resets the styling
*/
reset(): void;
/**
* Get values of instance
*/
getValues(): VanillaTilt.TiltValues;
/**
* Destroys the instance and removes the listeners.
*/
destroy(): void;
/**
* Start listening to events
*/
addEventListeners(): void;
/**
* Stop listening to events
*/
removeEventListener(): void;
}
| VanillaTilt | identifier_name |
index.d.ts | // Type definitions for vanilla-tilt 1.4
// Project: https://github.com/micku7zu/vanilla-tilt.js
// Definitions by: Livio Brunner <https://github.com/BrunnerLivio>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
/**
* A smooth 3D tilt javascript library forked from Tilt.js (jQuery version).
*/
export namespace VanillaTilt {
/**
* Options which configures the tilting
*/
interface TiltOptions {
/**
* Reverse the tilt direction
*/
reverse?: boolean;
/**
* Max tilt rotation (degrees)
*/
max?: number;
/**
* Transform perspective, the lower the more extreme the tilt gets.
*/
perspective?: number;
/**
* 2 = 200%, 1.5 = 150%, etc..
*/
scale?: number;
/**
* Speed of the enter/exit transition
*/
speed?: number;
/**
* Set a transition on enter/exit.
*/
transition?: boolean;
/**
* What axis should be disabled. Can be X or Y.
*/
axis?: null | "x" | "y";
/**
* If the tilt effect has to be reset on exit.
*/
reset?: boolean;
/**
* Easing on enter/exit.
*/
easing?: string;
/**
* if it should have a "glare" effect
*/
glare?: boolean;
/**
* the maximum "glare" opacity
*/
"max-glare"?: number;
/**
* false = VanillaTilt creates the glare elements for you, otherwise
* you need to add .js-tilt-glare>.js-tilt-glare-inner by yourself
*/
"glare-prerender"?: boolean;
}
interface TiltValues {
/**
* The current tilt on the X axis
*/
tiltX: number;
/**
* The current tilt on the Y axis
*/
tiltY: number;
/**
* The current percentage on the X axis
*/
percentageX: number;
/**
* The current percentage on the Y axis
*/
percentageY: number;
}
interface HTMLVanillaTiltElement extends HTMLElement {
vanillaTilt: VanillaTilt;
}
}
/**
* A smooth 3D tilt javascript library forked from Tilt.js (jQuery version).
*/
export class VanillaTilt {
/**
* Creates a new instance of a VanillaTilt element.
* @param element The element, which should be a VanillaTilt element
* @param settings Settings which configures the element
*/
constructor(element: HTMLElement, settings?: VanillaTilt.TiltOptions);
/**
* Initializes one or multiple elements
* @param elements The element, which should tilt
* @param settings Settings, which configures the elements
*/
static init(elements: HTMLElement | HTMLElement[], settings?: VanillaTilt.TiltOptions): void;
/**
* Resets the styling
*/
reset(): void;
/**
* Get values of instance
*/
getValues(): VanillaTilt.TiltValues;
/**
* Destroys the instance and removes the listeners.
*/
destroy(): void;
/**
* Start listening to events
*/
addEventListeners(): void;
/** | * Stop listening to events
*/
removeEventListener(): void;
} | random_line_split |
|
backtop.js | (function($, f) {
// If there's no jQuery, Unslider can't work, so kill the operation.
if(!$) return f;
var Unslider = function() {
// Set up our elements
this.el = f;
this.items = f;
// Dimensions
this.sizes = [];
this.max = [0,0];
// Current inded
this.current = 0;
// Start/stop timer
this.interval = f;
// Set some options
this.opts = {
speed: 500,
delay: 3000, // f for no autoplay
complete: f, // when a slide's finished
keys: !f, // keyboard shortcuts - disable if it breaks things
dots: f, // display �T�T�T�▊�� pagination
fluid: f // is it a percentage width?,
};
// Create a deep clone for methods where context changes
var _ = this;
this.init = function(el, opts) {
this.el = el;
this.ul = el.children('ul');
this.max = [el.outerWidth(), el.outerHeight()];
this.items = this.ul.children('li').each(this.calculate);
// Check whether we're passing any options in to Unslider
this.opts = $.extend(this.opts, opts);
// Set up the Unslider
this.setup();
return this;
};
// Get the width for an element
// Pass a jQuery element as the context with .call(), and the index as a parameter: Unslider.calculate.call($('li:first'), 0)
this.calculate = function(index) {
var me = $(this),
width = me.outerWidth(), height = me.outerHeight();
// Add it to the sizes list
_.sizes[index] = [width, height];
// Set the max values
if(width > _.max[0]) _.max[0] = width;
if(height > _.max[1]) _.max[1] = height;
};
// Work out what methods need calling
this.setup = function() {
// Set the main element
this.el.css({
overflow: 'hidden',
width: _.max[0],
height: this.items.first().outerHeight()
});
// Set the relative widths
this.ul.css({width: (this.items.length * 100) + '%', position: 'relative'});
this.items.css('width', (100 / this.items.length) + '%');
if(this.opts.delay !== f) {
this.start();
this.el.hover(this.stop, this.start);
}
// Custom keyboard support
this.opts.keys && $(document).keydown(this.keys);
// Dot pagination
this.opts.dots && this.dots();
// Little patch for fluid-width sliders. Screw those guys.
if(this.opts.fluid) {
var resize = function() {
_.el.css('width', Math.min(Math.round((_.el.outerWidth() / _.el.parent().outerWidth()) * 100), 100) + '%');
};
resize();
$(window).resize(resize);
}
if(this.opts.arrows) {
this.el.parent().append('<p class="arrows"><span class="prev">��</span><span class="next">��</span></p>')
.find('.arrows span').click(function() {
$.isFunction(_[this.className]) && _[this.className]();
});
};
// Swipe support
if($.event.swipe) {
this.el.on('swipeleft', _.prev).on('swiperight', _.next);
}
};
// Move Unslider to a slide index
this.move = function(index, cb) {
// If it's out of bounds, go to the first slide
if(!this.items.eq(index).length) index = 0;
if(index < 0) index = (this.items.length - 1);
var target = this.items.eq(index);
var obj = {height: target.outerHeight()};
var speed = cb ? 5 : this.opts.speed;
if(!this.ul.is(':animated')) {
// Handle those pesky dots
_.el.find('.dot:eq(' + index + ')').addClass('active').siblings().removeClass('active');
this.el.animate(obj, speed) && this.ul.animate($.extend({left: '-' + index + '00%'}, obj), speed, function(data) {
_.current = index;
$.isFunction(_.opts.complete) && !cb && _.opts.complete(_.el);
});
}
};
// Autoplay functionality
this.start = function() {
_.interval = setInterval(function() {
_.move(_.current + 1);
}, _.opts.delay);
};
// Stop autoplay
this.stop = function() {
_.interval = clearInterval(_.interval);
return _;
};
// Keypresses
this.keys = function(e) {
var key = e.which;
var map = {
// Prev/next
37: _.prev,
39: _.next,
// Esc
27: _.stop
};
if($.isFunction(map[key])) {
map[key]();
}
};
// Arrow navigation
this.next = function() { return _.stop().move(_.current + 1) };
this.prev = function() { return _.stop().move(_.current - 1) };
this.dots = function() {
// Create the HTML
var html = '<ol class="dots">';
$.each(this.items, function(index) { html += '<li class="dot' + (index < 1 ? ' active' : '') + '">' + (index + 1) + '</li>'; });
html += '</ol>';
// Add it to the Unslider
this.el.addClass('has-dots').append(html).find('.dot').click(function() {
_.move($(this).index());
});
};
};
// Create a jQuery plugin
$.fn.unslider = function(o) {
var len = this.length;
// Enable multiple-slider support
return this.each(function(index) {
// Cache a copy of $(this), so it
var me = $(this);
var instance = (new Unslider).init(me, o);
// Invoke an Unslider instance
me.data('unslider' + (len > 1 ? '-' + (index + 1) : ''), instance);
});
};
})(window.jQuery, false);
jQuery(document).ready(function($){
// browser window scroll (in pixels) after which the "back to top" link is shown
var offset = 300,
//browser window scroll (in pixels) after which the "back to top" link opacity is reduced
offset_opacity = 1200,
//duration of the top scrolling animation (in ms)
scroll_top_duration = 700,
//grab the "back to top" link
$back_to_top = $('.back-top');
//hide or show the "back to top" link
$(window).scroll(function(){
( $(this).scrollTop() > offset ) ? $back_to_top.addClass('cd-is-visible') : $back_to_top.removeClass('cd-is-visible cd-fade-out');
if( $(this).scrollTop() > offset_opacity ) {
$back_to_top.addClass('cd-fade-out');
}
});
//smooth scroll to top
$back_to_top.on('click', function(event){
event.preventDefault();
$('body,html').animate({
scrollTop: 0 , | }, scroll_top_duration
);
});
//設定footer置底
var winheight = $( window ).height();
var bodyheight = $("body").height();
if (winheight >= bodyheight){
$(".outer-footer").css("position" , "fixed");
$(".outer-footer").css("bottom" , "0");
}
});
/**
* Unslider by @idiot
*/ | random_line_split |
|
backtop.js | (function($, f) {
// If there's no jQuery, Unslider can't work, so kill the operation.
if(!$) return f;
var Unslider = function() {
// Set up our elements
this.el = f;
this.items = f;
// Dimensions
this.sizes = [];
this.max = [0,0];
// Current inded
this.current = 0;
// Start/stop timer
this.interval = f;
// Set some options
this.opts = {
speed: 500,
delay: 3000, // f for no autoplay
complete: f, // when a slide's finished
keys: !f, // keyboard shortcuts - disable if it breaks things
dots: f, // display �T�T�T�▊�� pagination
fluid: f // is it a percentage width?,
};
// Create a deep clone for methods where context changes
var _ = this;
this.init = function(el, opts) {
this.el = el;
this.ul = el.children('ul');
this.max = [el.outerWidth(), el.outerHeight()];
this.items = this.ul.children('li').each(this.calculate);
// Check whether we're passing any options in to Unslider
this.opts = $.extend(this.opts, opts);
// Set up the Unslider
this.setup();
return this;
};
// Get the width for an element
// Pass a jQuery element as the context with .call(), and the index as a parameter: Unslider.calculate.call($('li:first'), 0)
this.calculate = function(index) {
var me = $(this),
width = me.outerWidth(), height = me.outerHeight();
// Add it to the sizes list
_.sizes[index] = [width, height];
// Set the max values
if(width > _.max[0]) _.max[0] = width;
if(height > _.max[1]) _.max[1] = height;
};
// Work out what methods need calling
this.setup = function() {
// Set the main element
this.el.css({
overflow: 'hidden',
width: _.max[0],
height: this.items.first().outerHeight()
});
// Set the relative widths
this.ul.css({width: (this.items.length * 100) + '%', position: 'relative'});
this.items.css('width', (100 / this.items.length) + '%');
if(this.opts.delay !== f) {
this.start();
this.el.hover(this.stop, this.start);
}
// Custom keyboard support
this.opts.keys && $(document).keydown(this.keys);
// Dot pagination
this.opts.dots && this.dots();
// Little patch for fluid-width sliders. Screw those guys.
if(this.opts.fluid) {
var resize = function() {
_.el.css('width', Math.min(Math.round((_.el.outerWidth() / _.el.parent().outerWidth()) * 100), 100) + '%');
};
resize();
$(window).resize(resize);
}
if(this.opts.arrows) {
this.el.parent().append('<p class="arrows"><span class="prev">��</span><span class="next">��</span></p>')
.find('.arrows span').click(function() {
$.isFunction(_[this.className]) && _[this.className]();
});
};
// Swipe support
if($.event.swipe) {
this.el.on('swipeleft', _.prev).on('swiperight', _.next);
}
};
// Move Unslider to a slide index
this.move = function(index, cb) {
// If it's out of bounds, go to the first slide
if(!this.items.eq(index).length) index = 0;
if(index < 0) index = (this.items.length - 1);
var target = this.items.eq(index);
var obj = {height: target.outerHeight()};
var speed = cb ? 5 : this.opts.speed;
if(!this.ul.is(':animated')) {
// Handle those pe | tionality
this.start = function() {
_.interval = setInterval(function() {
_.move(_.current + 1);
}, _.opts.delay);
};
// Stop autoplay
this.stop = function() {
_.interval = clearInterval(_.interval);
return _;
};
// Keypresses
this.keys = function(e) {
var key = e.which;
var map = {
// Prev/next
37: _.prev,
39: _.next,
// Esc
27: _.stop
};
if($.isFunction(map[key])) {
map[key]();
}
};
// Arrow navigation
this.next = function() { return _.stop().move(_.current + 1) };
this.prev = function() { return _.stop().move(_.current - 1) };
this.dots = function() {
// Create the HTML
var html = '<ol class="dots">';
$.each(this.items, function(index) { html += '<li class="dot' + (index < 1 ? ' active' : '') + '">' + (index + 1) + '</li>'; });
html += '</ol>';
// Add it to the Unslider
this.el.addClass('has-dots').append(html).find('.dot').click(function() {
_.move($(this).index());
});
};
};
// Create a jQuery plugin
$.fn.unslider = function(o) {
var len = this.length;
// Enable multiple-slider support
return this.each(function(index) {
// Cache a copy of $(this), so it
var me = $(this);
var instance = (new Unslider).init(me, o);
// Invoke an Unslider instance
me.data('unslider' + (len > 1 ? '-' + (index + 1) : ''), instance);
});
};
})(window.jQuery, false);
jQuery(document).ready(function($){
// browser window scroll (in pixels) after which the "back to top" link is shown
var offset = 300,
//browser window scroll (in pixels) after which the "back to top" link opacity is reduced
offset_opacity = 1200,
//duration of the top scrolling animation (in ms)
scroll_top_duration = 700,
//grab the "back to top" link
$back_to_top = $('.back-top');
//hide or show the "back to top" link
$(window).scroll(function(){
( $(this).scrollTop() > offset ) ? $back_to_top.addClass('cd-is-visible') : $back_to_top.removeClass('cd-is-visible cd-fade-out');
if( $(this).scrollTop() > offset_opacity ) {
$back_to_top.addClass('cd-fade-out');
}
});
//smooth scroll to top
$back_to_top.on('click', function(event){
event.preventDefault();
$('body,html').animate({
scrollTop: 0 ,
}, scroll_top_duration
);
});
//設定footer置底
var winheight = $( window ).height();
var bodyheight = $("body").height();
if (winheight >= bodyheight){
$(".outer-footer").css("position" , "fixed");
$(".outer-footer").css("bottom" , "0");
}
});
/**
* Unslider by @idiot
*/
| sky dots
_.el.find('.dot:eq(' + index + ')').addClass('active').siblings().removeClass('active');
this.el.animate(obj, speed) && this.ul.animate($.extend({left: '-' + index + '00%'}, obj), speed, function(data) {
_.current = index;
$.isFunction(_.opts.complete) && !cb && _.opts.complete(_.el);
});
}
};
// Autoplay func | conditional_block |
virtual_machine_agent_instance_view.py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class VirtualMachineAgentInstanceView(Model):
"""The instance view of the VM Agent running on the virtual machine.
:param vm_agent_version: The VM Agent full version.
:type vm_agent_version: str
:param extension_handlers: The virtual machine extension handler instance
view.
:type extension_handlers: list of
:class:`VirtualMachineExtensionHandlerInstanceView
<azure.mgmt.compute.compute.v2017_03_30.models.VirtualMachineExtensionHandlerInstanceView>`
:param statuses: The resource status information.
:type statuses: list of :class:`InstanceViewStatus
<azure.mgmt.compute.compute.v2017_03_30.models.InstanceViewStatus>`
"""
_attribute_map = {
'vm_agent_version': {'key': 'vmAgentVersion', 'type': 'str'},
'extension_handlers': {'key': 'extensionHandlers', 'type': '[VirtualMachineExtensionHandlerInstanceView]'},
'statuses': {'key': 'statuses', 'type': '[InstanceViewStatus]'},
} | self.vm_agent_version = vm_agent_version
self.extension_handlers = extension_handlers
self.statuses = statuses |
def __init__(self, vm_agent_version=None, extension_handlers=None, statuses=None): | random_line_split |
virtual_machine_agent_instance_view.py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class | (Model):
"""The instance view of the VM Agent running on the virtual machine.
:param vm_agent_version: The VM Agent full version.
:type vm_agent_version: str
:param extension_handlers: The virtual machine extension handler instance
view.
:type extension_handlers: list of
:class:`VirtualMachineExtensionHandlerInstanceView
<azure.mgmt.compute.compute.v2017_03_30.models.VirtualMachineExtensionHandlerInstanceView>`
:param statuses: The resource status information.
:type statuses: list of :class:`InstanceViewStatus
<azure.mgmt.compute.compute.v2017_03_30.models.InstanceViewStatus>`
"""
_attribute_map = {
'vm_agent_version': {'key': 'vmAgentVersion', 'type': 'str'},
'extension_handlers': {'key': 'extensionHandlers', 'type': '[VirtualMachineExtensionHandlerInstanceView]'},
'statuses': {'key': 'statuses', 'type': '[InstanceViewStatus]'},
}
def __init__(self, vm_agent_version=None, extension_handlers=None, statuses=None):
self.vm_agent_version = vm_agent_version
self.extension_handlers = extension_handlers
self.statuses = statuses
| VirtualMachineAgentInstanceView | identifier_name |
virtual_machine_agent_instance_view.py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class VirtualMachineAgentInstanceView(Model):
"""The instance view of the VM Agent running on the virtual machine.
:param vm_agent_version: The VM Agent full version.
:type vm_agent_version: str
:param extension_handlers: The virtual machine extension handler instance
view.
:type extension_handlers: list of
:class:`VirtualMachineExtensionHandlerInstanceView
<azure.mgmt.compute.compute.v2017_03_30.models.VirtualMachineExtensionHandlerInstanceView>`
:param statuses: The resource status information.
:type statuses: list of :class:`InstanceViewStatus
<azure.mgmt.compute.compute.v2017_03_30.models.InstanceViewStatus>`
"""
_attribute_map = {
'vm_agent_version': {'key': 'vmAgentVersion', 'type': 'str'},
'extension_handlers': {'key': 'extensionHandlers', 'type': '[VirtualMachineExtensionHandlerInstanceView]'},
'statuses': {'key': 'statuses', 'type': '[InstanceViewStatus]'},
}
def __init__(self, vm_agent_version=None, extension_handlers=None, statuses=None):
| self.vm_agent_version = vm_agent_version
self.extension_handlers = extension_handlers
self.statuses = statuses | identifier_body |
|
imdb_rnn.py | # Copyright 2021 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Train a SimpleRNN on the IMDB sentiment classification task.
The dataset is actually too small for LSTM to be of any advantage
compared to simpler, much faster methods such as TF-IDF+LogReg.
"""
from __future__ import print_function
import tensorflow.keras as keras
import tensorflow.keras.preprocessing.sequence as sequence
from tensorflow_model_optimization.python.core.clustering.keras import cluster
from tensorflow_model_optimization.python.core.clustering.keras import cluster_config | max_features = 20000
maxlen = 100 # cut texts after this number of words
batch_size = 32
print("Loading data...")
(x_train,
y_train), (x_test,
y_test) = keras.datasets.imdb.load_data(num_words=max_features)
print(len(x_train), "train sequences")
print(len(x_test), "test sequences")
print("Pad sequences (samples x time)")
x_train = sequence.pad_sequences(x_train, maxlen=maxlen)
x_test = sequence.pad_sequences(x_test, maxlen=maxlen)
print("x_train shape:", x_train.shape)
print("x_test shape:", x_test.shape)
print("Build model...")
model = keras.models.Sequential()
model.add(keras.layers.Embedding(max_features, 128, input_length=maxlen))
model.add(keras.layers.SimpleRNN(128))
model.add(keras.layers.Dropout(0.5))
model.add(keras.layers.Dense(1))
model.add(keras.layers.Activation("sigmoid"))
model = cluster.cluster_weights(
model,
number_of_clusters=16,
cluster_centroids_init=cluster_config.CentroidInitialization
.KMEANS_PLUS_PLUS,
)
model.compile(loss="binary_crossentropy",
optimizer="adam",
metrics=["accuracy"])
print("Train...")
model.fit(x_train, y_train, batch_size=batch_size, epochs=3,
validation_data=(x_test, y_test))
score, acc = model.evaluate(x_test, y_test,
batch_size=batch_size)
print("Test score:", score)
print("Test accuracy:", acc) | random_line_split |
|
Districts.js | define([
'dojo/_base/declare',
'dojo/_base/lang',
'dojo/topic',
'dojo/on',
'dojo/_base/array',
'dijit/_WidgetBase',
'dijit/_TemplatedMixin',
'ngw-compulink-libs/dgrid-0.4.0/dstore/Rest',
'ngw-compulink-libs/dgrid-0.4.0/dgrid/OnDemandGrid',
'ngw-compulink-libs/dgrid-0.4.0/dgrid/Keyboard',
'ngw-compulink-libs/dgrid-0.4.0/dgrid/Selection',
'ngw-compulink-admin/reference_books/EditorRelation'
], function (declare, lang, topic, on, array, _WidgetBase, _TemplatedMixin,
Rest, OnDemandGrid, Keyboard, Selection, EditorRelation) {
return declare([_WidgetBase, _TemplatedMixin], {
templateString: '<div></div>',
postCreate: function () {
var districtsStore = new Rest({
target: ngwConfig.applicationUrl + '/compulink/services/reference_books/district/',
useRangeHeaders: true
}); | }, this.domNode);
}
});
}); |
new (declare([OnDemandGrid, Keyboard, Selection, EditorRelation]))({
collection: districtsStore,
columns: this.config | random_line_split |
slogging.py | from ethereum.utils import bcolors, is_numeric
DEFAULT_LOGLEVEL = 'INFO'
JSON_FORMAT = '%(message)s'
PRINT_FORMAT = '%(levelname)s:%(name)s\t%(message)s'
FILE_PREFIX = '%(asctime)s'
TRACE = 5
known_loggers = set()
log_listeners = []
def _inject_into_logger(name, code, namespace=None):
# This is a hack to fool the logging module into reporting correct source files.
# It determines the actual source of a logging call by inspecting the stack frame's
# source file. So we use this `eval(compile())` construct to "inject" our additional
# methods into the logging module.
if namespace is None:
namespace = {}
eval(
compile(
code,
logging._srcfile,
'exec'
),
namespace
)
setattr(logging.Logger, name, namespace[name])
# Add `trace()` level to Logger
_inject_into_logger(
'trace',
textwrap.dedent(
"""\
def trace(self, msg, *args, **kwargs):
if self.isEnabledFor(TRACE):
self._log(TRACE, msg, args, **kwargs)
"""
),
{'TRACE': TRACE}
)
logging.TRACE = TRACE
logging.addLevelName(TRACE, "TRACE")
# Add `DEV()` shortcut to loggers
_inject_into_logger(
'DEV',
textwrap.dedent(
"""\
def DEV(self, msg, *args, **kwargs):
'''Shortcut to output highlighted log text'''
kwargs['highlight'] = True
self.critical(msg, *args, **kwargs)
"""
)
)
class LogRecorder(object):
"""
temporarily records all logs, w/o level filtering
use only once!
"""
max_capacity = 1000 * 1000 # check we are not forgotten or abused
def __init__(self, disable_other_handlers=False, log_config=None):
self._records = []
log_listeners.append(self._add_log_record)
self._saved_config = None
if log_config:
self._saved_config = get_configuration()
configure(log_config)
self._saved_handlers = []
if disable_other_handlers:
self._saved_handlers = rootLogger.handlers[:]
rootLogger.handlers = []
def pop_records(self):
# only returns records on the first call
r = self._records[:]
self._records = []
try:
log_listeners.remove(self._add_log_record)
except ValueError:
pass
if self._saved_config:
configure(**self._saved_config)
self._saved_config = None
if self._saved_handlers:
rootLogger.handlers = self._saved_handlers[:]
self._saved_handlers = []
return r
def _add_log_record(self, msg):
self._records.append(msg)
assert len(self._records) < self.max_capacity
def get_configuration():
"""
get a configuration (snapshot) that can be used to call configure
snapshot = get_configuration()
configure(**snapshot)
"""
root = getLogger()
name_levels = [('', logging.getLevelName(root.level))]
name_levels.extend(
(name, logging.getLevelName(logger.level))
for name, logger
in root.manager.loggerDict.items()
if hasattr(logger, 'level')
)
config_string = ','.join('%s:%s' % x for x in name_levels)
return dict(config_string=config_string, log_json=SLogger.manager.log_json)
def get_logger_names():
return sorted(known_loggers, key=lambda x: '' if not x else x)
class BoundLogger(object):
def __init__(self, logger, context):
self.logger = logger
self.context = context
def bind(self, **kwargs):
return BoundLogger(self, kwargs)
def _proxy(self, method_name, *args, **kwargs):
context = self.context.copy()
context.update(kwargs)
return getattr(self.logger, method_name)(*args, **context)
trace = lambda self, *args, **kwargs: self._proxy('trace', *args, **kwargs)
debug = lambda self, *args, **kwargs: self._proxy('debug', *args, **kwargs)
info = lambda self, *args, **kwargs: self._proxy('info', *args, **kwargs)
warn = warning = lambda self, *args, **kwargs: self._proxy('warning', *args, **kwargs)
error = lambda self, *args, **kwargs: self._proxy('error', *args, **kwargs)
exception = lambda self, *args, **kwargs: self._proxy('exception', *args, **kwargs)
fatal = critical = lambda self, *args, **kwargs: self._proxy('critical', *args, **kwargs)
class _LogJSONEncoder(JSONEncoder):
def default(self, o):
return repr(o)
class SLogger(logging.Logger):
def __init__(self, name, level=DEFAULT_LOGLEVEL):
self.warn = self.warning
super(SLogger, self).__init__(name, level=level)
@property
def log_json(self):
return SLogger.manager.log_json
def is_active(self, level_name='trace'):
return self.isEnabledFor(logging._checkLevel(level_name.upper()))
def format_message(self, msg, kwargs, highlight, level):
if getattr(self, 'log_json', False):
message = dict()
message['event'] = '{}.{}'.format(self.name, msg.lower().replace(' ', '_'))
message['level'] = logging.getLevelName(level)
try:
message.update(kwargs)
try:
msg = json.dumps(message, cls=_LogJSONEncoder)
except TypeError:
# Invalid value. With our custom encoder this can only happen with non-string
# dict keys (see: https://bugs.python.org/issue18820).
message = _stringify_dict_keys(message)
msg = json.dumps(message, cls=_LogJSONEncoder)
except UnicodeDecodeError:
message.update({
k: v if is_numeric(v) or isinstance(v, (float, complex)) else repr(v)
for k, v in kwargs.items()
})
msg = json.dumps(message, cls=_LogJSONEncoder)
else:
msg = "{}{} {}{}".format(
bcolors.WARNING if highlight else "",
msg,
" ".join("{}={!s}".format(k, v) for k, v in kwargs.items()),
bcolors.ENDC if highlight else ""
)
return msg
def bind(self, **kwargs):
return BoundLogger(self, kwargs)
def _log(self, level, msg, args, **kwargs):
exc_info = kwargs.pop('exc_info', None)
extra = kwargs.pop('extra', {})
highlight = kwargs.pop('highlight', False)
extra['kwargs'] = kwargs
extra['original_msg'] = msg
msg = self.format_message(msg, kwargs, highlight, level)
super(SLogger, self)._log(level, msg, args, exc_info, extra)
class RootLogger(SLogger):
"""
A root logger is not that different to any other logger, except that
it must have a logging level and there is only one instance of it in
the hierarchy.
"""
def __init__(self, level):
"""
Initialize the logger with the name "root".
"""
super(RootLogger, self).__init__("root", level)
def handle(self, record):
if log_listeners:
rec_dict = getattr(record, 'kwargs', {}).copy()
rec_dict['event'] = getattr(record, 'original_msg', "")
for listener in log_listeners:
listener(rec_dict)
super(RootLogger, self).handle(record)
class SManager(logging.Manager):
def __init__(self, rootnode):
self.loggerClass = SLogger
self.log_json = False
super(SManager, self).__init__(rootnode)
def getLogger(self, name):
logging.setLoggerClass(SLogger)
return super(SManager, self).getLogger(name)
rootLogger = RootLogger(DEFAULT_LOGLEVEL)
SLogger.root = rootLogger
SLogger.manager = SManager(SLogger.root)
def _stringify_dict_keys(input_):
if isinstance(input_, dict):
res = {}
for k, v in input_.items():
v = _stringify_dict_keys(v)
if not isinstance(k, (int, long, bool, None.__class__)):
k = str(k)
res[k] = v
elif isinstance(input_, (list, tuple)):
res = input_.__class__([_stringify_dict_keys(i) for i in input_])
else:
res = input_
return res
def getLogger(name=None):
"""
Return a logger with the specified name, creating it if necessary.
If no name is specified, return the root logger.
"""
if name:
logger = SLogger.manager.getLogger(name)
return logger
else:
return rootLogger
def configure(config_string=None, log_json=False, log_file=None):
if not config_string:
config_string = ":{}".format(DEFAULT_LOGLEVEL)
if log_json:
SLogger.manager.log_json = True
log_format = JSON_FORMAT
else:
SLogger.manager.log_json = False
log_format = PRINT_FORMAT
if len(rootLogger.handlers) == 0:
handler = StreamHandler()
formatter = Formatter(log_format)
handler.setFormatter(formatter)
rootLogger.addHandler(handler)
if log_file:
if not any(isinstance(hndlr, FileHandler) for hndlr in rootLogger.handlers):
handler = FileHandler(log_file)
formatter = Formatter("{} {}".format(FILE_PREFIX, log_format))
handler.setFormatter(formatter)
rootLogger.addHandler(handler)
# Reset logging levels before applying new config below
for name, logger in SLogger.manager.loggerDict.items():
if hasattr(logger, 'setLevel'):
# Guard against `logging.PlaceHolder` instances
logger.setLevel(logging.NOTSET)
logger.propagate = True
for name_levels in config_string.split(','):
name, _, level = name_levels.partition(':')
logger = getLogger(name)
logger.setLevel(level.upper())
configure_logging = configure
def set_level(name, level):
assert not isinstance(level, int)
logger = getLogger(name)
logger.setLevel(getattr(logging, level.upper()))
def get_logger(name=None):
known_loggers.add(name)
return getLogger(name)
def DEBUG(msg, *args, **kwargs):
"""temporary logger during development that is always on"""
logger = getLogger("DEBUG")
if len(logger.handlers) == 0:
logger.addHandler(StreamHandler())
logger.propagate = False
logger.setLevel(logging.DEBUG)
logger.DEV(msg, *args, **kwargs) | import logging
import json
import textwrap
from json.encoder import JSONEncoder
from logging import StreamHandler, Formatter, FileHandler | random_line_split |
|
slogging.py | import logging
import json
import textwrap
from json.encoder import JSONEncoder
from logging import StreamHandler, Formatter, FileHandler
from ethereum.utils import bcolors, is_numeric
DEFAULT_LOGLEVEL = 'INFO'
JSON_FORMAT = '%(message)s'
PRINT_FORMAT = '%(levelname)s:%(name)s\t%(message)s'
FILE_PREFIX = '%(asctime)s'
TRACE = 5
known_loggers = set()
log_listeners = []
def _inject_into_logger(name, code, namespace=None):
# This is a hack to fool the logging module into reporting correct source files.
# It determines the actual source of a logging call by inspecting the stack frame's
# source file. So we use this `eval(compile())` construct to "inject" our additional
# methods into the logging module.
if namespace is None:
|
eval(
compile(
code,
logging._srcfile,
'exec'
),
namespace
)
setattr(logging.Logger, name, namespace[name])
# Add `trace()` level to Logger
_inject_into_logger(
'trace',
textwrap.dedent(
"""\
def trace(self, msg, *args, **kwargs):
if self.isEnabledFor(TRACE):
self._log(TRACE, msg, args, **kwargs)
"""
),
{'TRACE': TRACE}
)
logging.TRACE = TRACE
logging.addLevelName(TRACE, "TRACE")
# Add `DEV()` shortcut to loggers
_inject_into_logger(
'DEV',
textwrap.dedent(
"""\
def DEV(self, msg, *args, **kwargs):
'''Shortcut to output highlighted log text'''
kwargs['highlight'] = True
self.critical(msg, *args, **kwargs)
"""
)
)
class LogRecorder(object):
"""
temporarily records all logs, w/o level filtering
use only once!
"""
max_capacity = 1000 * 1000 # check we are not forgotten or abused
def __init__(self, disable_other_handlers=False, log_config=None):
self._records = []
log_listeners.append(self._add_log_record)
self._saved_config = None
if log_config:
self._saved_config = get_configuration()
configure(log_config)
self._saved_handlers = []
if disable_other_handlers:
self._saved_handlers = rootLogger.handlers[:]
rootLogger.handlers = []
def pop_records(self):
# only returns records on the first call
r = self._records[:]
self._records = []
try:
log_listeners.remove(self._add_log_record)
except ValueError:
pass
if self._saved_config:
configure(**self._saved_config)
self._saved_config = None
if self._saved_handlers:
rootLogger.handlers = self._saved_handlers[:]
self._saved_handlers = []
return r
def _add_log_record(self, msg):
self._records.append(msg)
assert len(self._records) < self.max_capacity
def get_configuration():
"""
get a configuration (snapshot) that can be used to call configure
snapshot = get_configuration()
configure(**snapshot)
"""
root = getLogger()
name_levels = [('', logging.getLevelName(root.level))]
name_levels.extend(
(name, logging.getLevelName(logger.level))
for name, logger
in root.manager.loggerDict.items()
if hasattr(logger, 'level')
)
config_string = ','.join('%s:%s' % x for x in name_levels)
return dict(config_string=config_string, log_json=SLogger.manager.log_json)
def get_logger_names():
return sorted(known_loggers, key=lambda x: '' if not x else x)
class BoundLogger(object):
def __init__(self, logger, context):
self.logger = logger
self.context = context
def bind(self, **kwargs):
return BoundLogger(self, kwargs)
def _proxy(self, method_name, *args, **kwargs):
context = self.context.copy()
context.update(kwargs)
return getattr(self.logger, method_name)(*args, **context)
trace = lambda self, *args, **kwargs: self._proxy('trace', *args, **kwargs)
debug = lambda self, *args, **kwargs: self._proxy('debug', *args, **kwargs)
info = lambda self, *args, **kwargs: self._proxy('info', *args, **kwargs)
warn = warning = lambda self, *args, **kwargs: self._proxy('warning', *args, **kwargs)
error = lambda self, *args, **kwargs: self._proxy('error', *args, **kwargs)
exception = lambda self, *args, **kwargs: self._proxy('exception', *args, **kwargs)
fatal = critical = lambda self, *args, **kwargs: self._proxy('critical', *args, **kwargs)
class _LogJSONEncoder(JSONEncoder):
def default(self, o):
return repr(o)
class SLogger(logging.Logger):
def __init__(self, name, level=DEFAULT_LOGLEVEL):
self.warn = self.warning
super(SLogger, self).__init__(name, level=level)
@property
def log_json(self):
return SLogger.manager.log_json
def is_active(self, level_name='trace'):
return self.isEnabledFor(logging._checkLevel(level_name.upper()))
def format_message(self, msg, kwargs, highlight, level):
if getattr(self, 'log_json', False):
message = dict()
message['event'] = '{}.{}'.format(self.name, msg.lower().replace(' ', '_'))
message['level'] = logging.getLevelName(level)
try:
message.update(kwargs)
try:
msg = json.dumps(message, cls=_LogJSONEncoder)
except TypeError:
# Invalid value. With our custom encoder this can only happen with non-string
# dict keys (see: https://bugs.python.org/issue18820).
message = _stringify_dict_keys(message)
msg = json.dumps(message, cls=_LogJSONEncoder)
except UnicodeDecodeError:
message.update({
k: v if is_numeric(v) or isinstance(v, (float, complex)) else repr(v)
for k, v in kwargs.items()
})
msg = json.dumps(message, cls=_LogJSONEncoder)
else:
msg = "{}{} {}{}".format(
bcolors.WARNING if highlight else "",
msg,
" ".join("{}={!s}".format(k, v) for k, v in kwargs.items()),
bcolors.ENDC if highlight else ""
)
return msg
def bind(self, **kwargs):
return BoundLogger(self, kwargs)
def _log(self, level, msg, args, **kwargs):
exc_info = kwargs.pop('exc_info', None)
extra = kwargs.pop('extra', {})
highlight = kwargs.pop('highlight', False)
extra['kwargs'] = kwargs
extra['original_msg'] = msg
msg = self.format_message(msg, kwargs, highlight, level)
super(SLogger, self)._log(level, msg, args, exc_info, extra)
class RootLogger(SLogger):
"""
A root logger is not that different to any other logger, except that
it must have a logging level and there is only one instance of it in
the hierarchy.
"""
def __init__(self, level):
"""
Initialize the logger with the name "root".
"""
super(RootLogger, self).__init__("root", level)
def handle(self, record):
if log_listeners:
rec_dict = getattr(record, 'kwargs', {}).copy()
rec_dict['event'] = getattr(record, 'original_msg', "")
for listener in log_listeners:
listener(rec_dict)
super(RootLogger, self).handle(record)
class SManager(logging.Manager):
def __init__(self, rootnode):
self.loggerClass = SLogger
self.log_json = False
super(SManager, self).__init__(rootnode)
def getLogger(self, name):
logging.setLoggerClass(SLogger)
return super(SManager, self).getLogger(name)
rootLogger = RootLogger(DEFAULT_LOGLEVEL)
SLogger.root = rootLogger
SLogger.manager = SManager(SLogger.root)
def _stringify_dict_keys(input_):
if isinstance(input_, dict):
res = {}
for k, v in input_.items():
v = _stringify_dict_keys(v)
if not isinstance(k, (int, long, bool, None.__class__)):
k = str(k)
res[k] = v
elif isinstance(input_, (list, tuple)):
res = input_.__class__([_stringify_dict_keys(i) for i in input_])
else:
res = input_
return res
def getLogger(name=None):
"""
Return a logger with the specified name, creating it if necessary.
If no name is specified, return the root logger.
"""
if name:
logger = SLogger.manager.getLogger(name)
return logger
else:
return rootLogger
def configure(config_string=None, log_json=False, log_file=None):
if not config_string:
config_string = ":{}".format(DEFAULT_LOGLEVEL)
if log_json:
SLogger.manager.log_json = True
log_format = JSON_FORMAT
else:
SLogger.manager.log_json = False
log_format = PRINT_FORMAT
if len(rootLogger.handlers) == 0:
handler = StreamHandler()
formatter = Formatter(log_format)
handler.setFormatter(formatter)
rootLogger.addHandler(handler)
if log_file:
if not any(isinstance(hndlr, FileHandler) for hndlr in rootLogger.handlers):
handler = FileHandler(log_file)
formatter = Formatter("{} {}".format(FILE_PREFIX, log_format))
handler.setFormatter(formatter)
rootLogger.addHandler(handler)
# Reset logging levels before applying new config below
for name, logger in SLogger.manager.loggerDict.items():
if hasattr(logger, 'setLevel'):
# Guard against `logging.PlaceHolder` instances
logger.setLevel(logging.NOTSET)
logger.propagate = True
for name_levels in config_string.split(','):
name, _, level = name_levels.partition(':')
logger = getLogger(name)
logger.setLevel(level.upper())
configure_logging = configure
def set_level(name, level):
assert not isinstance(level, int)
logger = getLogger(name)
logger.setLevel(getattr(logging, level.upper()))
def get_logger(name=None):
known_loggers.add(name)
return getLogger(name)
def DEBUG(msg, *args, **kwargs):
"""temporary logger during development that is always on"""
logger = getLogger("DEBUG")
if len(logger.handlers) == 0:
logger.addHandler(StreamHandler())
logger.propagate = False
logger.setLevel(logging.DEBUG)
logger.DEV(msg, *args, **kwargs)
| namespace = {} | conditional_block |
slogging.py | import logging
import json
import textwrap
from json.encoder import JSONEncoder
from logging import StreamHandler, Formatter, FileHandler
from ethereum.utils import bcolors, is_numeric
DEFAULT_LOGLEVEL = 'INFO'
JSON_FORMAT = '%(message)s'
PRINT_FORMAT = '%(levelname)s:%(name)s\t%(message)s'
FILE_PREFIX = '%(asctime)s'
TRACE = 5
known_loggers = set()
log_listeners = []
def _inject_into_logger(name, code, namespace=None):
# This is a hack to fool the logging module into reporting correct source files.
# It determines the actual source of a logging call by inspecting the stack frame's
# source file. So we use this `eval(compile())` construct to "inject" our additional
# methods into the logging module.
if namespace is None:
namespace = {}
eval(
compile(
code,
logging._srcfile,
'exec'
),
namespace
)
setattr(logging.Logger, name, namespace[name])
# Add `trace()` level to Logger
_inject_into_logger(
'trace',
textwrap.dedent(
"""\
def trace(self, msg, *args, **kwargs):
if self.isEnabledFor(TRACE):
self._log(TRACE, msg, args, **kwargs)
"""
),
{'TRACE': TRACE}
)
logging.TRACE = TRACE
logging.addLevelName(TRACE, "TRACE")
# Add `DEV()` shortcut to loggers
_inject_into_logger(
'DEV',
textwrap.dedent(
"""\
def DEV(self, msg, *args, **kwargs):
'''Shortcut to output highlighted log text'''
kwargs['highlight'] = True
self.critical(msg, *args, **kwargs)
"""
)
)
class LogRecorder(object):
"""
temporarily records all logs, w/o level filtering
use only once!
"""
max_capacity = 1000 * 1000 # check we are not forgotten or abused
def __init__(self, disable_other_handlers=False, log_config=None):
self._records = []
log_listeners.append(self._add_log_record)
self._saved_config = None
if log_config:
self._saved_config = get_configuration()
configure(log_config)
self._saved_handlers = []
if disable_other_handlers:
self._saved_handlers = rootLogger.handlers[:]
rootLogger.handlers = []
def pop_records(self):
# only returns records on the first call
r = self._records[:]
self._records = []
try:
log_listeners.remove(self._add_log_record)
except ValueError:
pass
if self._saved_config:
configure(**self._saved_config)
self._saved_config = None
if self._saved_handlers:
rootLogger.handlers = self._saved_handlers[:]
self._saved_handlers = []
return r
def _add_log_record(self, msg):
self._records.append(msg)
assert len(self._records) < self.max_capacity
def get_configuration():
"""
get a configuration (snapshot) that can be used to call configure
snapshot = get_configuration()
configure(**snapshot)
"""
root = getLogger()
name_levels = [('', logging.getLevelName(root.level))]
name_levels.extend(
(name, logging.getLevelName(logger.level))
for name, logger
in root.manager.loggerDict.items()
if hasattr(logger, 'level')
)
config_string = ','.join('%s:%s' % x for x in name_levels)
return dict(config_string=config_string, log_json=SLogger.manager.log_json)
def get_logger_names():
return sorted(known_loggers, key=lambda x: '' if not x else x)
class BoundLogger(object):
def __init__(self, logger, context):
self.logger = logger
self.context = context
def bind(self, **kwargs):
return BoundLogger(self, kwargs)
def _proxy(self, method_name, *args, **kwargs):
|
trace = lambda self, *args, **kwargs: self._proxy('trace', *args, **kwargs)
debug = lambda self, *args, **kwargs: self._proxy('debug', *args, **kwargs)
info = lambda self, *args, **kwargs: self._proxy('info', *args, **kwargs)
warn = warning = lambda self, *args, **kwargs: self._proxy('warning', *args, **kwargs)
error = lambda self, *args, **kwargs: self._proxy('error', *args, **kwargs)
exception = lambda self, *args, **kwargs: self._proxy('exception', *args, **kwargs)
fatal = critical = lambda self, *args, **kwargs: self._proxy('critical', *args, **kwargs)
class _LogJSONEncoder(JSONEncoder):
def default(self, o):
return repr(o)
class SLogger(logging.Logger):
def __init__(self, name, level=DEFAULT_LOGLEVEL):
self.warn = self.warning
super(SLogger, self).__init__(name, level=level)
@property
def log_json(self):
return SLogger.manager.log_json
def is_active(self, level_name='trace'):
return self.isEnabledFor(logging._checkLevel(level_name.upper()))
def format_message(self, msg, kwargs, highlight, level):
if getattr(self, 'log_json', False):
message = dict()
message['event'] = '{}.{}'.format(self.name, msg.lower().replace(' ', '_'))
message['level'] = logging.getLevelName(level)
try:
message.update(kwargs)
try:
msg = json.dumps(message, cls=_LogJSONEncoder)
except TypeError:
# Invalid value. With our custom encoder this can only happen with non-string
# dict keys (see: https://bugs.python.org/issue18820).
message = _stringify_dict_keys(message)
msg = json.dumps(message, cls=_LogJSONEncoder)
except UnicodeDecodeError:
message.update({
k: v if is_numeric(v) or isinstance(v, (float, complex)) else repr(v)
for k, v in kwargs.items()
})
msg = json.dumps(message, cls=_LogJSONEncoder)
else:
msg = "{}{} {}{}".format(
bcolors.WARNING if highlight else "",
msg,
" ".join("{}={!s}".format(k, v) for k, v in kwargs.items()),
bcolors.ENDC if highlight else ""
)
return msg
def bind(self, **kwargs):
return BoundLogger(self, kwargs)
def _log(self, level, msg, args, **kwargs):
exc_info = kwargs.pop('exc_info', None)
extra = kwargs.pop('extra', {})
highlight = kwargs.pop('highlight', False)
extra['kwargs'] = kwargs
extra['original_msg'] = msg
msg = self.format_message(msg, kwargs, highlight, level)
super(SLogger, self)._log(level, msg, args, exc_info, extra)
class RootLogger(SLogger):
"""
A root logger is not that different to any other logger, except that
it must have a logging level and there is only one instance of it in
the hierarchy.
"""
def __init__(self, level):
"""
Initialize the logger with the name "root".
"""
super(RootLogger, self).__init__("root", level)
def handle(self, record):
if log_listeners:
rec_dict = getattr(record, 'kwargs', {}).copy()
rec_dict['event'] = getattr(record, 'original_msg', "")
for listener in log_listeners:
listener(rec_dict)
super(RootLogger, self).handle(record)
class SManager(logging.Manager):
def __init__(self, rootnode):
self.loggerClass = SLogger
self.log_json = False
super(SManager, self).__init__(rootnode)
def getLogger(self, name):
logging.setLoggerClass(SLogger)
return super(SManager, self).getLogger(name)
rootLogger = RootLogger(DEFAULT_LOGLEVEL)
SLogger.root = rootLogger
SLogger.manager = SManager(SLogger.root)
def _stringify_dict_keys(input_):
if isinstance(input_, dict):
res = {}
for k, v in input_.items():
v = _stringify_dict_keys(v)
if not isinstance(k, (int, long, bool, None.__class__)):
k = str(k)
res[k] = v
elif isinstance(input_, (list, tuple)):
res = input_.__class__([_stringify_dict_keys(i) for i in input_])
else:
res = input_
return res
def getLogger(name=None):
"""
Return a logger with the specified name, creating it if necessary.
If no name is specified, return the root logger.
"""
if name:
logger = SLogger.manager.getLogger(name)
return logger
else:
return rootLogger
def configure(config_string=None, log_json=False, log_file=None):
if not config_string:
config_string = ":{}".format(DEFAULT_LOGLEVEL)
if log_json:
SLogger.manager.log_json = True
log_format = JSON_FORMAT
else:
SLogger.manager.log_json = False
log_format = PRINT_FORMAT
if len(rootLogger.handlers) == 0:
handler = StreamHandler()
formatter = Formatter(log_format)
handler.setFormatter(formatter)
rootLogger.addHandler(handler)
if log_file:
if not any(isinstance(hndlr, FileHandler) for hndlr in rootLogger.handlers):
handler = FileHandler(log_file)
formatter = Formatter("{} {}".format(FILE_PREFIX, log_format))
handler.setFormatter(formatter)
rootLogger.addHandler(handler)
# Reset logging levels before applying new config below
for name, logger in SLogger.manager.loggerDict.items():
if hasattr(logger, 'setLevel'):
# Guard against `logging.PlaceHolder` instances
logger.setLevel(logging.NOTSET)
logger.propagate = True
for name_levels in config_string.split(','):
name, _, level = name_levels.partition(':')
logger = getLogger(name)
logger.setLevel(level.upper())
configure_logging = configure
def set_level(name, level):
assert not isinstance(level, int)
logger = getLogger(name)
logger.setLevel(getattr(logging, level.upper()))
def get_logger(name=None):
known_loggers.add(name)
return getLogger(name)
def DEBUG(msg, *args, **kwargs):
"""temporary logger during development that is always on"""
logger = getLogger("DEBUG")
if len(logger.handlers) == 0:
logger.addHandler(StreamHandler())
logger.propagate = False
logger.setLevel(logging.DEBUG)
logger.DEV(msg, *args, **kwargs)
| context = self.context.copy()
context.update(kwargs)
return getattr(self.logger, method_name)(*args, **context) | identifier_body |
slogging.py | import logging
import json
import textwrap
from json.encoder import JSONEncoder
from logging import StreamHandler, Formatter, FileHandler
from ethereum.utils import bcolors, is_numeric
DEFAULT_LOGLEVEL = 'INFO'
JSON_FORMAT = '%(message)s'
PRINT_FORMAT = '%(levelname)s:%(name)s\t%(message)s'
FILE_PREFIX = '%(asctime)s'
TRACE = 5
known_loggers = set()
log_listeners = []
def _inject_into_logger(name, code, namespace=None):
# This is a hack to fool the logging module into reporting correct source files.
# It determines the actual source of a logging call by inspecting the stack frame's
# source file. So we use this `eval(compile())` construct to "inject" our additional
# methods into the logging module.
if namespace is None:
namespace = {}
eval(
compile(
code,
logging._srcfile,
'exec'
),
namespace
)
setattr(logging.Logger, name, namespace[name])
# Add `trace()` level to Logger
_inject_into_logger(
'trace',
textwrap.dedent(
"""\
def trace(self, msg, *args, **kwargs):
if self.isEnabledFor(TRACE):
self._log(TRACE, msg, args, **kwargs)
"""
),
{'TRACE': TRACE}
)
logging.TRACE = TRACE
logging.addLevelName(TRACE, "TRACE")
# Add `DEV()` shortcut to loggers
_inject_into_logger(
'DEV',
textwrap.dedent(
"""\
def DEV(self, msg, *args, **kwargs):
'''Shortcut to output highlighted log text'''
kwargs['highlight'] = True
self.critical(msg, *args, **kwargs)
"""
)
)
class LogRecorder(object):
"""
temporarily records all logs, w/o level filtering
use only once!
"""
max_capacity = 1000 * 1000 # check we are not forgotten or abused
def __init__(self, disable_other_handlers=False, log_config=None):
self._records = []
log_listeners.append(self._add_log_record)
self._saved_config = None
if log_config:
self._saved_config = get_configuration()
configure(log_config)
self._saved_handlers = []
if disable_other_handlers:
self._saved_handlers = rootLogger.handlers[:]
rootLogger.handlers = []
def pop_records(self):
# only returns records on the first call
r = self._records[:]
self._records = []
try:
log_listeners.remove(self._add_log_record)
except ValueError:
pass
if self._saved_config:
configure(**self._saved_config)
self._saved_config = None
if self._saved_handlers:
rootLogger.handlers = self._saved_handlers[:]
self._saved_handlers = []
return r
def _add_log_record(self, msg):
self._records.append(msg)
assert len(self._records) < self.max_capacity
def get_configuration():
"""
get a configuration (snapshot) that can be used to call configure
snapshot = get_configuration()
configure(**snapshot)
"""
root = getLogger()
name_levels = [('', logging.getLevelName(root.level))]
name_levels.extend(
(name, logging.getLevelName(logger.level))
for name, logger
in root.manager.loggerDict.items()
if hasattr(logger, 'level')
)
config_string = ','.join('%s:%s' % x for x in name_levels)
return dict(config_string=config_string, log_json=SLogger.manager.log_json)
def get_logger_names():
return sorted(known_loggers, key=lambda x: '' if not x else x)
class BoundLogger(object):
def __init__(self, logger, context):
self.logger = logger
self.context = context
def bind(self, **kwargs):
return BoundLogger(self, kwargs)
def _proxy(self, method_name, *args, **kwargs):
context = self.context.copy()
context.update(kwargs)
return getattr(self.logger, method_name)(*args, **context)
trace = lambda self, *args, **kwargs: self._proxy('trace', *args, **kwargs)
debug = lambda self, *args, **kwargs: self._proxy('debug', *args, **kwargs)
info = lambda self, *args, **kwargs: self._proxy('info', *args, **kwargs)
warn = warning = lambda self, *args, **kwargs: self._proxy('warning', *args, **kwargs)
error = lambda self, *args, **kwargs: self._proxy('error', *args, **kwargs)
exception = lambda self, *args, **kwargs: self._proxy('exception', *args, **kwargs)
fatal = critical = lambda self, *args, **kwargs: self._proxy('critical', *args, **kwargs)
class _LogJSONEncoder(JSONEncoder):
def default(self, o):
return repr(o)
class | (logging.Logger):
def __init__(self, name, level=DEFAULT_LOGLEVEL):
self.warn = self.warning
super(SLogger, self).__init__(name, level=level)
@property
def log_json(self):
return SLogger.manager.log_json
def is_active(self, level_name='trace'):
return self.isEnabledFor(logging._checkLevel(level_name.upper()))
def format_message(self, msg, kwargs, highlight, level):
if getattr(self, 'log_json', False):
message = dict()
message['event'] = '{}.{}'.format(self.name, msg.lower().replace(' ', '_'))
message['level'] = logging.getLevelName(level)
try:
message.update(kwargs)
try:
msg = json.dumps(message, cls=_LogJSONEncoder)
except TypeError:
# Invalid value. With our custom encoder this can only happen with non-string
# dict keys (see: https://bugs.python.org/issue18820).
message = _stringify_dict_keys(message)
msg = json.dumps(message, cls=_LogJSONEncoder)
except UnicodeDecodeError:
message.update({
k: v if is_numeric(v) or isinstance(v, (float, complex)) else repr(v)
for k, v in kwargs.items()
})
msg = json.dumps(message, cls=_LogJSONEncoder)
else:
msg = "{}{} {}{}".format(
bcolors.WARNING if highlight else "",
msg,
" ".join("{}={!s}".format(k, v) for k, v in kwargs.items()),
bcolors.ENDC if highlight else ""
)
return msg
def bind(self, **kwargs):
return BoundLogger(self, kwargs)
def _log(self, level, msg, args, **kwargs):
exc_info = kwargs.pop('exc_info', None)
extra = kwargs.pop('extra', {})
highlight = kwargs.pop('highlight', False)
extra['kwargs'] = kwargs
extra['original_msg'] = msg
msg = self.format_message(msg, kwargs, highlight, level)
super(SLogger, self)._log(level, msg, args, exc_info, extra)
class RootLogger(SLogger):
"""
A root logger is not that different to any other logger, except that
it must have a logging level and there is only one instance of it in
the hierarchy.
"""
def __init__(self, level):
"""
Initialize the logger with the name "root".
"""
super(RootLogger, self).__init__("root", level)
def handle(self, record):
if log_listeners:
rec_dict = getattr(record, 'kwargs', {}).copy()
rec_dict['event'] = getattr(record, 'original_msg', "")
for listener in log_listeners:
listener(rec_dict)
super(RootLogger, self).handle(record)
class SManager(logging.Manager):
def __init__(self, rootnode):
self.loggerClass = SLogger
self.log_json = False
super(SManager, self).__init__(rootnode)
def getLogger(self, name):
logging.setLoggerClass(SLogger)
return super(SManager, self).getLogger(name)
rootLogger = RootLogger(DEFAULT_LOGLEVEL)
SLogger.root = rootLogger
SLogger.manager = SManager(SLogger.root)
def _stringify_dict_keys(input_):
if isinstance(input_, dict):
res = {}
for k, v in input_.items():
v = _stringify_dict_keys(v)
if not isinstance(k, (int, long, bool, None.__class__)):
k = str(k)
res[k] = v
elif isinstance(input_, (list, tuple)):
res = input_.__class__([_stringify_dict_keys(i) for i in input_])
else:
res = input_
return res
def getLogger(name=None):
"""
Return a logger with the specified name, creating it if necessary.
If no name is specified, return the root logger.
"""
if name:
logger = SLogger.manager.getLogger(name)
return logger
else:
return rootLogger
def configure(config_string=None, log_json=False, log_file=None):
if not config_string:
config_string = ":{}".format(DEFAULT_LOGLEVEL)
if log_json:
SLogger.manager.log_json = True
log_format = JSON_FORMAT
else:
SLogger.manager.log_json = False
log_format = PRINT_FORMAT
if len(rootLogger.handlers) == 0:
handler = StreamHandler()
formatter = Formatter(log_format)
handler.setFormatter(formatter)
rootLogger.addHandler(handler)
if log_file:
if not any(isinstance(hndlr, FileHandler) for hndlr in rootLogger.handlers):
handler = FileHandler(log_file)
formatter = Formatter("{} {}".format(FILE_PREFIX, log_format))
handler.setFormatter(formatter)
rootLogger.addHandler(handler)
# Reset logging levels before applying new config below
for name, logger in SLogger.manager.loggerDict.items():
if hasattr(logger, 'setLevel'):
# Guard against `logging.PlaceHolder` instances
logger.setLevel(logging.NOTSET)
logger.propagate = True
for name_levels in config_string.split(','):
name, _, level = name_levels.partition(':')
logger = getLogger(name)
logger.setLevel(level.upper())
configure_logging = configure
def set_level(name, level):
assert not isinstance(level, int)
logger = getLogger(name)
logger.setLevel(getattr(logging, level.upper()))
def get_logger(name=None):
known_loggers.add(name)
return getLogger(name)
def DEBUG(msg, *args, **kwargs):
"""temporary logger during development that is always on"""
logger = getLogger("DEBUG")
if len(logger.handlers) == 0:
logger.addHandler(StreamHandler())
logger.propagate = False
logger.setLevel(logging.DEBUG)
logger.DEV(msg, *args, **kwargs)
| SLogger | identifier_name |
errors.rs | use attaca::marshal::ObjectHash;
| types { Error, ErrorKind, ResultExt, Result; }
links {
Attaca(::attaca::Error, ::attaca::ErrorKind);
}
foreign_links {
Clap(::clap::Error);
Fmt(::std::fmt::Error);
GlobSet(::globset::Error);
Nul(::std::ffi::NulError);
Io(::std::io::Error);
}
errors {
FsckFailure(expected: ObjectHash, actual: ObjectHash) {
description("an object did not hash to the expected value"),
display("an object {} did not hash to the expected value {}", actual, expected)
}
InvalidUsage {
description("invalid usage"),
display("invalid usage"),
}
NotACommit(hash: ObjectHash) {
description("not a commit hash"),
display("{} is not a commit hash", hash),
}
}
} | error_chain! { | random_line_split |
utils.py | # Copyright 2015 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import functools
import os
from perfkitbenchmarker import flags
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.vm_util import POLL_INTERVAL
FLAGS = flags.FLAGS
flags.DEFINE_string('openstack_auth_url',
os.environ.get('OS_AUTH_URL', 'http://localhost:5000'),
('Url for Keystone authentication service, defaults to '
'$OS_AUTH_URL. Required for discovery of other OpenStack '
'service URLs.'))
flags.DEFINE_string('openstack_username',
os.getenv('OS_USERNAME', 'admin'),
'OpenStack login username, defaults to $OS_USERNAME.')
flags.DEFINE_string('openstack_tenant',
os.getenv('OS_TENANT_NAME', 'admin'),
'OpenStack tenant name, defaults to $OS_TENANT_NAME.')
flags.DEFINE_string('openstack_password_file',
os.getenv('OPENSTACK_PASSWORD_FILE',
'~/.config/openstack-password.txt'),
'Path to file containing the openstack password, '
'defaults to $OPENSTACK_PASSWORD_FILE. Alternatively, '
'setting the password itself in $OS_PASSWORD is also '
'supported.')
class KeystoneAuth(object):
"""
Usage example:
auth = KeystoneAuth(auth_url, auth_tenant, auth_user, auth_password)
token = auth.get_token()
tenant_id = auth.get_tenant_id()
token and tenant_id are required to use all OpenStack python clients
"""
def __init__(self, url, tenant, user, password):
self.__url = url
self.__tenant = tenant
self.__user = user
self.__password = password
self.__connection = None
self.__session = None
def GetConnection(self):
if self.__connection is None:
self.__authenticate()
return self.__connection
def __authenticate(self):
import keystoneclient.v2_0.client as ksclient
self.__connection = ksclient.Client(
auth_url=self.__url,
username=self.__user,
password=self.__password,
tenant=self.__tenant)
self.__connection.authenticate()
def get_token(self):
return self.GetConnection().get_token(self.__session)
def get_tenant_id(self):
raw_token = self.GetConnection().get_raw_token_from_identity_service(
auth_url=self.__url,
username=self.__user,
password=self.__password,
tenant_name=self.__tenant
)
return raw_token['token']['tenant']['id']
class NovaClient(object):
def __getattribute__(self, item):
try:
return super(NovaClient, self).__getattribute__(item)
except AttributeError:
return self.__client.__getattribute__(item)
def GetPassword(self):
# For compatibility with Nova CLI, use 'OS'-prefixed environment value
# if present. Also support reading the password from a file.
error_msg = ('No OpenStack password specified. '
'Either set the environment variable OS_PASSWORD to the '
'admin password, or provide the name of a file '
'containing the password using the OPENSTACK_PASSWORD_FILE '
'environment variable or --openstack_password_file flag.')
password = os.getenv('OS_PASSWORD')
if password is not None:
|
try:
with open(os.path.expanduser(FLAGS.openstack_password_file)) as pwfile:
password = pwfile.readline().rstrip()
return password
except IOError as e:
raise Exception(error_msg + ' ' + str(e))
raise Exception(error_msg)
def __init__(self):
from novaclient import client as noclient
self.url = FLAGS.openstack_auth_url
self.user = FLAGS.openstack_username
self.tenant = FLAGS.openstack_tenant
self.password = self.GetPassword()
self.__auth = KeystoneAuth(self.url, self.tenant,
self.user, self.password)
self.__client = noclient.Client('1.1',
auth_url=self.url,
username=self.user,
auth_token=self.__auth.get_token(),
tenant_id=self.__auth.get_tenant_id(),
)
def reconnect(self):
from novaclient import client as noclient
self.__auth = KeystoneAuth(self.url, self.tenant, self.user,
self.password)
self.__client = noclient.Client('1.1',
auth_url=self.url,
username=self.user,
auth_token=self.__auth.get_token(),
tenant_id=self.__auth.get_tenant_id(),
)
class AuthException(Exception):
"""Wrapper for NovaClient auth exceptions."""
pass
def retry_authorization(max_retries=1, poll_interval=POLL_INTERVAL):
def decored(function):
@vm_util.Retry(max_retries=max_retries,
poll_interval=poll_interval,
retryable_exceptions=AuthException,
log_errors=False)
@functools.wraps(function)
def decor(*args, **kwargs):
from novaclient.exceptions import Unauthorized
try:
return function(*args, **kwargs)
except Unauthorized as e:
NovaClient.instance.reconnect()
raise AuthException(str(e))
return decor
return decored
| return password | conditional_block |
utils.py | # Copyright 2015 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import functools
import os
from perfkitbenchmarker import flags
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.vm_util import POLL_INTERVAL
FLAGS = flags.FLAGS
flags.DEFINE_string('openstack_auth_url',
os.environ.get('OS_AUTH_URL', 'http://localhost:5000'),
('Url for Keystone authentication service, defaults to '
'$OS_AUTH_URL. Required for discovery of other OpenStack '
'service URLs.'))
flags.DEFINE_string('openstack_username',
os.getenv('OS_USERNAME', 'admin'),
'OpenStack login username, defaults to $OS_USERNAME.')
flags.DEFINE_string('openstack_tenant',
os.getenv('OS_TENANT_NAME', 'admin'),
'OpenStack tenant name, defaults to $OS_TENANT_NAME.')
flags.DEFINE_string('openstack_password_file',
os.getenv('OPENSTACK_PASSWORD_FILE',
'~/.config/openstack-password.txt'),
'Path to file containing the openstack password, '
'defaults to $OPENSTACK_PASSWORD_FILE. Alternatively, '
'setting the password itself in $OS_PASSWORD is also '
'supported.')
| """
Usage example:
auth = KeystoneAuth(auth_url, auth_tenant, auth_user, auth_password)
token = auth.get_token()
tenant_id = auth.get_tenant_id()
token and tenant_id are required to use all OpenStack python clients
"""
def __init__(self, url, tenant, user, password):
self.__url = url
self.__tenant = tenant
self.__user = user
self.__password = password
self.__connection = None
self.__session = None
def GetConnection(self):
if self.__connection is None:
self.__authenticate()
return self.__connection
def __authenticate(self):
import keystoneclient.v2_0.client as ksclient
self.__connection = ksclient.Client(
auth_url=self.__url,
username=self.__user,
password=self.__password,
tenant=self.__tenant)
self.__connection.authenticate()
def get_token(self):
return self.GetConnection().get_token(self.__session)
def get_tenant_id(self):
raw_token = self.GetConnection().get_raw_token_from_identity_service(
auth_url=self.__url,
username=self.__user,
password=self.__password,
tenant_name=self.__tenant
)
return raw_token['token']['tenant']['id']
class NovaClient(object):
def __getattribute__(self, item):
try:
return super(NovaClient, self).__getattribute__(item)
except AttributeError:
return self.__client.__getattribute__(item)
def GetPassword(self):
# For compatibility with Nova CLI, use 'OS'-prefixed environment value
# if present. Also support reading the password from a file.
error_msg = ('No OpenStack password specified. '
'Either set the environment variable OS_PASSWORD to the '
'admin password, or provide the name of a file '
'containing the password using the OPENSTACK_PASSWORD_FILE '
'environment variable or --openstack_password_file flag.')
password = os.getenv('OS_PASSWORD')
if password is not None:
return password
try:
with open(os.path.expanduser(FLAGS.openstack_password_file)) as pwfile:
password = pwfile.readline().rstrip()
return password
except IOError as e:
raise Exception(error_msg + ' ' + str(e))
raise Exception(error_msg)
def __init__(self):
from novaclient import client as noclient
self.url = FLAGS.openstack_auth_url
self.user = FLAGS.openstack_username
self.tenant = FLAGS.openstack_tenant
self.password = self.GetPassword()
self.__auth = KeystoneAuth(self.url, self.tenant,
self.user, self.password)
self.__client = noclient.Client('1.1',
auth_url=self.url,
username=self.user,
auth_token=self.__auth.get_token(),
tenant_id=self.__auth.get_tenant_id(),
)
def reconnect(self):
from novaclient import client as noclient
self.__auth = KeystoneAuth(self.url, self.tenant, self.user,
self.password)
self.__client = noclient.Client('1.1',
auth_url=self.url,
username=self.user,
auth_token=self.__auth.get_token(),
tenant_id=self.__auth.get_tenant_id(),
)
class AuthException(Exception):
"""Wrapper for NovaClient auth exceptions."""
pass
def retry_authorization(max_retries=1, poll_interval=POLL_INTERVAL):
def decored(function):
@vm_util.Retry(max_retries=max_retries,
poll_interval=poll_interval,
retryable_exceptions=AuthException,
log_errors=False)
@functools.wraps(function)
def decor(*args, **kwargs):
from novaclient.exceptions import Unauthorized
try:
return function(*args, **kwargs)
except Unauthorized as e:
NovaClient.instance.reconnect()
raise AuthException(str(e))
return decor
return decored | class KeystoneAuth(object): | random_line_split |
utils.py | # Copyright 2015 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import functools
import os
from perfkitbenchmarker import flags
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.vm_util import POLL_INTERVAL
FLAGS = flags.FLAGS
flags.DEFINE_string('openstack_auth_url',
os.environ.get('OS_AUTH_URL', 'http://localhost:5000'),
('Url for Keystone authentication service, defaults to '
'$OS_AUTH_URL. Required for discovery of other OpenStack '
'service URLs.'))
flags.DEFINE_string('openstack_username',
os.getenv('OS_USERNAME', 'admin'),
'OpenStack login username, defaults to $OS_USERNAME.')
flags.DEFINE_string('openstack_tenant',
os.getenv('OS_TENANT_NAME', 'admin'),
'OpenStack tenant name, defaults to $OS_TENANT_NAME.')
flags.DEFINE_string('openstack_password_file',
os.getenv('OPENSTACK_PASSWORD_FILE',
'~/.config/openstack-password.txt'),
'Path to file containing the openstack password, '
'defaults to $OPENSTACK_PASSWORD_FILE. Alternatively, '
'setting the password itself in $OS_PASSWORD is also '
'supported.')
class KeystoneAuth(object):
"""
Usage example:
auth = KeystoneAuth(auth_url, auth_tenant, auth_user, auth_password)
token = auth.get_token()
tenant_id = auth.get_tenant_id()
token and tenant_id are required to use all OpenStack python clients
"""
def __init__(self, url, tenant, user, password):
self.__url = url
self.__tenant = tenant
self.__user = user
self.__password = password
self.__connection = None
self.__session = None
def | (self):
if self.__connection is None:
self.__authenticate()
return self.__connection
def __authenticate(self):
import keystoneclient.v2_0.client as ksclient
self.__connection = ksclient.Client(
auth_url=self.__url,
username=self.__user,
password=self.__password,
tenant=self.__tenant)
self.__connection.authenticate()
def get_token(self):
return self.GetConnection().get_token(self.__session)
def get_tenant_id(self):
raw_token = self.GetConnection().get_raw_token_from_identity_service(
auth_url=self.__url,
username=self.__user,
password=self.__password,
tenant_name=self.__tenant
)
return raw_token['token']['tenant']['id']
class NovaClient(object):
def __getattribute__(self, item):
try:
return super(NovaClient, self).__getattribute__(item)
except AttributeError:
return self.__client.__getattribute__(item)
def GetPassword(self):
# For compatibility with Nova CLI, use 'OS'-prefixed environment value
# if present. Also support reading the password from a file.
error_msg = ('No OpenStack password specified. '
'Either set the environment variable OS_PASSWORD to the '
'admin password, or provide the name of a file '
'containing the password using the OPENSTACK_PASSWORD_FILE '
'environment variable or --openstack_password_file flag.')
password = os.getenv('OS_PASSWORD')
if password is not None:
return password
try:
with open(os.path.expanduser(FLAGS.openstack_password_file)) as pwfile:
password = pwfile.readline().rstrip()
return password
except IOError as e:
raise Exception(error_msg + ' ' + str(e))
raise Exception(error_msg)
def __init__(self):
from novaclient import client as noclient
self.url = FLAGS.openstack_auth_url
self.user = FLAGS.openstack_username
self.tenant = FLAGS.openstack_tenant
self.password = self.GetPassword()
self.__auth = KeystoneAuth(self.url, self.tenant,
self.user, self.password)
self.__client = noclient.Client('1.1',
auth_url=self.url,
username=self.user,
auth_token=self.__auth.get_token(),
tenant_id=self.__auth.get_tenant_id(),
)
def reconnect(self):
from novaclient import client as noclient
self.__auth = KeystoneAuth(self.url, self.tenant, self.user,
self.password)
self.__client = noclient.Client('1.1',
auth_url=self.url,
username=self.user,
auth_token=self.__auth.get_token(),
tenant_id=self.__auth.get_tenant_id(),
)
class AuthException(Exception):
"""Wrapper for NovaClient auth exceptions."""
pass
def retry_authorization(max_retries=1, poll_interval=POLL_INTERVAL):
def decored(function):
@vm_util.Retry(max_retries=max_retries,
poll_interval=poll_interval,
retryable_exceptions=AuthException,
log_errors=False)
@functools.wraps(function)
def decor(*args, **kwargs):
from novaclient.exceptions import Unauthorized
try:
return function(*args, **kwargs)
except Unauthorized as e:
NovaClient.instance.reconnect()
raise AuthException(str(e))
return decor
return decored
| GetConnection | identifier_name |
utils.py | # Copyright 2015 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import functools
import os
from perfkitbenchmarker import flags
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.vm_util import POLL_INTERVAL
FLAGS = flags.FLAGS
flags.DEFINE_string('openstack_auth_url',
os.environ.get('OS_AUTH_URL', 'http://localhost:5000'),
('Url for Keystone authentication service, defaults to '
'$OS_AUTH_URL. Required for discovery of other OpenStack '
'service URLs.'))
flags.DEFINE_string('openstack_username',
os.getenv('OS_USERNAME', 'admin'),
'OpenStack login username, defaults to $OS_USERNAME.')
flags.DEFINE_string('openstack_tenant',
os.getenv('OS_TENANT_NAME', 'admin'),
'OpenStack tenant name, defaults to $OS_TENANT_NAME.')
flags.DEFINE_string('openstack_password_file',
os.getenv('OPENSTACK_PASSWORD_FILE',
'~/.config/openstack-password.txt'),
'Path to file containing the openstack password, '
'defaults to $OPENSTACK_PASSWORD_FILE. Alternatively, '
'setting the password itself in $OS_PASSWORD is also '
'supported.')
class KeystoneAuth(object):
"""
Usage example:
auth = KeystoneAuth(auth_url, auth_tenant, auth_user, auth_password)
token = auth.get_token()
tenant_id = auth.get_tenant_id()
token and tenant_id are required to use all OpenStack python clients
"""
def __init__(self, url, tenant, user, password):
self.__url = url
self.__tenant = tenant
self.__user = user
self.__password = password
self.__connection = None
self.__session = None
def GetConnection(self):
if self.__connection is None:
self.__authenticate()
return self.__connection
def __authenticate(self):
import keystoneclient.v2_0.client as ksclient
self.__connection = ksclient.Client(
auth_url=self.__url,
username=self.__user,
password=self.__password,
tenant=self.__tenant)
self.__connection.authenticate()
def get_token(self):
return self.GetConnection().get_token(self.__session)
def get_tenant_id(self):
raw_token = self.GetConnection().get_raw_token_from_identity_service(
auth_url=self.__url,
username=self.__user,
password=self.__password,
tenant_name=self.__tenant
)
return raw_token['token']['tenant']['id']
class NovaClient(object):
def __getattribute__(self, item):
try:
return super(NovaClient, self).__getattribute__(item)
except AttributeError:
return self.__client.__getattribute__(item)
def GetPassword(self):
# For compatibility with Nova CLI, use 'OS'-prefixed environment value
# if present. Also support reading the password from a file.
error_msg = ('No OpenStack password specified. '
'Either set the environment variable OS_PASSWORD to the '
'admin password, or provide the name of a file '
'containing the password using the OPENSTACK_PASSWORD_FILE '
'environment variable or --openstack_password_file flag.')
password = os.getenv('OS_PASSWORD')
if password is not None:
return password
try:
with open(os.path.expanduser(FLAGS.openstack_password_file)) as pwfile:
password = pwfile.readline().rstrip()
return password
except IOError as e:
raise Exception(error_msg + ' ' + str(e))
raise Exception(error_msg)
def __init__(self):
from novaclient import client as noclient
self.url = FLAGS.openstack_auth_url
self.user = FLAGS.openstack_username
self.tenant = FLAGS.openstack_tenant
self.password = self.GetPassword()
self.__auth = KeystoneAuth(self.url, self.tenant,
self.user, self.password)
self.__client = noclient.Client('1.1',
auth_url=self.url,
username=self.user,
auth_token=self.__auth.get_token(),
tenant_id=self.__auth.get_tenant_id(),
)
def reconnect(self):
from novaclient import client as noclient
self.__auth = KeystoneAuth(self.url, self.tenant, self.user,
self.password)
self.__client = noclient.Client('1.1',
auth_url=self.url,
username=self.user,
auth_token=self.__auth.get_token(),
tenant_id=self.__auth.get_tenant_id(),
)
class AuthException(Exception):
"""Wrapper for NovaClient auth exceptions."""
pass
def retry_authorization(max_retries=1, poll_interval=POLL_INTERVAL):
def decored(function):
@vm_util.Retry(max_retries=max_retries,
poll_interval=poll_interval,
retryable_exceptions=AuthException,
log_errors=False)
@functools.wraps(function)
def decor(*args, **kwargs):
|
return decor
return decored
| from novaclient.exceptions import Unauthorized
try:
return function(*args, **kwargs)
except Unauthorized as e:
NovaClient.instance.reconnect()
raise AuthException(str(e)) | identifier_body |
rprocess.py | """Definitions for the `RProcess` class."""
from math import isnan
import numpy as np
from astrocats.catalog.source import SOURCE
from mosfit.constants import C_CGS, DAY_CGS, IPI, KM_CGS, M_SUN_CGS
from mosfit.modules.engines.engine import Engine
from scipy.interpolate import RegularGridInterpolator
# Important: Only define one ``Module`` class per file.
class RProcess(Engine):
"""r-process decay engine.
input luminosity adapted from Metzger 2016: 2017LRR....20....3M
"""
_REFERENCES = [
{SOURCE.BIBCODE: '2013ApJ...775...18B'},
{SOURCE.BIBCODE: '2017LRR....20....3M'},
{SOURCE.BIBCODE: '2017arXiv170708132V'}
]
ckm = C_CGS / KM_CGS
def __init__(self, **kwargs):
"""Initialize module."""
super(RProcess, self).__init__(**kwargs)
self._wants_dense = True
barnes_v = np.asarray([0.1, 0.2, 0.3])
barnes_M = np.asarray([1.e-3, 5.e-3, 1.e-2, 5.e-2])
barnes_a = np.asarray([[2.01, 4.52, 8.16], [0.81, 1.9, 3.2], [
0.56, 1.31, 2.19], [.27, .55, .95]])
barnes_b = np.asarray([[0.28, 0.62, 1.19], [0.19, 0.28, 0.45], [
0.17, 0.21, 0.31], [0.10, 0.13, 0.15]])
barnes_d = np.asarray([[1.12, 1.39, 1.52], [0.86, 1.21, 1.39], [
0.74, 1.13, 1.32], [0.6, 0.9, 1.13]])
self.therm_func_a = RegularGridInterpolator(
(barnes_M, barnes_v), barnes_a, bounds_error=False, fill_value=None)
self.therm_func_b = RegularGridInterpolator(
(barnes_M, barnes_v), barnes_b, bounds_error=False, fill_value=None)
self.therm_func_d = RegularGridInterpolator(
(barnes_M, barnes_v), barnes_d, bounds_error=False, fill_value=None)
def process(self, **kwargs):
"""Process module."""
self._times = kwargs[self.key('dense_times')]
self._mass = kwargs[self.key('mejecta')] * M_SUN_CGS
self._rest_texplosion = kwargs[self.key('resttexplosion')]
self._vejecta = kwargs[self.key('vejecta')]
self._a = self.therm_func_a(
[self._mass / M_SUN_CGS, self._vejecta / self.ckm])[0]
self._bx2 = 2.0 * self.therm_func_b(
[self._mass / M_SUN_CGS, self._vejecta / self.ckm])[0]
self._d = self.therm_func_d(
[self._mass / M_SUN_CGS, self._vejecta / self.ckm])[0]
ts = [
np.inf
if self._rest_texplosion > x else (x - self._rest_texplosion)
for x in self._times
]
self._lscale = self._mass * 4.0e18 * 0.36
luminosities = [
self._lscale * (0.5 - IPI * np.arctan(
(t * DAY_CGS - 1.3) / 0.11)) ** 1.3 *
(np.exp(-self._a * t) + np.log1p(
self._bx2 * t ** self._d) / (self._bx2 * t ** self._d))
for t in ts
] | luminosities = [0.0 if isnan(x) else x for x in luminosities]
return {self.dense_key('luminosities'): luminosities} | random_line_split |
|
rprocess.py | """Definitions for the `RProcess` class."""
from math import isnan
import numpy as np
from astrocats.catalog.source import SOURCE
from mosfit.constants import C_CGS, DAY_CGS, IPI, KM_CGS, M_SUN_CGS
from mosfit.modules.engines.engine import Engine
from scipy.interpolate import RegularGridInterpolator
# Important: Only define one ``Module`` class per file.
class | (Engine):
"""r-process decay engine.
input luminosity adapted from Metzger 2016: 2017LRR....20....3M
"""
_REFERENCES = [
{SOURCE.BIBCODE: '2013ApJ...775...18B'},
{SOURCE.BIBCODE: '2017LRR....20....3M'},
{SOURCE.BIBCODE: '2017arXiv170708132V'}
]
ckm = C_CGS / KM_CGS
def __init__(self, **kwargs):
"""Initialize module."""
super(RProcess, self).__init__(**kwargs)
self._wants_dense = True
barnes_v = np.asarray([0.1, 0.2, 0.3])
barnes_M = np.asarray([1.e-3, 5.e-3, 1.e-2, 5.e-2])
barnes_a = np.asarray([[2.01, 4.52, 8.16], [0.81, 1.9, 3.2], [
0.56, 1.31, 2.19], [.27, .55, .95]])
barnes_b = np.asarray([[0.28, 0.62, 1.19], [0.19, 0.28, 0.45], [
0.17, 0.21, 0.31], [0.10, 0.13, 0.15]])
barnes_d = np.asarray([[1.12, 1.39, 1.52], [0.86, 1.21, 1.39], [
0.74, 1.13, 1.32], [0.6, 0.9, 1.13]])
self.therm_func_a = RegularGridInterpolator(
(barnes_M, barnes_v), barnes_a, bounds_error=False, fill_value=None)
self.therm_func_b = RegularGridInterpolator(
(barnes_M, barnes_v), barnes_b, bounds_error=False, fill_value=None)
self.therm_func_d = RegularGridInterpolator(
(barnes_M, barnes_v), barnes_d, bounds_error=False, fill_value=None)
def process(self, **kwargs):
"""Process module."""
self._times = kwargs[self.key('dense_times')]
self._mass = kwargs[self.key('mejecta')] * M_SUN_CGS
self._rest_texplosion = kwargs[self.key('resttexplosion')]
self._vejecta = kwargs[self.key('vejecta')]
self._a = self.therm_func_a(
[self._mass / M_SUN_CGS, self._vejecta / self.ckm])[0]
self._bx2 = 2.0 * self.therm_func_b(
[self._mass / M_SUN_CGS, self._vejecta / self.ckm])[0]
self._d = self.therm_func_d(
[self._mass / M_SUN_CGS, self._vejecta / self.ckm])[0]
ts = [
np.inf
if self._rest_texplosion > x else (x - self._rest_texplosion)
for x in self._times
]
self._lscale = self._mass * 4.0e18 * 0.36
luminosities = [
self._lscale * (0.5 - IPI * np.arctan(
(t * DAY_CGS - 1.3) / 0.11)) ** 1.3 *
(np.exp(-self._a * t) + np.log1p(
self._bx2 * t ** self._d) / (self._bx2 * t ** self._d))
for t in ts
]
luminosities = [0.0 if isnan(x) else x for x in luminosities]
return {self.dense_key('luminosities'): luminosities} | RProcess | identifier_name |
rprocess.py | """Definitions for the `RProcess` class."""
from math import isnan
import numpy as np
from astrocats.catalog.source import SOURCE
from mosfit.constants import C_CGS, DAY_CGS, IPI, KM_CGS, M_SUN_CGS
from mosfit.modules.engines.engine import Engine
from scipy.interpolate import RegularGridInterpolator
# Important: Only define one ``Module`` class per file.
class RProcess(Engine):
"""r-process decay engine.
input luminosity adapted from Metzger 2016: 2017LRR....20....3M
"""
_REFERENCES = [
{SOURCE.BIBCODE: '2013ApJ...775...18B'},
{SOURCE.BIBCODE: '2017LRR....20....3M'},
{SOURCE.BIBCODE: '2017arXiv170708132V'}
]
ckm = C_CGS / KM_CGS
def __init__(self, **kwargs):
|
def process(self, **kwargs):
"""Process module."""
self._times = kwargs[self.key('dense_times')]
self._mass = kwargs[self.key('mejecta')] * M_SUN_CGS
self._rest_texplosion = kwargs[self.key('resttexplosion')]
self._vejecta = kwargs[self.key('vejecta')]
self._a = self.therm_func_a(
[self._mass / M_SUN_CGS, self._vejecta / self.ckm])[0]
self._bx2 = 2.0 * self.therm_func_b(
[self._mass / M_SUN_CGS, self._vejecta / self.ckm])[0]
self._d = self.therm_func_d(
[self._mass / M_SUN_CGS, self._vejecta / self.ckm])[0]
ts = [
np.inf
if self._rest_texplosion > x else (x - self._rest_texplosion)
for x in self._times
]
self._lscale = self._mass * 4.0e18 * 0.36
luminosities = [
self._lscale * (0.5 - IPI * np.arctan(
(t * DAY_CGS - 1.3) / 0.11)) ** 1.3 *
(np.exp(-self._a * t) + np.log1p(
self._bx2 * t ** self._d) / (self._bx2 * t ** self._d))
for t in ts
]
luminosities = [0.0 if isnan(x) else x for x in luminosities]
return {self.dense_key('luminosities'): luminosities} | """Initialize module."""
super(RProcess, self).__init__(**kwargs)
self._wants_dense = True
barnes_v = np.asarray([0.1, 0.2, 0.3])
barnes_M = np.asarray([1.e-3, 5.e-3, 1.e-2, 5.e-2])
barnes_a = np.asarray([[2.01, 4.52, 8.16], [0.81, 1.9, 3.2], [
0.56, 1.31, 2.19], [.27, .55, .95]])
barnes_b = np.asarray([[0.28, 0.62, 1.19], [0.19, 0.28, 0.45], [
0.17, 0.21, 0.31], [0.10, 0.13, 0.15]])
barnes_d = np.asarray([[1.12, 1.39, 1.52], [0.86, 1.21, 1.39], [
0.74, 1.13, 1.32], [0.6, 0.9, 1.13]])
self.therm_func_a = RegularGridInterpolator(
(barnes_M, barnes_v), barnes_a, bounds_error=False, fill_value=None)
self.therm_func_b = RegularGridInterpolator(
(barnes_M, barnes_v), barnes_b, bounds_error=False, fill_value=None)
self.therm_func_d = RegularGridInterpolator(
(barnes_M, barnes_v), barnes_d, bounds_error=False, fill_value=None) | identifier_body |
base.py | __author__ = 'Robbert Harms'
__date__ = "2015-04-23"
__maintainer__ = "Robbert Harms"
__email__ = "[email protected]"
class DVS(object):
def __init__(self, comments, dvs_tables):
"""Create a new DVS object
Args:
comments (str): The list with comments on top of the file
dvs_tables (list of DVSDirectionTable): The list with the direction tables
Attributes:
comments (str): The list with comments on top of the file
dvs_tables (list of DVSDirectionTable): The list with the direction tables
"""
self.comments = comments
self.dvs_tables = dvs_tables
def get_file_string(self, windows_line_endings=True):
"""Get a complete string representation of the DVS.
Args:
windows_line_endings (boolean): If we want to include an \r before every \n
"""
s = self.comments + "\n"
s += "\n".join([table.get_file_string(windows_line_endings=False) for table in self.dvs_tables])
if windows_line_endings:
s = s.replace("\n", "\r\n")
return s
def get_overview_representation(self):
"""Get a small overview of the contained contents."""
s = 'Nmr tables: {}'.format(len(self.dvs_tables)) + "\n"
for i, table in enumerate(self.dvs_tables):
s += 'Table {}: {} directions'.format(i, table.table.shape[0]) + "\n"
return s
class DVSDirectionTable(object):
def __init__(self, table, comments='', coordinate_system='xyz', normalisation='none'):
"""A representation of a direction table.
Args:
table (ndarray): The actual table
comments (str): The list with comments above this table
coordinate_system (str): The coordinate system (for example 'xyz')
normalisation (str): The normalisation definition (normally 'none')
Attributes:
table (ndarray): The actual table
comments (str): The list with comments above this table
coordinate_system (str): The coordinate system (for example 'xyz')
normalisation (str): The normalisation definition (normally 'none')
"""
self.table = table
self.comments = comments
self.coordinate_system = coordinate_system
self.normalisation = normalisation
def get_file_string(self, windows_line_endings=True):
"""Get a complete string representation of this direction table.
Args:
windows_line_endings (boolean): If we want to include an \r before every \n
"""
s = self.comments
s += '[directions={}]'.format(self.table.shape[0]) + "\n"
s += 'CoordinateSystem = {}'.format(self.coordinate_system) + "\n"
s += 'Normalisation = {}'.format(self.normalisation) + "\n"
for i in range(self.table.shape[0]):
s += 'Vector[{0}] = ( {1}, {2}, {3} )'.format(i, *self.table[i, :]) + "\n"
if windows_line_endings:
|
return s | s = s.replace("\n", "\r\n") | conditional_block |
base.py | __author__ = 'Robbert Harms'
__date__ = "2015-04-23"
__maintainer__ = "Robbert Harms"
__email__ = "[email protected]"
class DVS(object):
def __init__(self, comments, dvs_tables):
"""Create a new DVS object
Args:
comments (str): The list with comments on top of the file
dvs_tables (list of DVSDirectionTable): The list with the direction tables
Attributes:
comments (str): The list with comments on top of the file
dvs_tables (list of DVSDirectionTable): The list with the direction tables
"""
self.comments = comments
self.dvs_tables = dvs_tables
def | (self, windows_line_endings=True):
"""Get a complete string representation of the DVS.
Args:
windows_line_endings (boolean): If we want to include an \r before every \n
"""
s = self.comments + "\n"
s += "\n".join([table.get_file_string(windows_line_endings=False) for table in self.dvs_tables])
if windows_line_endings:
s = s.replace("\n", "\r\n")
return s
def get_overview_representation(self):
"""Get a small overview of the contained contents."""
s = 'Nmr tables: {}'.format(len(self.dvs_tables)) + "\n"
for i, table in enumerate(self.dvs_tables):
s += 'Table {}: {} directions'.format(i, table.table.shape[0]) + "\n"
return s
class DVSDirectionTable(object):
def __init__(self, table, comments='', coordinate_system='xyz', normalisation='none'):
"""A representation of a direction table.
Args:
table (ndarray): The actual table
comments (str): The list with comments above this table
coordinate_system (str): The coordinate system (for example 'xyz')
normalisation (str): The normalisation definition (normally 'none')
Attributes:
table (ndarray): The actual table
comments (str): The list with comments above this table
coordinate_system (str): The coordinate system (for example 'xyz')
normalisation (str): The normalisation definition (normally 'none')
"""
self.table = table
self.comments = comments
self.coordinate_system = coordinate_system
self.normalisation = normalisation
def get_file_string(self, windows_line_endings=True):
"""Get a complete string representation of this direction table.
Args:
windows_line_endings (boolean): If we want to include an \r before every \n
"""
s = self.comments
s += '[directions={}]'.format(self.table.shape[0]) + "\n"
s += 'CoordinateSystem = {}'.format(self.coordinate_system) + "\n"
s += 'Normalisation = {}'.format(self.normalisation) + "\n"
for i in range(self.table.shape[0]):
s += 'Vector[{0}] = ( {1}, {2}, {3} )'.format(i, *self.table[i, :]) + "\n"
if windows_line_endings:
s = s.replace("\n", "\r\n")
return s | get_file_string | identifier_name |
base.py | __author__ = 'Robbert Harms'
__date__ = "2015-04-23"
__maintainer__ = "Robbert Harms"
__email__ = "[email protected]"
class DVS(object):
def __init__(self, comments, dvs_tables):
"""Create a new DVS object
Args:
comments (str): The list with comments on top of the file
dvs_tables (list of DVSDirectionTable): The list with the direction tables
| self.comments = comments
self.dvs_tables = dvs_tables
def get_file_string(self, windows_line_endings=True):
"""Get a complete string representation of the DVS.
Args:
windows_line_endings (boolean): If we want to include an \r before every \n
"""
s = self.comments + "\n"
s += "\n".join([table.get_file_string(windows_line_endings=False) for table in self.dvs_tables])
if windows_line_endings:
s = s.replace("\n", "\r\n")
return s
def get_overview_representation(self):
"""Get a small overview of the contained contents."""
s = 'Nmr tables: {}'.format(len(self.dvs_tables)) + "\n"
for i, table in enumerate(self.dvs_tables):
s += 'Table {}: {} directions'.format(i, table.table.shape[0]) + "\n"
return s
class DVSDirectionTable(object):
def __init__(self, table, comments='', coordinate_system='xyz', normalisation='none'):
"""A representation of a direction table.
Args:
table (ndarray): The actual table
comments (str): The list with comments above this table
coordinate_system (str): The coordinate system (for example 'xyz')
normalisation (str): The normalisation definition (normally 'none')
Attributes:
table (ndarray): The actual table
comments (str): The list with comments above this table
coordinate_system (str): The coordinate system (for example 'xyz')
normalisation (str): The normalisation definition (normally 'none')
"""
self.table = table
self.comments = comments
self.coordinate_system = coordinate_system
self.normalisation = normalisation
def get_file_string(self, windows_line_endings=True):
"""Get a complete string representation of this direction table.
Args:
windows_line_endings (boolean): If we want to include an \r before every \n
"""
s = self.comments
s += '[directions={}]'.format(self.table.shape[0]) + "\n"
s += 'CoordinateSystem = {}'.format(self.coordinate_system) + "\n"
s += 'Normalisation = {}'.format(self.normalisation) + "\n"
for i in range(self.table.shape[0]):
s += 'Vector[{0}] = ( {1}, {2}, {3} )'.format(i, *self.table[i, :]) + "\n"
if windows_line_endings:
s = s.replace("\n", "\r\n")
return s | Attributes:
comments (str): The list with comments on top of the file
dvs_tables (list of DVSDirectionTable): The list with the direction tables
""" | random_line_split |
base.py | __author__ = 'Robbert Harms'
__date__ = "2015-04-23"
__maintainer__ = "Robbert Harms"
__email__ = "[email protected]"
class DVS(object):
def __init__(self, comments, dvs_tables):
"""Create a new DVS object
Args:
comments (str): The list with comments on top of the file
dvs_tables (list of DVSDirectionTable): The list with the direction tables
Attributes:
comments (str): The list with comments on top of the file
dvs_tables (list of DVSDirectionTable): The list with the direction tables
"""
self.comments = comments
self.dvs_tables = dvs_tables
def get_file_string(self, windows_line_endings=True):
"""Get a complete string representation of the DVS.
Args:
windows_line_endings (boolean): If we want to include an \r before every \n
"""
s = self.comments + "\n"
s += "\n".join([table.get_file_string(windows_line_endings=False) for table in self.dvs_tables])
if windows_line_endings:
s = s.replace("\n", "\r\n")
return s
def get_overview_representation(self):
"""Get a small overview of the contained contents."""
s = 'Nmr tables: {}'.format(len(self.dvs_tables)) + "\n"
for i, table in enumerate(self.dvs_tables):
s += 'Table {}: {} directions'.format(i, table.table.shape[0]) + "\n"
return s
class DVSDirectionTable(object):
def __init__(self, table, comments='', coordinate_system='xyz', normalisation='none'):
"""A representation of a direction table.
Args:
table (ndarray): The actual table
comments (str): The list with comments above this table
coordinate_system (str): The coordinate system (for example 'xyz')
normalisation (str): The normalisation definition (normally 'none')
Attributes:
table (ndarray): The actual table
comments (str): The list with comments above this table
coordinate_system (str): The coordinate system (for example 'xyz')
normalisation (str): The normalisation definition (normally 'none')
"""
self.table = table
self.comments = comments
self.coordinate_system = coordinate_system
self.normalisation = normalisation
def get_file_string(self, windows_line_endings=True):
| """Get a complete string representation of this direction table.
Args:
windows_line_endings (boolean): If we want to include an \r before every \n
"""
s = self.comments
s += '[directions={}]'.format(self.table.shape[0]) + "\n"
s += 'CoordinateSystem = {}'.format(self.coordinate_system) + "\n"
s += 'Normalisation = {}'.format(self.normalisation) + "\n"
for i in range(self.table.shape[0]):
s += 'Vector[{0}] = ( {1}, {2}, {3} )'.format(i, *self.table[i, :]) + "\n"
if windows_line_endings:
s = s.replace("\n", "\r\n")
return s | identifier_body |
|
server.js | 'use strict';
var gulp = require('gulp');
var paths = gulp.paths;
var util = require('util');
var browserSync = require('browser-sync');
var middleware = require('./proxy');
function browserSyncInit(baseDir, files, browser) |
gulp.task('serve', ['watch'], function () {
browserSyncInit([
paths.tmp + '/serve',
paths.src,
paths.lib
], [
paths.tmp + '/serve/app/**/*.css',
paths.src + '/app/**/*.js',
paths.lib + '/**/*.js',
paths.src + 'src/assets/images/**/*',
paths.tmp + '/serve/*.html',
paths.tmp + '/serve/app/**/*.html',
paths.src + '/app/**/*.html'
]);
});
gulp.task('serve:dist', ['build'], function () {
browserSyncInit(paths.dist.demo);
});
gulp.task('serve:e2e', ['inject'], function () {
browserSyncInit([paths.tmp + '/serve', paths.src], null, []);
});
gulp.task('serve:e2e-dist', ['build'], function () {
browserSyncInit(paths.dist, null, []);
});
| {
browser = browser === undefined ? 'default' : browser;
var routes = null;
if(baseDir === paths.src || (util.isArray(baseDir) && baseDir.indexOf(paths.src) !== -1)) {
routes = {
'/bower_components': 'bower_components'
};
}
browserSync.instance = browserSync.init(files, {
startPath: '/',
server: {
baseDir: baseDir,
middleware: middleware,
routes: routes
},
browser: browser
});
} | identifier_body |
server.js | 'use strict';
var gulp = require('gulp');
var paths = gulp.paths;
var util = require('util');
var browserSync = require('browser-sync');
var middleware = require('./proxy');
function browserSyncInit(baseDir, files, browser) {
browser = browser === undefined ? 'default' : browser;
var routes = null;
if(baseDir === paths.src || (util.isArray(baseDir) && baseDir.indexOf(paths.src) !== -1)) |
browserSync.instance = browserSync.init(files, {
startPath: '/',
server: {
baseDir: baseDir,
middleware: middleware,
routes: routes
},
browser: browser
});
}
gulp.task('serve', ['watch'], function () {
browserSyncInit([
paths.tmp + '/serve',
paths.src,
paths.lib
], [
paths.tmp + '/serve/app/**/*.css',
paths.src + '/app/**/*.js',
paths.lib + '/**/*.js',
paths.src + 'src/assets/images/**/*',
paths.tmp + '/serve/*.html',
paths.tmp + '/serve/app/**/*.html',
paths.src + '/app/**/*.html'
]);
});
gulp.task('serve:dist', ['build'], function () {
browserSyncInit(paths.dist.demo);
});
gulp.task('serve:e2e', ['inject'], function () {
browserSyncInit([paths.tmp + '/serve', paths.src], null, []);
});
gulp.task('serve:e2e-dist', ['build'], function () {
browserSyncInit(paths.dist, null, []);
});
| {
routes = {
'/bower_components': 'bower_components'
};
} | conditional_block |
server.js | 'use strict'; |
var gulp = require('gulp');
var paths = gulp.paths;
var util = require('util');
var browserSync = require('browser-sync');
var middleware = require('./proxy');
function browserSyncInit(baseDir, files, browser) {
browser = browser === undefined ? 'default' : browser;
var routes = null;
if(baseDir === paths.src || (util.isArray(baseDir) && baseDir.indexOf(paths.src) !== -1)) {
routes = {
'/bower_components': 'bower_components'
};
}
browserSync.instance = browserSync.init(files, {
startPath: '/',
server: {
baseDir: baseDir,
middleware: middleware,
routes: routes
},
browser: browser
});
}
gulp.task('serve', ['watch'], function () {
browserSyncInit([
paths.tmp + '/serve',
paths.src,
paths.lib
], [
paths.tmp + '/serve/app/**/*.css',
paths.src + '/app/**/*.js',
paths.lib + '/**/*.js',
paths.src + 'src/assets/images/**/*',
paths.tmp + '/serve/*.html',
paths.tmp + '/serve/app/**/*.html',
paths.src + '/app/**/*.html'
]);
});
gulp.task('serve:dist', ['build'], function () {
browserSyncInit(paths.dist.demo);
});
gulp.task('serve:e2e', ['inject'], function () {
browserSyncInit([paths.tmp + '/serve', paths.src], null, []);
});
gulp.task('serve:e2e-dist', ['build'], function () {
browserSyncInit(paths.dist, null, []);
}); | random_line_split |
|
server.js | 'use strict';
var gulp = require('gulp');
var paths = gulp.paths;
var util = require('util');
var browserSync = require('browser-sync');
var middleware = require('./proxy');
function | (baseDir, files, browser) {
browser = browser === undefined ? 'default' : browser;
var routes = null;
if(baseDir === paths.src || (util.isArray(baseDir) && baseDir.indexOf(paths.src) !== -1)) {
routes = {
'/bower_components': 'bower_components'
};
}
browserSync.instance = browserSync.init(files, {
startPath: '/',
server: {
baseDir: baseDir,
middleware: middleware,
routes: routes
},
browser: browser
});
}
gulp.task('serve', ['watch'], function () {
browserSyncInit([
paths.tmp + '/serve',
paths.src,
paths.lib
], [
paths.tmp + '/serve/app/**/*.css',
paths.src + '/app/**/*.js',
paths.lib + '/**/*.js',
paths.src + 'src/assets/images/**/*',
paths.tmp + '/serve/*.html',
paths.tmp + '/serve/app/**/*.html',
paths.src + '/app/**/*.html'
]);
});
gulp.task('serve:dist', ['build'], function () {
browserSyncInit(paths.dist.demo);
});
gulp.task('serve:e2e', ['inject'], function () {
browserSyncInit([paths.tmp + '/serve', paths.src], null, []);
});
gulp.task('serve:e2e-dist', ['build'], function () {
browserSyncInit(paths.dist, null, []);
});
| browserSyncInit | identifier_name |
SessionStore.js | /*
Copyright 2017 Vector Creations Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import dis from '../dispatcher';
import {Store} from 'flux/utils';
const INITIAL_STATE = {
cachedPassword: localStorage.getItem('mx_pass'),
};
/**
* A class for storing application state to do with the session. This is a simple flux
* store that listens for actions and updates its state accordingly, informing any
* listeners (views) of state changes.
*
* Usage:
* ```
* sessionStore.addListener(() => {
* this.setState({ cachedPassword: sessionStore.getCachedPassword() })
* })
* ```
*/
class | extends Store {
constructor() {
super(dis);
// Initialise state
this._state = INITIAL_STATE;
}
_update() {
// Persist state to localStorage
if (this._state.cachedPassword) {
localStorage.setItem('mx_pass', this._state.cachedPassword);
} else {
localStorage.removeItem('mx_pass', this._state.cachedPassword);
}
this.__emitChange();
}
_setState(newState) {
this._state = Object.assign(this._state, newState);
this._update();
}
__onDispatch(payload) {
switch (payload.action) {
case 'cached_password':
this._setState({
cachedPassword: payload.cachedPassword,
});
break;
case 'password_changed':
this._setState({
cachedPassword: null,
});
break;
case 'on_logged_out':
this._setState({
cachedPassword: null,
});
break;
}
}
getCachedPassword() {
return this._state.cachedPassword;
}
}
let singletonSessionStore = null;
if (!singletonSessionStore) {
singletonSessionStore = new SessionStore();
}
module.exports = singletonSessionStore;
| SessionStore | identifier_name |
SessionStore.js | /*
Copyright 2017 Vector Creations Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import dis from '../dispatcher';
import {Store} from 'flux/utils';
const INITIAL_STATE = {
cachedPassword: localStorage.getItem('mx_pass'),
};
/**
* A class for storing application state to do with the session. This is a simple flux
* store that listens for actions and updates its state accordingly, informing any | *
* Usage:
* ```
* sessionStore.addListener(() => {
* this.setState({ cachedPassword: sessionStore.getCachedPassword() })
* })
* ```
*/
class SessionStore extends Store {
constructor() {
super(dis);
// Initialise state
this._state = INITIAL_STATE;
}
_update() {
// Persist state to localStorage
if (this._state.cachedPassword) {
localStorage.setItem('mx_pass', this._state.cachedPassword);
} else {
localStorage.removeItem('mx_pass', this._state.cachedPassword);
}
this.__emitChange();
}
_setState(newState) {
this._state = Object.assign(this._state, newState);
this._update();
}
__onDispatch(payload) {
switch (payload.action) {
case 'cached_password':
this._setState({
cachedPassword: payload.cachedPassword,
});
break;
case 'password_changed':
this._setState({
cachedPassword: null,
});
break;
case 'on_logged_out':
this._setState({
cachedPassword: null,
});
break;
}
}
getCachedPassword() {
return this._state.cachedPassword;
}
}
let singletonSessionStore = null;
if (!singletonSessionStore) {
singletonSessionStore = new SessionStore();
}
module.exports = singletonSessionStore; | * listeners (views) of state changes. | random_line_split |
SessionStore.js | /*
Copyright 2017 Vector Creations Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import dis from '../dispatcher';
import {Store} from 'flux/utils';
const INITIAL_STATE = {
cachedPassword: localStorage.getItem('mx_pass'),
};
/**
* A class for storing application state to do with the session. This is a simple flux
* store that listens for actions and updates its state accordingly, informing any
* listeners (views) of state changes.
*
* Usage:
* ```
* sessionStore.addListener(() => {
* this.setState({ cachedPassword: sessionStore.getCachedPassword() })
* })
* ```
*/
class SessionStore extends Store {
constructor() {
super(dis);
// Initialise state
this._state = INITIAL_STATE;
}
_update() {
// Persist state to localStorage
if (this._state.cachedPassword) {
localStorage.setItem('mx_pass', this._state.cachedPassword);
} else {
localStorage.removeItem('mx_pass', this._state.cachedPassword);
}
this.__emitChange();
}
_setState(newState) |
__onDispatch(payload) {
switch (payload.action) {
case 'cached_password':
this._setState({
cachedPassword: payload.cachedPassword,
});
break;
case 'password_changed':
this._setState({
cachedPassword: null,
});
break;
case 'on_logged_out':
this._setState({
cachedPassword: null,
});
break;
}
}
getCachedPassword() {
return this._state.cachedPassword;
}
}
let singletonSessionStore = null;
if (!singletonSessionStore) {
singletonSessionStore = new SessionStore();
}
module.exports = singletonSessionStore;
| {
this._state = Object.assign(this._state, newState);
this._update();
} | identifier_body |
SessionStore.js | /*
Copyright 2017 Vector Creations Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import dis from '../dispatcher';
import {Store} from 'flux/utils';
const INITIAL_STATE = {
cachedPassword: localStorage.getItem('mx_pass'),
};
/**
* A class for storing application state to do with the session. This is a simple flux
* store that listens for actions and updates its state accordingly, informing any
* listeners (views) of state changes.
*
* Usage:
* ```
* sessionStore.addListener(() => {
* this.setState({ cachedPassword: sessionStore.getCachedPassword() })
* })
* ```
*/
class SessionStore extends Store {
constructor() {
super(dis);
// Initialise state
this._state = INITIAL_STATE;
}
_update() {
// Persist state to localStorage
if (this._state.cachedPassword) {
localStorage.setItem('mx_pass', this._state.cachedPassword);
} else |
this.__emitChange();
}
_setState(newState) {
this._state = Object.assign(this._state, newState);
this._update();
}
__onDispatch(payload) {
switch (payload.action) {
case 'cached_password':
this._setState({
cachedPassword: payload.cachedPassword,
});
break;
case 'password_changed':
this._setState({
cachedPassword: null,
});
break;
case 'on_logged_out':
this._setState({
cachedPassword: null,
});
break;
}
}
getCachedPassword() {
return this._state.cachedPassword;
}
}
let singletonSessionStore = null;
if (!singletonSessionStore) {
singletonSessionStore = new SessionStore();
}
module.exports = singletonSessionStore;
| {
localStorage.removeItem('mx_pass', this._state.cachedPassword);
} | conditional_block |
mod.rs | // Copyright 2015-2017 Intecture Developers.
//
// Licensed under the Mozilla Public License 2.0 <LICENSE or
// https://www.tldrlegal.com/l/mpl-2.0>. This file may not be copied,
// modified, or distributed except according to those terms.
//! OS abstractions for `Telemetry`.
mod centos;
mod debian;
mod fedora;
mod freebsd;
mod macos;
mod nixos;
mod ubuntu;
pub use self::centos::Centos;
pub use self::debian::Debian;
pub use self::fedora::Fedora;
pub use self::freebsd::Freebsd;
pub use self::macos::Macos;
pub use self::nixos::Nixos;
pub use self::ubuntu::Ubuntu;
use errors::*;
use futures::Future;
use super::Telemetry;
pub trait TelemetryProvider {
fn available() -> bool where Self: Sized;
fn load(&self) -> Box<Future<Item = Telemetry, Error = Error>>;
}
#[doc(hidden)]
pub fn | () -> Result<Box<TelemetryProvider>> {
if Centos::available() {
Ok(Box::new(Centos))
}
else if Debian::available() {
Ok(Box::new(Debian))
}
else if Fedora::available() {
Ok(Box::new(Fedora))
}
else if Freebsd::available() {
Ok(Box::new(Freebsd))
}
else if Macos::available() {
Ok(Box::new(Macos))
}
else if Nixos::available() {
Ok(Box::new(Nixos))
}
else if Ubuntu::available() {
Ok(Box::new(Ubuntu))
} else {
Err(ErrorKind::ProviderUnavailable("Telemetry").into())
}
}
| factory | identifier_name |
mod.rs | // Copyright 2015-2017 Intecture Developers.
//
// Licensed under the Mozilla Public License 2.0 <LICENSE or
// https://www.tldrlegal.com/l/mpl-2.0>. This file may not be copied,
// modified, or distributed except according to those terms.
//! OS abstractions for `Telemetry`.
mod centos;
mod debian;
mod fedora;
mod freebsd;
mod macos;
mod nixos;
mod ubuntu;
pub use self::centos::Centos;
pub use self::debian::Debian;
pub use self::fedora::Fedora;
pub use self::freebsd::Freebsd;
pub use self::macos::Macos;
pub use self::nixos::Nixos;
pub use self::ubuntu::Ubuntu;
use errors::*;
use futures::Future;
use super::Telemetry;
pub trait TelemetryProvider {
fn available() -> bool where Self: Sized;
fn load(&self) -> Box<Future<Item = Telemetry, Error = Error>>; | if Centos::available() {
Ok(Box::new(Centos))
}
else if Debian::available() {
Ok(Box::new(Debian))
}
else if Fedora::available() {
Ok(Box::new(Fedora))
}
else if Freebsd::available() {
Ok(Box::new(Freebsd))
}
else if Macos::available() {
Ok(Box::new(Macos))
}
else if Nixos::available() {
Ok(Box::new(Nixos))
}
else if Ubuntu::available() {
Ok(Box::new(Ubuntu))
} else {
Err(ErrorKind::ProviderUnavailable("Telemetry").into())
}
} | }
#[doc(hidden)]
pub fn factory() -> Result<Box<TelemetryProvider>> { | random_line_split |
mod.rs | // Copyright 2015-2017 Intecture Developers.
//
// Licensed under the Mozilla Public License 2.0 <LICENSE or
// https://www.tldrlegal.com/l/mpl-2.0>. This file may not be copied,
// modified, or distributed except according to those terms.
//! OS abstractions for `Telemetry`.
mod centos;
mod debian;
mod fedora;
mod freebsd;
mod macos;
mod nixos;
mod ubuntu;
pub use self::centos::Centos;
pub use self::debian::Debian;
pub use self::fedora::Fedora;
pub use self::freebsd::Freebsd;
pub use self::macos::Macos;
pub use self::nixos::Nixos;
pub use self::ubuntu::Ubuntu;
use errors::*;
use futures::Future;
use super::Telemetry;
pub trait TelemetryProvider {
fn available() -> bool where Self: Sized;
fn load(&self) -> Box<Future<Item = Telemetry, Error = Error>>;
}
#[doc(hidden)]
pub fn factory() -> Result<Box<TelemetryProvider>> | {
if Centos::available() {
Ok(Box::new(Centos))
}
else if Debian::available() {
Ok(Box::new(Debian))
}
else if Fedora::available() {
Ok(Box::new(Fedora))
}
else if Freebsd::available() {
Ok(Box::new(Freebsd))
}
else if Macos::available() {
Ok(Box::new(Macos))
}
else if Nixos::available() {
Ok(Box::new(Nixos))
}
else if Ubuntu::available() {
Ok(Box::new(Ubuntu))
} else {
Err(ErrorKind::ProviderUnavailable("Telemetry").into())
}
} | identifier_body |
|
Helpers.API.Establishments.js | (function (exports, $) {
const Module = exports.Helpers.API || {}
// Internal cache
let internalCache = {}
let formControls
// Default config
const settings = {
// This is the ajax config object
ajax: {
url: '/establishments.json',
type: 'GET',
dataType: 'json'
},
// The init requires 2 values to be set on a page:
// $(init.selector).data(init.dataAttr)
//
init: {
// Context selector to obtain the feature flag value
selector: '#expenses',
// This attr should contain: true | false
dataAttr: 'featureDistance'
},
// Success / Fail events via $.publish()
events: {
cacheLoaded: '/API/establishments/loaded/',
cacheLoadError: '/API/establishments/load/error/'
}
}
// Init
function | (ajaxConfig) {
// This module is a simple cache of data
// It will self init and publish success and failure
// events when the promise resolves
return query(ajaxConfig).then(function (results) {
// Load the results into the internal cache
internalCache = results.sort(function (a, b) {
const nameA = a.name.toUpperCase() // ignore upper and lowercase
const nameB = b.name.toUpperCase() // ignore upper and lowercase
if (nameA < nameB) {
return -1
}
if (nameA > nameB) {
return 1
}
return 0
})
// Publish the success event
$.publish(settings.events.cacheLoaded)
}, function (status, error) {
// Load the error status and response
// in the the internal cache
internalCache = {
error: error,
status: status
}
// Publish the success settings.events.cacheLoadError
$.publish(settings.events.cacheLoadError, internalCache)
})
}
// Query will merge the settings
// and delegate to ..API.CORE
function query (ajaxConfig) {
const mergedSettings = $.extend(settings.ajax, ajaxConfig)
return moj.Helpers.API._CORE.query(mergedSettings)
}
// Filter by category
function getLocationByCategory (category) {
// If no catebory return the entire internalCache
if (!category) {
return internalCache
}
// Filter the internalCache on category
// Examples: 'crown_court', 'prison', etc
return internalCache.filter(function (obj) {
return obj.category.indexOf(category) > -1
})
}
// init with jquery based on a dom selector
function init () {
// Checking DOM for feature flag value
if ($(settings.init.selector).data(settings.init.dataAttr)) {
formControls = moj.Helpers.FormControls
return loadData()
}
}
// leaving in place for possible refactor
function getAsSelectWithOptions (a, b) {
return getAsOptions(a, b)
}
// This method will return an array of <option> tags
// It is also wrapped in a promise to ensure
// the entire operation completes before other
// events are triggered
function getAsOptions (category, selected) {
const results = getLocationByCategory(category)
if (results.length === 0) throw Error('Missing results: no data to build options with')
const def = $.Deferred()
formControls.getOptions(results, selected).then(function (els) {
def.resolve(els)
}, function () {
def.reject(arguments)
})
return def.promise()
}
Module.Establishments = {
init: init,
loadData: loadData,
getLocationByCategory: getLocationByCategory,
getAsOptions: getAsOptions,
getAsSelectWithOptions: getAsSelectWithOptions
}
$(document).ready(init)
exports.Helpers.API = Module
}(moj, jQuery))
| loadData | identifier_name |
Helpers.API.Establishments.js | (function (exports, $) {
const Module = exports.Helpers.API || {}
// Internal cache
let internalCache = {}
let formControls
// Default config
const settings = {
// This is the ajax config object
ajax: {
url: '/establishments.json',
type: 'GET',
dataType: 'json'
},
// The init requires 2 values to be set on a page:
// $(init.selector).data(init.dataAttr)
//
init: {
// Context selector to obtain the feature flag value
selector: '#expenses',
// This attr should contain: true | false
dataAttr: 'featureDistance'
},
// Success / Fail events via $.publish()
events: {
cacheLoaded: '/API/establishments/loaded/',
cacheLoadError: '/API/establishments/load/error/'
}
}
// Init
function loadData (ajaxConfig) {
// This module is a simple cache of data
// It will self init and publish success and failure
// events when the promise resolves
return query(ajaxConfig).then(function (results) {
// Load the results into the internal cache
internalCache = results.sort(function (a, b) {
const nameA = a.name.toUpperCase() // ignore upper and lowercase
const nameB = b.name.toUpperCase() // ignore upper and lowercase
if (nameA < nameB) {
return -1
}
if (nameA > nameB) |
return 0
})
// Publish the success event
$.publish(settings.events.cacheLoaded)
}, function (status, error) {
// Load the error status and response
// in the the internal cache
internalCache = {
error: error,
status: status
}
// Publish the success settings.events.cacheLoadError
$.publish(settings.events.cacheLoadError, internalCache)
})
}
// Query will merge the settings
// and delegate to ..API.CORE
function query (ajaxConfig) {
const mergedSettings = $.extend(settings.ajax, ajaxConfig)
return moj.Helpers.API._CORE.query(mergedSettings)
}
// Filter by category
function getLocationByCategory (category) {
// If no catebory return the entire internalCache
if (!category) {
return internalCache
}
// Filter the internalCache on category
// Examples: 'crown_court', 'prison', etc
return internalCache.filter(function (obj) {
return obj.category.indexOf(category) > -1
})
}
// init with jquery based on a dom selector
function init () {
// Checking DOM for feature flag value
if ($(settings.init.selector).data(settings.init.dataAttr)) {
formControls = moj.Helpers.FormControls
return loadData()
}
}
// leaving in place for possible refactor
function getAsSelectWithOptions (a, b) {
return getAsOptions(a, b)
}
// This method will return an array of <option> tags
// It is also wrapped in a promise to ensure
// the entire operation completes before other
// events are triggered
function getAsOptions (category, selected) {
const results = getLocationByCategory(category)
if (results.length === 0) throw Error('Missing results: no data to build options with')
const def = $.Deferred()
formControls.getOptions(results, selected).then(function (els) {
def.resolve(els)
}, function () {
def.reject(arguments)
})
return def.promise()
}
Module.Establishments = {
init: init,
loadData: loadData,
getLocationByCategory: getLocationByCategory,
getAsOptions: getAsOptions,
getAsSelectWithOptions: getAsSelectWithOptions
}
$(document).ready(init)
exports.Helpers.API = Module
}(moj, jQuery))
| {
return 1
} | conditional_block |
Helpers.API.Establishments.js | (function (exports, $) {
const Module = exports.Helpers.API || {}
// Internal cache
let internalCache = {}
let formControls
// Default config
const settings = {
// This is the ajax config object
ajax: {
url: '/establishments.json',
type: 'GET',
dataType: 'json'
},
// The init requires 2 values to be set on a page:
// $(init.selector).data(init.dataAttr)
//
init: {
// Context selector to obtain the feature flag value
selector: '#expenses',
// This attr should contain: true | false | events: {
cacheLoaded: '/API/establishments/loaded/',
cacheLoadError: '/API/establishments/load/error/'
}
}
// Init
function loadData (ajaxConfig) {
// This module is a simple cache of data
// It will self init and publish success and failure
// events when the promise resolves
return query(ajaxConfig).then(function (results) {
// Load the results into the internal cache
internalCache = results.sort(function (a, b) {
const nameA = a.name.toUpperCase() // ignore upper and lowercase
const nameB = b.name.toUpperCase() // ignore upper and lowercase
if (nameA < nameB) {
return -1
}
if (nameA > nameB) {
return 1
}
return 0
})
// Publish the success event
$.publish(settings.events.cacheLoaded)
}, function (status, error) {
// Load the error status and response
// in the the internal cache
internalCache = {
error: error,
status: status
}
// Publish the success settings.events.cacheLoadError
$.publish(settings.events.cacheLoadError, internalCache)
})
}
// Query will merge the settings
// and delegate to ..API.CORE
function query (ajaxConfig) {
const mergedSettings = $.extend(settings.ajax, ajaxConfig)
return moj.Helpers.API._CORE.query(mergedSettings)
}
// Filter by category
function getLocationByCategory (category) {
// If no catebory return the entire internalCache
if (!category) {
return internalCache
}
// Filter the internalCache on category
// Examples: 'crown_court', 'prison', etc
return internalCache.filter(function (obj) {
return obj.category.indexOf(category) > -1
})
}
// init with jquery based on a dom selector
function init () {
// Checking DOM for feature flag value
if ($(settings.init.selector).data(settings.init.dataAttr)) {
formControls = moj.Helpers.FormControls
return loadData()
}
}
// leaving in place for possible refactor
function getAsSelectWithOptions (a, b) {
return getAsOptions(a, b)
}
// This method will return an array of <option> tags
// It is also wrapped in a promise to ensure
// the entire operation completes before other
// events are triggered
function getAsOptions (category, selected) {
const results = getLocationByCategory(category)
if (results.length === 0) throw Error('Missing results: no data to build options with')
const def = $.Deferred()
formControls.getOptions(results, selected).then(function (els) {
def.resolve(els)
}, function () {
def.reject(arguments)
})
return def.promise()
}
Module.Establishments = {
init: init,
loadData: loadData,
getLocationByCategory: getLocationByCategory,
getAsOptions: getAsOptions,
getAsSelectWithOptions: getAsSelectWithOptions
}
$(document).ready(init)
exports.Helpers.API = Module
}(moj, jQuery)) | dataAttr: 'featureDistance'
},
// Success / Fail events via $.publish() | random_line_split |
Helpers.API.Establishments.js | (function (exports, $) {
const Module = exports.Helpers.API || {}
// Internal cache
let internalCache = {}
let formControls
// Default config
const settings = {
// This is the ajax config object
ajax: {
url: '/establishments.json',
type: 'GET',
dataType: 'json'
},
// The init requires 2 values to be set on a page:
// $(init.selector).data(init.dataAttr)
//
init: {
// Context selector to obtain the feature flag value
selector: '#expenses',
// This attr should contain: true | false
dataAttr: 'featureDistance'
},
// Success / Fail events via $.publish()
events: {
cacheLoaded: '/API/establishments/loaded/',
cacheLoadError: '/API/establishments/load/error/'
}
}
// Init
function loadData (ajaxConfig) {
// This module is a simple cache of data
// It will self init and publish success and failure
// events when the promise resolves
return query(ajaxConfig).then(function (results) {
// Load the results into the internal cache
internalCache = results.sort(function (a, b) {
const nameA = a.name.toUpperCase() // ignore upper and lowercase
const nameB = b.name.toUpperCase() // ignore upper and lowercase
if (nameA < nameB) {
return -1
}
if (nameA > nameB) {
return 1
}
return 0
})
// Publish the success event
$.publish(settings.events.cacheLoaded)
}, function (status, error) {
// Load the error status and response
// in the the internal cache
internalCache = {
error: error,
status: status
}
// Publish the success settings.events.cacheLoadError
$.publish(settings.events.cacheLoadError, internalCache)
})
}
// Query will merge the settings
// and delegate to ..API.CORE
function query (ajaxConfig) {
const mergedSettings = $.extend(settings.ajax, ajaxConfig)
return moj.Helpers.API._CORE.query(mergedSettings)
}
// Filter by category
function getLocationByCategory (category) {
// If no catebory return the entire internalCache
if (!category) {
return internalCache
}
// Filter the internalCache on category
// Examples: 'crown_court', 'prison', etc
return internalCache.filter(function (obj) {
return obj.category.indexOf(category) > -1
})
}
// init with jquery based on a dom selector
function init () |
// leaving in place for possible refactor
function getAsSelectWithOptions (a, b) {
return getAsOptions(a, b)
}
// This method will return an array of <option> tags
// It is also wrapped in a promise to ensure
// the entire operation completes before other
// events are triggered
function getAsOptions (category, selected) {
const results = getLocationByCategory(category)
if (results.length === 0) throw Error('Missing results: no data to build options with')
const def = $.Deferred()
formControls.getOptions(results, selected).then(function (els) {
def.resolve(els)
}, function () {
def.reject(arguments)
})
return def.promise()
}
Module.Establishments = {
init: init,
loadData: loadData,
getLocationByCategory: getLocationByCategory,
getAsOptions: getAsOptions,
getAsSelectWithOptions: getAsSelectWithOptions
}
$(document).ready(init)
exports.Helpers.API = Module
}(moj, jQuery))
| {
// Checking DOM for feature flag value
if ($(settings.init.selector).data(settings.init.dataAttr)) {
formControls = moj.Helpers.FormControls
return loadData()
}
} | identifier_body |
geonetwork.py | import os
import csv
import tempfile
import codecs
from urlparse import urlsplit
from shutil import abspath
import requests
from metadown.utils.etree import etree
namespaces = {
"gmx": "http://www.isotc211.org/2005/gmx",
"gsr": "http://www.isotc211.org/2005/gsr",
"gss": "http://www.isotc211.org/2005/gss",
"gts": "http://www.isotc211.org/2005/gts",
"xs": "http://www.w3.org/2001/XMLSchema",
"gml": "http://www.opengis.net/gml/3.2",
"xlink": "http://www.w3.org/1999/xlink",
"xsi": "http://www.w3.org/2001/XMLSchema-instance",
"gco": "http://www.isotc211.org/2005/gco",
"gmd": "http://www.isotc211.org/2005/gmd",
"gmi": "http://www.isotc211.org/2005/gmi",
"srv": "http://www.isotc211.org/2005/srv",
"geonet": "http://www.fao.org/geonetwork"
}
class GeoNetworkCollector(object):
def __init__(self, base_url):
self.data = base_url + '/srv/en/csv.search?'
# change to use ISO with extra GeoNetwork metadata
self.download = base_url + '/srv/en/xml.metadata.get?id='
def utf_8_encoder(self, unicode_csv_data):
for line in unicode_csv_data:
yield line.encode('utf-8')
def | (self):
isos = []
o, t = tempfile.mkstemp()
with codecs.open(t, "w+", "utf-8") as h:
h.write(requests.get(self.data).text)
with codecs.open(t, "rb", "utf-8") as f:
reader = csv.DictReader(self.utf_8_encoder(f))
for row in reader:
if row.get('schema') != 'iso19139':
continue
download_url = self.download + row.get('id')
isos.append(download_url)
os.unlink(f.name)
return isos
@staticmethod
def namer(url, **kwargs):
uid = urlsplit(url).query
uid = uid[uid.index("=")+1:]
return "GeoNetwork-" + uid + ".xml"
@staticmethod
def uuid_namer(url, **kwargs):
root = etree.parse(url).getroot()
x_res = root.xpath(
'/gmd:MD_Metadata/gmd:fileIdentifier/gco:CharacterString',
namespaces=namespaces
)
uuid = "GeoNetwork-" + x_res[0].text + ".xml"
return uuid
@staticmethod
def modifier(url, **kwargs):
# translate ISO19139 to ISO19115
gmi_ns = "http://www.isotc211.org/2005/gmi"
etree.register_namespace("gmi",gmi_ns)
new_root = etree.Element("{%s}MI_Metadata" % gmi_ns)
old_root = etree.parse(url).getroot()
# carry over an attributes we need
[new_root.set(k,v) for k,v in old_root.attrib.items()]
# carry over children
[new_root.append(e) for e in old_root]
return etree.tostring(new_root, encoding="UTF-8", pretty_print=True, xml_declaration=True)
| run | identifier_name |
geonetwork.py | import os
import csv
import tempfile
import codecs
from urlparse import urlsplit
from shutil import abspath
import requests
from metadown.utils.etree import etree
namespaces = {
"gmx": "http://www.isotc211.org/2005/gmx",
"gsr": "http://www.isotc211.org/2005/gsr",
"gss": "http://www.isotc211.org/2005/gss",
"gts": "http://www.isotc211.org/2005/gts",
"xs": "http://www.w3.org/2001/XMLSchema",
"gml": "http://www.opengis.net/gml/3.2",
"xlink": "http://www.w3.org/1999/xlink",
"xsi": "http://www.w3.org/2001/XMLSchema-instance",
"gco": "http://www.isotc211.org/2005/gco",
"gmd": "http://www.isotc211.org/2005/gmd",
"gmi": "http://www.isotc211.org/2005/gmi",
"srv": "http://www.isotc211.org/2005/srv",
"geonet": "http://www.fao.org/geonetwork"
}
class GeoNetworkCollector(object):
def __init__(self, base_url):
self.data = base_url + '/srv/en/csv.search?' | yield line.encode('utf-8')
def run(self):
isos = []
o, t = tempfile.mkstemp()
with codecs.open(t, "w+", "utf-8") as h:
h.write(requests.get(self.data).text)
with codecs.open(t, "rb", "utf-8") as f:
reader = csv.DictReader(self.utf_8_encoder(f))
for row in reader:
if row.get('schema') != 'iso19139':
continue
download_url = self.download + row.get('id')
isos.append(download_url)
os.unlink(f.name)
return isos
@staticmethod
def namer(url, **kwargs):
uid = urlsplit(url).query
uid = uid[uid.index("=")+1:]
return "GeoNetwork-" + uid + ".xml"
@staticmethod
def uuid_namer(url, **kwargs):
root = etree.parse(url).getroot()
x_res = root.xpath(
'/gmd:MD_Metadata/gmd:fileIdentifier/gco:CharacterString',
namespaces=namespaces
)
uuid = "GeoNetwork-" + x_res[0].text + ".xml"
return uuid
@staticmethod
def modifier(url, **kwargs):
# translate ISO19139 to ISO19115
gmi_ns = "http://www.isotc211.org/2005/gmi"
etree.register_namespace("gmi",gmi_ns)
new_root = etree.Element("{%s}MI_Metadata" % gmi_ns)
old_root = etree.parse(url).getroot()
# carry over an attributes we need
[new_root.set(k,v) for k,v in old_root.attrib.items()]
# carry over children
[new_root.append(e) for e in old_root]
return etree.tostring(new_root, encoding="UTF-8", pretty_print=True, xml_declaration=True) | # change to use ISO with extra GeoNetwork metadata
self.download = base_url + '/srv/en/xml.metadata.get?id='
def utf_8_encoder(self, unicode_csv_data):
for line in unicode_csv_data: | random_line_split |
geonetwork.py | import os
import csv
import tempfile
import codecs
from urlparse import urlsplit
from shutil import abspath
import requests
from metadown.utils.etree import etree
namespaces = {
"gmx": "http://www.isotc211.org/2005/gmx",
"gsr": "http://www.isotc211.org/2005/gsr",
"gss": "http://www.isotc211.org/2005/gss",
"gts": "http://www.isotc211.org/2005/gts",
"xs": "http://www.w3.org/2001/XMLSchema",
"gml": "http://www.opengis.net/gml/3.2",
"xlink": "http://www.w3.org/1999/xlink",
"xsi": "http://www.w3.org/2001/XMLSchema-instance",
"gco": "http://www.isotc211.org/2005/gco",
"gmd": "http://www.isotc211.org/2005/gmd",
"gmi": "http://www.isotc211.org/2005/gmi",
"srv": "http://www.isotc211.org/2005/srv",
"geonet": "http://www.fao.org/geonetwork"
}
class GeoNetworkCollector(object):
def __init__(self, base_url):
self.data = base_url + '/srv/en/csv.search?'
# change to use ISO with extra GeoNetwork metadata
self.download = base_url + '/srv/en/xml.metadata.get?id='
def utf_8_encoder(self, unicode_csv_data):
for line in unicode_csv_data:
yield line.encode('utf-8')
def run(self):
isos = []
o, t = tempfile.mkstemp()
with codecs.open(t, "w+", "utf-8") as h:
h.write(requests.get(self.data).text)
with codecs.open(t, "rb", "utf-8") as f:
reader = csv.DictReader(self.utf_8_encoder(f))
for row in reader:
if row.get('schema') != 'iso19139':
continue
download_url = self.download + row.get('id')
isos.append(download_url)
os.unlink(f.name)
return isos
@staticmethod
def namer(url, **kwargs):
uid = urlsplit(url).query
uid = uid[uid.index("=")+1:]
return "GeoNetwork-" + uid + ".xml"
@staticmethod
def uuid_namer(url, **kwargs):
|
@staticmethod
def modifier(url, **kwargs):
# translate ISO19139 to ISO19115
gmi_ns = "http://www.isotc211.org/2005/gmi"
etree.register_namespace("gmi",gmi_ns)
new_root = etree.Element("{%s}MI_Metadata" % gmi_ns)
old_root = etree.parse(url).getroot()
# carry over an attributes we need
[new_root.set(k,v) for k,v in old_root.attrib.items()]
# carry over children
[new_root.append(e) for e in old_root]
return etree.tostring(new_root, encoding="UTF-8", pretty_print=True, xml_declaration=True)
| root = etree.parse(url).getroot()
x_res = root.xpath(
'/gmd:MD_Metadata/gmd:fileIdentifier/gco:CharacterString',
namespaces=namespaces
)
uuid = "GeoNetwork-" + x_res[0].text + ".xml"
return uuid | identifier_body |
geonetwork.py | import os
import csv
import tempfile
import codecs
from urlparse import urlsplit
from shutil import abspath
import requests
from metadown.utils.etree import etree
namespaces = {
"gmx": "http://www.isotc211.org/2005/gmx",
"gsr": "http://www.isotc211.org/2005/gsr",
"gss": "http://www.isotc211.org/2005/gss",
"gts": "http://www.isotc211.org/2005/gts",
"xs": "http://www.w3.org/2001/XMLSchema",
"gml": "http://www.opengis.net/gml/3.2",
"xlink": "http://www.w3.org/1999/xlink",
"xsi": "http://www.w3.org/2001/XMLSchema-instance",
"gco": "http://www.isotc211.org/2005/gco",
"gmd": "http://www.isotc211.org/2005/gmd",
"gmi": "http://www.isotc211.org/2005/gmi",
"srv": "http://www.isotc211.org/2005/srv",
"geonet": "http://www.fao.org/geonetwork"
}
class GeoNetworkCollector(object):
def __init__(self, base_url):
self.data = base_url + '/srv/en/csv.search?'
# change to use ISO with extra GeoNetwork metadata
self.download = base_url + '/srv/en/xml.metadata.get?id='
def utf_8_encoder(self, unicode_csv_data):
for line in unicode_csv_data:
|
def run(self):
isos = []
o, t = tempfile.mkstemp()
with codecs.open(t, "w+", "utf-8") as h:
h.write(requests.get(self.data).text)
with codecs.open(t, "rb", "utf-8") as f:
reader = csv.DictReader(self.utf_8_encoder(f))
for row in reader:
if row.get('schema') != 'iso19139':
continue
download_url = self.download + row.get('id')
isos.append(download_url)
os.unlink(f.name)
return isos
@staticmethod
def namer(url, **kwargs):
uid = urlsplit(url).query
uid = uid[uid.index("=")+1:]
return "GeoNetwork-" + uid + ".xml"
@staticmethod
def uuid_namer(url, **kwargs):
root = etree.parse(url).getroot()
x_res = root.xpath(
'/gmd:MD_Metadata/gmd:fileIdentifier/gco:CharacterString',
namespaces=namespaces
)
uuid = "GeoNetwork-" + x_res[0].text + ".xml"
return uuid
@staticmethod
def modifier(url, **kwargs):
# translate ISO19139 to ISO19115
gmi_ns = "http://www.isotc211.org/2005/gmi"
etree.register_namespace("gmi",gmi_ns)
new_root = etree.Element("{%s}MI_Metadata" % gmi_ns)
old_root = etree.parse(url).getroot()
# carry over an attributes we need
[new_root.set(k,v) for k,v in old_root.attrib.items()]
# carry over children
[new_root.append(e) for e in old_root]
return etree.tostring(new_root, encoding="UTF-8", pretty_print=True, xml_declaration=True)
| yield line.encode('utf-8') | conditional_block |
bootstrap-collapse.js | /* =============================================================
* bootstrap-collapse.js v2.2.2
* http://twitter.github.com/bootstrap/javascript.html#collapse
* =============================================================
* Copyright 2012 Twitter, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============================================================ */
!function ($) {
"use strict"; // jshint ;_;
/* COLLAPSE PUBLIC CLASS DEFINITION
* ================================ */
var Collapse = function (element, options) {
this.$element = $(element)
this.options = $.extend({}, $.fn.collapse.defaults, options)
if (this.options.parent) {
this.$parent = $(this.options.parent)
}
this.options.toggle && this.toggle()
}
Collapse.prototype = {
constructor: Collapse
, dimension: function () {
var hasWidth = this.$element.hasClass('width')
return hasWidth ? 'width' : 'height'
}
, show: function () {
var dimension
, scroll
, actives
, hasData
if (this.transitioning) return
dimension = this.dimension()
scroll = $.camelCase(['scroll', dimension].join('-'))
actives = this.$parent && this.$parent.find('> .accordion-group > .in')
if (actives && actives.length) {
hasData = actives.data('collapse')
if (hasData && hasData.transitioning) return
actives.collapse('hide')
hasData || actives.data('collapse', null)
}
this.$element[dimension](0)
this.transition('addClass', $.Event('show'), 'shown')
$.support.transition && this.$element[dimension](this.$element[0][scroll])
}
, hide: function () {
var dimension
if (this.transitioning) return
dimension = this.dimension()
this.reset(this.$element[dimension]())
this.transition('removeClass', $.Event('hide'), 'hidden')
this.$element[dimension](0)
}
, reset: function (size) {
var dimension = this.dimension()
this.$element
.removeClass('collapse')
[dimension](size || 'auto')
[0].offsetWidth
this.$element[size !== null ? 'addClass' : 'removeClass']('collapse')
return this
}
, transition: function (method, startEvent, completeEvent) {
var that = this
, complete = function () {
if (startEvent.type == 'show') that.reset()
that.transitioning = 0
that.$element.trigger(completeEvent)
}
this.$element.trigger(startEvent)
if (startEvent.isDefaultPrevented()) return
this.transitioning = 1
this.$element[method]('in')
$.support.transition && this.$element.hasClass('collapse') ?
this.$element.one($.support.transition.end, complete) :
complete()
} | }
/* COLLAPSE PLUGIN DEFINITION
* ========================== */
var old = $.fn.collapse
$.fn.collapse = function (option) {
return this.each(function () {
var $this = $(this)
, data = $this.data('collapse')
, options = typeof option == 'object' && option
if (!data) $this.data('collapse', (data = new Collapse(this, options)))
if (typeof option == 'string') data[option]()
})
}
$.fn.collapse.defaults = {
toggle: true
}
$.fn.collapse.Constructor = Collapse
/* COLLAPSE NO CONFLICT
* ==================== */
$.fn.collapse.noConflict = function () {
$.fn.collapse = old
return this
}
/* COLLAPSE DATA-API
* ================= */
$(document).on('click.collapse.data-api', '[data-toggle=collapse]', function (e) {
var $this = $(this), href
, target = $this.attr('data-target')
|| e.preventDefault()
|| (href = $this.attr('href')) && href.replace(/.*(?=#[^\s]+$)/, '') //strip for ie7
, option = $(target).data('collapse') ? 'toggle' : $this.data()
$this[$(target).hasClass('in') ? 'addClass' : 'removeClass']('collapsed')
$(target).collapse(option)
})
}(window.jQuery); |
, toggle: function () {
this[this.$element.hasClass('in') ? 'hide' : 'show']()
}
| random_line_split |
Client.js | const uuid = require('uuid/v4')
const zmq = require('zeromq')
const events = require('events')
const Handle = function (id, callback) {
this.id = id
this.callback = callback
}
Handle.prototype = Object.create(events.EventEmitter.prototype)
/**
* A client submits tasks to a broker.
*/
const Client = function (options = {}) {
this._message = this._message.bind(this)
this.options = options
this.handles = {}
this._connect()
}
Object.assign(Client.prototype, {
/**
* Closes the connection to the broker.
*/
close () { this.socket.close() },
/**
* Submits a task with the given name and data.
*/
submitTask (name, data, callback) {
const handle = this._addHandle(uuid(), callback)
const payload = JSON.stringify({ id: handle.id, request: name, data })
this.socket.send([Buffer.alloc(0), payload])
return handle
},
_connect () {
const endpoint = this.options.router || 'ipc:///tmp/bokeh-router'
this.socket = zmq.socket('dealer')
this.socket.on('message', this._message)
this.socket.connect(endpoint)
},
_message (...args) {
const adjustedLength = Math.max(args.length, 1)
const payload = args[adjustedLength - 1]
const task = JSON.parse(payload)
switch (task.response) {
case 'submitted':
this._submitted(task)
break
case 'completed':
this._completed(task)
break
case 'failed':
this._failed(task)
break
default:
throw new Error(`Unknown response '${task.response}'`)
}
},
_submitted (task) {
const handle = this._getHandle(task.id)
handle.emit('submit')
},
_completed (task) {
const handle = this._getHandle(task.id)
if (typeof handle.callback === 'function') {
handle.callback(null, task.data)
}
handle.emit('complete', task.data)
this._removeHandle(handle)
},
_failed (task) {
const handle = this._getHandle(task.id)
if (typeof handle.callback === 'function') {
handle.callback(task.data)
}
if (handle.listeners('error').length !== 0) { handle.emit('error', task.data) }
this._removeHandle(handle)
},
_getHandle (id) | ,
_addHandle (id, callback) {
const handle = new Handle(id, callback)
this.handles[id] = handle
return handle
},
_removeHandle (handle) {
delete this.handles[handle.id]
}
})
module.exports = Client
| { return this.handles[id] } | identifier_body |
Client.js | const uuid = require('uuid/v4')
const zmq = require('zeromq')
const events = require('events')
const Handle = function (id, callback) {
this.id = id
this.callback = callback
}
Handle.prototype = Object.create(events.EventEmitter.prototype)
/**
* A client submits tasks to a broker.
*/
const Client = function (options = {}) {
this._message = this._message.bind(this)
this.options = options
this.handles = {}
this._connect()
}
Object.assign(Client.prototype, {
/**
* Closes the connection to the broker.
*/
close () { this.socket.close() },
/**
* Submits a task with the given name and data.
*/
submitTask (name, data, callback) {
const handle = this._addHandle(uuid(), callback)
const payload = JSON.stringify({ id: handle.id, request: name, data })
this.socket.send([Buffer.alloc(0), payload])
return handle
},
_connect () {
const endpoint = this.options.router || 'ipc:///tmp/bokeh-router'
this.socket = zmq.socket('dealer')
this.socket.on('message', this._message)
this.socket.connect(endpoint)
},
_message (...args) {
const adjustedLength = Math.max(args.length, 1)
const payload = args[adjustedLength - 1]
const task = JSON.parse(payload)
switch (task.response) {
case 'submitted':
this._submitted(task)
break
case 'completed':
this._completed(task)
break
case 'failed':
this._failed(task)
break
default:
throw new Error(`Unknown response '${task.response}'`)
}
},
_submitted (task) {
const handle = this._getHandle(task.id)
handle.emit('submit')
},
_completed (task) {
const handle = this._getHandle(task.id)
if (typeof handle.callback === 'function') {
handle.callback(null, task.data)
}
handle.emit('complete', task.data)
this._removeHandle(handle)
},
_failed (task) {
const handle = this._getHandle(task.id)
if (typeof handle.callback === 'function') |
if (handle.listeners('error').length !== 0) { handle.emit('error', task.data) }
this._removeHandle(handle)
},
_getHandle (id) { return this.handles[id] },
_addHandle (id, callback) {
const handle = new Handle(id, callback)
this.handles[id] = handle
return handle
},
_removeHandle (handle) {
delete this.handles[handle.id]
}
})
module.exports = Client
| {
handle.callback(task.data)
} | conditional_block |
Client.js | const uuid = require('uuid/v4')
const zmq = require('zeromq')
const events = require('events')
const Handle = function (id, callback) {
this.id = id
this.callback = callback
}
Handle.prototype = Object.create(events.EventEmitter.prototype)
/**
* A client submits tasks to a broker.
*/
const Client = function (options = {}) {
this._message = this._message.bind(this)
this.options = options
this.handles = {}
this._connect()
}
Object.assign(Client.prototype, {
/**
* Closes the connection to the broker.
*/
close () { this.socket.close() },
/**
* Submits a task with the given name and data.
*/
submitTask (name, data, callback) {
const handle = this._addHandle(uuid(), callback)
const payload = JSON.stringify({ id: handle.id, request: name, data })
this.socket.send([Buffer.alloc(0), payload])
return handle
},
_connect () {
const endpoint = this.options.router || 'ipc:///tmp/bokeh-router'
this.socket = zmq.socket('dealer')
this.socket.on('message', this._message)
this.socket.connect(endpoint)
},
_message (...args) {
const adjustedLength = Math.max(args.length, 1)
const payload = args[adjustedLength - 1]
const task = JSON.parse(payload)
switch (task.response) {
case 'submitted':
this._submitted(task)
break
case 'completed':
this._completed(task)
break
case 'failed':
this._failed(task)
break
default:
throw new Error(`Unknown response '${task.response}'`)
}
},
_submitted (task) {
const handle = this._getHandle(task.id)
handle.emit('submit')
},
_completed (task) {
const handle = this._getHandle(task.id)
if (typeof handle.callback === 'function') {
handle.callback(null, task.data)
}
handle.emit('complete', task.data)
this._removeHandle(handle)
},
| (task) {
const handle = this._getHandle(task.id)
if (typeof handle.callback === 'function') {
handle.callback(task.data)
}
if (handle.listeners('error').length !== 0) { handle.emit('error', task.data) }
this._removeHandle(handle)
},
_getHandle (id) { return this.handles[id] },
_addHandle (id, callback) {
const handle = new Handle(id, callback)
this.handles[id] = handle
return handle
},
_removeHandle (handle) {
delete this.handles[handle.id]
}
})
module.exports = Client
| _failed | identifier_name |
Client.js | const uuid = require('uuid/v4')
const zmq = require('zeromq')
const events = require('events')
const Handle = function (id, callback) {
this.id = id
this.callback = callback
}
Handle.prototype = Object.create(events.EventEmitter.prototype)
/**
* A client submits tasks to a broker.
*/
const Client = function (options = {}) {
this._message = this._message.bind(this)
this.options = options
this.handles = {}
this._connect()
}
Object.assign(Client.prototype, {
/**
* Closes the connection to the broker.
*/
close () { this.socket.close() },
/**
* Submits a task with the given name and data.
*/
submitTask (name, data, callback) {
const handle = this._addHandle(uuid(), callback)
const payload = JSON.stringify({ id: handle.id, request: name, data })
this.socket.send([Buffer.alloc(0), payload])
return handle
},
_connect () {
const endpoint = this.options.router || 'ipc:///tmp/bokeh-router'
this.socket = zmq.socket('dealer')
this.socket.on('message', this._message)
this.socket.connect(endpoint) | _message (...args) {
const adjustedLength = Math.max(args.length, 1)
const payload = args[adjustedLength - 1]
const task = JSON.parse(payload)
switch (task.response) {
case 'submitted':
this._submitted(task)
break
case 'completed':
this._completed(task)
break
case 'failed':
this._failed(task)
break
default:
throw new Error(`Unknown response '${task.response}'`)
}
},
_submitted (task) {
const handle = this._getHandle(task.id)
handle.emit('submit')
},
_completed (task) {
const handle = this._getHandle(task.id)
if (typeof handle.callback === 'function') {
handle.callback(null, task.data)
}
handle.emit('complete', task.data)
this._removeHandle(handle)
},
_failed (task) {
const handle = this._getHandle(task.id)
if (typeof handle.callback === 'function') {
handle.callback(task.data)
}
if (handle.listeners('error').length !== 0) { handle.emit('error', task.data) }
this._removeHandle(handle)
},
_getHandle (id) { return this.handles[id] },
_addHandle (id, callback) {
const handle = new Handle(id, callback)
this.handles[id] = handle
return handle
},
_removeHandle (handle) {
delete this.handles[handle.id]
}
})
module.exports = Client | },
| random_line_split |
regions-mock-tcx.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// xfail-fast `use` standards don't resolve
// Test a sample usage pattern for regions. Makes use of the
// following features:
//
// - Multiple lifetime parameters
// - Arenas
extern mod extra;
use extra::arena;
use extra::arena::Arena;
use std::hashmap::HashMap;
use std::cast;
use std::libc;
use std::mem;
type Type<'tcx> = &'tcx TypeStructure<'tcx>;
#[deriving(Eq)]
enum TypeStructure<'tcx> {
TypeInt,
TypeFunction(Type<'tcx>, Type<'tcx>),
}
struct TypeContext<'tcx, 'ast> {
ty_arena: &'tcx Arena,
types: ~[Type<'tcx>],
type_table: HashMap<NodeId, Type<'tcx>>,
ast_arena: &'ast Arena,
ast_counter: uint,
}
impl<'tcx,'ast> TypeContext<'tcx, 'ast> {
fn | (ty_arena: &'tcx Arena, ast_arena: &'ast Arena)
-> TypeContext<'tcx, 'ast> {
TypeContext { ty_arena: ty_arena,
types: ~[],
type_table: HashMap::new(),
ast_arena: ast_arena,
ast_counter: 0 }
}
fn add_type(&mut self, s: TypeStructure<'tcx>) -> Type<'tcx> {
for &ty in self.types.iter() {
if *ty == s {
return ty;
}
}
let ty = self.ty_arena.alloc(|| s);
self.types.push(ty);
ty
}
fn set_type(&mut self, id: NodeId, ty: Type<'tcx>) -> Type<'tcx> {
self.type_table.insert(id, ty);
ty
}
fn ast(&mut self, a: AstKind<'ast>) -> Ast<'ast> {
let id = self.ast_counter;
self.ast_counter += 1;
self.ast_arena.alloc(|| AstStructure { id: NodeId {id:id}, kind: a })
}
}
#[deriving(Eq, IterBytes)]
struct NodeId {
id: uint
}
type Ast<'ast> = &'ast AstStructure<'ast>;
struct AstStructure<'ast> {
id: NodeId,
kind: AstKind<'ast>
}
enum AstKind<'ast> {
ExprInt,
ExprVar(uint),
ExprLambda(Ast<'ast>),
}
fn compute_types<'tcx,'ast>(tcx: &mut TypeContext<'tcx,'ast>,
ast: Ast<'ast>) -> Type<'tcx>
{
match ast.kind {
ExprInt | ExprVar(_) => {
let ty = tcx.add_type(TypeInt);
tcx.set_type(ast.id, ty)
}
ExprLambda(ast) => {
let arg_ty = tcx.add_type(TypeInt);
let body_ty = compute_types(tcx, ast);
let lambda_ty = tcx.add_type(TypeFunction(arg_ty, body_ty));
tcx.set_type(ast.id, lambda_ty)
}
}
}
pub fn main() {
let ty_arena = arena::Arena::new();
let ast_arena = arena::Arena::new();
let mut tcx = TypeContext::new(&ty_arena, &ast_arena);
let ast = tcx.ast(ExprInt);
let ty = compute_types(&mut tcx, ast);
assert_eq!(*ty, TypeInt);
}
| new | identifier_name |
regions-mock-tcx.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// xfail-fast `use` standards don't resolve
// Test a sample usage pattern for regions. Makes use of the
// following features:
//
// - Multiple lifetime parameters
// - Arenas
extern mod extra;
use extra::arena;
use extra::arena::Arena;
use std::hashmap::HashMap;
use std::cast;
use std::libc;
use std::mem;
type Type<'tcx> = &'tcx TypeStructure<'tcx>;
#[deriving(Eq)]
enum TypeStructure<'tcx> {
TypeInt,
TypeFunction(Type<'tcx>, Type<'tcx>),
}
struct TypeContext<'tcx, 'ast> {
ty_arena: &'tcx Arena,
types: ~[Type<'tcx>],
type_table: HashMap<NodeId, Type<'tcx>>,
ast_arena: &'ast Arena,
ast_counter: uint,
}
impl<'tcx,'ast> TypeContext<'tcx, 'ast> {
fn new(ty_arena: &'tcx Arena, ast_arena: &'ast Arena)
-> TypeContext<'tcx, 'ast> {
TypeContext { ty_arena: ty_arena,
types: ~[],
type_table: HashMap::new(),
ast_arena: ast_arena,
ast_counter: 0 }
}
fn add_type(&mut self, s: TypeStructure<'tcx>) -> Type<'tcx> {
for &ty in self.types.iter() {
if *ty == s {
return ty;
}
}
let ty = self.ty_arena.alloc(|| s);
self.types.push(ty);
ty
}
fn set_type(&mut self, id: NodeId, ty: Type<'tcx>) -> Type<'tcx> {
self.type_table.insert(id, ty);
ty
}
fn ast(&mut self, a: AstKind<'ast>) -> Ast<'ast> {
let id = self.ast_counter;
self.ast_counter += 1;
self.ast_arena.alloc(|| AstStructure { id: NodeId {id:id}, kind: a })
}
}
#[deriving(Eq, IterBytes)]
struct NodeId {
id: uint
}
type Ast<'ast> = &'ast AstStructure<'ast>;
struct AstStructure<'ast> {
id: NodeId,
kind: AstKind<'ast>
}
enum AstKind<'ast> {
ExprInt,
ExprVar(uint),
ExprLambda(Ast<'ast>),
}
fn compute_types<'tcx,'ast>(tcx: &mut TypeContext<'tcx,'ast>,
ast: Ast<'ast>) -> Type<'tcx>
{
match ast.kind {
ExprInt | ExprVar(_) => {
let ty = tcx.add_type(TypeInt);
tcx.set_type(ast.id, ty)
}
ExprLambda(ast) => {
let arg_ty = tcx.add_type(TypeInt);
let body_ty = compute_types(tcx, ast);
let lambda_ty = tcx.add_type(TypeFunction(arg_ty, body_ty));
tcx.set_type(ast.id, lambda_ty)
} | }
pub fn main() {
let ty_arena = arena::Arena::new();
let ast_arena = arena::Arena::new();
let mut tcx = TypeContext::new(&ty_arena, &ast_arena);
let ast = tcx.ast(ExprInt);
let ty = compute_types(&mut tcx, ast);
assert_eq!(*ty, TypeInt);
} | } | random_line_split |
regions-mock-tcx.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// xfail-fast `use` standards don't resolve
// Test a sample usage pattern for regions. Makes use of the
// following features:
//
// - Multiple lifetime parameters
// - Arenas
extern mod extra;
use extra::arena;
use extra::arena::Arena;
use std::hashmap::HashMap;
use std::cast;
use std::libc;
use std::mem;
type Type<'tcx> = &'tcx TypeStructure<'tcx>;
#[deriving(Eq)]
enum TypeStructure<'tcx> {
TypeInt,
TypeFunction(Type<'tcx>, Type<'tcx>),
}
struct TypeContext<'tcx, 'ast> {
ty_arena: &'tcx Arena,
types: ~[Type<'tcx>],
type_table: HashMap<NodeId, Type<'tcx>>,
ast_arena: &'ast Arena,
ast_counter: uint,
}
impl<'tcx,'ast> TypeContext<'tcx, 'ast> {
fn new(ty_arena: &'tcx Arena, ast_arena: &'ast Arena)
-> TypeContext<'tcx, 'ast> {
TypeContext { ty_arena: ty_arena,
types: ~[],
type_table: HashMap::new(),
ast_arena: ast_arena,
ast_counter: 0 }
}
fn add_type(&mut self, s: TypeStructure<'tcx>) -> Type<'tcx> {
for &ty in self.types.iter() {
if *ty == s {
return ty;
}
}
let ty = self.ty_arena.alloc(|| s);
self.types.push(ty);
ty
}
fn set_type(&mut self, id: NodeId, ty: Type<'tcx>) -> Type<'tcx> {
self.type_table.insert(id, ty);
ty
}
fn ast(&mut self, a: AstKind<'ast>) -> Ast<'ast> {
let id = self.ast_counter;
self.ast_counter += 1;
self.ast_arena.alloc(|| AstStructure { id: NodeId {id:id}, kind: a })
}
}
#[deriving(Eq, IterBytes)]
struct NodeId {
id: uint
}
type Ast<'ast> = &'ast AstStructure<'ast>;
struct AstStructure<'ast> {
id: NodeId,
kind: AstKind<'ast>
}
enum AstKind<'ast> {
ExprInt,
ExprVar(uint),
ExprLambda(Ast<'ast>),
}
fn compute_types<'tcx,'ast>(tcx: &mut TypeContext<'tcx,'ast>,
ast: Ast<'ast>) -> Type<'tcx>
{
match ast.kind {
ExprInt | ExprVar(_) => |
ExprLambda(ast) => {
let arg_ty = tcx.add_type(TypeInt);
let body_ty = compute_types(tcx, ast);
let lambda_ty = tcx.add_type(TypeFunction(arg_ty, body_ty));
tcx.set_type(ast.id, lambda_ty)
}
}
}
pub fn main() {
let ty_arena = arena::Arena::new();
let ast_arena = arena::Arena::new();
let mut tcx = TypeContext::new(&ty_arena, &ast_arena);
let ast = tcx.ast(ExprInt);
let ty = compute_types(&mut tcx, ast);
assert_eq!(*ty, TypeInt);
}
| {
let ty = tcx.add_type(TypeInt);
tcx.set_type(ast.id, ty)
} | conditional_block |
2357b6b3d76_.py | """empty message
Revision ID: 2357b6b3d76
Revises: fecca96b9d
Create Date: 2015-10-27 10:26:52.074526
"""
# revision identifiers, used by Alembic.
revision = '2357b6b3d76'
down_revision = 'fecca96b9d'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
|
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('citizen_complaints', 'source')
op.drop_column('citizen_complaints', 'service_type')
### end Alembic commands ###
| op.add_column('citizen_complaints', sa.Column('service_type', sa.String(length=255), nullable=True))
op.add_column('citizen_complaints', sa.Column('source', sa.String(length=255), nullable=True))
### end Alembic commands ### | identifier_body |
2357b6b3d76_.py | """empty message
Revision ID: 2357b6b3d76
Revises: fecca96b9d
Create Date: 2015-10-27 10:26:52.074526
"""
# revision identifiers, used by Alembic.
revision = '2357b6b3d76'
down_revision = 'fecca96b9d'
from alembic import op
import sqlalchemy as sa
def | ():
### commands auto generated by Alembic - please adjust! ###
op.add_column('citizen_complaints', sa.Column('service_type', sa.String(length=255), nullable=True))
op.add_column('citizen_complaints', sa.Column('source', sa.String(length=255), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('citizen_complaints', 'source')
op.drop_column('citizen_complaints', 'service_type')
### end Alembic commands ###
| upgrade | identifier_name |
2357b6b3d76_.py | """empty message
Revision ID: 2357b6b3d76 |
"""
# revision identifiers, used by Alembic.
revision = '2357b6b3d76'
down_revision = 'fecca96b9d'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('citizen_complaints', sa.Column('service_type', sa.String(length=255), nullable=True))
op.add_column('citizen_complaints', sa.Column('source', sa.String(length=255), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('citizen_complaints', 'source')
op.drop_column('citizen_complaints', 'service_type')
### end Alembic commands ### | Revises: fecca96b9d
Create Date: 2015-10-27 10:26:52.074526 | random_line_split |
RegressionFunction.js | var clover = new Object();
| // JSON: {classes : [{name, id, sl, el, methods : [{sl, el}, ...]}, ...]}
clover.pageData = {"classes":[{"el":119,"id":216512,"methods":[{"el":48,"sc":2,"sl":47},{"el":53,"sc":2,"sl":50},{"el":58,"sc":2,"sl":55},{"el":62,"sc":2,"sl":60},{"el":66,"sc":2,"sl":64},{"el":71,"sc":2,"sl":68},{"el":76,"sc":2,"sl":73},{"el":81,"sc":2,"sl":78},{"el":90,"sc":2,"sl":83},{"el":108,"sc":2,"sl":92},{"el":113,"sc":2,"sl":110},{"el":118,"sc":2,"sl":115}],"name":"RegressionFunction","sl":37}]}
// JSON: {test_ID : {"methods": [ID1, ID2, ID3...], "name" : "testXXX() void"}, ...};
clover.testTargets = {}
// JSON: { lines : [{tests : [testid1, testid2, testid3, ...]}, ...]};
clover.srcFileLines = [[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []] | random_line_split |
|
install-plugins.ts | import { reporter } from "./utils/reporter"
import path from "path"
import { PluginConfigMap } from "."
import { requireResolve } from "./utils/require-utils"
const resolveGatsbyPath = (rootPath: string): string | never => {
try {
const gatsbyPath = requireResolve(`gatsby/package.json`, {
paths: [rootPath],
})
if (!gatsbyPath) throw new Error()
return gatsbyPath
} catch (e) {
throw new Error(
`Could not find "gatsby" in ${rootPath}. Perhaps it wasn't installed properly?`
)
}
}
const resolveGatsbyCliPath = (
rootPath: string,
gatsbyPath: string
): string | never => {
try {
let installPluginCommand
try {
installPluginCommand = requireResolve(
`gatsby-cli/lib/handlers/plugin-add`,
{
// Try to find gatsby-cli in the site root, or in the site's gatsby dir
paths: [rootPath, path.dirname(gatsbyPath)],
}
)
} catch (e) {
// We'll error out later
}
try {
if (!installPluginCommand) {
// Older location
console.log(`looking in old place`)
installPluginCommand = requireResolve(`gatsby-cli/lib/plugin-add`, {
paths: [rootPath, path.dirname(gatsbyPath)],
})
}
} catch (e) {
// We'll error out later
}
if (!installPluginCommand) {
throw new Error()
}
return installPluginCommand
} catch (e) {
throw new Error(
`Could not find a suitable version of gatsby-cli. Please report this issue at https://www.github.com/gatsbyjs/gatsby/issues`
)
}
}
const addPluginsToProject = async (
installPluginCommand: string,
plugins: Array<string>,
pluginOptions: PluginConfigMap = {},
rootPath: string,
packages: Array<string>
): Promise<void> => {
try {
const { addPlugins } = require(installPluginCommand)
await addPlugins(plugins, pluginOptions, rootPath, packages)
} catch (e) {
throw new Error(
`Something went wrong when trying to add the plugins to the project: ${
(e as Error).message
}`
)
}
}
export async function installPlugins(
plugins: Array<string>,
pluginOptions: PluginConfigMap = {},
rootPath: string,
packages: Array<string>
): Promise<void> | {
try {
const gatsbyPath = resolveGatsbyPath(rootPath)
const installPluginCommand = resolveGatsbyCliPath(rootPath, gatsbyPath)
await addPluginsToProject(
installPluginCommand,
plugins,
pluginOptions,
rootPath,
packages
)
} catch (e) {
reporter.error((e as Error).message)
return
}
} | identifier_body |
|
install-plugins.ts | import { reporter } from "./utils/reporter"
import path from "path"
import { PluginConfigMap } from "."
import { requireResolve } from "./utils/require-utils"
const resolveGatsbyPath = (rootPath: string): string | never => {
try {
const gatsbyPath = requireResolve(`gatsby/package.json`, {
paths: [rootPath],
})
if (!gatsbyPath) throw new Error()
return gatsbyPath
} catch (e) {
throw new Error(
`Could not find "gatsby" in ${rootPath}. Perhaps it wasn't installed properly?`
)
}
}
const resolveGatsbyCliPath = (
rootPath: string,
gatsbyPath: string
): string | never => {
try {
let installPluginCommand
try {
installPluginCommand = requireResolve(
`gatsby-cli/lib/handlers/plugin-add`,
{
// Try to find gatsby-cli in the site root, or in the site's gatsby dir
paths: [rootPath, path.dirname(gatsbyPath)],
}
)
} catch (e) {
// We'll error out later
}
try {
if (!installPluginCommand) {
// Older location
console.log(`looking in old place`)
installPluginCommand = requireResolve(`gatsby-cli/lib/plugin-add`, {
paths: [rootPath, path.dirname(gatsbyPath)],
})
}
} catch (e) {
// We'll error out later
}
if (!installPluginCommand) {
throw new Error()
}
return installPluginCommand
} catch (e) {
throw new Error(
`Could not find a suitable version of gatsby-cli. Please report this issue at https://www.github.com/gatsbyjs/gatsby/issues`
)
}
}
const addPluginsToProject = async (
installPluginCommand: string,
plugins: Array<string>,
pluginOptions: PluginConfigMap = {},
rootPath: string,
packages: Array<string>
): Promise<void> => {
try {
const { addPlugins } = require(installPluginCommand)
await addPlugins(plugins, pluginOptions, rootPath, packages)
} catch (e) {
throw new Error(
`Something went wrong when trying to add the plugins to the project: ${
(e as Error).message
}`
)
}
}
export async function | (
plugins: Array<string>,
pluginOptions: PluginConfigMap = {},
rootPath: string,
packages: Array<string>
): Promise<void> {
try {
const gatsbyPath = resolveGatsbyPath(rootPath)
const installPluginCommand = resolveGatsbyCliPath(rootPath, gatsbyPath)
await addPluginsToProject(
installPluginCommand,
plugins,
pluginOptions,
rootPath,
packages
)
} catch (e) {
reporter.error((e as Error).message)
return
}
}
| installPlugins | identifier_name |
install-plugins.ts | import { reporter } from "./utils/reporter"
import path from "path"
import { PluginConfigMap } from "."
import { requireResolve } from "./utils/require-utils"
const resolveGatsbyPath = (rootPath: string): string | never => {
try {
const gatsbyPath = requireResolve(`gatsby/package.json`, {
paths: [rootPath],
})
if (!gatsbyPath) throw new Error()
return gatsbyPath
} catch (e) {
throw new Error(
`Could not find "gatsby" in ${rootPath}. Perhaps it wasn't installed properly?`
)
}
}
const resolveGatsbyCliPath = (
rootPath: string,
gatsbyPath: string
): string | never => {
try {
let installPluginCommand
try {
installPluginCommand = requireResolve(
`gatsby-cli/lib/handlers/plugin-add`,
{
// Try to find gatsby-cli in the site root, or in the site's gatsby dir
paths: [rootPath, path.dirname(gatsbyPath)],
}
)
} catch (e) {
// We'll error out later
}
try {
if (!installPluginCommand) {
// Older location
console.log(`looking in old place`)
installPluginCommand = requireResolve(`gatsby-cli/lib/plugin-add`, {
paths: [rootPath, path.dirname(gatsbyPath)],
})
}
} catch (e) {
// We'll error out later
}
if (!installPluginCommand) |
return installPluginCommand
} catch (e) {
throw new Error(
`Could not find a suitable version of gatsby-cli. Please report this issue at https://www.github.com/gatsbyjs/gatsby/issues`
)
}
}
const addPluginsToProject = async (
installPluginCommand: string,
plugins: Array<string>,
pluginOptions: PluginConfigMap = {},
rootPath: string,
packages: Array<string>
): Promise<void> => {
try {
const { addPlugins } = require(installPluginCommand)
await addPlugins(plugins, pluginOptions, rootPath, packages)
} catch (e) {
throw new Error(
`Something went wrong when trying to add the plugins to the project: ${
(e as Error).message
}`
)
}
}
export async function installPlugins(
plugins: Array<string>,
pluginOptions: PluginConfigMap = {},
rootPath: string,
packages: Array<string>
): Promise<void> {
try {
const gatsbyPath = resolveGatsbyPath(rootPath)
const installPluginCommand = resolveGatsbyCliPath(rootPath, gatsbyPath)
await addPluginsToProject(
installPluginCommand,
plugins,
pluginOptions,
rootPath,
packages
)
} catch (e) {
reporter.error((e as Error).message)
return
}
}
| {
throw new Error()
} | conditional_block |
install-plugins.ts | import { reporter } from "./utils/reporter"
import path from "path"
import { PluginConfigMap } from "."
import { requireResolve } from "./utils/require-utils"
const resolveGatsbyPath = (rootPath: string): string | never => {
try {
const gatsbyPath = requireResolve(`gatsby/package.json`, {
paths: [rootPath],
})
if (!gatsbyPath) throw new Error()
return gatsbyPath
} catch (e) {
throw new Error(
`Could not find "gatsby" in ${rootPath}. Perhaps it wasn't installed properly?`
)
}
}
const resolveGatsbyCliPath = (
rootPath: string,
gatsbyPath: string
): string | never => {
try {
let installPluginCommand | try {
installPluginCommand = requireResolve(
`gatsby-cli/lib/handlers/plugin-add`,
{
// Try to find gatsby-cli in the site root, or in the site's gatsby dir
paths: [rootPath, path.dirname(gatsbyPath)],
}
)
} catch (e) {
// We'll error out later
}
try {
if (!installPluginCommand) {
// Older location
console.log(`looking in old place`)
installPluginCommand = requireResolve(`gatsby-cli/lib/plugin-add`, {
paths: [rootPath, path.dirname(gatsbyPath)],
})
}
} catch (e) {
// We'll error out later
}
if (!installPluginCommand) {
throw new Error()
}
return installPluginCommand
} catch (e) {
throw new Error(
`Could not find a suitable version of gatsby-cli. Please report this issue at https://www.github.com/gatsbyjs/gatsby/issues`
)
}
}
const addPluginsToProject = async (
installPluginCommand: string,
plugins: Array<string>,
pluginOptions: PluginConfigMap = {},
rootPath: string,
packages: Array<string>
): Promise<void> => {
try {
const { addPlugins } = require(installPluginCommand)
await addPlugins(plugins, pluginOptions, rootPath, packages)
} catch (e) {
throw new Error(
`Something went wrong when trying to add the plugins to the project: ${
(e as Error).message
}`
)
}
}
export async function installPlugins(
plugins: Array<string>,
pluginOptions: PluginConfigMap = {},
rootPath: string,
packages: Array<string>
): Promise<void> {
try {
const gatsbyPath = resolveGatsbyPath(rootPath)
const installPluginCommand = resolveGatsbyCliPath(rootPath, gatsbyPath)
await addPluginsToProject(
installPluginCommand,
plugins,
pluginOptions,
rootPath,
packages
)
} catch (e) {
reporter.error((e as Error).message)
return
}
} | random_line_split |
|
unix.rs | use super::RW;
use super::evented::{Evented, EventedImpl, MioAdapter};
use std::io;
use std::path::Path;
use std::os::unix::io::RawFd;
use mio_orig;
/// Unix pipe reader
pub type PipeReader = MioAdapter<mio_orig::unix::PipeReader>;
/// Unix pipe writer
pub type PipeWriter = MioAdapter<mio_orig::unix::PipeWriter>;
/// Unix listener
pub type UnixListener = MioAdapter<mio_orig::unix::UnixListener>;
impl UnixListener {
/// Bind to a port
pub fn bind<P: AsRef<Path> + ?Sized>(addr: &P) -> io::Result<Self> |
/// Try cloning the socket descriptor.
pub fn try_clone(&self) -> io::Result<Self> {
self.shared().io_ref().try_clone().map(MioAdapter::new)
}
}
/// Unix socket
pub type UnixSocket = MioAdapter<mio_orig::unix::UnixSocket>;
impl UnixSocket {
/// Returns a new, unbound, Unix domain socket
pub fn stream() -> io::Result<UnixSocket> {
mio_orig::unix::UnixSocket::stream().map(MioAdapter::new)
}
/// Connect the socket to the specified address
pub fn connect<P: AsRef<Path> + ?Sized>(self, addr: &P) -> io::Result<(UnixStream, bool)> {
self.shared()
.io_ref()
.try_clone()
.and_then(|t| mio_orig::unix::UnixSocket::connect(t, addr))
.map(|(t, b)| (MioAdapter::new(t), b))
}
/// Bind the socket to the specified address
pub fn bind<P: AsRef<Path> + ?Sized>(&self, addr: &P) -> io::Result<()> {
self.shared().io_ref().bind(addr)
}
/// Clone
pub fn try_clone(&self) -> io::Result<Self> {
self.shared().io_ref().try_clone().map(MioAdapter::new)
}
}
/// Unix stream
pub type UnixStream = MioAdapter<mio_orig::unix::UnixStream>;
impl UnixStream {
/// Connect UnixStream to `path`
pub fn connect<P: AsRef<Path> + ?Sized>(path: &P) -> io::Result<UnixStream> {
mio_orig::unix::UnixStream::connect(path).map(MioAdapter::new)
}
/// Clone
pub fn try_clone(&self) -> io::Result<Self> {
self.shared().io_ref().try_clone().map(MioAdapter::new)
}
/// Try reading data into a buffer.
///
/// This will not block.
pub fn try_read_recv_fd(&mut self,
buf: &mut [u8])
-> io::Result<Option<(usize, Option<RawFd>)>> {
self.shared().io_mut().try_read_recv_fd(buf)
}
/// Block on read.
pub fn read_recv_fd(&mut self, buf: &mut [u8]) -> io::Result<(usize, Option<RawFd>)> {
loop {
let res = self.try_read_recv_fd(buf);
match res {
Ok(None) => self.block_on(RW::read()),
Ok(Some(r)) => {
return Ok(r);
}
Err(e) => return Err(e),
}
}
}
/// Try writing a data from the buffer.
///
/// This will not block.
pub fn try_write_send_fd(&self, buf: &[u8], fd: RawFd) -> io::Result<Option<usize>> {
self.shared().io_mut().try_write_send_fd(buf, fd)
}
/// Block on write
pub fn write_send_fd(&mut self, buf: &[u8], fd: RawFd) -> io::Result<usize> {
loop {
let res = self.try_write_send_fd(buf, fd);
match res {
Ok(None) => self.block_on(RW::write()),
Ok(Some(r)) => {
return Ok(r);
}
Err(e) => return Err(e),
}
}
}
}
/// Create a pair of unix pipe (reader and writer)
pub fn pipe() -> io::Result<(PipeReader, PipeWriter)> {
let (raw_reader, raw_writer) = try!(mio_orig::unix::pipe());
Ok((MioAdapter::new(raw_reader), MioAdapter::new(raw_writer)))
}
| {
mio_orig::unix::UnixListener::bind(addr).map(MioAdapter::new)
} | identifier_body |
unix.rs | use super::RW;
use super::evented::{Evented, EventedImpl, MioAdapter};
use std::io;
use std::path::Path;
use std::os::unix::io::RawFd;
use mio_orig;
/// Unix pipe reader
pub type PipeReader = MioAdapter<mio_orig::unix::PipeReader>;
/// Unix pipe writer
pub type PipeWriter = MioAdapter<mio_orig::unix::PipeWriter>;
/// Unix listener
pub type UnixListener = MioAdapter<mio_orig::unix::UnixListener>;
impl UnixListener {
/// Bind to a port
pub fn bind<P: AsRef<Path> + ?Sized>(addr: &P) -> io::Result<Self> {
mio_orig::unix::UnixListener::bind(addr).map(MioAdapter::new)
}
/// Try cloning the socket descriptor.
pub fn try_clone(&self) -> io::Result<Self> {
self.shared().io_ref().try_clone().map(MioAdapter::new)
}
}
/// Unix socket
pub type UnixSocket = MioAdapter<mio_orig::unix::UnixSocket>;
impl UnixSocket {
/// Returns a new, unbound, Unix domain socket
pub fn stream() -> io::Result<UnixSocket> {
mio_orig::unix::UnixSocket::stream().map(MioAdapter::new)
}
/// Connect the socket to the specified address
pub fn | <P: AsRef<Path> + ?Sized>(self, addr: &P) -> io::Result<(UnixStream, bool)> {
self.shared()
.io_ref()
.try_clone()
.and_then(|t| mio_orig::unix::UnixSocket::connect(t, addr))
.map(|(t, b)| (MioAdapter::new(t), b))
}
/// Bind the socket to the specified address
pub fn bind<P: AsRef<Path> + ?Sized>(&self, addr: &P) -> io::Result<()> {
self.shared().io_ref().bind(addr)
}
/// Clone
pub fn try_clone(&self) -> io::Result<Self> {
self.shared().io_ref().try_clone().map(MioAdapter::new)
}
}
/// Unix stream
pub type UnixStream = MioAdapter<mio_orig::unix::UnixStream>;
impl UnixStream {
/// Connect UnixStream to `path`
pub fn connect<P: AsRef<Path> + ?Sized>(path: &P) -> io::Result<UnixStream> {
mio_orig::unix::UnixStream::connect(path).map(MioAdapter::new)
}
/// Clone
pub fn try_clone(&self) -> io::Result<Self> {
self.shared().io_ref().try_clone().map(MioAdapter::new)
}
/// Try reading data into a buffer.
///
/// This will not block.
pub fn try_read_recv_fd(&mut self,
buf: &mut [u8])
-> io::Result<Option<(usize, Option<RawFd>)>> {
self.shared().io_mut().try_read_recv_fd(buf)
}
/// Block on read.
pub fn read_recv_fd(&mut self, buf: &mut [u8]) -> io::Result<(usize, Option<RawFd>)> {
loop {
let res = self.try_read_recv_fd(buf);
match res {
Ok(None) => self.block_on(RW::read()),
Ok(Some(r)) => {
return Ok(r);
}
Err(e) => return Err(e),
}
}
}
/// Try writing a data from the buffer.
///
/// This will not block.
pub fn try_write_send_fd(&self, buf: &[u8], fd: RawFd) -> io::Result<Option<usize>> {
self.shared().io_mut().try_write_send_fd(buf, fd)
}
/// Block on write
pub fn write_send_fd(&mut self, buf: &[u8], fd: RawFd) -> io::Result<usize> {
loop {
let res = self.try_write_send_fd(buf, fd);
match res {
Ok(None) => self.block_on(RW::write()),
Ok(Some(r)) => {
return Ok(r);
}
Err(e) => return Err(e),
}
}
}
}
/// Create a pair of unix pipe (reader and writer)
pub fn pipe() -> io::Result<(PipeReader, PipeWriter)> {
let (raw_reader, raw_writer) = try!(mio_orig::unix::pipe());
Ok((MioAdapter::new(raw_reader), MioAdapter::new(raw_writer)))
}
| connect | identifier_name |
unix.rs | use super::RW;
use super::evented::{Evented, EventedImpl, MioAdapter};
use std::io;
use std::path::Path;
use std::os::unix::io::RawFd;
use mio_orig;
/// Unix pipe reader
pub type PipeReader = MioAdapter<mio_orig::unix::PipeReader>;
/// Unix pipe writer
pub type PipeWriter = MioAdapter<mio_orig::unix::PipeWriter>;
/// Unix listener
pub type UnixListener = MioAdapter<mio_orig::unix::UnixListener>;
impl UnixListener {
/// Bind to a port
pub fn bind<P: AsRef<Path> + ?Sized>(addr: &P) -> io::Result<Self> {
mio_orig::unix::UnixListener::bind(addr).map(MioAdapter::new)
}
/// Try cloning the socket descriptor.
pub fn try_clone(&self) -> io::Result<Self> {
self.shared().io_ref().try_clone().map(MioAdapter::new)
}
}
/// Unix socket
pub type UnixSocket = MioAdapter<mio_orig::unix::UnixSocket>;
impl UnixSocket {
/// Returns a new, unbound, Unix domain socket
pub fn stream() -> io::Result<UnixSocket> {
mio_orig::unix::UnixSocket::stream().map(MioAdapter::new)
}
/// Connect the socket to the specified address
pub fn connect<P: AsRef<Path> + ?Sized>(self, addr: &P) -> io::Result<(UnixStream, bool)> {
self.shared()
.io_ref()
.try_clone()
.and_then(|t| mio_orig::unix::UnixSocket::connect(t, addr))
.map(|(t, b)| (MioAdapter::new(t), b))
}
/// Bind the socket to the specified address
pub fn bind<P: AsRef<Path> + ?Sized>(&self, addr: &P) -> io::Result<()> {
self.shared().io_ref().bind(addr)
}
/// Clone
pub fn try_clone(&self) -> io::Result<Self> {
self.shared().io_ref().try_clone().map(MioAdapter::new)
}
}
/// Unix stream
pub type UnixStream = MioAdapter<mio_orig::unix::UnixStream>;
| pub fn connect<P: AsRef<Path> + ?Sized>(path: &P) -> io::Result<UnixStream> {
mio_orig::unix::UnixStream::connect(path).map(MioAdapter::new)
}
/// Clone
pub fn try_clone(&self) -> io::Result<Self> {
self.shared().io_ref().try_clone().map(MioAdapter::new)
}
/// Try reading data into a buffer.
///
/// This will not block.
pub fn try_read_recv_fd(&mut self,
buf: &mut [u8])
-> io::Result<Option<(usize, Option<RawFd>)>> {
self.shared().io_mut().try_read_recv_fd(buf)
}
/// Block on read.
pub fn read_recv_fd(&mut self, buf: &mut [u8]) -> io::Result<(usize, Option<RawFd>)> {
loop {
let res = self.try_read_recv_fd(buf);
match res {
Ok(None) => self.block_on(RW::read()),
Ok(Some(r)) => {
return Ok(r);
}
Err(e) => return Err(e),
}
}
}
/// Try writing a data from the buffer.
///
/// This will not block.
pub fn try_write_send_fd(&self, buf: &[u8], fd: RawFd) -> io::Result<Option<usize>> {
self.shared().io_mut().try_write_send_fd(buf, fd)
}
/// Block on write
pub fn write_send_fd(&mut self, buf: &[u8], fd: RawFd) -> io::Result<usize> {
loop {
let res = self.try_write_send_fd(buf, fd);
match res {
Ok(None) => self.block_on(RW::write()),
Ok(Some(r)) => {
return Ok(r);
}
Err(e) => return Err(e),
}
}
}
}
/// Create a pair of unix pipe (reader and writer)
pub fn pipe() -> io::Result<(PipeReader, PipeWriter)> {
let (raw_reader, raw_writer) = try!(mio_orig::unix::pipe());
Ok((MioAdapter::new(raw_reader), MioAdapter::new(raw_writer)))
} |
impl UnixStream {
/// Connect UnixStream to `path` | random_line_split |
gh-ed-editor.js | import Ember from 'ember';
import EditorAPI from 'ghost-admin/mixins/ed-editor-api';
import EditorShortcuts from 'ghost-admin/mixins/ed-editor-shortcuts';
import EditorScroll from 'ghost-admin/mixins/ed-editor-scroll';
import {invokeAction} from 'ember-invoke-action';
const {TextArea, run} = Ember;
export default TextArea.extend(EditorAPI, EditorShortcuts, EditorScroll, {
focus: false,
/**
* Tell the controller about focusIn events, will trigger an autosave on a new document
*/
focusIn() {
this.sendAction('onFocusIn');
},
/**
* Sets the focus of the textarea if needed
*/
setFocus() {
if (this.get('focus')) {
this.$().val(this.$().val()).focus();
}
},
/**
* Sets up properties at render time
*/
didInsertElement() { |
run.scheduleOnce('afterRender', this, this.afterRenderEvent);
},
afterRenderEvent() {
if (this.get('focus') && this.get('focusCursorAtEnd')) {
this.setSelection('end');
}
},
actions: {
toggleCopyHTMLModal(generatedHTML) {
invokeAction(this, 'toggleCopyHTMLModal', generatedHTML);
}
}
}); | this._super(...arguments);
this.setFocus();
invokeAction(this, 'setEditor', this); | random_line_split |
gh-ed-editor.js | import Ember from 'ember';
import EditorAPI from 'ghost-admin/mixins/ed-editor-api';
import EditorShortcuts from 'ghost-admin/mixins/ed-editor-shortcuts';
import EditorScroll from 'ghost-admin/mixins/ed-editor-scroll';
import {invokeAction} from 'ember-invoke-action';
const {TextArea, run} = Ember;
export default TextArea.extend(EditorAPI, EditorShortcuts, EditorScroll, {
focus: false,
/**
* Tell the controller about focusIn events, will trigger an autosave on a new document
*/
focusIn() {
this.sendAction('onFocusIn');
},
/**
* Sets the focus of the textarea if needed
*/
setFocus() {
if (this.get('focus')) {
this.$().val(this.$().val()).focus();
}
},
/**
* Sets up properties at render time
*/
didInsertElement() {
this._super(...arguments);
this.setFocus();
invokeAction(this, 'setEditor', this);
run.scheduleOnce('afterRender', this, this.afterRenderEvent);
},
afterRenderEvent() {
if (this.get('focus') && this.get('focusCursorAtEnd')) |
},
actions: {
toggleCopyHTMLModal(generatedHTML) {
invokeAction(this, 'toggleCopyHTMLModal', generatedHTML);
}
}
});
| {
this.setSelection('end');
} | conditional_block |
gh-ed-editor.js | import Ember from 'ember';
import EditorAPI from 'ghost-admin/mixins/ed-editor-api';
import EditorShortcuts from 'ghost-admin/mixins/ed-editor-shortcuts';
import EditorScroll from 'ghost-admin/mixins/ed-editor-scroll';
import {invokeAction} from 'ember-invoke-action';
const {TextArea, run} = Ember;
export default TextArea.extend(EditorAPI, EditorShortcuts, EditorScroll, {
focus: false,
/**
* Tell the controller about focusIn events, will trigger an autosave on a new document
*/
focusIn() {
this.sendAction('onFocusIn');
},
/**
* Sets the focus of the textarea if needed
*/
setFocus() {
if (this.get('focus')) {
this.$().val(this.$().val()).focus();
}
},
/**
* Sets up properties at render time
*/
didInsertElement() | ,
afterRenderEvent() {
if (this.get('focus') && this.get('focusCursorAtEnd')) {
this.setSelection('end');
}
},
actions: {
toggleCopyHTMLModal(generatedHTML) {
invokeAction(this, 'toggleCopyHTMLModal', generatedHTML);
}
}
});
| {
this._super(...arguments);
this.setFocus();
invokeAction(this, 'setEditor', this);
run.scheduleOnce('afterRender', this, this.afterRenderEvent);
} | identifier_body |
gh-ed-editor.js | import Ember from 'ember';
import EditorAPI from 'ghost-admin/mixins/ed-editor-api';
import EditorShortcuts from 'ghost-admin/mixins/ed-editor-shortcuts';
import EditorScroll from 'ghost-admin/mixins/ed-editor-scroll';
import {invokeAction} from 'ember-invoke-action';
const {TextArea, run} = Ember;
export default TextArea.extend(EditorAPI, EditorShortcuts, EditorScroll, {
focus: false,
/**
* Tell the controller about focusIn events, will trigger an autosave on a new document
*/
focusIn() {
this.sendAction('onFocusIn');
},
/**
* Sets the focus of the textarea if needed
*/
setFocus() {
if (this.get('focus')) {
this.$().val(this.$().val()).focus();
}
},
/**
* Sets up properties at render time
*/
didInsertElement() {
this._super(...arguments);
this.setFocus();
invokeAction(this, 'setEditor', this);
run.scheduleOnce('afterRender', this, this.afterRenderEvent);
},
| () {
if (this.get('focus') && this.get('focusCursorAtEnd')) {
this.setSelection('end');
}
},
actions: {
toggleCopyHTMLModal(generatedHTML) {
invokeAction(this, 'toggleCopyHTMLModal', generatedHTML);
}
}
});
| afterRenderEvent | identifier_name |
proxy.py | """An HTTP proxy that supports IPv6 as well as the HTTP CONNECT method, among
other things."""
# Standard libary imports
import socket
import thread
import select
__version__ = '0.1.0 Draft 1'
BUFFER_LENGTH = 8192
VERSION = 'Python Proxy/{}'.format(__version__)
HTTP_VERSION = 'HTTP/1.1'
class ConnectionHandler(object):
"""Handles connections between the HTTP client and HTTP server."""
def __init__(self, connection, _, timeout):
self.client = connection
self.client_buffer = ''
self.timeout = timeout
self.target = None
method, path, protocol = self.get_base_header()
if method == 'CONNECT':
self.method_connect(path)
else:
self.method_others(method, path, protocol)
def get_base_header(self):
"""Return a tuple of (method, path, protocol) from the recieved
message."""
while 1:
self.client_buffer += self.client.recv(BUFFER_LENGTH)
end = self.client_buffer.find('\n')
if end != -1:
break
print '{}'.format(self.client_buffer[:end])
data = (self.client_buffer[:end+1]).split()
self.client_buffer = self.client_buffer[end+1:]
return data
def method_connect(self, path):
"""Handle HTTP CONNECT messages."""
self._connect_target(path)
self.client.send('{http_version} 200 Connection established\n'
'Proxy-agent: {version}\n\n'.format(
http_version=HTTP_VERSION,
version=VERSION))
self.client_buffer = ''
self._read_write()
def method_others(self, method, path, protocol):
"""Handle all non-HTTP CONNECT messages."""
path = path[7:]
i = path.find('/')
host = path[:i]
path = path[i:]
self._connect_target(host)
self.target.send('{method} {path} {protocol}\n{client_buffer}'.format(
method=method,
path=path,
protocol=protocol,
client_buffer=self.client_buffer))
self.client_buffer = ''
self._read_write()
def _connect_target(self, host):
"""Create a connection to the HTTP server specified by *host*."""
i = host.find(':')
if i != -1:
port = int(host[i+1:])
host = host[:i]
else:
port = 80
(soc_family, _, _, _, address) = socket.getaddrinfo(host, port)[0]
self.target = socket.socket(soc_family)
self.target.connect(address)
def _read_write(self):
"""Read data from client connection and forward to server
connection."""
time_out_max = self.timeout/3
socs = [self.client, self.target]
count = 0
while 1:
|
self.client.close()
self.target.close()
def start_server(host='localhost', port=8080, ipv_6=False, timeout=60,
handler=ConnectionHandler):
"""Start the HTTP proxy server."""
if ipv_6:
soc_type = socket.AF_INET6
else:
soc_type = socket.AF_INET
soc = socket.socket(soc_type)
soc.bind((host, port))
print 'Serving on {0}:{1}.'.format(host, port)
soc.listen(0)
while 1:
thread.start_new_thread(handler, soc.accept()+(timeout,))
if __name__ == '__main__':
start_server()
| count += 1
(recv, _, error) = select.select(socs, [], socs, 3)
if error:
break
if recv:
for in_ in recv:
data = in_.recv(BUFFER_LENGTH)
if in_ is self.client:
out = self.target
else:
out = self.client
if data:
out.send(data)
count = 0
if count == time_out_max:
break | conditional_block |
proxy.py | """An HTTP proxy that supports IPv6 as well as the HTTP CONNECT method, among
other things."""
# Standard libary imports
import socket
import thread
import select
__version__ = '0.1.0 Draft 1'
BUFFER_LENGTH = 8192
VERSION = 'Python Proxy/{}'.format(__version__)
HTTP_VERSION = 'HTTP/1.1'
class ConnectionHandler(object):
"""Handles connections between the HTTP client and HTTP server."""
def __init__(self, connection, _, timeout):
self.client = connection
self.client_buffer = ''
self.timeout = timeout
self.target = None
method, path, protocol = self.get_base_header()
if method == 'CONNECT':
self.method_connect(path)
else:
self.method_others(method, path, protocol)
def get_base_header(self):
"""Return a tuple of (method, path, protocol) from the recieved
message."""
while 1:
self.client_buffer += self.client.recv(BUFFER_LENGTH)
end = self.client_buffer.find('\n')
if end != -1:
break
print '{}'.format(self.client_buffer[:end])
data = (self.client_buffer[:end+1]).split()
self.client_buffer = self.client_buffer[end+1:]
return data
def method_connect(self, path):
"""Handle HTTP CONNECT messages."""
self._connect_target(path)
self.client.send('{http_version} 200 Connection established\n'
'Proxy-agent: {version}\n\n'.format(
http_version=HTTP_VERSION,
version=VERSION))
self.client_buffer = ''
self._read_write()
def method_others(self, method, path, protocol):
"""Handle all non-HTTP CONNECT messages."""
path = path[7:]
i = path.find('/')
host = path[:i]
path = path[i:]
self._connect_target(host)
self.target.send('{method} {path} {protocol}\n{client_buffer}'.format(
method=method,
path=path,
protocol=protocol,
client_buffer=self.client_buffer))
self.client_buffer = ''
self._read_write()
def _connect_target(self, host):
"""Create a connection to the HTTP server specified by *host*."""
i = host.find(':')
if i != -1:
port = int(host[i+1:])
host = host[:i]
else:
port = 80
(soc_family, _, _, _, address) = socket.getaddrinfo(host, port)[0]
self.target = socket.socket(soc_family)
self.target.connect(address)
def _read_write(self):
|
def start_server(host='localhost', port=8080, ipv_6=False, timeout=60,
handler=ConnectionHandler):
"""Start the HTTP proxy server."""
if ipv_6:
soc_type = socket.AF_INET6
else:
soc_type = socket.AF_INET
soc = socket.socket(soc_type)
soc.bind((host, port))
print 'Serving on {0}:{1}.'.format(host, port)
soc.listen(0)
while 1:
thread.start_new_thread(handler, soc.accept()+(timeout,))
if __name__ == '__main__':
start_server()
| """Read data from client connection and forward to server
connection."""
time_out_max = self.timeout/3
socs = [self.client, self.target]
count = 0
while 1:
count += 1
(recv, _, error) = select.select(socs, [], socs, 3)
if error:
break
if recv:
for in_ in recv:
data = in_.recv(BUFFER_LENGTH)
if in_ is self.client:
out = self.target
else:
out = self.client
if data:
out.send(data)
count = 0
if count == time_out_max:
break
self.client.close()
self.target.close() | identifier_body |
proxy.py | """An HTTP proxy that supports IPv6 as well as the HTTP CONNECT method, among
other things."""
# Standard libary imports
import socket
import thread
import select
__version__ = '0.1.0 Draft 1'
BUFFER_LENGTH = 8192
VERSION = 'Python Proxy/{}'.format(__version__)
HTTP_VERSION = 'HTTP/1.1'
class ConnectionHandler(object):
"""Handles connections between the HTTP client and HTTP server."""
def __init__(self, connection, _, timeout):
self.client = connection
self.client_buffer = ''
self.timeout = timeout
self.target = None
method, path, protocol = self.get_base_header()
if method == 'CONNECT':
self.method_connect(path)
else:
self.method_others(method, path, protocol)
def get_base_header(self):
"""Return a tuple of (method, path, protocol) from the recieved
message."""
while 1:
self.client_buffer += self.client.recv(BUFFER_LENGTH)
end = self.client_buffer.find('\n')
if end != -1:
break
print '{}'.format(self.client_buffer[:end])
data = (self.client_buffer[:end+1]).split()
self.client_buffer = self.client_buffer[end+1:]
return data
def method_connect(self, path):
"""Handle HTTP CONNECT messages."""
self._connect_target(path)
self.client.send('{http_version} 200 Connection established\n'
'Proxy-agent: {version}\n\n'.format(
http_version=HTTP_VERSION,
version=VERSION))
self.client_buffer = ''
self._read_write()
def method_others(self, method, path, protocol):
"""Handle all non-HTTP CONNECT messages."""
path = path[7:]
i = path.find('/')
host = path[:i]
path = path[i:]
self._connect_target(host)
self.target.send('{method} {path} {protocol}\n{client_buffer}'.format(
method=method,
path=path,
protocol=protocol,
client_buffer=self.client_buffer))
self.client_buffer = ''
self._read_write()
def _connect_target(self, host):
"""Create a connection to the HTTP server specified by *host*."""
i = host.find(':')
if i != -1:
port = int(host[i+1:])
host = host[:i]
else:
port = 80
(soc_family, _, _, _, address) = socket.getaddrinfo(host, port)[0]
self.target = socket.socket(soc_family)
self.target.connect(address)
def _read_write(self):
"""Read data from client connection and forward to server
connection."""
time_out_max = self.timeout/3
socs = [self.client, self.target]
count = 0
while 1:
count += 1
(recv, _, error) = select.select(socs, [], socs, 3)
if error:
break
if recv:
for in_ in recv:
data = in_.recv(BUFFER_LENGTH)
if in_ is self.client:
out = self.target
else:
out = self.client
if data:
out.send(data)
count = 0
if count == time_out_max:
break
self.client.close()
self.target.close()
def start_server(host='localhost', port=8080, ipv_6=False, timeout=60,
handler=ConnectionHandler):
"""Start the HTTP proxy server."""
if ipv_6:
soc_type = socket.AF_INET6
else:
soc_type = socket.AF_INET | soc.listen(0)
while 1:
thread.start_new_thread(handler, soc.accept()+(timeout,))
if __name__ == '__main__':
start_server() | soc = socket.socket(soc_type)
soc.bind((host, port))
print 'Serving on {0}:{1}.'.format(host, port) | random_line_split |
proxy.py | """An HTTP proxy that supports IPv6 as well as the HTTP CONNECT method, among
other things."""
# Standard libary imports
import socket
import thread
import select
__version__ = '0.1.0 Draft 1'
BUFFER_LENGTH = 8192
VERSION = 'Python Proxy/{}'.format(__version__)
HTTP_VERSION = 'HTTP/1.1'
class | (object):
"""Handles connections between the HTTP client and HTTP server."""
def __init__(self, connection, _, timeout):
self.client = connection
self.client_buffer = ''
self.timeout = timeout
self.target = None
method, path, protocol = self.get_base_header()
if method == 'CONNECT':
self.method_connect(path)
else:
self.method_others(method, path, protocol)
def get_base_header(self):
"""Return a tuple of (method, path, protocol) from the recieved
message."""
while 1:
self.client_buffer += self.client.recv(BUFFER_LENGTH)
end = self.client_buffer.find('\n')
if end != -1:
break
print '{}'.format(self.client_buffer[:end])
data = (self.client_buffer[:end+1]).split()
self.client_buffer = self.client_buffer[end+1:]
return data
def method_connect(self, path):
"""Handle HTTP CONNECT messages."""
self._connect_target(path)
self.client.send('{http_version} 200 Connection established\n'
'Proxy-agent: {version}\n\n'.format(
http_version=HTTP_VERSION,
version=VERSION))
self.client_buffer = ''
self._read_write()
def method_others(self, method, path, protocol):
"""Handle all non-HTTP CONNECT messages."""
path = path[7:]
i = path.find('/')
host = path[:i]
path = path[i:]
self._connect_target(host)
self.target.send('{method} {path} {protocol}\n{client_buffer}'.format(
method=method,
path=path,
protocol=protocol,
client_buffer=self.client_buffer))
self.client_buffer = ''
self._read_write()
def _connect_target(self, host):
"""Create a connection to the HTTP server specified by *host*."""
i = host.find(':')
if i != -1:
port = int(host[i+1:])
host = host[:i]
else:
port = 80
(soc_family, _, _, _, address) = socket.getaddrinfo(host, port)[0]
self.target = socket.socket(soc_family)
self.target.connect(address)
def _read_write(self):
"""Read data from client connection and forward to server
connection."""
time_out_max = self.timeout/3
socs = [self.client, self.target]
count = 0
while 1:
count += 1
(recv, _, error) = select.select(socs, [], socs, 3)
if error:
break
if recv:
for in_ in recv:
data = in_.recv(BUFFER_LENGTH)
if in_ is self.client:
out = self.target
else:
out = self.client
if data:
out.send(data)
count = 0
if count == time_out_max:
break
self.client.close()
self.target.close()
def start_server(host='localhost', port=8080, ipv_6=False, timeout=60,
handler=ConnectionHandler):
"""Start the HTTP proxy server."""
if ipv_6:
soc_type = socket.AF_INET6
else:
soc_type = socket.AF_INET
soc = socket.socket(soc_type)
soc.bind((host, port))
print 'Serving on {0}:{1}.'.format(host, port)
soc.listen(0)
while 1:
thread.start_new_thread(handler, soc.accept()+(timeout,))
if __name__ == '__main__':
start_server()
| ConnectionHandler | identifier_name |
matching.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! High-level interface to CSS selector matching.
#![allow(unsafe_code)]
#![deny(missing_docs)]
use context::{ElementCascadeInputs, QuirksMode, SelectorFlagsMap};
use context::{SharedStyleContext, StyleContext};
use data::ElementData;
use dom::TElement;
use invalidation::element::restyle_hints::RestyleHint;
use properties::ComputedValues;
use properties::longhands::display::computed_value::T as Display;
use rule_tree::{CascadeLevel, StrongRuleNode};
use selector_parser::{PseudoElement, RestyleDamage};
use selectors::matching::ElementSelectorFlags;
use servo_arc::{Arc, ArcBorrow};
use style_resolver::ResolvedElementStyles;
use traversal_flags::TraversalFlags;
/// Represents the result of comparing an element's old and new style.
#[derive(Debug)]
pub struct StyleDifference {
/// The resulting damage.
pub damage: RestyleDamage,
/// Whether any styles changed.
pub change: StyleChange,
}
/// Represents whether or not the style of an element has changed.
#[derive(Clone, Copy, Debug)]
pub enum StyleChange {
/// The style hasn't changed.
Unchanged,
/// The style has changed.
Changed {
/// Whether only reset structs changed.
reset_only: bool,
},
}
/// Whether or not newly computed values for an element need to be cascade
/// to children.
#[derive(Clone, Copy, Debug, Eq, Ord, PartialEq, PartialOrd)]
pub enum ChildCascadeRequirement {
/// Old and new computed values were the same, or we otherwise know that
/// we won't bother recomputing style for children, so we can skip cascading
/// the new values into child elements.
CanSkipCascade = 0,
/// The same as `MustCascadeChildren`, but we only need to actually
/// recascade if the child inherits any explicit reset style.
MustCascadeChildrenIfInheritResetStyle = 1,
/// Old and new computed values were different, so we must cascade the
/// new values to children.
MustCascadeChildren = 2,
/// The same as `MustCascadeChildren`, but for the entire subtree. This is
/// used to handle root font-size updates needing to recascade the whole
/// document.
MustCascadeDescendants = 3,
}
impl ChildCascadeRequirement {
/// Whether we can unconditionally skip the cascade.
pub fn can_skip_cascade(&self) -> bool {
matches!(*self, ChildCascadeRequirement::CanSkipCascade)
}
}
/// Determines which styles are being cascaded currently.
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
enum CascadeVisitedMode {
/// Cascade the regular, unvisited styles.
Unvisited,
/// Cascade the styles used when an element's relevant link is visited. A
/// "relevant link" is the element being matched if it is a link or the
/// nearest ancestor link.
Visited,
}
trait PrivateMatchMethods: TElement {
/// Updates the rule nodes without re-running selector matching, using just
/// the rule tree, for a specific visited mode.
///
/// Returns true if an !important rule was replaced.
fn replace_rules_internal(
&self,
replacements: RestyleHint,
context: &mut StyleContext<Self>,
cascade_visited: CascadeVisitedMode,
cascade_inputs: &mut ElementCascadeInputs,
) -> bool {
use properties::PropertyDeclarationBlock;
use shared_lock::Locked;
debug_assert!(replacements.intersects(RestyleHint::replacements()) &&
(replacements & !RestyleHint::replacements()).is_empty());
let stylist = &context.shared.stylist;
let guards = &context.shared.guards;
let primary_rules =
match cascade_visited {
CascadeVisitedMode::Unvisited => cascade_inputs.primary.rules.as_mut(),
CascadeVisitedMode::Visited => cascade_inputs.primary.visited_rules.as_mut(),
};
let primary_rules = match primary_rules {
Some(r) => r,
None => return false,
};
let replace_rule_node = |level: CascadeLevel,
pdb: Option<ArcBorrow<Locked<PropertyDeclarationBlock>>>,
path: &mut StrongRuleNode| -> bool {
let mut important_rules_changed = false;
let new_node =
stylist.rule_tree().update_rule_at_level(
level,
pdb,
path,
guards,
&mut important_rules_changed,
);
if let Some(n) = new_node {
*path = n;
}
important_rules_changed
};
if !context.shared.traversal_flags.for_animation_only() {
let mut result = false;
if replacements.contains(RestyleHint::RESTYLE_STYLE_ATTRIBUTE) {
let style_attribute = self.style_attribute();
result |= replace_rule_node(
CascadeLevel::StyleAttributeNormal,
style_attribute,
primary_rules,
);
result |= replace_rule_node(
CascadeLevel::StyleAttributeImportant,
style_attribute,
primary_rules,
);
// FIXME(emilio): Still a hack!
self.unset_dirty_style_attribute();
}
return result;
}
// Animation restyle hints are processed prior to other restyle
// hints in the animation-only traversal.
//
// Non-animation restyle hints will be processed in a subsequent
// normal traversal.
if replacements.intersects(RestyleHint::for_animations()) {
debug_assert!(context.shared.traversal_flags.for_animation_only());
if replacements.contains(RestyleHint::RESTYLE_SMIL) {
replace_rule_node(
CascadeLevel::SMILOverride,
self.smil_override(),
primary_rules,
);
}
if replacements.contains(RestyleHint::RESTYLE_CSS_TRANSITIONS) {
replace_rule_node(
CascadeLevel::Transitions,
self.transition_rule().as_ref().map(|a| a.borrow_arc()),
primary_rules,
);
}
if replacements.contains(RestyleHint::RESTYLE_CSS_ANIMATIONS) {
replace_rule_node(
CascadeLevel::Animations,
self.animation_rule().as_ref().map(|a| a.borrow_arc()),
primary_rules,
);
}
}
false
}
/// If there is no transition rule in the ComputedValues, it returns None.
#[cfg(feature = "gecko")]
fn after_change_style(
&self,
context: &mut StyleContext<Self>,
primary_style: &Arc<ComputedValues>
) -> Option<Arc<ComputedValues>> {
use context::CascadeInputs;
use style_resolver::{PseudoElementResolution, StyleResolverForElement};
use stylist::RuleInclusion;
let rule_node = primary_style.rules();
let without_transition_rules =
context.shared.stylist.rule_tree().remove_transition_rule_if_applicable(rule_node);
if without_transition_rules == *rule_node {
// We don't have transition rule in this case, so return None to let
// the caller use the original ComputedValues.
return None;
}
// FIXME(bug 868975): We probably need to transition visited style as
// well.
let inputs =
CascadeInputs {
rules: Some(without_transition_rules),
visited_rules: primary_style.visited_rules().cloned()
};
// Actually `PseudoElementResolution` doesn't really matter.
let style =
StyleResolverForElement::new(*self, context, RuleInclusion::All, PseudoElementResolution::IfApplicable)
.cascade_style_and_visited_with_default_parents(inputs);
Some(style.0)
}
#[cfg(feature = "gecko")]
fn needs_animations_update(
&self,
context: &mut StyleContext<Self>,
old_values: Option<&ComputedValues>,
new_values: &ComputedValues,
) -> bool {
let new_box_style = new_values.get_box();
let has_new_animation_style = new_box_style.specifies_animations();
let old = match old_values {
Some(old) => old,
None => return has_new_animation_style,
};
let old_box_style = old.get_box();
let keyframes_could_have_changed =
context.shared.traversal_flags.contains(TraversalFlags::ForCSSRuleChanges);
// If the traversal is triggered due to changes in CSS rules changes, we
// need to try to update all CSS animations on the element if the
// element has or will have CSS animation style regardless of whether
// the animation is running or not.
//
// TODO: We should check which @keyframes were added/changed/deleted and
// update only animations corresponding to those @keyframes.
if keyframes_could_have_changed &&
(has_new_animation_style || self.has_css_animations())
{
return true;
}
// If the animations changed, well...
if !old_box_style.animations_equals(new_box_style) {
return true;
}
let old_display = old_box_style.clone_display();
let new_display = new_box_style.clone_display();
// If we were display: none, we may need to trigger animations.
if old_display == Display::None && new_display != Display::None {
return has_new_animation_style;
}
// If we are becoming display: none, we may need to stop animations.
if old_display != Display::None && new_display == Display::None {
return self.has_css_animations();
}
false
}
/// Create a SequentialTask for resolving descendants in a SMIL display property
/// animation if the display property changed from none.
#[cfg(feature = "gecko")]
fn handle_display_change_for_smil_if_needed(
&self,
context: &mut StyleContext<Self>,
old_values: Option<&ComputedValues>,
new_values: &ComputedValues,
restyle_hints: RestyleHint
) {
use context::PostAnimationTasks;
if !restyle_hints.intersects(RestyleHint::RESTYLE_SMIL) {
return;
}
if new_values.is_display_property_changed_from_none(old_values) {
// When display value is changed from none to other, we need to
// traverse descendant elements in a subsequent normal
// traversal (we can't traverse them in this animation-only restyle
// since we have no way to know whether the decendants
// need to be traversed at the beginning of the animation-only
// restyle).
let task = ::context::SequentialTask::process_post_animation(
*self,
PostAnimationTasks::DISPLAY_CHANGED_FROM_NONE_FOR_SMIL,
);
context.thread_local.tasks.push(task);
}
}
#[cfg(feature = "gecko")]
fn process_animations(
&self,
context: &mut StyleContext<Self>,
old_values: &mut Option<Arc<ComputedValues>>,
new_values: &mut Arc<ComputedValues>,
restyle_hint: RestyleHint,
important_rules_changed: bool,
) {
use context::UpdateAnimationsTasks;
if context.shared.traversal_flags.for_animation_only() {
self.handle_display_change_for_smil_if_needed(
context,
old_values.as_ref().map(|v| &**v),
new_values,
restyle_hint,
);
return;
}
// Bug 868975: These steps should examine and update the visited styles
// in addition to the unvisited styles.
let mut tasks = UpdateAnimationsTasks::empty();
if self.needs_animations_update(context, old_values.as_ref().map(|s| &**s), new_values) {
tasks.insert(UpdateAnimationsTasks::CSS_ANIMATIONS);
}
let before_change_style = if self.might_need_transitions_update(old_values.as_ref().map(|s| &**s),
new_values) {
let after_change_style = if self.has_css_transitions() {
self.after_change_style(context, new_values)
} else {
None
};
// In order to avoid creating a SequentialTask for transitions which
// may not be updated, we check it per property to make sure Gecko
// side will really update transition.
let needs_transitions_update = {
// We borrow new_values here, so need to add a scope to make
// sure we release it before assigning a new value to it.
let after_change_style_ref =
after_change_style.as_ref().unwrap_or(&new_values);
self.needs_transitions_update(
old_values.as_ref().unwrap(),
after_change_style_ref,
)
};
if needs_transitions_update {
if let Some(values_without_transitions) = after_change_style {
*new_values = values_without_transitions;
}
tasks.insert(UpdateAnimationsTasks::CSS_TRANSITIONS);
// We need to clone old_values into SequentialTask, so we can
// use it later.
old_values.clone()
} else {
None
}
} else {
None
};
if self.has_animations() {
tasks.insert(UpdateAnimationsTasks::EFFECT_PROPERTIES);
if important_rules_changed {
tasks.insert(UpdateAnimationsTasks::CASCADE_RESULTS);
}
if new_values.is_display_property_changed_from_none(old_values.as_ref().map(|s| &**s)) {
tasks.insert(UpdateAnimationsTasks::DISPLAY_CHANGED_FROM_NONE);
}
}
if !tasks.is_empty() {
let task = ::context::SequentialTask::update_animations(*self,
before_change_style,
tasks);
context.thread_local.tasks.push(task);
}
}
#[cfg(feature = "servo")]
fn process_animations(
&self,
context: &mut StyleContext<Self>,
old_values: &mut Option<Arc<ComputedValues>>,
new_values: &mut Arc<ComputedValues>,
_restyle_hint: RestyleHint,
_important_rules_changed: bool,
) {
use animation;
use dom::TNode;
let mut possibly_expired_animations = vec![];
let shared_context = context.shared;
if let Some(ref mut old) = *old_values {
// FIXME(emilio, #20116): This makes no sense.
self.update_animations_for_cascade(
shared_context,
old,
&mut possibly_expired_animations,
&context.thread_local.font_metrics_provider,
);
}
let new_animations_sender = &context.thread_local.new_animations_sender;
let this_opaque = self.as_node().opaque();
// Trigger any present animations if necessary.
animation::maybe_start_animations(
&shared_context,
new_animations_sender,
this_opaque,
&new_values,
);
// Trigger transitions if necessary. This will reset `new_values` back
// to its old value if it did trigger a transition.
if let Some(ref values) = *old_values {
animation::start_transitions_if_applicable(
new_animations_sender,
this_opaque,
&values,
new_values,
&shared_context.timer,
&possibly_expired_animations,
);
}
}
/// Computes and applies non-redundant damage.
fn accumulate_damage_for(
&self,
shared_context: &SharedStyleContext,
damage: &mut RestyleDamage,
old_values: &ComputedValues,
new_values: &ComputedValues,
pseudo: Option<&PseudoElement>,
) -> ChildCascadeRequirement {
debug!("accumulate_damage_for: {:?}", self);
debug_assert!(!shared_context.traversal_flags.contains(TraversalFlags::Forgetful));
let difference =
self.compute_style_difference(old_values, new_values, pseudo);
*damage |= difference.damage;
debug!(" > style difference: {:?}", difference);
// We need to cascade the children in order to ensure the correct
// propagation of inherited computed value flags.
if old_values.flags.maybe_inherited() != new_values.flags.maybe_inherited() {
debug!(" > flags changed: {:?} != {:?}", old_values.flags, new_values.flags);
return ChildCascadeRequirement::MustCascadeChildren;
}
match difference.change {
StyleChange::Unchanged => {
return ChildCascadeRequirement::CanSkipCascade
},
StyleChange::Changed { reset_only } => {
// If inherited properties changed, the best we can do is
// cascade the children.
if !reset_only {
return ChildCascadeRequirement::MustCascadeChildren
}
}
}
let old_display = old_values.get_box().clone_display();
let new_display = new_values.get_box().clone_display();
// If we used to be a display: none element, and no longer are,
// our children need to be restyled because they're unstyled.
//
// NOTE(emilio): Gecko has the special-case of -moz-binding, but
// that gets handled on the frame constructor when processing
// the reframe, so no need to handle that here.
if old_display == Display::None && old_display != new_display {
return ChildCascadeRequirement::MustCascadeChildren
}
// Blockification of children may depend on our display value,
// so we need to actually do the recascade. We could potentially
// do better, but it doesn't seem worth it.
if old_display.is_item_container() != new_display.is_item_container() {
return ChildCascadeRequirement::MustCascadeChildren
}
// Line break suppression may also be affected if the display
// type changes from ruby to non-ruby.
#[cfg(feature = "gecko")]
{
if old_display.is_ruby_type() != new_display.is_ruby_type() {
return ChildCascadeRequirement::MustCascadeChildren
}
}
// Children with justify-items: auto may depend on our
// justify-items property value.
//
// Similarly, we could potentially do better, but this really
// seems not common enough to care about.
#[cfg(feature = "gecko")]
{
use values::specified::align::AlignFlags;
let old_justify_items =
old_values.get_position().clone_justify_items();
let new_justify_items =
new_values.get_position().clone_justify_items();
let was_legacy_justify_items =
old_justify_items.computed.0.contains(AlignFlags::LEGACY);
let is_legacy_justify_items =
new_justify_items.computed.0.contains(AlignFlags::LEGACY);
if is_legacy_justify_items != was_legacy_justify_items {
return ChildCascadeRequirement::MustCascadeChildren;
}
if was_legacy_justify_items &&
old_justify_items.computed != new_justify_items.computed {
return ChildCascadeRequirement::MustCascadeChildren;
}
}
#[cfg(feature = "servo")]
{
// We may need to set or propagate the CAN_BE_FRAGMENTED bit
// on our children.
if old_values.is_multicol() != new_values.is_multicol() {
return ChildCascadeRequirement::MustCascadeChildren;
}
}
// We could prove that, if our children don't inherit reset
// properties, we can stop the cascade.
ChildCascadeRequirement::MustCascadeChildrenIfInheritResetStyle
}
// FIXME(emilio, #20116): It's not clear to me that the name of this method
// represents anything of what it does.
//
// Also, this function gets the old style, for some reason I don't really
// get, but the functions called (mainly update_style_for_animation) expects
// the new style, wtf?
#[cfg(feature = "servo")]
fn update_animations_for_cascade(
&self,
context: &SharedStyleContext,
style: &mut Arc<ComputedValues>,
possibly_expired_animations: &mut Vec<::animation::PropertyAnimation>,
font_metrics: &::font_metrics::FontMetricsProvider,
) {
use animation::{self, Animation};
use dom::TNode;
// Finish any expired transitions.
let this_opaque = self.as_node().opaque();
animation::complete_expired_transitions(this_opaque, style, context);
// Merge any running animations into the current style, and cancel them.
let had_running_animations =
context.running_animations.read().get(&this_opaque).is_some();
if !had_running_animations {
return;
}
let mut all_running_animations = context.running_animations.write();
for running_animation in all_running_animations.get_mut(&this_opaque).unwrap() {
// This shouldn't happen frequently, but under some circumstances
// mainly huge load or debug builds, the constellation might be
// delayed in sending the `TickAllAnimations` message to layout.
//
// Thus, we can't assume all the animations have been already
// updated by layout, because other restyle due to script might be
// triggered by layout before the animation tick.
//
// See #12171 and the associated PR for an example where this
// happened while debugging other release panic.
if running_animation.is_expired() {
continue;
}
animation::update_style_for_animation::<Self>(
context,
running_animation,
style,
font_metrics,
);
if let Animation::Transition(_, _, ref frame, _) = *running_animation {
possibly_expired_animations.push(frame.property_animation.clone())
}
}
}
}
impl<E: TElement> PrivateMatchMethods for E {}
/// The public API that elements expose for selector matching.
pub trait MatchMethods : TElement {
/// Returns the closest parent element that doesn't have a display: contents
/// style (and thus generates a box).
///
/// This is needed to correctly handle blockification of flex and grid
/// items.
///
/// Returns itself if the element has no parent. In practice this doesn't
/// happen because the root element is blockified per spec, but it could
/// happen if we decide to not blockify for roots of disconnected subtrees,
/// which is a kind of dubious behavior.
fn layout_parent(&self) -> Self {
let mut current = self.clone();
loop {
current = match current.traversal_parent() {
Some(el) => el,
None => return current,
};
let is_display_contents =
current.borrow_data().unwrap().styles.primary().is_display_contents();
if !is_display_contents {
return current;
}
}
}
/// Updates the styles with the new ones, diffs them, and stores the restyle
/// damage.
fn finish_restyle(
&self,
context: &mut StyleContext<Self>,
data: &mut ElementData,
mut new_styles: ResolvedElementStyles,
important_rules_changed: bool,
) -> ChildCascadeRequirement {
use std::cmp;
self.process_animations(
context,
&mut data.styles.primary,
&mut new_styles.primary.style.0,
data.hint,
important_rules_changed,
);
// First of all, update the styles.
let old_styles = data.set_styles(new_styles);
let new_primary_style = data.styles.primary.as_ref().unwrap();
let mut cascade_requirement = ChildCascadeRequirement::CanSkipCascade;
if self.is_root() && !self.is_native_anonymous() {
let device = context.shared.stylist.device();
let new_font_size = new_primary_style.get_font().clone_font_size();
if old_styles.primary.as_ref().map_or(true, |s| s.get_font().clone_font_size() != new_font_size) {
debug_assert!(self.owner_doc_matches_for_testing(device));
device.set_root_font_size(new_font_size.size());
// If the root font-size changed since last time, and something
// in the document did use rem units, ensure we recascade the
// entire tree.
if device.used_root_font_size() {
cascade_requirement = ChildCascadeRequirement::MustCascadeDescendants;
}
}
}
if context.shared.stylist.quirks_mode() == QuirksMode::Quirks {
if self.is_html_document_body_element() {
// NOTE(emilio): We _could_ handle dynamic changes to it if it
// changes and before we reach our children the cascade stops,
// but we don't track right now whether we use the document body
// color, and nobody else handles that properly anyway.
let device = context.shared.stylist.device();
// Needed for the "inherit from body" quirk.
let text_color = new_primary_style.get_color().clone_color();
device.set_body_text_color(text_color);
}
}
// Don't accumulate damage if we're in a forgetful traversal.
if context.shared.traversal_flags.contains(TraversalFlags::Forgetful) {
return ChildCascadeRequirement::MustCascadeChildren;
}
// Also, don't do anything if there was no style.
let old_primary_style = match old_styles.primary {
Some(s) => s,
None => return ChildCascadeRequirement::MustCascadeChildren,
};
cascade_requirement = cmp::max(
cascade_requirement,
self.accumulate_damage_for(
context.shared,
&mut data.damage,
&old_primary_style,
new_primary_style,
None,
)
);
if data.styles.pseudos.is_empty() && old_styles.pseudos.is_empty() {
// This is the common case; no need to examine pseudos here.
return cascade_requirement; |
let pseudo_styles =
old_styles.pseudos.as_array().iter().zip(
data.styles.pseudos.as_array().iter());
for (i, (old, new)) in pseudo_styles.enumerate() {
match (old, new) {
(&Some(ref old), &Some(ref new)) => {
self.accumulate_damage_for(
context.shared,
&mut data.damage,
old,
new,
Some(&PseudoElement::from_eager_index(i)),
);
}
(&None, &None) => {},
_ => {
// It's possible that we're switching from not having
// ::before/::after at all to having styles for them but not
// actually having a useful pseudo-element. Check for that
// case.
let pseudo = PseudoElement::from_eager_index(i);
let new_pseudo_should_exist =
new.as_ref().map_or(false, |s| pseudo.should_exist(s));
let old_pseudo_should_exist =
old.as_ref().map_or(false, |s| pseudo.should_exist(s));
if new_pseudo_should_exist != old_pseudo_should_exist {
data.damage |= RestyleDamage::reconstruct();
return cascade_requirement;
}
}
}
}
cascade_requirement
}
/// Applies selector flags to an element, deferring mutations of the parent
/// until after the traversal.
///
/// TODO(emilio): This is somewhat inefficient, because it doesn't take
/// advantage of us knowing that the traversal is sequential.
fn apply_selector_flags(
&self,
map: &mut SelectorFlagsMap<Self>,
element: &Self,
flags: ElementSelectorFlags,
) {
// Handle flags that apply to the element.
let self_flags = flags.for_self();
if !self_flags.is_empty() {
if element == self {
// If this is the element we're styling, we have exclusive
// access to the element, and thus it's fine inserting them,
// even from the worker.
unsafe { element.set_selector_flags(self_flags); }
} else {
// Otherwise, this element is an ancestor of the current element
// we're styling, and thus multiple children could write to it
// if we did from here.
//
// Instead, we can read them, and post them if necessary as a
// sequential task in order for them to be processed later.
if !element.has_selector_flags(self_flags) {
map.insert_flags(*element, self_flags);
}
}
}
// Handle flags that apply to the parent.
let parent_flags = flags.for_parent();
if !parent_flags.is_empty() {
if let Some(p) = element.parent_element() {
if !p.has_selector_flags(parent_flags) {
map.insert_flags(p, parent_flags);
}
}
}
}
/// Updates the rule nodes without re-running selector matching, using just
/// the rule tree.
///
/// Returns true if an !important rule was replaced.
fn replace_rules(
&self,
replacements: RestyleHint,
context: &mut StyleContext<Self>,
cascade_inputs: &mut ElementCascadeInputs,
) -> bool {
let mut result = false;
result |= self.replace_rules_internal(
replacements,
context,
CascadeVisitedMode::Unvisited,
cascade_inputs,
);
result |= self.replace_rules_internal(
replacements,
context,
CascadeVisitedMode::Visited,
cascade_inputs
);
result
}
/// Given the old and new style of this element, and whether it's a
/// pseudo-element, compute the restyle damage used to determine which
/// kind of layout or painting operations we'll need.
fn compute_style_difference(
&self,
old_values: &ComputedValues,
new_values: &ComputedValues,
pseudo: Option<&PseudoElement>
) -> StyleDifference {
debug_assert!(pseudo.map_or(true, |p| p.is_eager()));
RestyleDamage::compute_style_difference(old_values, new_values)
}
}
impl<E: TElement> MatchMethods for E {} | } | random_line_split |
matching.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! High-level interface to CSS selector matching.
#![allow(unsafe_code)]
#![deny(missing_docs)]
use context::{ElementCascadeInputs, QuirksMode, SelectorFlagsMap};
use context::{SharedStyleContext, StyleContext};
use data::ElementData;
use dom::TElement;
use invalidation::element::restyle_hints::RestyleHint;
use properties::ComputedValues;
use properties::longhands::display::computed_value::T as Display;
use rule_tree::{CascadeLevel, StrongRuleNode};
use selector_parser::{PseudoElement, RestyleDamage};
use selectors::matching::ElementSelectorFlags;
use servo_arc::{Arc, ArcBorrow};
use style_resolver::ResolvedElementStyles;
use traversal_flags::TraversalFlags;
/// Represents the result of comparing an element's old and new style.
#[derive(Debug)]
pub struct StyleDifference {
/// The resulting damage.
pub damage: RestyleDamage,
/// Whether any styles changed.
pub change: StyleChange,
}
/// Represents whether or not the style of an element has changed.
#[derive(Clone, Copy, Debug)]
pub enum StyleChange {
/// The style hasn't changed.
Unchanged,
/// The style has changed.
Changed {
/// Whether only reset structs changed.
reset_only: bool,
},
}
/// Whether or not newly computed values for an element need to be cascade
/// to children.
#[derive(Clone, Copy, Debug, Eq, Ord, PartialEq, PartialOrd)]
pub enum ChildCascadeRequirement {
/// Old and new computed values were the same, or we otherwise know that
/// we won't bother recomputing style for children, so we can skip cascading
/// the new values into child elements.
CanSkipCascade = 0,
/// The same as `MustCascadeChildren`, but we only need to actually
/// recascade if the child inherits any explicit reset style.
MustCascadeChildrenIfInheritResetStyle = 1,
/// Old and new computed values were different, so we must cascade the
/// new values to children.
MustCascadeChildren = 2,
/// The same as `MustCascadeChildren`, but for the entire subtree. This is
/// used to handle root font-size updates needing to recascade the whole
/// document.
MustCascadeDescendants = 3,
}
impl ChildCascadeRequirement {
/// Whether we can unconditionally skip the cascade.
pub fn can_skip_cascade(&self) -> bool {
matches!(*self, ChildCascadeRequirement::CanSkipCascade)
}
}
/// Determines which styles are being cascaded currently.
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
enum CascadeVisitedMode {
/// Cascade the regular, unvisited styles.
Unvisited,
/// Cascade the styles used when an element's relevant link is visited. A
/// "relevant link" is the element being matched if it is a link or the
/// nearest ancestor link.
Visited,
}
trait PrivateMatchMethods: TElement {
/// Updates the rule nodes without re-running selector matching, using just
/// the rule tree, for a specific visited mode.
///
/// Returns true if an !important rule was replaced.
fn replace_rules_internal(
&self,
replacements: RestyleHint,
context: &mut StyleContext<Self>,
cascade_visited: CascadeVisitedMode,
cascade_inputs: &mut ElementCascadeInputs,
) -> bool {
use properties::PropertyDeclarationBlock;
use shared_lock::Locked;
debug_assert!(replacements.intersects(RestyleHint::replacements()) &&
(replacements & !RestyleHint::replacements()).is_empty());
let stylist = &context.shared.stylist;
let guards = &context.shared.guards;
let primary_rules =
match cascade_visited {
CascadeVisitedMode::Unvisited => cascade_inputs.primary.rules.as_mut(),
CascadeVisitedMode::Visited => cascade_inputs.primary.visited_rules.as_mut(),
};
let primary_rules = match primary_rules {
Some(r) => r,
None => return false,
};
let replace_rule_node = |level: CascadeLevel,
pdb: Option<ArcBorrow<Locked<PropertyDeclarationBlock>>>,
path: &mut StrongRuleNode| -> bool {
let mut important_rules_changed = false;
let new_node =
stylist.rule_tree().update_rule_at_level(
level,
pdb,
path,
guards,
&mut important_rules_changed,
);
if let Some(n) = new_node {
*path = n;
}
important_rules_changed
};
if !context.shared.traversal_flags.for_animation_only() {
let mut result = false;
if replacements.contains(RestyleHint::RESTYLE_STYLE_ATTRIBUTE) {
let style_attribute = self.style_attribute();
result |= replace_rule_node(
CascadeLevel::StyleAttributeNormal,
style_attribute,
primary_rules,
);
result |= replace_rule_node(
CascadeLevel::StyleAttributeImportant,
style_attribute,
primary_rules,
);
// FIXME(emilio): Still a hack!
self.unset_dirty_style_attribute();
}
return result;
}
// Animation restyle hints are processed prior to other restyle
// hints in the animation-only traversal.
//
// Non-animation restyle hints will be processed in a subsequent
// normal traversal.
if replacements.intersects(RestyleHint::for_animations()) {
debug_assert!(context.shared.traversal_flags.for_animation_only());
if replacements.contains(RestyleHint::RESTYLE_SMIL) {
replace_rule_node(
CascadeLevel::SMILOverride,
self.smil_override(),
primary_rules,
);
}
if replacements.contains(RestyleHint::RESTYLE_CSS_TRANSITIONS) {
replace_rule_node(
CascadeLevel::Transitions,
self.transition_rule().as_ref().map(|a| a.borrow_arc()),
primary_rules,
);
}
if replacements.contains(RestyleHint::RESTYLE_CSS_ANIMATIONS) {
replace_rule_node(
CascadeLevel::Animations,
self.animation_rule().as_ref().map(|a| a.borrow_arc()),
primary_rules,
);
}
}
false
}
/// If there is no transition rule in the ComputedValues, it returns None.
#[cfg(feature = "gecko")]
fn after_change_style(
&self,
context: &mut StyleContext<Self>,
primary_style: &Arc<ComputedValues>
) -> Option<Arc<ComputedValues>> {
use context::CascadeInputs;
use style_resolver::{PseudoElementResolution, StyleResolverForElement};
use stylist::RuleInclusion;
let rule_node = primary_style.rules();
let without_transition_rules =
context.shared.stylist.rule_tree().remove_transition_rule_if_applicable(rule_node);
if without_transition_rules == *rule_node {
// We don't have transition rule in this case, so return None to let
// the caller use the original ComputedValues.
return None;
}
// FIXME(bug 868975): We probably need to transition visited style as
// well.
let inputs =
CascadeInputs {
rules: Some(without_transition_rules),
visited_rules: primary_style.visited_rules().cloned()
};
// Actually `PseudoElementResolution` doesn't really matter.
let style =
StyleResolverForElement::new(*self, context, RuleInclusion::All, PseudoElementResolution::IfApplicable)
.cascade_style_and_visited_with_default_parents(inputs);
Some(style.0)
}
#[cfg(feature = "gecko")]
fn needs_animations_update(
&self,
context: &mut StyleContext<Self>,
old_values: Option<&ComputedValues>,
new_values: &ComputedValues,
) -> bool {
let new_box_style = new_values.get_box();
let has_new_animation_style = new_box_style.specifies_animations();
let old = match old_values {
Some(old) => old,
None => return has_new_animation_style,
};
let old_box_style = old.get_box();
let keyframes_could_have_changed =
context.shared.traversal_flags.contains(TraversalFlags::ForCSSRuleChanges);
// If the traversal is triggered due to changes in CSS rules changes, we
// need to try to update all CSS animations on the element if the
// element has or will have CSS animation style regardless of whether
// the animation is running or not.
//
// TODO: We should check which @keyframes were added/changed/deleted and
// update only animations corresponding to those @keyframes.
if keyframes_could_have_changed &&
(has_new_animation_style || self.has_css_animations())
{
return true;
}
// If the animations changed, well...
if !old_box_style.animations_equals(new_box_style) {
return true;
}
let old_display = old_box_style.clone_display();
let new_display = new_box_style.clone_display();
// If we were display: none, we may need to trigger animations.
if old_display == Display::None && new_display != Display::None {
return has_new_animation_style;
}
// If we are becoming display: none, we may need to stop animations.
if old_display != Display::None && new_display == Display::None {
return self.has_css_animations();
}
false
}
/// Create a SequentialTask for resolving descendants in a SMIL display property
/// animation if the display property changed from none.
#[cfg(feature = "gecko")]
fn handle_display_change_for_smil_if_needed(
&self,
context: &mut StyleContext<Self>,
old_values: Option<&ComputedValues>,
new_values: &ComputedValues,
restyle_hints: RestyleHint
) {
use context::PostAnimationTasks;
if !restyle_hints.intersects(RestyleHint::RESTYLE_SMIL) {
return;
}
if new_values.is_display_property_changed_from_none(old_values) {
// When display value is changed from none to other, we need to
// traverse descendant elements in a subsequent normal
// traversal (we can't traverse them in this animation-only restyle
// since we have no way to know whether the decendants
// need to be traversed at the beginning of the animation-only
// restyle).
let task = ::context::SequentialTask::process_post_animation(
*self,
PostAnimationTasks::DISPLAY_CHANGED_FROM_NONE_FOR_SMIL,
);
context.thread_local.tasks.push(task);
}
}
#[cfg(feature = "gecko")]
fn process_animations(
&self,
context: &mut StyleContext<Self>,
old_values: &mut Option<Arc<ComputedValues>>,
new_values: &mut Arc<ComputedValues>,
restyle_hint: RestyleHint,
important_rules_changed: bool,
) {
use context::UpdateAnimationsTasks;
if context.shared.traversal_flags.for_animation_only() {
self.handle_display_change_for_smil_if_needed(
context,
old_values.as_ref().map(|v| &**v),
new_values,
restyle_hint,
);
return;
}
// Bug 868975: These steps should examine and update the visited styles
// in addition to the unvisited styles.
let mut tasks = UpdateAnimationsTasks::empty();
if self.needs_animations_update(context, old_values.as_ref().map(|s| &**s), new_values) {
tasks.insert(UpdateAnimationsTasks::CSS_ANIMATIONS);
}
let before_change_style = if self.might_need_transitions_update(old_values.as_ref().map(|s| &**s),
new_values) {
let after_change_style = if self.has_css_transitions() {
self.after_change_style(context, new_values)
} else {
None
};
// In order to avoid creating a SequentialTask for transitions which
// may not be updated, we check it per property to make sure Gecko
// side will really update transition.
let needs_transitions_update = {
// We borrow new_values here, so need to add a scope to make
// sure we release it before assigning a new value to it.
let after_change_style_ref =
after_change_style.as_ref().unwrap_or(&new_values);
self.needs_transitions_update(
old_values.as_ref().unwrap(),
after_change_style_ref,
)
};
if needs_transitions_update {
if let Some(values_without_transitions) = after_change_style {
*new_values = values_without_transitions;
}
tasks.insert(UpdateAnimationsTasks::CSS_TRANSITIONS);
// We need to clone old_values into SequentialTask, so we can
// use it later.
old_values.clone()
} else {
None
}
} else {
None
};
if self.has_animations() {
tasks.insert(UpdateAnimationsTasks::EFFECT_PROPERTIES);
if important_rules_changed {
tasks.insert(UpdateAnimationsTasks::CASCADE_RESULTS);
}
if new_values.is_display_property_changed_from_none(old_values.as_ref().map(|s| &**s)) {
tasks.insert(UpdateAnimationsTasks::DISPLAY_CHANGED_FROM_NONE);
}
}
if !tasks.is_empty() {
let task = ::context::SequentialTask::update_animations(*self,
before_change_style,
tasks);
context.thread_local.tasks.push(task);
}
}
#[cfg(feature = "servo")]
fn process_animations(
&self,
context: &mut StyleContext<Self>,
old_values: &mut Option<Arc<ComputedValues>>,
new_values: &mut Arc<ComputedValues>,
_restyle_hint: RestyleHint,
_important_rules_changed: bool,
) {
use animation;
use dom::TNode;
let mut possibly_expired_animations = vec![];
let shared_context = context.shared;
if let Some(ref mut old) = *old_values {
// FIXME(emilio, #20116): This makes no sense.
self.update_animations_for_cascade(
shared_context,
old,
&mut possibly_expired_animations,
&context.thread_local.font_metrics_provider,
);
}
let new_animations_sender = &context.thread_local.new_animations_sender;
let this_opaque = self.as_node().opaque();
// Trigger any present animations if necessary.
animation::maybe_start_animations(
&shared_context,
new_animations_sender,
this_opaque,
&new_values,
);
// Trigger transitions if necessary. This will reset `new_values` back
// to its old value if it did trigger a transition.
if let Some(ref values) = *old_values {
animation::start_transitions_if_applicable(
new_animations_sender,
this_opaque,
&values,
new_values,
&shared_context.timer,
&possibly_expired_animations,
);
}
}
/// Computes and applies non-redundant damage.
fn accumulate_damage_for(
&self,
shared_context: &SharedStyleContext,
damage: &mut RestyleDamage,
old_values: &ComputedValues,
new_values: &ComputedValues,
pseudo: Option<&PseudoElement>,
) -> ChildCascadeRequirement {
debug!("accumulate_damage_for: {:?}", self);
debug_assert!(!shared_context.traversal_flags.contains(TraversalFlags::Forgetful));
let difference =
self.compute_style_difference(old_values, new_values, pseudo);
*damage |= difference.damage;
debug!(" > style difference: {:?}", difference);
// We need to cascade the children in order to ensure the correct
// propagation of inherited computed value flags.
if old_values.flags.maybe_inherited() != new_values.flags.maybe_inherited() {
debug!(" > flags changed: {:?} != {:?}", old_values.flags, new_values.flags);
return ChildCascadeRequirement::MustCascadeChildren;
}
match difference.change {
StyleChange::Unchanged => {
return ChildCascadeRequirement::CanSkipCascade
},
StyleChange::Changed { reset_only } => {
// If inherited properties changed, the best we can do is
// cascade the children.
if !reset_only {
return ChildCascadeRequirement::MustCascadeChildren
}
}
}
let old_display = old_values.get_box().clone_display();
let new_display = new_values.get_box().clone_display();
// If we used to be a display: none element, and no longer are,
// our children need to be restyled because they're unstyled.
//
// NOTE(emilio): Gecko has the special-case of -moz-binding, but
// that gets handled on the frame constructor when processing
// the reframe, so no need to handle that here.
if old_display == Display::None && old_display != new_display {
return ChildCascadeRequirement::MustCascadeChildren
}
// Blockification of children may depend on our display value,
// so we need to actually do the recascade. We could potentially
// do better, but it doesn't seem worth it.
if old_display.is_item_container() != new_display.is_item_container() {
return ChildCascadeRequirement::MustCascadeChildren
}
// Line break suppression may also be affected if the display
// type changes from ruby to non-ruby.
#[cfg(feature = "gecko")]
{
if old_display.is_ruby_type() != new_display.is_ruby_type() {
return ChildCascadeRequirement::MustCascadeChildren
}
}
// Children with justify-items: auto may depend on our
// justify-items property value.
//
// Similarly, we could potentially do better, but this really
// seems not common enough to care about.
#[cfg(feature = "gecko")]
{
use values::specified::align::AlignFlags;
let old_justify_items =
old_values.get_position().clone_justify_items();
let new_justify_items =
new_values.get_position().clone_justify_items();
let was_legacy_justify_items =
old_justify_items.computed.0.contains(AlignFlags::LEGACY);
let is_legacy_justify_items =
new_justify_items.computed.0.contains(AlignFlags::LEGACY);
if is_legacy_justify_items != was_legacy_justify_items {
return ChildCascadeRequirement::MustCascadeChildren;
}
if was_legacy_justify_items &&
old_justify_items.computed != new_justify_items.computed {
return ChildCascadeRequirement::MustCascadeChildren;
}
}
#[cfg(feature = "servo")]
{
// We may need to set or propagate the CAN_BE_FRAGMENTED bit
// on our children.
if old_values.is_multicol() != new_values.is_multicol() {
return ChildCascadeRequirement::MustCascadeChildren;
}
}
// We could prove that, if our children don't inherit reset
// properties, we can stop the cascade.
ChildCascadeRequirement::MustCascadeChildrenIfInheritResetStyle
}
// FIXME(emilio, #20116): It's not clear to me that the name of this method
// represents anything of what it does.
//
// Also, this function gets the old style, for some reason I don't really
// get, but the functions called (mainly update_style_for_animation) expects
// the new style, wtf?
#[cfg(feature = "servo")]
fn update_animations_for_cascade(
&self,
context: &SharedStyleContext,
style: &mut Arc<ComputedValues>,
possibly_expired_animations: &mut Vec<::animation::PropertyAnimation>,
font_metrics: &::font_metrics::FontMetricsProvider,
) |
}
impl<E: TElement> PrivateMatchMethods for E {}
/// The public API that elements expose for selector matching.
pub trait MatchMethods : TElement {
/// Returns the closest parent element that doesn't have a display: contents
/// style (and thus generates a box).
///
/// This is needed to correctly handle blockification of flex and grid
/// items.
///
/// Returns itself if the element has no parent. In practice this doesn't
/// happen because the root element is blockified per spec, but it could
/// happen if we decide to not blockify for roots of disconnected subtrees,
/// which is a kind of dubious behavior.
fn layout_parent(&self) -> Self {
let mut current = self.clone();
loop {
current = match current.traversal_parent() {
Some(el) => el,
None => return current,
};
let is_display_contents =
current.borrow_data().unwrap().styles.primary().is_display_contents();
if !is_display_contents {
return current;
}
}
}
/// Updates the styles with the new ones, diffs them, and stores the restyle
/// damage.
fn finish_restyle(
&self,
context: &mut StyleContext<Self>,
data: &mut ElementData,
mut new_styles: ResolvedElementStyles,
important_rules_changed: bool,
) -> ChildCascadeRequirement {
use std::cmp;
self.process_animations(
context,
&mut data.styles.primary,
&mut new_styles.primary.style.0,
data.hint,
important_rules_changed,
);
// First of all, update the styles.
let old_styles = data.set_styles(new_styles);
let new_primary_style = data.styles.primary.as_ref().unwrap();
let mut cascade_requirement = ChildCascadeRequirement::CanSkipCascade;
if self.is_root() && !self.is_native_anonymous() {
let device = context.shared.stylist.device();
let new_font_size = new_primary_style.get_font().clone_font_size();
if old_styles.primary.as_ref().map_or(true, |s| s.get_font().clone_font_size() != new_font_size) {
debug_assert!(self.owner_doc_matches_for_testing(device));
device.set_root_font_size(new_font_size.size());
// If the root font-size changed since last time, and something
// in the document did use rem units, ensure we recascade the
// entire tree.
if device.used_root_font_size() {
cascade_requirement = ChildCascadeRequirement::MustCascadeDescendants;
}
}
}
if context.shared.stylist.quirks_mode() == QuirksMode::Quirks {
if self.is_html_document_body_element() {
// NOTE(emilio): We _could_ handle dynamic changes to it if it
// changes and before we reach our children the cascade stops,
// but we don't track right now whether we use the document body
// color, and nobody else handles that properly anyway.
let device = context.shared.stylist.device();
// Needed for the "inherit from body" quirk.
let text_color = new_primary_style.get_color().clone_color();
device.set_body_text_color(text_color);
}
}
// Don't accumulate damage if we're in a forgetful traversal.
if context.shared.traversal_flags.contains(TraversalFlags::Forgetful) {
return ChildCascadeRequirement::MustCascadeChildren;
}
// Also, don't do anything if there was no style.
let old_primary_style = match old_styles.primary {
Some(s) => s,
None => return ChildCascadeRequirement::MustCascadeChildren,
};
cascade_requirement = cmp::max(
cascade_requirement,
self.accumulate_damage_for(
context.shared,
&mut data.damage,
&old_primary_style,
new_primary_style,
None,
)
);
if data.styles.pseudos.is_empty() && old_styles.pseudos.is_empty() {
// This is the common case; no need to examine pseudos here.
return cascade_requirement;
}
let pseudo_styles =
old_styles.pseudos.as_array().iter().zip(
data.styles.pseudos.as_array().iter());
for (i, (old, new)) in pseudo_styles.enumerate() {
match (old, new) {
(&Some(ref old), &Some(ref new)) => {
self.accumulate_damage_for(
context.shared,
&mut data.damage,
old,
new,
Some(&PseudoElement::from_eager_index(i)),
);
}
(&None, &None) => {},
_ => {
// It's possible that we're switching from not having
// ::before/::after at all to having styles for them but not
// actually having a useful pseudo-element. Check for that
// case.
let pseudo = PseudoElement::from_eager_index(i);
let new_pseudo_should_exist =
new.as_ref().map_or(false, |s| pseudo.should_exist(s));
let old_pseudo_should_exist =
old.as_ref().map_or(false, |s| pseudo.should_exist(s));
if new_pseudo_should_exist != old_pseudo_should_exist {
data.damage |= RestyleDamage::reconstruct();
return cascade_requirement;
}
}
}
}
cascade_requirement
}
/// Applies selector flags to an element, deferring mutations of the parent
/// until after the traversal.
///
/// TODO(emilio): This is somewhat inefficient, because it doesn't take
/// advantage of us knowing that the traversal is sequential.
fn apply_selector_flags(
&self,
map: &mut SelectorFlagsMap<Self>,
element: &Self,
flags: ElementSelectorFlags,
) {
// Handle flags that apply to the element.
let self_flags = flags.for_self();
if !self_flags.is_empty() {
if element == self {
// If this is the element we're styling, we have exclusive
// access to the element, and thus it's fine inserting them,
// even from the worker.
unsafe { element.set_selector_flags(self_flags); }
} else {
// Otherwise, this element is an ancestor of the current element
// we're styling, and thus multiple children could write to it
// if we did from here.
//
// Instead, we can read them, and post them if necessary as a
// sequential task in order for them to be processed later.
if !element.has_selector_flags(self_flags) {
map.insert_flags(*element, self_flags);
}
}
}
// Handle flags that apply to the parent.
let parent_flags = flags.for_parent();
if !parent_flags.is_empty() {
if let Some(p) = element.parent_element() {
if !p.has_selector_flags(parent_flags) {
map.insert_flags(p, parent_flags);
}
}
}
}
/// Updates the rule nodes without re-running selector matching, using just
/// the rule tree.
///
/// Returns true if an !important rule was replaced.
fn replace_rules(
&self,
replacements: RestyleHint,
context: &mut StyleContext<Self>,
cascade_inputs: &mut ElementCascadeInputs,
) -> bool {
let mut result = false;
result |= self.replace_rules_internal(
replacements,
context,
CascadeVisitedMode::Unvisited,
cascade_inputs,
);
result |= self.replace_rules_internal(
replacements,
context,
CascadeVisitedMode::Visited,
cascade_inputs
);
result
}
/// Given the old and new style of this element, and whether it's a
/// pseudo-element, compute the restyle damage used to determine which
/// kind of layout or painting operations we'll need.
fn compute_style_difference(
&self,
old_values: &ComputedValues,
new_values: &ComputedValues,
pseudo: Option<&PseudoElement>
) -> StyleDifference {
debug_assert!(pseudo.map_or(true, |p| p.is_eager()));
RestyleDamage::compute_style_difference(old_values, new_values)
}
}
impl<E: TElement> MatchMethods for E {}
| {
use animation::{self, Animation};
use dom::TNode;
// Finish any expired transitions.
let this_opaque = self.as_node().opaque();
animation::complete_expired_transitions(this_opaque, style, context);
// Merge any running animations into the current style, and cancel them.
let had_running_animations =
context.running_animations.read().get(&this_opaque).is_some();
if !had_running_animations {
return;
}
let mut all_running_animations = context.running_animations.write();
for running_animation in all_running_animations.get_mut(&this_opaque).unwrap() {
// This shouldn't happen frequently, but under some circumstances
// mainly huge load or debug builds, the constellation might be
// delayed in sending the `TickAllAnimations` message to layout.
//
// Thus, we can't assume all the animations have been already
// updated by layout, because other restyle due to script might be
// triggered by layout before the animation tick.
//
// See #12171 and the associated PR for an example where this
// happened while debugging other release panic.
if running_animation.is_expired() {
continue;
}
animation::update_style_for_animation::<Self>(
context,
running_animation,
style,
font_metrics,
);
if let Animation::Transition(_, _, ref frame, _) = *running_animation {
possibly_expired_animations.push(frame.property_animation.clone())
}
}
} | identifier_body |
matching.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! High-level interface to CSS selector matching.
#![allow(unsafe_code)]
#![deny(missing_docs)]
use context::{ElementCascadeInputs, QuirksMode, SelectorFlagsMap};
use context::{SharedStyleContext, StyleContext};
use data::ElementData;
use dom::TElement;
use invalidation::element::restyle_hints::RestyleHint;
use properties::ComputedValues;
use properties::longhands::display::computed_value::T as Display;
use rule_tree::{CascadeLevel, StrongRuleNode};
use selector_parser::{PseudoElement, RestyleDamage};
use selectors::matching::ElementSelectorFlags;
use servo_arc::{Arc, ArcBorrow};
use style_resolver::ResolvedElementStyles;
use traversal_flags::TraversalFlags;
/// Represents the result of comparing an element's old and new style.
#[derive(Debug)]
pub struct StyleDifference {
/// The resulting damage.
pub damage: RestyleDamage,
/// Whether any styles changed.
pub change: StyleChange,
}
/// Represents whether or not the style of an element has changed.
#[derive(Clone, Copy, Debug)]
pub enum StyleChange {
/// The style hasn't changed.
Unchanged,
/// The style has changed.
Changed {
/// Whether only reset structs changed.
reset_only: bool,
},
}
/// Whether or not newly computed values for an element need to be cascade
/// to children.
#[derive(Clone, Copy, Debug, Eq, Ord, PartialEq, PartialOrd)]
pub enum ChildCascadeRequirement {
/// Old and new computed values were the same, or we otherwise know that
/// we won't bother recomputing style for children, so we can skip cascading
/// the new values into child elements.
CanSkipCascade = 0,
/// The same as `MustCascadeChildren`, but we only need to actually
/// recascade if the child inherits any explicit reset style.
MustCascadeChildrenIfInheritResetStyle = 1,
/// Old and new computed values were different, so we must cascade the
/// new values to children.
MustCascadeChildren = 2,
/// The same as `MustCascadeChildren`, but for the entire subtree. This is
/// used to handle root font-size updates needing to recascade the whole
/// document.
MustCascadeDescendants = 3,
}
impl ChildCascadeRequirement {
/// Whether we can unconditionally skip the cascade.
pub fn can_skip_cascade(&self) -> bool {
matches!(*self, ChildCascadeRequirement::CanSkipCascade)
}
}
/// Determines which styles are being cascaded currently.
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
enum CascadeVisitedMode {
/// Cascade the regular, unvisited styles.
Unvisited,
/// Cascade the styles used when an element's relevant link is visited. A
/// "relevant link" is the element being matched if it is a link or the
/// nearest ancestor link.
Visited,
}
trait PrivateMatchMethods: TElement {
/// Updates the rule nodes without re-running selector matching, using just
/// the rule tree, for a specific visited mode.
///
/// Returns true if an !important rule was replaced.
fn replace_rules_internal(
&self,
replacements: RestyleHint,
context: &mut StyleContext<Self>,
cascade_visited: CascadeVisitedMode,
cascade_inputs: &mut ElementCascadeInputs,
) -> bool {
use properties::PropertyDeclarationBlock;
use shared_lock::Locked;
debug_assert!(replacements.intersects(RestyleHint::replacements()) &&
(replacements & !RestyleHint::replacements()).is_empty());
let stylist = &context.shared.stylist;
let guards = &context.shared.guards;
let primary_rules =
match cascade_visited {
CascadeVisitedMode::Unvisited => cascade_inputs.primary.rules.as_mut(),
CascadeVisitedMode::Visited => cascade_inputs.primary.visited_rules.as_mut(),
};
let primary_rules = match primary_rules {
Some(r) => r,
None => return false,
};
let replace_rule_node = |level: CascadeLevel,
pdb: Option<ArcBorrow<Locked<PropertyDeclarationBlock>>>,
path: &mut StrongRuleNode| -> bool {
let mut important_rules_changed = false;
let new_node =
stylist.rule_tree().update_rule_at_level(
level,
pdb,
path,
guards,
&mut important_rules_changed,
);
if let Some(n) = new_node {
*path = n;
}
important_rules_changed
};
if !context.shared.traversal_flags.for_animation_only() {
let mut result = false;
if replacements.contains(RestyleHint::RESTYLE_STYLE_ATTRIBUTE) {
let style_attribute = self.style_attribute();
result |= replace_rule_node(
CascadeLevel::StyleAttributeNormal,
style_attribute,
primary_rules,
);
result |= replace_rule_node(
CascadeLevel::StyleAttributeImportant,
style_attribute,
primary_rules,
);
// FIXME(emilio): Still a hack!
self.unset_dirty_style_attribute();
}
return result;
}
// Animation restyle hints are processed prior to other restyle
// hints in the animation-only traversal.
//
// Non-animation restyle hints will be processed in a subsequent
// normal traversal.
if replacements.intersects(RestyleHint::for_animations()) {
debug_assert!(context.shared.traversal_flags.for_animation_only());
if replacements.contains(RestyleHint::RESTYLE_SMIL) {
replace_rule_node(
CascadeLevel::SMILOverride,
self.smil_override(),
primary_rules,
);
}
if replacements.contains(RestyleHint::RESTYLE_CSS_TRANSITIONS) {
replace_rule_node(
CascadeLevel::Transitions,
self.transition_rule().as_ref().map(|a| a.borrow_arc()),
primary_rules,
);
}
if replacements.contains(RestyleHint::RESTYLE_CSS_ANIMATIONS) {
replace_rule_node(
CascadeLevel::Animations,
self.animation_rule().as_ref().map(|a| a.borrow_arc()),
primary_rules,
);
}
}
false
}
/// If there is no transition rule in the ComputedValues, it returns None.
#[cfg(feature = "gecko")]
fn after_change_style(
&self,
context: &mut StyleContext<Self>,
primary_style: &Arc<ComputedValues>
) -> Option<Arc<ComputedValues>> {
use context::CascadeInputs;
use style_resolver::{PseudoElementResolution, StyleResolverForElement};
use stylist::RuleInclusion;
let rule_node = primary_style.rules();
let without_transition_rules =
context.shared.stylist.rule_tree().remove_transition_rule_if_applicable(rule_node);
if without_transition_rules == *rule_node {
// We don't have transition rule in this case, so return None to let
// the caller use the original ComputedValues.
return None;
}
// FIXME(bug 868975): We probably need to transition visited style as
// well.
let inputs =
CascadeInputs {
rules: Some(without_transition_rules),
visited_rules: primary_style.visited_rules().cloned()
};
// Actually `PseudoElementResolution` doesn't really matter.
let style =
StyleResolverForElement::new(*self, context, RuleInclusion::All, PseudoElementResolution::IfApplicable)
.cascade_style_and_visited_with_default_parents(inputs);
Some(style.0)
}
#[cfg(feature = "gecko")]
fn needs_animations_update(
&self,
context: &mut StyleContext<Self>,
old_values: Option<&ComputedValues>,
new_values: &ComputedValues,
) -> bool {
let new_box_style = new_values.get_box();
let has_new_animation_style = new_box_style.specifies_animations();
let old = match old_values {
Some(old) => old,
None => return has_new_animation_style,
};
let old_box_style = old.get_box();
let keyframes_could_have_changed =
context.shared.traversal_flags.contains(TraversalFlags::ForCSSRuleChanges);
// If the traversal is triggered due to changes in CSS rules changes, we
// need to try to update all CSS animations on the element if the
// element has or will have CSS animation style regardless of whether
// the animation is running or not.
//
// TODO: We should check which @keyframes were added/changed/deleted and
// update only animations corresponding to those @keyframes.
if keyframes_could_have_changed &&
(has_new_animation_style || self.has_css_animations())
{
return true;
}
// If the animations changed, well...
if !old_box_style.animations_equals(new_box_style) {
return true;
}
let old_display = old_box_style.clone_display();
let new_display = new_box_style.clone_display();
// If we were display: none, we may need to trigger animations.
if old_display == Display::None && new_display != Display::None {
return has_new_animation_style;
}
// If we are becoming display: none, we may need to stop animations.
if old_display != Display::None && new_display == Display::None {
return self.has_css_animations();
}
false
}
/// Create a SequentialTask for resolving descendants in a SMIL display property
/// animation if the display property changed from none.
#[cfg(feature = "gecko")]
fn handle_display_change_for_smil_if_needed(
&self,
context: &mut StyleContext<Self>,
old_values: Option<&ComputedValues>,
new_values: &ComputedValues,
restyle_hints: RestyleHint
) {
use context::PostAnimationTasks;
if !restyle_hints.intersects(RestyleHint::RESTYLE_SMIL) {
return;
}
if new_values.is_display_property_changed_from_none(old_values) {
// When display value is changed from none to other, we need to
// traverse descendant elements in a subsequent normal
// traversal (we can't traverse them in this animation-only restyle
// since we have no way to know whether the decendants
// need to be traversed at the beginning of the animation-only
// restyle).
let task = ::context::SequentialTask::process_post_animation(
*self,
PostAnimationTasks::DISPLAY_CHANGED_FROM_NONE_FOR_SMIL,
);
context.thread_local.tasks.push(task);
}
}
#[cfg(feature = "gecko")]
fn | (
&self,
context: &mut StyleContext<Self>,
old_values: &mut Option<Arc<ComputedValues>>,
new_values: &mut Arc<ComputedValues>,
restyle_hint: RestyleHint,
important_rules_changed: bool,
) {
use context::UpdateAnimationsTasks;
if context.shared.traversal_flags.for_animation_only() {
self.handle_display_change_for_smil_if_needed(
context,
old_values.as_ref().map(|v| &**v),
new_values,
restyle_hint,
);
return;
}
// Bug 868975: These steps should examine and update the visited styles
// in addition to the unvisited styles.
let mut tasks = UpdateAnimationsTasks::empty();
if self.needs_animations_update(context, old_values.as_ref().map(|s| &**s), new_values) {
tasks.insert(UpdateAnimationsTasks::CSS_ANIMATIONS);
}
let before_change_style = if self.might_need_transitions_update(old_values.as_ref().map(|s| &**s),
new_values) {
let after_change_style = if self.has_css_transitions() {
self.after_change_style(context, new_values)
} else {
None
};
// In order to avoid creating a SequentialTask for transitions which
// may not be updated, we check it per property to make sure Gecko
// side will really update transition.
let needs_transitions_update = {
// We borrow new_values here, so need to add a scope to make
// sure we release it before assigning a new value to it.
let after_change_style_ref =
after_change_style.as_ref().unwrap_or(&new_values);
self.needs_transitions_update(
old_values.as_ref().unwrap(),
after_change_style_ref,
)
};
if needs_transitions_update {
if let Some(values_without_transitions) = after_change_style {
*new_values = values_without_transitions;
}
tasks.insert(UpdateAnimationsTasks::CSS_TRANSITIONS);
// We need to clone old_values into SequentialTask, so we can
// use it later.
old_values.clone()
} else {
None
}
} else {
None
};
if self.has_animations() {
tasks.insert(UpdateAnimationsTasks::EFFECT_PROPERTIES);
if important_rules_changed {
tasks.insert(UpdateAnimationsTasks::CASCADE_RESULTS);
}
if new_values.is_display_property_changed_from_none(old_values.as_ref().map(|s| &**s)) {
tasks.insert(UpdateAnimationsTasks::DISPLAY_CHANGED_FROM_NONE);
}
}
if !tasks.is_empty() {
let task = ::context::SequentialTask::update_animations(*self,
before_change_style,
tasks);
context.thread_local.tasks.push(task);
}
}
#[cfg(feature = "servo")]
fn process_animations(
&self,
context: &mut StyleContext<Self>,
old_values: &mut Option<Arc<ComputedValues>>,
new_values: &mut Arc<ComputedValues>,
_restyle_hint: RestyleHint,
_important_rules_changed: bool,
) {
use animation;
use dom::TNode;
let mut possibly_expired_animations = vec![];
let shared_context = context.shared;
if let Some(ref mut old) = *old_values {
// FIXME(emilio, #20116): This makes no sense.
self.update_animations_for_cascade(
shared_context,
old,
&mut possibly_expired_animations,
&context.thread_local.font_metrics_provider,
);
}
let new_animations_sender = &context.thread_local.new_animations_sender;
let this_opaque = self.as_node().opaque();
// Trigger any present animations if necessary.
animation::maybe_start_animations(
&shared_context,
new_animations_sender,
this_opaque,
&new_values,
);
// Trigger transitions if necessary. This will reset `new_values` back
// to its old value if it did trigger a transition.
if let Some(ref values) = *old_values {
animation::start_transitions_if_applicable(
new_animations_sender,
this_opaque,
&values,
new_values,
&shared_context.timer,
&possibly_expired_animations,
);
}
}
/// Computes and applies non-redundant damage.
fn accumulate_damage_for(
&self,
shared_context: &SharedStyleContext,
damage: &mut RestyleDamage,
old_values: &ComputedValues,
new_values: &ComputedValues,
pseudo: Option<&PseudoElement>,
) -> ChildCascadeRequirement {
debug!("accumulate_damage_for: {:?}", self);
debug_assert!(!shared_context.traversal_flags.contains(TraversalFlags::Forgetful));
let difference =
self.compute_style_difference(old_values, new_values, pseudo);
*damage |= difference.damage;
debug!(" > style difference: {:?}", difference);
// We need to cascade the children in order to ensure the correct
// propagation of inherited computed value flags.
if old_values.flags.maybe_inherited() != new_values.flags.maybe_inherited() {
debug!(" > flags changed: {:?} != {:?}", old_values.flags, new_values.flags);
return ChildCascadeRequirement::MustCascadeChildren;
}
match difference.change {
StyleChange::Unchanged => {
return ChildCascadeRequirement::CanSkipCascade
},
StyleChange::Changed { reset_only } => {
// If inherited properties changed, the best we can do is
// cascade the children.
if !reset_only {
return ChildCascadeRequirement::MustCascadeChildren
}
}
}
let old_display = old_values.get_box().clone_display();
let new_display = new_values.get_box().clone_display();
// If we used to be a display: none element, and no longer are,
// our children need to be restyled because they're unstyled.
//
// NOTE(emilio): Gecko has the special-case of -moz-binding, but
// that gets handled on the frame constructor when processing
// the reframe, so no need to handle that here.
if old_display == Display::None && old_display != new_display {
return ChildCascadeRequirement::MustCascadeChildren
}
// Blockification of children may depend on our display value,
// so we need to actually do the recascade. We could potentially
// do better, but it doesn't seem worth it.
if old_display.is_item_container() != new_display.is_item_container() {
return ChildCascadeRequirement::MustCascadeChildren
}
// Line break suppression may also be affected if the display
// type changes from ruby to non-ruby.
#[cfg(feature = "gecko")]
{
if old_display.is_ruby_type() != new_display.is_ruby_type() {
return ChildCascadeRequirement::MustCascadeChildren
}
}
// Children with justify-items: auto may depend on our
// justify-items property value.
//
// Similarly, we could potentially do better, but this really
// seems not common enough to care about.
#[cfg(feature = "gecko")]
{
use values::specified::align::AlignFlags;
let old_justify_items =
old_values.get_position().clone_justify_items();
let new_justify_items =
new_values.get_position().clone_justify_items();
let was_legacy_justify_items =
old_justify_items.computed.0.contains(AlignFlags::LEGACY);
let is_legacy_justify_items =
new_justify_items.computed.0.contains(AlignFlags::LEGACY);
if is_legacy_justify_items != was_legacy_justify_items {
return ChildCascadeRequirement::MustCascadeChildren;
}
if was_legacy_justify_items &&
old_justify_items.computed != new_justify_items.computed {
return ChildCascadeRequirement::MustCascadeChildren;
}
}
#[cfg(feature = "servo")]
{
// We may need to set or propagate the CAN_BE_FRAGMENTED bit
// on our children.
if old_values.is_multicol() != new_values.is_multicol() {
return ChildCascadeRequirement::MustCascadeChildren;
}
}
// We could prove that, if our children don't inherit reset
// properties, we can stop the cascade.
ChildCascadeRequirement::MustCascadeChildrenIfInheritResetStyle
}
// FIXME(emilio, #20116): It's not clear to me that the name of this method
// represents anything of what it does.
//
// Also, this function gets the old style, for some reason I don't really
// get, but the functions called (mainly update_style_for_animation) expects
// the new style, wtf?
#[cfg(feature = "servo")]
fn update_animations_for_cascade(
&self,
context: &SharedStyleContext,
style: &mut Arc<ComputedValues>,
possibly_expired_animations: &mut Vec<::animation::PropertyAnimation>,
font_metrics: &::font_metrics::FontMetricsProvider,
) {
use animation::{self, Animation};
use dom::TNode;
// Finish any expired transitions.
let this_opaque = self.as_node().opaque();
animation::complete_expired_transitions(this_opaque, style, context);
// Merge any running animations into the current style, and cancel them.
let had_running_animations =
context.running_animations.read().get(&this_opaque).is_some();
if !had_running_animations {
return;
}
let mut all_running_animations = context.running_animations.write();
for running_animation in all_running_animations.get_mut(&this_opaque).unwrap() {
// This shouldn't happen frequently, but under some circumstances
// mainly huge load or debug builds, the constellation might be
// delayed in sending the `TickAllAnimations` message to layout.
//
// Thus, we can't assume all the animations have been already
// updated by layout, because other restyle due to script might be
// triggered by layout before the animation tick.
//
// See #12171 and the associated PR for an example where this
// happened while debugging other release panic.
if running_animation.is_expired() {
continue;
}
animation::update_style_for_animation::<Self>(
context,
running_animation,
style,
font_metrics,
);
if let Animation::Transition(_, _, ref frame, _) = *running_animation {
possibly_expired_animations.push(frame.property_animation.clone())
}
}
}
}
impl<E: TElement> PrivateMatchMethods for E {}
/// The public API that elements expose for selector matching.
pub trait MatchMethods : TElement {
/// Returns the closest parent element that doesn't have a display: contents
/// style (and thus generates a box).
///
/// This is needed to correctly handle blockification of flex and grid
/// items.
///
/// Returns itself if the element has no parent. In practice this doesn't
/// happen because the root element is blockified per spec, but it could
/// happen if we decide to not blockify for roots of disconnected subtrees,
/// which is a kind of dubious behavior.
fn layout_parent(&self) -> Self {
let mut current = self.clone();
loop {
current = match current.traversal_parent() {
Some(el) => el,
None => return current,
};
let is_display_contents =
current.borrow_data().unwrap().styles.primary().is_display_contents();
if !is_display_contents {
return current;
}
}
}
/// Updates the styles with the new ones, diffs them, and stores the restyle
/// damage.
fn finish_restyle(
&self,
context: &mut StyleContext<Self>,
data: &mut ElementData,
mut new_styles: ResolvedElementStyles,
important_rules_changed: bool,
) -> ChildCascadeRequirement {
use std::cmp;
self.process_animations(
context,
&mut data.styles.primary,
&mut new_styles.primary.style.0,
data.hint,
important_rules_changed,
);
// First of all, update the styles.
let old_styles = data.set_styles(new_styles);
let new_primary_style = data.styles.primary.as_ref().unwrap();
let mut cascade_requirement = ChildCascadeRequirement::CanSkipCascade;
if self.is_root() && !self.is_native_anonymous() {
let device = context.shared.stylist.device();
let new_font_size = new_primary_style.get_font().clone_font_size();
if old_styles.primary.as_ref().map_or(true, |s| s.get_font().clone_font_size() != new_font_size) {
debug_assert!(self.owner_doc_matches_for_testing(device));
device.set_root_font_size(new_font_size.size());
// If the root font-size changed since last time, and something
// in the document did use rem units, ensure we recascade the
// entire tree.
if device.used_root_font_size() {
cascade_requirement = ChildCascadeRequirement::MustCascadeDescendants;
}
}
}
if context.shared.stylist.quirks_mode() == QuirksMode::Quirks {
if self.is_html_document_body_element() {
// NOTE(emilio): We _could_ handle dynamic changes to it if it
// changes and before we reach our children the cascade stops,
// but we don't track right now whether we use the document body
// color, and nobody else handles that properly anyway.
let device = context.shared.stylist.device();
// Needed for the "inherit from body" quirk.
let text_color = new_primary_style.get_color().clone_color();
device.set_body_text_color(text_color);
}
}
// Don't accumulate damage if we're in a forgetful traversal.
if context.shared.traversal_flags.contains(TraversalFlags::Forgetful) {
return ChildCascadeRequirement::MustCascadeChildren;
}
// Also, don't do anything if there was no style.
let old_primary_style = match old_styles.primary {
Some(s) => s,
None => return ChildCascadeRequirement::MustCascadeChildren,
};
cascade_requirement = cmp::max(
cascade_requirement,
self.accumulate_damage_for(
context.shared,
&mut data.damage,
&old_primary_style,
new_primary_style,
None,
)
);
if data.styles.pseudos.is_empty() && old_styles.pseudos.is_empty() {
// This is the common case; no need to examine pseudos here.
return cascade_requirement;
}
let pseudo_styles =
old_styles.pseudos.as_array().iter().zip(
data.styles.pseudos.as_array().iter());
for (i, (old, new)) in pseudo_styles.enumerate() {
match (old, new) {
(&Some(ref old), &Some(ref new)) => {
self.accumulate_damage_for(
context.shared,
&mut data.damage,
old,
new,
Some(&PseudoElement::from_eager_index(i)),
);
}
(&None, &None) => {},
_ => {
// It's possible that we're switching from not having
// ::before/::after at all to having styles for them but not
// actually having a useful pseudo-element. Check for that
// case.
let pseudo = PseudoElement::from_eager_index(i);
let new_pseudo_should_exist =
new.as_ref().map_or(false, |s| pseudo.should_exist(s));
let old_pseudo_should_exist =
old.as_ref().map_or(false, |s| pseudo.should_exist(s));
if new_pseudo_should_exist != old_pseudo_should_exist {
data.damage |= RestyleDamage::reconstruct();
return cascade_requirement;
}
}
}
}
cascade_requirement
}
/// Applies selector flags to an element, deferring mutations of the parent
/// until after the traversal.
///
/// TODO(emilio): This is somewhat inefficient, because it doesn't take
/// advantage of us knowing that the traversal is sequential.
fn apply_selector_flags(
&self,
map: &mut SelectorFlagsMap<Self>,
element: &Self,
flags: ElementSelectorFlags,
) {
// Handle flags that apply to the element.
let self_flags = flags.for_self();
if !self_flags.is_empty() {
if element == self {
// If this is the element we're styling, we have exclusive
// access to the element, and thus it's fine inserting them,
// even from the worker.
unsafe { element.set_selector_flags(self_flags); }
} else {
// Otherwise, this element is an ancestor of the current element
// we're styling, and thus multiple children could write to it
// if we did from here.
//
// Instead, we can read them, and post them if necessary as a
// sequential task in order for them to be processed later.
if !element.has_selector_flags(self_flags) {
map.insert_flags(*element, self_flags);
}
}
}
// Handle flags that apply to the parent.
let parent_flags = flags.for_parent();
if !parent_flags.is_empty() {
if let Some(p) = element.parent_element() {
if !p.has_selector_flags(parent_flags) {
map.insert_flags(p, parent_flags);
}
}
}
}
/// Updates the rule nodes without re-running selector matching, using just
/// the rule tree.
///
/// Returns true if an !important rule was replaced.
fn replace_rules(
&self,
replacements: RestyleHint,
context: &mut StyleContext<Self>,
cascade_inputs: &mut ElementCascadeInputs,
) -> bool {
let mut result = false;
result |= self.replace_rules_internal(
replacements,
context,
CascadeVisitedMode::Unvisited,
cascade_inputs,
);
result |= self.replace_rules_internal(
replacements,
context,
CascadeVisitedMode::Visited,
cascade_inputs
);
result
}
/// Given the old and new style of this element, and whether it's a
/// pseudo-element, compute the restyle damage used to determine which
/// kind of layout or painting operations we'll need.
fn compute_style_difference(
&self,
old_values: &ComputedValues,
new_values: &ComputedValues,
pseudo: Option<&PseudoElement>
) -> StyleDifference {
debug_assert!(pseudo.map_or(true, |p| p.is_eager()));
RestyleDamage::compute_style_difference(old_values, new_values)
}
}
impl<E: TElement> MatchMethods for E {}
| process_animations | identifier_name |
serializers.py | from rest_framework.serializers import (
HyperlinkedIdentityField, | ModelSerializer,
SerializerMethodField,
)
from comments.api.serializers import CommentSerializer
from accounts.api.serializers import UserDetailSerializer
from comments.models import Comment
from posts.models import Post
class PostCreateUpdateSerializer(ModelSerializer):
class Meta:
model = Post
fields = [
#'id',
'title',
#'slug',
'content',
'publish',
]
post_detail_url = HyperlinkedIdentityField(
view_name = 'posts-api:detail',
lookup_field = 'slug',
)
class PostDetailSerializer(ModelSerializer):
url = post_detail_url
user = UserDetailSerializer(read_only=True)
image = SerializerMethodField()
html = SerializerMethodField()
comments = SerializerMethodField()
class Meta:
model = Post
fields = [
'url',
'id',
'user',
'title',
'slug',
'content',
'html',
'publish',
'image',
'comments',
]
def get_html(self, obj):
return obj.get_markdown()
def get_image(self, obj):
try:
image = obj.image.url
except:
image = None
return image
def get_comments(self, obj):
#content_type = obj.get_content_type
#object_id = obj.id
c_qs = Comment.objects.filter_by_instance(obj)
comments = CommentSerializer(c_qs, many=True).data
return comments
class PostListSerializer(ModelSerializer):
url = post_detail_url
user = UserDetailSerializer(read_only=True)
class Meta:
model = Post
fields = [
'url',
'user',
'title',
'slug',
'content',
'publish',
] | random_line_split |
|
serializers.py | from rest_framework.serializers import (
HyperlinkedIdentityField,
ModelSerializer,
SerializerMethodField,
)
from comments.api.serializers import CommentSerializer
from accounts.api.serializers import UserDetailSerializer
from comments.models import Comment
from posts.models import Post
class PostCreateUpdateSerializer(ModelSerializer):
class Meta:
model = Post
fields = [
#'id',
'title',
#'slug',
'content',
'publish',
]
post_detail_url = HyperlinkedIdentityField(
view_name = 'posts-api:detail',
lookup_field = 'slug',
)
class PostDetailSerializer(ModelSerializer):
url = post_detail_url
user = UserDetailSerializer(read_only=True)
image = SerializerMethodField()
html = SerializerMethodField()
comments = SerializerMethodField()
class Meta:
model = Post
fields = [
'url',
'id',
'user',
'title',
'slug',
'content',
'html',
'publish',
'image',
'comments',
]
def get_html(self, obj):
return obj.get_markdown()
def get_image(self, obj):
try:
image = obj.image.url
except:
image = None
return image
def | (self, obj):
#content_type = obj.get_content_type
#object_id = obj.id
c_qs = Comment.objects.filter_by_instance(obj)
comments = CommentSerializer(c_qs, many=True).data
return comments
class PostListSerializer(ModelSerializer):
url = post_detail_url
user = UserDetailSerializer(read_only=True)
class Meta:
model = Post
fields = [
'url',
'user',
'title',
'slug',
'content',
'publish',
]
| get_comments | identifier_name |
serializers.py | from rest_framework.serializers import (
HyperlinkedIdentityField,
ModelSerializer,
SerializerMethodField,
)
from comments.api.serializers import CommentSerializer
from accounts.api.serializers import UserDetailSerializer
from comments.models import Comment
from posts.models import Post
class PostCreateUpdateSerializer(ModelSerializer):
class Meta:
model = Post
fields = [
#'id',
'title',
#'slug',
'content',
'publish',
]
post_detail_url = HyperlinkedIdentityField(
view_name = 'posts-api:detail',
lookup_field = 'slug',
)
class PostDetailSerializer(ModelSerializer):
url = post_detail_url
user = UserDetailSerializer(read_only=True)
image = SerializerMethodField()
html = SerializerMethodField()
comments = SerializerMethodField()
class Meta:
model = Post
fields = [
'url',
'id',
'user',
'title',
'slug',
'content',
'html',
'publish',
'image',
'comments',
]
def get_html(self, obj):
return obj.get_markdown()
def get_image(self, obj):
try:
image = obj.image.url
except:
image = None
return image
def get_comments(self, obj):
#content_type = obj.get_content_type
#object_id = obj.id
c_qs = Comment.objects.filter_by_instance(obj)
comments = CommentSerializer(c_qs, many=True).data
return comments
class PostListSerializer(ModelSerializer):
url = post_detail_url
user = UserDetailSerializer(read_only=True)
class Meta:
| model = Post
fields = [
'url',
'user',
'title',
'slug',
'content',
'publish',
] | identifier_body |
|
mod.rs | // we try to parse these as windows keycodes
mod keys;
pub use self::keys::Keys as Key;
mod linux;
mod qcode;
#[repr(u32)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
pub enum Modifier {
Alt = 0x0001,
Ctrl = 0x0002,
Shift = 0x0004,
Win = 0x0008,
}
const NOREPEAT: u32 = 0x4000;
impl Key {
fn modifier(&self) -> Option<Modifier> {
Some(match *self {
Key::LMenu | Key::RMenu => Modifier::Alt,
Key::LControlKey | Key::RControlKey => Modifier::Ctrl,
Key::LShiftKey | Key::RShiftKey => Modifier::Shift,
Key::LWin | Key::RWin => Modifier::Win,
_ => return None,
})
}
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct KeyBinding {
modifiers: Vec<Modifier>,
no_repeat: bool, // FIXME: implement this
key: Key,
}
impl KeyBinding {
pub fn new(modifiers: Vec<Modifier>, key: Key, no_repeat: bool) -> KeyBinding {
KeyBinding { modifiers, no_repeat, key }
}
pub fn matches(&self, modifiers: &[Modifier], key: Key) -> bool {
key == self.key && self.modifiers.iter().all(|x| modifiers.contains(x))
}
pub fn to_windows(&self) -> (u32, u32) {
let base = if self.no_repeat { NOREPEAT } else { 0 };
(self.modifiers.iter().fold(base, |sum, &x| (sum | (x as u32))), self.key as u32)
}
}
pub struct KeyResolution {
pub hotkeys: Vec<usize>,
pub qcode: Option<&'static str>,
}
pub struct KeyboardState<'a> {
modifiers: Vec<Modifier>,
bindings: &'a [KeyBinding],
}
impl<'a> KeyboardState<'a> {
pub fn new(bindings: &'a [KeyBinding]) -> KeyboardState {
KeyboardState {
modifiers: Vec::new(),
bindings,
}
}
pub fn input_linux(&mut self, code: u32, down: bool) -> Option<KeyResolution> {
linux::key_convert(code).map(|k| {
let mut bindings = Vec::new();
if let Some(m) = k.modifier() {
if down | else {
if let Some(i) = self.modifiers.iter().position(|&x| x == m) {
self.modifiers.swap_remove(i);
}
}
} else if down {
bindings.extend(self.bindings.iter().enumerate()
.filter(|&(_, b)| b.matches(&self.modifiers, k))
.map(|(i, _)| i));
}
KeyResolution {
hotkeys: bindings,
qcode: qcode::key_convert(k),
}
})
}
}
| {
if !self.modifiers.contains(&m) {
self.modifiers.push(m);
}
} | conditional_block |
mod.rs | // we try to parse these as windows keycodes
mod keys;
pub use self::keys::Keys as Key;
mod linux;
mod qcode;
#[repr(u32)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
pub enum Modifier {
Alt = 0x0001,
Ctrl = 0x0002,
Shift = 0x0004,
Win = 0x0008,
}
const NOREPEAT: u32 = 0x4000;
impl Key {
fn modifier(&self) -> Option<Modifier> {
Some(match *self {
Key::LMenu | Key::RMenu => Modifier::Alt,
Key::LControlKey | Key::RControlKey => Modifier::Ctrl,
Key::LShiftKey | Key::RShiftKey => Modifier::Shift,
Key::LWin | Key::RWin => Modifier::Win,
_ => return None,
})
}
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct KeyBinding {
modifiers: Vec<Modifier>,
no_repeat: bool, // FIXME: implement this
key: Key,
}
impl KeyBinding {
pub fn new(modifiers: Vec<Modifier>, key: Key, no_repeat: bool) -> KeyBinding {
KeyBinding { modifiers, no_repeat, key }
}
pub fn matches(&self, modifiers: &[Modifier], key: Key) -> bool {
key == self.key && self.modifiers.iter().all(|x| modifiers.contains(x))
}
pub fn to_windows(&self) -> (u32, u32) {
let base = if self.no_repeat { NOREPEAT } else { 0 };
(self.modifiers.iter().fold(base, |sum, &x| (sum | (x as u32))), self.key as u32)
}
}
pub struct KeyResolution {
pub hotkeys: Vec<usize>,
pub qcode: Option<&'static str>,
}
pub struct KeyboardState<'a> {
modifiers: Vec<Modifier>,
bindings: &'a [KeyBinding],
}
impl<'a> KeyboardState<'a> {
pub fn new(bindings: &'a [KeyBinding]) -> KeyboardState {
KeyboardState {
modifiers: Vec::new(),
bindings,
}
}
pub fn input_linux(&mut self, code: u32, down: bool) -> Option<KeyResolution> {
linux::key_convert(code).map(|k| {
let mut bindings = Vec::new();
if let Some(m) = k.modifier() {
if down {
if !self.modifiers.contains(&m) {
self.modifiers.push(m);
}
} else {
if let Some(i) = self.modifiers.iter().position(|&x| x == m) {
self.modifiers.swap_remove(i);
} | }
} else if down {
bindings.extend(self.bindings.iter().enumerate()
.filter(|&(_, b)| b.matches(&self.modifiers, k))
.map(|(i, _)| i));
}
KeyResolution {
hotkeys: bindings,
qcode: qcode::key_convert(k),
}
})
}
} | random_line_split |
|
mod.rs | // we try to parse these as windows keycodes
mod keys;
pub use self::keys::Keys as Key;
mod linux;
mod qcode;
#[repr(u32)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
pub enum Modifier {
Alt = 0x0001,
Ctrl = 0x0002,
Shift = 0x0004,
Win = 0x0008,
}
const NOREPEAT: u32 = 0x4000;
impl Key {
fn modifier(&self) -> Option<Modifier> {
Some(match *self {
Key::LMenu | Key::RMenu => Modifier::Alt,
Key::LControlKey | Key::RControlKey => Modifier::Ctrl,
Key::LShiftKey | Key::RShiftKey => Modifier::Shift,
Key::LWin | Key::RWin => Modifier::Win,
_ => return None,
})
}
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct KeyBinding {
modifiers: Vec<Modifier>,
no_repeat: bool, // FIXME: implement this
key: Key,
}
impl KeyBinding {
pub fn new(modifiers: Vec<Modifier>, key: Key, no_repeat: bool) -> KeyBinding {
KeyBinding { modifiers, no_repeat, key }
}
pub fn matches(&self, modifiers: &[Modifier], key: Key) -> bool {
key == self.key && self.modifiers.iter().all(|x| modifiers.contains(x))
}
pub fn to_windows(&self) -> (u32, u32) {
let base = if self.no_repeat { NOREPEAT } else { 0 };
(self.modifiers.iter().fold(base, |sum, &x| (sum | (x as u32))), self.key as u32)
}
}
pub struct KeyResolution {
pub hotkeys: Vec<usize>,
pub qcode: Option<&'static str>,
}
pub struct KeyboardState<'a> {
modifiers: Vec<Modifier>,
bindings: &'a [KeyBinding],
}
impl<'a> KeyboardState<'a> {
pub fn | (bindings: &'a [KeyBinding]) -> KeyboardState {
KeyboardState {
modifiers: Vec::new(),
bindings,
}
}
pub fn input_linux(&mut self, code: u32, down: bool) -> Option<KeyResolution> {
linux::key_convert(code).map(|k| {
let mut bindings = Vec::new();
if let Some(m) = k.modifier() {
if down {
if !self.modifiers.contains(&m) {
self.modifiers.push(m);
}
} else {
if let Some(i) = self.modifiers.iter().position(|&x| x == m) {
self.modifiers.swap_remove(i);
}
}
} else if down {
bindings.extend(self.bindings.iter().enumerate()
.filter(|&(_, b)| b.matches(&self.modifiers, k))
.map(|(i, _)| i));
}
KeyResolution {
hotkeys: bindings,
qcode: qcode::key_convert(k),
}
})
}
}
| new | identifier_name |
shopcustomers.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from optparse import make_option
from django.core.management.base import BaseCommand
from django.utils.translation import ugettext_lazy as _
class Command(BaseCommand):
help = _("Collect information about all customers which accessed this shop.")
option_list = BaseCommand.option_list + (
make_option("--delete-expired", action='store_true', dest='delete_expired',
help=_("Delete customers with expired sessions.")),
)
def handle(self, verbosity, delete_expired, *args, **options):
| from shop.models.customer import CustomerModel
data = dict(total=0, anonymous=0, active=0, staff=0, guests=0, registered=0, expired=0)
for customer in CustomerModel.objects.iterator():
data['total'] += 1
if customer.user.is_active:
data['active'] += 1
if customer.user.is_staff:
data['staff'] += 1
if customer.is_registered():
data['registered'] += 1
elif customer.is_guest():
data['guests'] += 1
elif customer.is_anonymous():
data['anonymous'] += 1
if customer.is_expired():
data['expired'] += 1
if delete_expired:
customer.delete()
msg = _("Customers in this shop: total={total}, anonymous={anonymous}, expired={expired}, active={active}, guests={guests}, registered={registered}, staff={staff}.")
self.stdout.write(msg.format(**data)) | identifier_body |
|
shopcustomers.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from optparse import make_option
from django.core.management.base import BaseCommand
from django.utils.translation import ugettext_lazy as _
class Command(BaseCommand):
help = _("Collect information about all customers which accessed this shop.")
option_list = BaseCommand.option_list + (
make_option("--delete-expired", action='store_true', dest='delete_expired',
help=_("Delete customers with expired sessions.")),
)
def handle(self, verbosity, delete_expired, *args, **options):
from shop.models.customer import CustomerModel
data = dict(total=0, anonymous=0, active=0, staff=0, guests=0, registered=0, expired=0)
for customer in CustomerModel.objects.iterator():
data['total'] += 1
if customer.user.is_active:
data['active'] += 1
if customer.user.is_staff:
data['staff'] += 1
if customer.is_registered():
|
elif customer.is_guest():
data['guests'] += 1
elif customer.is_anonymous():
data['anonymous'] += 1
if customer.is_expired():
data['expired'] += 1
if delete_expired:
customer.delete()
msg = _("Customers in this shop: total={total}, anonymous={anonymous}, expired={expired}, active={active}, guests={guests}, registered={registered}, staff={staff}.")
self.stdout.write(msg.format(**data))
| data['registered'] += 1 | conditional_block |
shopcustomers.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from optparse import make_option
from django.core.management.base import BaseCommand
from django.utils.translation import ugettext_lazy as _
class Command(BaseCommand):
help = _("Collect information about all customers which accessed this shop.")
option_list = BaseCommand.option_list + (
make_option("--delete-expired", action='store_true', dest='delete_expired',
help=_("Delete customers with expired sessions.")),
)
def | (self, verbosity, delete_expired, *args, **options):
from shop.models.customer import CustomerModel
data = dict(total=0, anonymous=0, active=0, staff=0, guests=0, registered=0, expired=0)
for customer in CustomerModel.objects.iterator():
data['total'] += 1
if customer.user.is_active:
data['active'] += 1
if customer.user.is_staff:
data['staff'] += 1
if customer.is_registered():
data['registered'] += 1
elif customer.is_guest():
data['guests'] += 1
elif customer.is_anonymous():
data['anonymous'] += 1
if customer.is_expired():
data['expired'] += 1
if delete_expired:
customer.delete()
msg = _("Customers in this shop: total={total}, anonymous={anonymous}, expired={expired}, active={active}, guests={guests}, registered={registered}, staff={staff}.")
self.stdout.write(msg.format(**data))
| handle | identifier_name |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.