file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
12.1k
| suffix
large_stringlengths 0
12k
| middle
large_stringlengths 0
7.51k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
thread.go | next = newMessageHandlerCont(c)
}
next.Push(t.Runtime, ErrorValue(err))
}
c = next
}
return
}
// This is to be able to close a suspended coroutine without completing it, but
// still allow cleaning up the to-be-closed variables. If this is put on the
// resume channel of a running thread, yield will cause a panic in the goroutine
// and that will be caught in the defer() clause below.
type threadClose struct{}
//
// Coroutine management
//
// Start starts the thread in a goroutine, giving it the callable c to run. the
// t.Resume() method needs to be called to provide arguments to the callable.
func (t *Thread) Start(c Callable) {
t.RequireBytes(2 << 10) // A goroutine starts off with 2k stack
go func() {
var (
args []Value
err error
)
// If there was a panic due to an exceeded quota, we need to end the
// thread and propagate that panic to the calling thread
defer func() {
r := recover()
if r != nil {
switch r.(type) {
case ContextTerminationError:
case threadClose:
// This means we want to close the coroutine, so no panic!
r = nil
default:
panic(r)
}
}
t.end(args, err, r)
}()
args, err = t.getResumeValues()
if err == nil {
next := NewTerminationWith(t.CurrentCont(), 0, true)
err = t.call(c, args, next)
args = next.Etc()
}
}()
}
// Status returns the status of a thread (suspended, running or dead).
func (t *Thread) Status() ThreadStatus {
return t.status
}
// Resume execution of a suspended thread. Its status switches to
// running while its caller's status switches to suspended.
func (t *Thread) Resume(caller *Thread, args []Value) ([]Value, error) {
t.mux.Lock()
if t.status != ThreadSuspended {
t.mux.Unlock()
switch t.status {
case ThreadDead:
return nil, errors.New("cannot resume dead thread")
default:
return nil, errors.New("cannot resume running thread")
}
}
caller.mux.Lock()
if caller.status != ThreadOK {
panic("Caller of thread to resume is not running")
}
t.caller = caller
t.status = ThreadOK
t.mux.Unlock()
caller.mux.Unlock()
t.sendResumeValues(args, nil, nil)
return caller.getResumeValues()
}
// Close a suspended thread. If successful, its status switches to dead. The
// boolean returned is true if it was possible to close the thread (i.e. it was
// suspended or already dead). The error is non-nil if there was an error in
// the cleanup process, or if the thread had already stopped with an error
// previously.
func (t *Thread) Close(caller *Thread) (bool, error) {
t.mux.Lock()
if t.status != ThreadSuspended {
t.mux.Unlock()
switch t.status {
case ThreadDead:
return true, t.closeErr
default:
return false, nil
}
}
caller.mux.Lock()
if caller.status != ThreadOK {
panic("Caller of thread to close is not running")
}
// The thread needs to go back to running to empty its close stack, before
// becoming dead.
t.caller = caller
t.status = ThreadOK
t.mux.Unlock()
caller.mux.Unlock()
t.sendResumeValues(nil, nil, threadClose{})
_, err := caller.getResumeValues()
return true, err
}
// Yield to the caller thread. The yielding thread's status switches to
// suspended. The caller's status must be OK.
func (t *Thread) Yield(args []Value) ([]Value, error) {
t.mux.Lock()
if t.status != ThreadOK {
panic("Thread to yield is not running")
}
caller := t.caller
if caller == nil {
t.mux.Unlock()
return nil, errors.New("cannot yield from main thread")
}
caller.mux.Lock()
if caller.status != ThreadOK {
panic("Caller of thread to yield is not OK")
}
t.status = ThreadSuspended
t.caller = nil
t.mux.Unlock()
caller.mux.Unlock()
caller.sendResumeValues(args, nil, nil)
return t.getResumeValues()
}
// This turns off the thread, cleaning up its close stack. The thread must be
// running.
func (t *Thread) end(args []Value, err error, exception interface{}) {
caller := t.caller
t.mux.Lock()
caller.mux.Lock()
defer t.mux.Unlock()
defer caller.mux.Unlock()
switch {
case t.status != ThreadOK:
panic("Called Thread.end on a non-running thread")
case caller.status != ThreadOK:
panic("Caller thread of ending thread is not OK")
}
close(t.resumeCh)
t.status = ThreadDead
t.caller = nil
err = t.cleanupCloseStack(nil, 0, err) // TODO: not nil
t.closeErr = err
caller.sendResumeValues(args, err, exception)
t.ReleaseBytes(2 << 10) // The goroutine will terminate after this
}
func (t *Thread) call(c Callable, args []Value, next Cont) error {
cont := c.Continuation(t, next)
t.Push(cont, args...)
return t.RunContinuation(cont)
}
func (t *Thread) getResumeValues() ([]Value, error) {
res := <-t.resumeCh
if res.exception != nil {
panic(res.exception)
}
return res.args, res.err
}
func (t *Thread) sendResumeValues(args []Value, err error, exception interface{}) {
t.resumeCh <- valuesError{args: args, err: err, exception: exception}
}
//
// Calling
//
// CallContext pushes a new runtime context on the thread's runtime and attempts
// to run f() in the thread. If the context runs out of resources while f() is
// running, all operations should abort and the CallContext should return
// immediately and not finalizing pending to-be-closed values.
//
// Otherwise (even if f() returns an error), pending to-be-closed values should
// be finalized.
//
// See quotas.md for details about this API.
func (t *Thread) CallContext(def RuntimeContextDef, f func() error) (ctx RuntimeContext, err error) {
t.PushContext(def)
c, h := t.CurrentCont(), t.closeStack.size()
defer func() {
ctx = t.PopContext()
if r := recover(); r != nil {
t.closeStack.truncate(h) // No resources to run that, so just discard it.
termErr, ok := r.(ContextTerminationError)
if !ok {
panic(r)
}
err = termErr
}
}()
err = t.cleanupCloseStack(c, h, f())
if t.GCPolicy() == IsolateGCPolicy {
t.runFinalizers(t.weakRefPool.ExtractAllMarkedFinalize())
}
if err != nil {
t.setStatus(StatusError)
}
return
}
//
// close stack operations
//
type closeStack struct {
stack []Value
}
func (s closeStack) size() int {
return len(s.stack)
}
func (s *closeStack) push(v Value) {
s.stack = append(s.stack, v)
}
func (s *closeStack) pop() (Value, bool) {
sz := len(s.stack)
if sz == 0 {
return NilValue, false
}
sz--
v := s.stack[sz]
s.stack = s.stack[:sz]
return v, true
}
func (s *closeStack) truncate(h int) {
sz := len(s.stack)
if sz > h {
s.stack = s.stack[:h]
}
}
// Truncate the close stack to size h, calling the __close metamethods in the
// context of the given continuation c and feeding them with the given error.
func (t *Thread) cleanupCloseStack(c Cont, h int, err error) error {
closeStack := &t.closeStack
for closeStack.size() > h {
v, _ := closeStack.pop()
if Truth(v) {
closeErr, ok := Metacall(t, v, "__close", []Value{v, ErrorValue(err)}, NewTerminationWith(c, 0, false))
if !ok {
return errors.New("to be closed value missing a __close metamethod")
}
if closeErr != nil {
err = closeErr
}
}
}
return err
}
//
// messageHandlerCont is a continuation that handles an error message (i.e.
// turns it to handled).
//
type messageHandlerCont struct {
c Cont
err Value
done bool
}
func newMessageHandlerCont(c Cont) *messageHandlerCont {
return &messageHandlerCont{c: c}
}
var _ Cont = (*messageHandlerCont)(nil)
func (c *messageHandlerCont) DebugInfo() *DebugInfo {
return c.c.DebugInfo()
}
func (c *messageHandlerCont) Next() Cont {
return c.c.Next()
}
func (c *messageHandlerCont) | Parent | identifier_name |
|
campaign_loader.rs | //pub fn load_all_campaign_metadata(asset_db: &mut AssetDatabase) {//-> Config {
//}
//loads all data for a given campaign
pub fn load_campaign_data(path: &str, gpu: &mut Gpu, asset_db: &mut AssetDatabase) {
//load all config files under the campain folder
let mut campaign_config_paths = find_config_files(path);
//for each config file, load it then load the associated asset
while let Some(config_path) = campaign_config_paths.pop() {
let config =
match load_config_task(&config_path).run(gpu) {
Ok(config) => config,
Err(e) => {
warn!("[Asset Loading] Could not load config file. Following error returned: {}", e);
continue //skip this config file. TODO never gets to the error message at end of loop
},
};
//TODO make type case insensitive
let asset_was_loaded = match config.get_str("type").unwrap_or("".to_string()).as_str() {
"sprite sheet" => load_sprite_sheet(&config, &config_path, gpu, asset_db),
"audio clip" => load_audio_clip(&config, &config_path, asset_db),
_ => {
warn!("[Asset Loading] 'Type' key does not exist or value is not supported. Config File Path: {}",
config_path.to_str().unwrap());
false
},
};
//do some extra logging to help bring errors to people's attention.
if asset_was_loaded | else {
error!("[Asset Loading] Failed to load asset relating to config file {}. {}",
config_path.to_str().unwrap(),
"Please review previous warnings."
);
}
}
}
//make sure file is one of the right formats for configuration files
fn is_config_ext(file_path: &Path) -> bool {
match file_path.extension().and_then(OsStr::to_str) {
Some("yml") => true,
_ => false
}
}
//locates all config files under a given path recursivly
fn find_config_files(path: &str) -> Vec<PathBuf> {
let mut config_file_paths = vec![];
for entry in WalkDir::new(path).into_iter().filter_map(|e| e.ok()) {
if is_config_ext( entry.path() )
&& entry.path().file_stem().unwrap_or(OsStr::new("")) != "campaign" {
config_file_paths.push(entry.into_path());
}
}
config_file_paths
}
//utility function to create a coffee error since it's a bit of a pain.
fn make_coffee_err_from_str(msg: &str) -> coffee::Error {
coffee::Error::IO(
std::io::Error::new( std::io::ErrorKind::Other, msg )
)
}
//creates a task for loading a config file and it's resources
fn load_config_task(file_path: &PathBuf) -> Task<Config> {
//needed so closure below can capture
let path = file_path.clone();
Task::new(move || {
//coerce into string value or return error
let str_path = match path.to_str() {
Some(string) => string,
//Will be logged in the function that runs the task.
None => return Err(
make_coffee_err_from_str("Config path cannot be converted to string.")
),
};
//create the config struct and load in the given file either retuning populated
// config file or a relevant error
let mut config_data = Config::default();
match config_data.merge(File::with_name(&str_path)) {
Ok(_) => Ok(config_data),
//Coerce err to an error type we can return.
//Will be logged in the function that runs the task.
Err(err) => Err( make_coffee_err_from_str( err.to_string().as_str() ) ),
}
})
}
//load sprite sheets
//TODO, maybe should make this return a task also?
fn load_sprite_sheet(config: &Config, config_path: &PathBuf,
gpu: &mut Gpu, asset_db: &mut AssetDatabase) -> bool {
//pull data we need and validate
let file = config.get_str("file");
let rows = config.get_int("rows");
let columns = config.get_int("columns");
let animations = config.get_table("animations");
if file.is_err() || rows.is_err() || columns.is_err() {
let err_msg_head = format!("{} {} {}. {}",
"[Asset Loading]",
"Could not find required config value for sprite sheet type in config file",
config_path.to_str().unwrap_or("<error could not convert config path to str>"),
"Error follows: ");
if let Err(err) = file { warn!("{} {}", err_msg_head, err); }
if let Err(err) = rows { warn!("{} {}", err_msg_head, err); }
if let Err(err) = columns { warn!("{} {}", err_msg_head, err); }
return false //config missing required values
}
//process the file path and asset name to the right types
// assume image path is given as relative to config path hence taking the parent as a starting point.
let image_path = match config_path.parent() {
Some(dir_path) => dir_path.join(file.ok().expect("File value is missing while loading.")),
//getting parent from path failed somehow. Shouldn't ever happen naturally.
None => {
warn!("{} {}",
"[Asset Loading] Parent missing from config path when processing",
config_path.to_str().unwrap_or("<error could not convert config path to str>"),
);
return false;
},
};
let asset_name = match image_path.clone().into_os_string().into_string() {
Ok(name) => name,
Err(err) => {
warn!("[Asset Loading] {}",
err.into_string().unwrap_or("<Could not convert OsString err into string>".to_string()));
return false //name is not UTF-8 compatable so abort
}
};
//try to load image
let image = match Image::load( image_path.clone() ).run(gpu) {
Ok(image) => image,
Err(err) => {
warn!("[Asset Loading] Could not load Image at {} related to config file {}. Following error returned: {}",
image_path.clone().to_str().unwrap_or("<error could not convert image path to str>"),
config_path.to_str().unwrap_or("<error could not convert config path to str>"),
err,
);
return false //load image failed.
}
};
//create sprite sheet, add animations, then add the new asset to the database
let mut spritesheet = SpriteSheet::new(
image,
rows.ok().expect("row convert error") as u16,
columns.ok().expect("column convert error") as u16,
);
if animations.is_ok() {
for (animation_name, tuple_list) in animations.ok().unwrap().iter() {
match tuple_list.clone().try_into::< Vec<(u16,u16)> >() {
Ok(sprite_pos_array) =>
//TODO might want to do additional checking of data.
// No error is thrown for having an extra value regardless if it is an int or not.
// Error branch will happen if a string is in 1st or 2nd location or if a tuple is
// replaced by something else.
spritesheet.add_animation(animation_name.clone(), sprite_pos_array),
Err(err) => {
warn!("[Asset Loading] Animation {} does not follow form {} in config file {}. Following error returned: {}",
animation_name,
"[ [row_1, col_1], ..., [row_n, col_n] ]",
config_path.to_str().unwrap_or("<error could not convert config path to str>"),
err,
);
continue;
}
}
}
}
asset_db.add_asset(asset_name, AssetContainer::Spritesheet(spritesheet));
return true;
}
//load sound clips
//TODO, maybe should make this return a task also?
fn load_audio_clip(config: &Config, config_path: &PathBuf, asset_db: &mut AssetDatabase) -> bool {
//pull data we need and validate
let file = config.get_str("file");
let category = config.get_str("category");
if file.is_err() || category.is_err() {
let err_msg_head = format!("{} {} {}. {}",
"[Asset Loading]",
"Could not find required config value for audio clip type in config file",
config_path.to_str().unwrap_or("<error could not convert config path to str>"),
"Error follows: ");
if let Err(err) = file { warn!("{} {}", err_msg_head, err); }
if let Err(err) = category { warn!("{} {}", err_msg_head, err); }
return false //config missing required values
}
//TODO make case insensitive
let clip_category = match category.unwrap().as_str() {
"voice" => ClipCategory::Voice,
"music" => ClipCategory::Music,
"effects" => ClipCategory::Effects,
failed_category => {
warn!("[Asset Loading] Provided audio category '{}' is not a valid option. Related to config | {
info!("[Asset Loading] Loaded asset relating to config file {}",
config_path.to_str().unwrap());
} | conditional_block |
campaign_loader.rs | (path: &str, gpu: &mut Gpu, asset_db: &mut AssetDatabase) {
//load all config files under the campain folder
let mut campaign_config_paths = find_config_files(path);
//for each config file, load it then load the associated asset
while let Some(config_path) = campaign_config_paths.pop() {
let config =
match load_config_task(&config_path).run(gpu) {
Ok(config) => config,
Err(e) => {
warn!("[Asset Loading] Could not load config file. Following error returned: {}", e);
continue //skip this config file. TODO never gets to the error message at end of loop
},
};
//TODO make type case insensitive
let asset_was_loaded = match config.get_str("type").unwrap_or("".to_string()).as_str() {
"sprite sheet" => load_sprite_sheet(&config, &config_path, gpu, asset_db),
"audio clip" => load_audio_clip(&config, &config_path, asset_db),
_ => {
warn!("[Asset Loading] 'Type' key does not exist or value is not supported. Config File Path: {}",
config_path.to_str().unwrap());
false
},
};
//do some extra logging to help bring errors to people's attention.
if asset_was_loaded {
info!("[Asset Loading] Loaded asset relating to config file {}",
config_path.to_str().unwrap());
} else {
error!("[Asset Loading] Failed to load asset relating to config file {}. {}",
config_path.to_str().unwrap(),
"Please review previous warnings."
);
}
}
}
//make sure file is one of the right formats for configuration files
fn is_config_ext(file_path: &Path) -> bool {
match file_path.extension().and_then(OsStr::to_str) {
Some("yml") => true,
_ => false
}
}
//locates all config files under a given path recursivly
fn find_config_files(path: &str) -> Vec<PathBuf> {
let mut config_file_paths = vec![];
for entry in WalkDir::new(path).into_iter().filter_map(|e| e.ok()) {
if is_config_ext( entry.path() )
&& entry.path().file_stem().unwrap_or(OsStr::new("")) != "campaign" {
config_file_paths.push(entry.into_path());
}
}
config_file_paths
}
//utility function to create a coffee error since it's a bit of a pain.
fn make_coffee_err_from_str(msg: &str) -> coffee::Error {
coffee::Error::IO(
std::io::Error::new( std::io::ErrorKind::Other, msg )
)
}
//creates a task for loading a config file and it's resources
fn load_config_task(file_path: &PathBuf) -> Task<Config> {
//needed so closure below can capture
let path = file_path.clone();
Task::new(move || {
//coerce into string value or return error
let str_path = match path.to_str() {
Some(string) => string,
//Will be logged in the function that runs the task.
None => return Err(
make_coffee_err_from_str("Config path cannot be converted to string.")
),
};
//create the config struct and load in the given file either retuning populated
// config file or a relevant error
let mut config_data = Config::default();
match config_data.merge(File::with_name(&str_path)) {
Ok(_) => Ok(config_data),
//Coerce err to an error type we can return.
//Will be logged in the function that runs the task.
Err(err) => Err( make_coffee_err_from_str( err.to_string().as_str() ) ),
}
})
}
//load sprite sheets
//TODO, maybe should make this return a task also?
fn load_sprite_sheet(config: &Config, config_path: &PathBuf,
gpu: &mut Gpu, asset_db: &mut AssetDatabase) -> bool {
//pull data we need and validate
let file = config.get_str("file");
let rows = config.get_int("rows");
let columns = config.get_int("columns");
let animations = config.get_table("animations");
if file.is_err() || rows.is_err() || columns.is_err() {
let err_msg_head = format!("{} {} {}. {}",
"[Asset Loading]",
"Could not find required config value for sprite sheet type in config file",
config_path.to_str().unwrap_or("<error could not convert config path to str>"),
"Error follows: ");
if let Err(err) = file { warn!("{} {}", err_msg_head, err); }
if let Err(err) = rows { warn!("{} {}", err_msg_head, err); }
if let Err(err) = columns { warn!("{} {}", err_msg_head, err); }
return false //config missing required values
}
//process the file path and asset name to the right types
// assume image path is given as relative to config path hence taking the parent as a starting point.
let image_path = match config_path.parent() {
Some(dir_path) => dir_path.join(file.ok().expect("File value is missing while loading.")),
//getting parent from path failed somehow. Shouldn't ever happen naturally.
None => {
warn!("{} {}",
"[Asset Loading] Parent missing from config path when processing",
config_path.to_str().unwrap_or("<error could not convert config path to str>"),
);
return false;
},
};
let asset_name = match image_path.clone().into_os_string().into_string() {
Ok(name) => name,
Err(err) => {
warn!("[Asset Loading] {}",
err.into_string().unwrap_or("<Could not convert OsString err into string>".to_string()));
return false //name is not UTF-8 compatable so abort
}
};
//try to load image
let image = match Image::load( image_path.clone() ).run(gpu) {
Ok(image) => image,
Err(err) => {
warn!("[Asset Loading] Could not load Image at {} related to config file {}. Following error returned: {}",
image_path.clone().to_str().unwrap_or("<error could not convert image path to str>"),
config_path.to_str().unwrap_or("<error could not convert config path to str>"),
err,
);
return false //load image failed.
}
};
//create sprite sheet, add animations, then add the new asset to the database
let mut spritesheet = SpriteSheet::new(
image,
rows.ok().expect("row convert error") as u16,
columns.ok().expect("column convert error") as u16,
);
if animations.is_ok() {
for (animation_name, tuple_list) in animations.ok().unwrap().iter() {
match tuple_list.clone().try_into::< Vec<(u16,u16)> >() {
Ok(sprite_pos_array) =>
//TODO might want to do additional checking of data.
// No error is thrown for having an extra value regardless if it is an int or not.
// Error branch will happen if a string is in 1st or 2nd location or if a tuple is
// replaced by something else.
spritesheet.add_animation(animation_name.clone(), sprite_pos_array),
Err(err) => {
warn!("[Asset Loading] Animation {} does not follow form {} in config file {}. Following error returned: {}",
animation_name,
"[ [row_1, col_1], ..., [row_n, col_n] ]",
config_path.to_str().unwrap_or("<error could not convert config path to str>"),
err,
);
continue;
}
}
}
}
asset_db.add_asset(asset_name, AssetContainer::Spritesheet(spritesheet));
return true;
}
//load sound clips
//TODO, maybe should make this return a task also?
fn load_audio_clip(config: &Config, config_path: &PathBuf, asset_db: &mut AssetDatabase) -> bool {
//pull data we need and validate
let file = config.get_str("file");
let category = config.get_str("category");
if file.is_err() || category.is_err() {
let err_msg_head = format!("{} {} {}. {}",
"[Asset Loading]",
"Could not find required config value for audio clip type in config file",
config_path.to_str().unwrap_or("<error could not convert config path to str>"),
"Error follows: ");
if let Err(err) = file { warn!("{} {}", err_msg_head, err); }
if let Err(err) = category { warn!("{} {}", err_msg_head, err); }
return false //config missing required values
}
//TODO make case insensitive
let clip_category = match category.unwrap().as_str() {
"voice" => ClipCategory::Voice,
"music" => ClipCategory::Music,
"effects" => ClipCategory::Effects,
failed_category => {
warn!("[Asset Loading] Provided audio category '{}' is not a valid option. Related to config file {}.",
failed_category,
config_path.to_str().unwrap_or("<error could not convert config path to str>"), | );
return false;
}
};
| random_line_split |
|
campaign_loader.rs | pub fn load_all_campaign_metadata(asset_db: &mut AssetDatabase) {//-> Config {
//}
//loads all data for a given campaign
pub fn load_campaign_data(path: &str, gpu: &mut Gpu, asset_db: &mut AssetDatabase) {
//load all config files under the campain folder
let mut campaign_config_paths = find_config_files(path);
//for each config file, load it then load the associated asset
while let Some(config_path) = campaign_config_paths.pop() {
let config =
match load_config_task(&config_path).run(gpu) {
Ok(config) => config,
Err(e) => {
warn!("[Asset Loading] Could not load config file. Following error returned: {}", e);
continue //skip this config file. TODO never gets to the error message at end of loop
},
};
//TODO make type case insensitive
let asset_was_loaded = match config.get_str("type").unwrap_or("".to_string()).as_str() {
"sprite sheet" => load_sprite_sheet(&config, &config_path, gpu, asset_db),
"audio clip" => load_audio_clip(&config, &config_path, asset_db),
_ => {
warn!("[Asset Loading] 'Type' key does not exist or value is not supported. Config File Path: {}",
config_path.to_str().unwrap());
false
},
};
//do some extra logging to help bring errors to people's attention.
if asset_was_loaded {
info!("[Asset Loading] Loaded asset relating to config file {}",
config_path.to_str().unwrap());
} else {
error!("[Asset Loading] Failed to load asset relating to config file {}. {}",
config_path.to_str().unwrap(),
"Please review previous warnings."
);
}
}
}
//make sure file is one of the right formats for configuration files
fn is_config_ext(file_path: &Path) -> bool {
match file_path.extension().and_then(OsStr::to_str) {
Some("yml") => true,
_ => false
}
}
//locates all config files under a given path recursivly
fn find_config_files(path: &str) -> Vec<PathBuf> {
let mut config_file_paths = vec![];
for entry in WalkDir::new(path).into_iter().filter_map(|e| e.ok()) {
if is_config_ext( entry.path() )
&& entry.path().file_stem().unwrap_or(OsStr::new("")) != "campaign" {
config_file_paths.push(entry.into_path());
}
}
config_file_paths
}
//utility function to create a coffee error since it's a bit of a pain.
fn make_coffee_err_from_str(msg: &str) -> coffee::Error |
//creates a task for loading a config file and it's resources
fn load_config_task(file_path: &PathBuf) -> Task<Config> {
//needed so closure below can capture
let path = file_path.clone();
Task::new(move || {
//coerce into string value or return error
let str_path = match path.to_str() {
Some(string) => string,
//Will be logged in the function that runs the task.
None => return Err(
make_coffee_err_from_str("Config path cannot be converted to string.")
),
};
//create the config struct and load in the given file either retuning populated
// config file or a relevant error
let mut config_data = Config::default();
match config_data.merge(File::with_name(&str_path)) {
Ok(_) => Ok(config_data),
//Coerce err to an error type we can return.
//Will be logged in the function that runs the task.
Err(err) => Err( make_coffee_err_from_str( err.to_string().as_str() ) ),
}
})
}
//load sprite sheets
//TODO, maybe should make this return a task also?
fn load_sprite_sheet(config: &Config, config_path: &PathBuf,
gpu: &mut Gpu, asset_db: &mut AssetDatabase) -> bool {
//pull data we need and validate
let file = config.get_str("file");
let rows = config.get_int("rows");
let columns = config.get_int("columns");
let animations = config.get_table("animations");
if file.is_err() || rows.is_err() || columns.is_err() {
let err_msg_head = format!("{} {} {}. {}",
"[Asset Loading]",
"Could not find required config value for sprite sheet type in config file",
config_path.to_str().unwrap_or("<error could not convert config path to str>"),
"Error follows: ");
if let Err(err) = file { warn!("{} {}", err_msg_head, err); }
if let Err(err) = rows { warn!("{} {}", err_msg_head, err); }
if let Err(err) = columns { warn!("{} {}", err_msg_head, err); }
return false //config missing required values
}
//process the file path and asset name to the right types
// assume image path is given as relative to config path hence taking the parent as a starting point.
let image_path = match config_path.parent() {
Some(dir_path) => dir_path.join(file.ok().expect("File value is missing while loading.")),
//getting parent from path failed somehow. Shouldn't ever happen naturally.
None => {
warn!("{} {}",
"[Asset Loading] Parent missing from config path when processing",
config_path.to_str().unwrap_or("<error could not convert config path to str>"),
);
return false;
},
};
let asset_name = match image_path.clone().into_os_string().into_string() {
Ok(name) => name,
Err(err) => {
warn!("[Asset Loading] {}",
err.into_string().unwrap_or("<Could not convert OsString err into string>".to_string()));
return false //name is not UTF-8 compatable so abort
}
};
//try to load image
let image = match Image::load( image_path.clone() ).run(gpu) {
Ok(image) => image,
Err(err) => {
warn!("[Asset Loading] Could not load Image at {} related to config file {}. Following error returned: {}",
image_path.clone().to_str().unwrap_or("<error could not convert image path to str>"),
config_path.to_str().unwrap_or("<error could not convert config path to str>"),
err,
);
return false //load image failed.
}
};
//create sprite sheet, add animations, then add the new asset to the database
let mut spritesheet = SpriteSheet::new(
image,
rows.ok().expect("row convert error") as u16,
columns.ok().expect("column convert error") as u16,
);
if animations.is_ok() {
for (animation_name, tuple_list) in animations.ok().unwrap().iter() {
match tuple_list.clone().try_into::< Vec<(u16,u16)> >() {
Ok(sprite_pos_array) =>
//TODO might want to do additional checking of data.
// No error is thrown for having an extra value regardless if it is an int or not.
// Error branch will happen if a string is in 1st or 2nd location or if a tuple is
// replaced by something else.
spritesheet.add_animation(animation_name.clone(), sprite_pos_array),
Err(err) => {
warn!("[Asset Loading] Animation {} does not follow form {} in config file {}. Following error returned: {}",
animation_name,
"[ [row_1, col_1], ..., [row_n, col_n] ]",
config_path.to_str().unwrap_or("<error could not convert config path to str>"),
err,
);
continue;
}
}
}
}
asset_db.add_asset(asset_name, AssetContainer::Spritesheet(spritesheet));
return true;
}
//load sound clips
//TODO, maybe should make this return a task also?
fn load_audio_clip(config: &Config, config_path: &PathBuf, asset_db: &mut AssetDatabase) -> bool {
//pull data we need and validate
let file = config.get_str("file");
let category = config.get_str("category");
if file.is_err() || category.is_err() {
let err_msg_head = format!("{} {} {}. {}",
"[Asset Loading]",
"Could not find required config value for audio clip type in config file",
config_path.to_str().unwrap_or("<error could not convert config path to str>"),
"Error follows: ");
if let Err(err) = file { warn!("{} {}", err_msg_head, err); }
if let Err(err) = category { warn!("{} {}", err_msg_head, err); }
return false //config missing required values
}
//TODO make case insensitive
let clip_category = match category.unwrap().as_str() {
"voice" => ClipCategory::Voice,
"music" => ClipCategory::Music,
"effects" => ClipCategory::Effects,
failed_category => {
warn!("[Asset Loading] Provided audio category '{}' is not a valid option. Related to config | {
coffee::Error::IO(
std::io::Error::new( std::io::ErrorKind::Other, msg )
)
} | identifier_body |
campaign_loader.rs | //pub fn load_all_campaign_metadata(asset_db: &mut AssetDatabase) {//-> Config {
//}
//loads all data for a given campaign
pub fn | (path: &str, gpu: &mut Gpu, asset_db: &mut AssetDatabase) {
//load all config files under the campain folder
let mut campaign_config_paths = find_config_files(path);
//for each config file, load it then load the associated asset
while let Some(config_path) = campaign_config_paths.pop() {
let config =
match load_config_task(&config_path).run(gpu) {
Ok(config) => config,
Err(e) => {
warn!("[Asset Loading] Could not load config file. Following error returned: {}", e);
continue //skip this config file. TODO never gets to the error message at end of loop
},
};
//TODO make type case insensitive
let asset_was_loaded = match config.get_str("type").unwrap_or("".to_string()).as_str() {
"sprite sheet" => load_sprite_sheet(&config, &config_path, gpu, asset_db),
"audio clip" => load_audio_clip(&config, &config_path, asset_db),
_ => {
warn!("[Asset Loading] 'Type' key does not exist or value is not supported. Config File Path: {}",
config_path.to_str().unwrap());
false
},
};
//do some extra logging to help bring errors to people's attention.
if asset_was_loaded {
info!("[Asset Loading] Loaded asset relating to config file {}",
config_path.to_str().unwrap());
} else {
error!("[Asset Loading] Failed to load asset relating to config file {}. {}",
config_path.to_str().unwrap(),
"Please review previous warnings."
);
}
}
}
//make sure file is one of the right formats for configuration files
fn is_config_ext(file_path: &Path) -> bool {
match file_path.extension().and_then(OsStr::to_str) {
Some("yml") => true,
_ => false
}
}
//locates all config files under a given path recursivly
fn find_config_files(path: &str) -> Vec<PathBuf> {
let mut config_file_paths = vec![];
for entry in WalkDir::new(path).into_iter().filter_map(|e| e.ok()) {
if is_config_ext( entry.path() )
&& entry.path().file_stem().unwrap_or(OsStr::new("")) != "campaign" {
config_file_paths.push(entry.into_path());
}
}
config_file_paths
}
//utility function to create a coffee error since it's a bit of a pain.
fn make_coffee_err_from_str(msg: &str) -> coffee::Error {
coffee::Error::IO(
std::io::Error::new( std::io::ErrorKind::Other, msg )
)
}
//creates a task for loading a config file and it's resources
fn load_config_task(file_path: &PathBuf) -> Task<Config> {
//needed so closure below can capture
let path = file_path.clone();
Task::new(move || {
//coerce into string value or return error
let str_path = match path.to_str() {
Some(string) => string,
//Will be logged in the function that runs the task.
None => return Err(
make_coffee_err_from_str("Config path cannot be converted to string.")
),
};
//create the config struct and load in the given file either retuning populated
// config file or a relevant error
let mut config_data = Config::default();
match config_data.merge(File::with_name(&str_path)) {
Ok(_) => Ok(config_data),
//Coerce err to an error type we can return.
//Will be logged in the function that runs the task.
Err(err) => Err( make_coffee_err_from_str( err.to_string().as_str() ) ),
}
})
}
//load sprite sheets
//TODO, maybe should make this return a task also?
fn load_sprite_sheet(config: &Config, config_path: &PathBuf,
gpu: &mut Gpu, asset_db: &mut AssetDatabase) -> bool {
//pull data we need and validate
let file = config.get_str("file");
let rows = config.get_int("rows");
let columns = config.get_int("columns");
let animations = config.get_table("animations");
if file.is_err() || rows.is_err() || columns.is_err() {
let err_msg_head = format!("{} {} {}. {}",
"[Asset Loading]",
"Could not find required config value for sprite sheet type in config file",
config_path.to_str().unwrap_or("<error could not convert config path to str>"),
"Error follows: ");
if let Err(err) = file { warn!("{} {}", err_msg_head, err); }
if let Err(err) = rows { warn!("{} {}", err_msg_head, err); }
if let Err(err) = columns { warn!("{} {}", err_msg_head, err); }
return false //config missing required values
}
//process the file path and asset name to the right types
// assume image path is given as relative to config path hence taking the parent as a starting point.
let image_path = match config_path.parent() {
Some(dir_path) => dir_path.join(file.ok().expect("File value is missing while loading.")),
//getting parent from path failed somehow. Shouldn't ever happen naturally.
None => {
warn!("{} {}",
"[Asset Loading] Parent missing from config path when processing",
config_path.to_str().unwrap_or("<error could not convert config path to str>"),
);
return false;
},
};
let asset_name = match image_path.clone().into_os_string().into_string() {
Ok(name) => name,
Err(err) => {
warn!("[Asset Loading] {}",
err.into_string().unwrap_or("<Could not convert OsString err into string>".to_string()));
return false //name is not UTF-8 compatable so abort
}
};
//try to load image
let image = match Image::load( image_path.clone() ).run(gpu) {
Ok(image) => image,
Err(err) => {
warn!("[Asset Loading] Could not load Image at {} related to config file {}. Following error returned: {}",
image_path.clone().to_str().unwrap_or("<error could not convert image path to str>"),
config_path.to_str().unwrap_or("<error could not convert config path to str>"),
err,
);
return false //load image failed.
}
};
//create sprite sheet, add animations, then add the new asset to the database
let mut spritesheet = SpriteSheet::new(
image,
rows.ok().expect("row convert error") as u16,
columns.ok().expect("column convert error") as u16,
);
if animations.is_ok() {
for (animation_name, tuple_list) in animations.ok().unwrap().iter() {
match tuple_list.clone().try_into::< Vec<(u16,u16)> >() {
Ok(sprite_pos_array) =>
//TODO might want to do additional checking of data.
// No error is thrown for having an extra value regardless if it is an int or not.
// Error branch will happen if a string is in 1st or 2nd location or if a tuple is
// replaced by something else.
spritesheet.add_animation(animation_name.clone(), sprite_pos_array),
Err(err) => {
warn!("[Asset Loading] Animation {} does not follow form {} in config file {}. Following error returned: {}",
animation_name,
"[ [row_1, col_1], ..., [row_n, col_n] ]",
config_path.to_str().unwrap_or("<error could not convert config path to str>"),
err,
);
continue;
}
}
}
}
asset_db.add_asset(asset_name, AssetContainer::Spritesheet(spritesheet));
return true;
}
//load sound clips
//TODO, maybe should make this return a task also?
fn load_audio_clip(config: &Config, config_path: &PathBuf, asset_db: &mut AssetDatabase) -> bool {
//pull data we need and validate
let file = config.get_str("file");
let category = config.get_str("category");
if file.is_err() || category.is_err() {
let err_msg_head = format!("{} {} {}. {}",
"[Asset Loading]",
"Could not find required config value for audio clip type in config file",
config_path.to_str().unwrap_or("<error could not convert config path to str>"),
"Error follows: ");
if let Err(err) = file { warn!("{} {}", err_msg_head, err); }
if let Err(err) = category { warn!("{} {}", err_msg_head, err); }
return false //config missing required values
}
//TODO make case insensitive
let clip_category = match category.unwrap().as_str() {
"voice" => ClipCategory::Voice,
"music" => ClipCategory::Music,
"effects" => ClipCategory::Effects,
failed_category => {
warn!("[Asset Loading] Provided audio category '{}' is not a valid option. Related to config | load_campaign_data | identifier_name |
encryption_properties.go | .
type Algorithm struct {
Algo Cipher
Aad struct {
AadPrefix []byte
AadFileUnique []byte
SupplyAadPrefix bool
}
}
// ToThrift returns an instance to be used for serializing when writing a file.
func (e Algorithm) ToThrift() *format.EncryptionAlgorithm {
if e.Algo == AesGcm {
return &format.EncryptionAlgorithm{
AES_GCM_V1: &format.AesGcmV1{
AadPrefix: e.Aad.AadPrefix,
AadFileUnique: e.Aad.AadFileUnique,
SupplyAadPrefix: &e.Aad.SupplyAadPrefix,
},
}
}
return &format.EncryptionAlgorithm{
AES_GCM_CTR_V1: &format.AesGcmCtrV1{
AadPrefix: e.Aad.AadPrefix,
AadFileUnique: e.Aad.AadFileUnique,
SupplyAadPrefix: &e.Aad.SupplyAadPrefix,
},
}
}
// AlgorithmFromThrift converts the thrift object to the Algorithm struct for easier usage.
func AlgorithmFromThrift(enc *format.EncryptionAlgorithm) (ret Algorithm) {
if enc.IsSetAES_GCM_V1() {
ret.Algo = AesGcm
ret.Aad.AadFileUnique = enc.AES_GCM_V1.AadFileUnique
ret.Aad.AadPrefix = enc.AES_GCM_V1.AadPrefix
ret.Aad.SupplyAadPrefix = *enc.AES_GCM_V1.SupplyAadPrefix
return
}
ret.Algo = AesCtr
ret.Aad.AadFileUnique = enc.AES_GCM_CTR_V1.AadFileUnique
ret.Aad.AadPrefix = enc.AES_GCM_CTR_V1.AadPrefix
ret.Aad.SupplyAadPrefix = *enc.AES_GCM_CTR_V1.SupplyAadPrefix
return
}
// FileEncryptionProperties describe how to encrypt a parquet file when writing data.
type FileEncryptionProperties struct {
alg Algorithm
footerKey string
footerKeyMetadata string
encryptedFooter bool
fileAad string
utilized bool
storeAadPrefixInFile bool
aadPrefix string
encryptedCols ColumnPathToEncryptionPropsMap
}
// EncryptedFooter returns if the footer for this file should be encrypted or left in plaintext.
func (fe *FileEncryptionProperties) EncryptedFooter() bool { return fe.encryptedFooter }
// Algorithm returns the description of how we will perform the encryption, the algorithm, prefixes, and so on.
func (fe *FileEncryptionProperties) Algorithm() Algorithm { return fe.alg }
// FooterKey returns the actual key used to encrypt the footer if it is encrypted, or to encrypt any columns which
// will be encrypted with it rather than their own keys.
func (fe *FileEncryptionProperties) FooterKey() string { return fe.footerKey }
// FooterKeyMetadata is used for retrieving a key from the key retriever in order to set the footer key
func (fe *FileEncryptionProperties) FooterKeyMetadata() string { return fe.footerKeyMetadata }
// FileAad returns the aad identification to be used at the file level which gets concatenated with the row and column
// information for encrypting data.
func (fe *FileEncryptionProperties) FileAad() string { return fe.fileAad }
// IsUtilized returns whether or not this instance has been used to encrypt a file
func (fe *FileEncryptionProperties) IsUtilized() bool { return fe.utilized }
// SetUtilized is called after writing a file. A FileEncryptionProperties object can be used for writing one file only,
// the encryption keys will be wiped out upon completion of writing the file.
func (fe *FileEncryptionProperties) SetUtilized() { fe.utilized = true }
// EncryptedColumns returns the mapping of column paths to column encryption properties
func (fe *FileEncryptionProperties) EncryptedColumns() ColumnPathToEncryptionPropsMap {
return fe.encryptedCols
}
// ColumnEncryptionProperties returns the properties for encrypting a given column.
//
// This may be nil for columns that aren't encrypted or may be default properties.
func (fe *FileEncryptionProperties) ColumnEncryptionProperties(path string) *ColumnEncryptionProperties {
if len(fe.encryptedCols) == 0 {
return NewColumnEncryptionProperties(path)
}
if c, ok := fe.encryptedCols[path]; ok {
return c
}
return nil
}
// Clone allows returning an identical property setup for another file with the option to update the aadPrefix,
// (if given the empty string, the current aad prefix will be used) since a single instance can only be used
// to encrypt one file before wiping out the keys.
func (fe *FileEncryptionProperties) Clone(newAadPrefix string) *FileEncryptionProperties {
footerKeyCopy := fe.footerKey
encryptedColsCopy := make(ColumnPathToEncryptionPropsMap)
for k, v := range fe.encryptedCols {
encryptedColsCopy[k] = v.Clone()
}
if newAadPrefix == "" {
newAadPrefix = fe.aadPrefix
}
opts := []EncryptOption{
WithAlg(fe.alg.Algo), WithFooterKeyMetadata(fe.footerKeyMetadata),
WithAadPrefix(newAadPrefix), WithEncryptedColumns(encryptedColsCopy),
}
if !fe.encryptedFooter {
opts = append(opts, WithPlaintextFooter())
}
if !fe.storeAadPrefixInFile {
opts = append(opts, DisableAadPrefixStorage())
}
return NewFileEncryptionProperties(footerKeyCopy, opts...)
}
// WipeOutEncryptionKeys clears all of the encryption keys for this and the columns
func (fe *FileEncryptionProperties) WipeOutEncryptionKeys() {
fe.footerKey = ""
for _, elem := range fe.encryptedCols {
elem.WipeOutEncryptionKey()
}
}
type configEncrypt struct {
cipher Cipher
encryptFooter bool
keyMetadata string
aadprefix string
storeAadPrefixInFile bool
encryptedCols ColumnPathToEncryptionPropsMap
}
// EncryptOption is used for specifying values when building FileEncryptionProperties
type EncryptOption func(*configEncrypt)
// WithPlaintextFooter sets the writer to write the footer in plain text, otherwise the footer will be encrypted
// too (which is the default behavior).
func WithPlaintextFooter() EncryptOption {
return func(cfg *configEncrypt) {
cfg.encryptFooter = false
}
}
// WithAlg sets the encryption algorithm to utilize. (default is AesGcm)
func WithAlg(cipher Cipher) EncryptOption {
return func(cfg *configEncrypt) {
cfg.cipher = cipher
}
}
// WithFooterKeyID sets a key retrieval metadata to use (converted from string), this must be a utf8 string.
//
// use either WithFooterKeyID or WithFooterKeyMetadata, not both.
func WithFooterKeyID(key string) EncryptOption {
if !utf8.ValidString(key) {
panic("parquet: footer key id should be UTF8 encoded")
}
return WithFooterKeyMetadata(key)
}
// WithFooterKeyMetadata sets a key retrieval metadata to use for getting the key.
//
// Use either WithFooterKeyID or WithFooterKeyMetadata, not both.
func WithFooterKeyMetadata(keyMeta string) EncryptOption {
return func(cfg *configEncrypt) {
if keyMeta != "" {
cfg.keyMetadata = keyMeta
}
}
}
// WithAadPrefix sets the AAD prefix to use for encryption and by default will store it in the file
func WithAadPrefix(aadPrefix string) EncryptOption {
return func(cfg *configEncrypt) {
if aadPrefix != "" {
cfg.aadprefix = aadPrefix
cfg.storeAadPrefixInFile = true
}
}
}
// DisableAadPrefixStorage will set the properties to not store the AadPrefix in the file. If this isn't called
// and the AadPrefix is set, then it will be stored. This needs to in the options *after* WithAadPrefix to have an effect.
func DisableAadPrefixStorage() EncryptOption {
return func(cfg *configEncrypt) {
cfg.storeAadPrefixInFile = false
}
}
// WithEncryptedColumns sets the map of columns and their properties (keys etc.) If not called, then all columns will
// be encrypted with the footer key. If called, then columns not in the map will be left unencrypted.
func WithEncryptedColumns(encrypted ColumnPathToEncryptionPropsMap) EncryptOption {
none := func(*configEncrypt) {}
if len(encrypted) == 0 {
return none
}
return func(cfg *configEncrypt) {
if len(cfg.encryptedCols) != 0 {
panic("column properties already set")
}
for _, v := range encrypted {
if v.IsUtilized() {
panic("column properties utilized in another file")
}
v.SetUtilized()
}
cfg.encryptedCols = encrypted
}
}
// NewFileEncryptionProperties returns a new File Encryption description object using the options provided.
func NewFileEncryptionProperties(footerKey string, opts ...EncryptOption) *FileEncryptionProperties {
var cfg configEncrypt
cfg.cipher = DefaultEncryptionAlgorithm
cfg.encryptFooter = DefaultEncryptedFooter
for _, o := range opts | {
o(&cfg)
} | conditional_block |
|
encryption_properties.go | ryptionProperties{
Verifier: cfg.verifier,
footerKey: cfg.footerKey,
checkPlaintextFooterIntegrity: cfg.checkFooterIntegrity,
KeyRetriever: cfg.retriever,
aadPrefix: cfg.aadPrefix,
columnDecryptProps: cfg.colDecrypt,
plaintextAllowed: cfg.plaintextAllowed,
utilized: false,
}
}
// ColumnKey returns the key to be used for decrypting the provided column.
func (fd *FileDecryptionProperties) ColumnKey(path string) string {
if d, ok := fd.columnDecryptProps[path]; ok {
if d != nil {
return d.Key()
}
}
return ""
}
// FooterKey returns the key utilized for decrypting the Footer if encrypted and any columns that are encrypted with
// the footer key.
func (fd *FileDecryptionProperties) FooterKey() string { return fd.footerKey }
// AadPrefix returns the prefix to be supplied for constructing the identification strings when decrypting
func (fd *FileDecryptionProperties) AadPrefix() string { return fd.aadPrefix }
// PlaintextFooterIntegrity returns whether or not an integrity check will be performed on a plaintext footer for an
// encrypted file.
func (fd *FileDecryptionProperties) PlaintextFooterIntegrity() bool {
return fd.checkPlaintextFooterIntegrity
}
// PlaintextFilesAllowed returns whether or not this instance of decryption properties are allowed on a plaintext file.
func (fd *FileDecryptionProperties) PlaintextFilesAllowed() bool { return fd.plaintextAllowed }
// SetUtilized is called to mark this instance as utilized once it is used to read a file. A single instance
// can be used for reading one file only. Setting this ensures the keys will be wiped out upon completion of file reading.
func (fd *FileDecryptionProperties) SetUtilized() { fd.utilized = true }
// IsUtilized returns whether or not this instance has been used to decrypt a file. If the footer key and prefix are
// empty and there are no column decryption properties, then this is always false.
func (fd *FileDecryptionProperties) IsUtilized() bool {
if fd.footerKey == "" && len(fd.columnDecryptProps) == 0 && fd.aadPrefix == "" {
return false
}
return fd.utilized
}
// WipeOutDecryptionKeys will clear all the keys for this instance including the column level ones, this will be called
// after this instance has been utilized.
func (fd *FileDecryptionProperties) WipeOutDecryptionKeys() {
fd.footerKey = ""
for _, cd := range fd.columnDecryptProps {
cd.WipeOutDecryptionKey()
}
}
// Clone returns a new instance of these properties, changing the prefix if set (keeping the same prefix if left empty)
func (fd *FileDecryptionProperties) Clone(newAadPrefix string) *FileDecryptionProperties {
keyCopy := fd.footerKey
colDecryptMapCopy := make(ColumnPathToDecryptionPropsMap)
for k, v := range fd.columnDecryptProps {
colDecryptMapCopy[k] = v.Clone()
}
if newAadPrefix == "" {
newAadPrefix = fd.aadPrefix
}
return &FileDecryptionProperties{
footerKey: keyCopy,
KeyRetriever: fd.KeyRetriever,
checkPlaintextFooterIntegrity: fd.checkPlaintextFooterIntegrity,
Verifier: fd.Verifier,
columnDecryptProps: colDecryptMapCopy,
aadPrefix: newAadPrefix,
plaintextAllowed: fd.plaintextAllowed,
utilized: false,
}
}
type fileDecryptConfig struct {
footerKey string
aadPrefix string
verifier AADPrefixVerifier
colDecrypt ColumnPathToDecryptionPropsMap
retriever DecryptionKeyRetriever
checkFooterIntegrity bool
plaintextAllowed bool
}
// FileDecryptionOption is how to supply options to constructing a new FileDecryptionProperties instance.
type FileDecryptionOption func(*fileDecryptConfig)
// WithFooterKey sets an explicit footer key. If Applied on a file that contains footer key
// metadata the metadata will be ignored, the footer will be decrypted/verified with this key.
//
// If the explicit key is not set, footer key will be fetched from the key retriever.
// With explcit keys or AAD prefix, new encryption properties object must be created for each
// encrypted file.
//
// Explicit encryption keys (footer and column) are cloned.
// Upon completion of file reading, the cloned encryption keys in the properties will be wiped out
// Caller is responsible for wiping out the input key array
// footer key length must be either 16, 24, or 32 bytes
func WithFooterKey(key string) FileDecryptionOption {
return func(cfg *fileDecryptConfig) {
if key != "" {
cfg.footerKey = key
}
}
}
// WithPrefixVerifier supplies a verifier object to use for verifying the AAD Prefixes stored in the file.
func WithPrefixVerifier(verifier AADPrefixVerifier) FileDecryptionOption {
return func(cfg *fileDecryptConfig) {
if verifier != nil {
cfg.verifier = verifier
}
}
}
// WithColumnKeys sets explicit column keys.
//
// It's also possible to set a key retriever on this property object.
//
// Upon file decryption, availability of explicit keys is checked before invocation
// of the retreiver callback.
//
// If an explicit key is available for a footer or a column, its key metadata will be ignored.
func WithColumnKeys(decrypt ColumnPathToDecryptionPropsMap) FileDecryptionOption {
return func(cfg *fileDecryptConfig) {
if len(decrypt) == 0 {
return
}
if len(cfg.colDecrypt) != 0 {
panic("column properties already set")
}
for _, v := range decrypt {
if v.IsUtilized() {
panic("parquet: column properties utilized in another file")
}
v.SetUtilized()
}
cfg.colDecrypt = decrypt
}
}
// WithKeyRetriever sets a key retriever callback. It's also possible to set explicit footer or column keys.
func WithKeyRetriever(retriever DecryptionKeyRetriever) FileDecryptionOption {
return func(cfg *fileDecryptConfig) {
if retriever != nil {
cfg.retriever = retriever
}
}
}
// DisableFooterSignatureVerification skips integrity verification of plaintext footers.
//
// If not called, integrity of plaintext footers will be checked in runtime, and will panic
// if the footer signing key is not available
// or if the footer content and signature don't match
func DisableFooterSignatureVerification() FileDecryptionOption {
return func(cfg *fileDecryptConfig) {
cfg.checkFooterIntegrity = false
}
}
// WithPlaintextAllowed sets allowing plaintext files.
//
// By default, reading plaintext (unencrypted) files is not allowed when using
// a decryptor.
//
// In order to detect files that were not encrypted by mistake.
// However the default behavior can be overridden by using this method.
func WithPlaintextAllowed() FileDecryptionOption {
return func(cfg *fileDecryptConfig) {
cfg.plaintextAllowed = true
}
}
// WithDecryptAadPrefix explicitly supplies the file aad prefix.
//
// A must when a prefix is used for file encryption, but not stored in the file.
func WithDecryptAadPrefix(prefix string) FileDecryptionOption {
return func(cfg *fileDecryptConfig) {
if prefix != "" {
cfg.aadPrefix = prefix
}
}
}
// Algorithm describes how something was encrypted, representing the EncryptionAlgorithm object from the
// parquet.thrift file.
type Algorithm struct {
Algo Cipher
Aad struct {
AadPrefix []byte
AadFileUnique []byte
SupplyAadPrefix bool
}
}
// ToThrift returns an instance to be used for serializing when writing a file.
func (e Algorithm) ToThrift() *format.EncryptionAlgorithm {
if e.Algo == AesGcm {
return &format.EncryptionAlgorithm{
AES_GCM_V1: &format.AesGcmV1{
AadPrefix: e.Aad.AadPrefix,
AadFileUnique: e.Aad.AadFileUnique,
SupplyAadPrefix: &e.Aad.SupplyAadPrefix,
},
}
}
return &format.EncryptionAlgorithm{
AES_GCM_CTR_V1: &format.AesGcmCtrV1{
AadPrefix: e.Aad.AadPrefix,
AadFileUnique: e.Aad.AadFileUnique,
SupplyAadPrefix: &e.Aad.SupplyAadPrefix,
},
}
}
// AlgorithmFromThrift converts the thrift object to the Algorithm struct for easier usage.
func AlgorithmFromThrift(enc *format.EncryptionAlgorithm) (ret Algorithm) {
if enc.IsSetAES_GCM_V1() {
ret.Algo = AesGcm
ret.Aad.AadFileUnique = enc.AES_GCM_V1.AadFileUnique
ret.Aad.AadPrefix = enc.AES_GCM_V1.AadPrefix | ret.Aad.SupplyAadPrefix = *enc.AES_GCM_V1.SupplyAadPrefix
return | random_line_split |
|
encryption_properties.go | ) {
if len(decrypt) == 0 {
return
}
if len(cfg.colDecrypt) != 0 {
panic("column properties already set")
}
for _, v := range decrypt {
if v.IsUtilized() {
panic("parquet: column properties utilized in another file")
}
v.SetUtilized()
}
cfg.colDecrypt = decrypt
}
}
// WithKeyRetriever sets a key retriever callback. It's also possible to set explicit footer or column keys.
func WithKeyRetriever(retriever DecryptionKeyRetriever) FileDecryptionOption {
return func(cfg *fileDecryptConfig) {
if retriever != nil {
cfg.retriever = retriever
}
}
}
// DisableFooterSignatureVerification skips integrity verification of plaintext footers.
//
// If not called, integrity of plaintext footers will be checked in runtime, and will panic
// if the footer signing key is not available
// or if the footer content and signature don't match
func DisableFooterSignatureVerification() FileDecryptionOption {
return func(cfg *fileDecryptConfig) {
cfg.checkFooterIntegrity = false
}
}
// WithPlaintextAllowed sets allowing plaintext files.
//
// By default, reading plaintext (unencrypted) files is not allowed when using
// a decryptor.
//
// In order to detect files that were not encrypted by mistake.
// However the default behavior can be overridden by using this method.
func WithPlaintextAllowed() FileDecryptionOption {
return func(cfg *fileDecryptConfig) {
cfg.plaintextAllowed = true
}
}
// WithDecryptAadPrefix explicitly supplies the file aad prefix.
//
// A must when a prefix is used for file encryption, but not stored in the file.
func WithDecryptAadPrefix(prefix string) FileDecryptionOption {
return func(cfg *fileDecryptConfig) {
if prefix != "" {
cfg.aadPrefix = prefix
}
}
}
// Algorithm describes how something was encrypted, representing the EncryptionAlgorithm object from the
// parquet.thrift file.
type Algorithm struct {
Algo Cipher
Aad struct {
AadPrefix []byte
AadFileUnique []byte
SupplyAadPrefix bool
}
}
// ToThrift returns an instance to be used for serializing when writing a file.
func (e Algorithm) ToThrift() *format.EncryptionAlgorithm {
if e.Algo == AesGcm {
return &format.EncryptionAlgorithm{
AES_GCM_V1: &format.AesGcmV1{
AadPrefix: e.Aad.AadPrefix,
AadFileUnique: e.Aad.AadFileUnique,
SupplyAadPrefix: &e.Aad.SupplyAadPrefix,
},
}
}
return &format.EncryptionAlgorithm{
AES_GCM_CTR_V1: &format.AesGcmCtrV1{
AadPrefix: e.Aad.AadPrefix,
AadFileUnique: e.Aad.AadFileUnique,
SupplyAadPrefix: &e.Aad.SupplyAadPrefix,
},
}
}
// AlgorithmFromThrift converts the thrift object to the Algorithm struct for easier usage.
func AlgorithmFromThrift(enc *format.EncryptionAlgorithm) (ret Algorithm) {
if enc.IsSetAES_GCM_V1() {
ret.Algo = AesGcm
ret.Aad.AadFileUnique = enc.AES_GCM_V1.AadFileUnique
ret.Aad.AadPrefix = enc.AES_GCM_V1.AadPrefix
ret.Aad.SupplyAadPrefix = *enc.AES_GCM_V1.SupplyAadPrefix
return
}
ret.Algo = AesCtr
ret.Aad.AadFileUnique = enc.AES_GCM_CTR_V1.AadFileUnique
ret.Aad.AadPrefix = enc.AES_GCM_CTR_V1.AadPrefix
ret.Aad.SupplyAadPrefix = *enc.AES_GCM_CTR_V1.SupplyAadPrefix
return
}
// FileEncryptionProperties describe how to encrypt a parquet file when writing data.
type FileEncryptionProperties struct {
alg Algorithm
footerKey string
footerKeyMetadata string
encryptedFooter bool
fileAad string
utilized bool
storeAadPrefixInFile bool
aadPrefix string
encryptedCols ColumnPathToEncryptionPropsMap
}
// EncryptedFooter returns if the footer for this file should be encrypted or left in plaintext.
func (fe *FileEncryptionProperties) EncryptedFooter() bool { return fe.encryptedFooter }
// Algorithm returns the description of how we will perform the encryption, the algorithm, prefixes, and so on.
func (fe *FileEncryptionProperties) Algorithm() Algorithm { return fe.alg }
// FooterKey returns the actual key used to encrypt the footer if it is encrypted, or to encrypt any columns which
// will be encrypted with it rather than their own keys.
func (fe *FileEncryptionProperties) FooterKey() string { return fe.footerKey }
// FooterKeyMetadata is used for retrieving a key from the key retriever in order to set the footer key
func (fe *FileEncryptionProperties) FooterKeyMetadata() string { return fe.footerKeyMetadata }
// FileAad returns the aad identification to be used at the file level which gets concatenated with the row and column
// information for encrypting data.
func (fe *FileEncryptionProperties) FileAad() string { return fe.fileAad }
// IsUtilized returns whether or not this instance has been used to encrypt a file
func (fe *FileEncryptionProperties) IsUtilized() bool { return fe.utilized }
// SetUtilized is called after writing a file. A FileEncryptionProperties object can be used for writing one file only,
// the encryption keys will be wiped out upon completion of writing the file.
func (fe *FileEncryptionProperties) SetUtilized() { fe.utilized = true }
// EncryptedColumns returns the mapping of column paths to column encryption properties
func (fe *FileEncryptionProperties) EncryptedColumns() ColumnPathToEncryptionPropsMap {
return fe.encryptedCols
}
// ColumnEncryptionProperties returns the properties for encrypting a given column.
//
// This may be nil for columns that aren't encrypted or may be default properties.
func (fe *FileEncryptionProperties) ColumnEncryptionProperties(path string) *ColumnEncryptionProperties {
if len(fe.encryptedCols) == 0 {
return NewColumnEncryptionProperties(path)
}
if c, ok := fe.encryptedCols[path]; ok {
return c
}
return nil
}
// Clone allows returning an identical property setup for another file with the option to update the aadPrefix,
// (if given the empty string, the current aad prefix will be used) since a single instance can only be used
// to encrypt one file before wiping out the keys.
func (fe *FileEncryptionProperties) Clone(newAadPrefix string) *FileEncryptionProperties {
footerKeyCopy := fe.footerKey
encryptedColsCopy := make(ColumnPathToEncryptionPropsMap)
for k, v := range fe.encryptedCols {
encryptedColsCopy[k] = v.Clone()
}
if newAadPrefix == "" {
newAadPrefix = fe.aadPrefix
}
opts := []EncryptOption{
WithAlg(fe.alg.Algo), WithFooterKeyMetadata(fe.footerKeyMetadata),
WithAadPrefix(newAadPrefix), WithEncryptedColumns(encryptedColsCopy),
}
if !fe.encryptedFooter {
opts = append(opts, WithPlaintextFooter())
}
if !fe.storeAadPrefixInFile {
opts = append(opts, DisableAadPrefixStorage())
}
return NewFileEncryptionProperties(footerKeyCopy, opts...)
}
// WipeOutEncryptionKeys clears all of the encryption keys for this and the columns
func (fe *FileEncryptionProperties) WipeOutEncryptionKeys() {
fe.footerKey = ""
for _, elem := range fe.encryptedCols {
elem.WipeOutEncryptionKey()
}
}
type configEncrypt struct {
cipher Cipher
encryptFooter bool
keyMetadata string
aadprefix string
storeAadPrefixInFile bool
encryptedCols ColumnPathToEncryptionPropsMap
}
// EncryptOption is used for specifying values when building FileEncryptionProperties
type EncryptOption func(*configEncrypt)
// WithPlaintextFooter sets the writer to write the footer in plain text, otherwise the footer will be encrypted
// too (which is the default behavior).
func WithPlaintextFooter() EncryptOption {
return func(cfg *configEncrypt) {
cfg.encryptFooter = false
}
}
// WithAlg sets the encryption algorithm to utilize. (default is AesGcm)
func WithAlg(cipher Cipher) EncryptOption {
return func(cfg *configEncrypt) {
cfg.cipher = cipher
}
}
// WithFooterKeyID sets a key retrieval metadata to use (converted from string), this must be a utf8 string.
//
// use either WithFooterKeyID or WithFooterKeyMetadata, not both.
func WithFooterKeyID(key string) EncryptOption {
if !utf8.ValidString(key) {
panic("parquet: footer key id should be UTF8 encoded")
}
return WithFooterKeyMetadata(key)
}
// WithFooterKeyMetadata sets a key retrieval metadata to use for getting the key.
//
// Use either WithFooterKeyID or WithFooterKeyMetadata, not both.
func | WithFooterKeyMetadata | identifier_name |
|
encryption_properties.go |
type colEncryptConfig struct {
key string
keyMetadata string
encrypted bool
}
// ColumnEncryptOption how to specify options to the the NewColumnEncryptionProperties function.
type ColumnEncryptOption func(*colEncryptConfig)
// WithKey sets a column specific key.
// If key is not set on an encrypted column, the column will be encrypted with the footer key.
// key length must be either 16, 24, or 32 bytes
// the key is cloned and will be wiped out (array values set to 0) upon completion of file writing.
// Caller is responsible for wiping out input key array
func WithKey(key string) ColumnEncryptOption {
return func(c *colEncryptConfig) {
if key != "" {
c.key = key
}
}
}
// WithKeyMetadata sets the key retrieval metadata, use either KeyMetadata or KeyID but not both
func WithKeyMetadata(keyMeta string) ColumnEncryptOption {
return func(c *colEncryptConfig) {
c.keyMetadata = keyMeta
}
}
// WithKeyID is a convenience function to set the key metadata using a string id.
// Set a key retrieval metadata (converted from String). and use either KeyMetadata or KeyID, not both.
// KeyID will be converted to metadata (UTF-8 Array)
func WithKeyID(keyID string) ColumnEncryptOption {
if !utf8.ValidString(keyID) {
panic("parquet: key id should be UTF8 encoded")
}
return WithKeyMetadata(keyID)
}
// NewColumnEncryptionProperties constructs properties for the provided column path, modified by the options provided
func NewColumnEncryptionProperties(name string, opts ...ColumnEncryptOption) *ColumnEncryptionProperties {
var cfg colEncryptConfig
cfg.encrypted = true
for _, o := range opts {
o(&cfg)
}
return &ColumnEncryptionProperties{
utilized: false,
encrypted: cfg.encrypted,
encryptedWithFooterKey: cfg.encrypted && cfg.key == "",
keyMetadata: cfg.keyMetadata,
key: cfg.key,
columnPath: name,
}
}
// ColumnDecryptionProperties are the specifications for how to decrypt a given column.
type ColumnDecryptionProperties struct {
columnPath string
key string
utilized bool
}
// NewColumnDecryptionProperties constructs a new ColumnDecryptionProperties for the given column path, modified by
// the provided options
func NewColumnDecryptionProperties(column string, opts ...ColumnDecryptOption) *ColumnDecryptionProperties {
var cfg columnDecryptConfig
for _, o := range opts {
o(&cfg)
}
return &ColumnDecryptionProperties{
columnPath: column,
utilized: false,
key: cfg.key,
}
}
// ColumnPath returns which column these properties describe how to decrypt
func (cd *ColumnDecryptionProperties) ColumnPath() string { return cd.columnPath }
// Key returns the key specified to decrypt this column, or is empty if the Footer Key should be used.
func (cd *ColumnDecryptionProperties) Key() string { return cd.key }
// IsUtilized returns whether or not these properties have been used for decryption already
func (cd *ColumnDecryptionProperties) IsUtilized() bool { return cd.utilized }
// SetUtilized is used by the reader to specify when we've decrypted the column and have used the key so we know
// to wipe out the keys.
func (cd *ColumnDecryptionProperties) SetUtilized() { cd.utilized = true }
// WipeOutDecryptionKey is called after decryption to ensure the key doesn't stick around and get re-used.
func (cd *ColumnDecryptionProperties) WipeOutDecryptionKey() { cd.key = "" }
// Clone returns a new instance of ColumnDecryptionProperties with the same key and column
func (cd *ColumnDecryptionProperties) Clone() *ColumnDecryptionProperties {
return NewColumnDecryptionProperties(cd.columnPath, WithDecryptKey(cd.key))
}
type columnDecryptConfig struct {
key string
}
// ColumnDecryptOption is the type of the options passed for constructing Decryption Properties
type ColumnDecryptOption func(*columnDecryptConfig)
// WithDecryptKey specifies the key to utilize for decryption
func WithDecryptKey(key string) ColumnDecryptOption {
return func(cfg *columnDecryptConfig) {
if key != "" {
cfg.key = key
}
}
}
// AADPrefixVerifier is an interface for any object that can be used to verify the identity of the file being decrypted.
// It should panic if the provided AAD identity is bad.
//
// In a data set, AAD Prefixes should be collected, and then checked for missing files.
type AADPrefixVerifier interface {
// Verify identity of file. panic if bad
Verify(string)
}
// DecryptionKeyRetriever is an interface for getting the desired key for decryption from metadata. It should take in
// some metadata identifier and return the actual Key to use for decryption.
type DecryptionKeyRetriever interface {
GetKey(keyMetadata []byte) string
}
// FileDecryptionProperties define the File Level configuration for decrypting a parquet file. Once constructed they are
// read only.
type FileDecryptionProperties struct {
footerKey string
aadPrefix string
checkPlaintextFooterIntegrity bool
plaintextAllowed bool
utilized bool
columnDecryptProps ColumnPathToDecryptionPropsMap
Verifier AADPrefixVerifier
KeyRetriever DecryptionKeyRetriever
}
// NewFileDecryptionProperties takes in the options for constructing a new FileDecryptionProperties object, otherwise
// it will use the default configuration which will check footer integrity of a plaintext footer for an encrypted file
// for unencrypted parquet files, the decryption properties should not be set.
func NewFileDecryptionProperties(opts ...FileDecryptionOption) *FileDecryptionProperties {
var cfg fileDecryptConfig
cfg.checkFooterIntegrity = DefaultCheckSignature
cfg.plaintextAllowed = DefaultAllowPlaintextFiles
for _, o := range opts {
o(&cfg)
}
return &FileDecryptionProperties{
Verifier: cfg.verifier,
footerKey: cfg.footerKey,
checkPlaintextFooterIntegrity: cfg.checkFooterIntegrity,
KeyRetriever: cfg.retriever,
aadPrefix: cfg.aadPrefix,
columnDecryptProps: cfg.colDecrypt,
plaintextAllowed: cfg.plaintextAllowed,
utilized: false,
}
}
// ColumnKey returns the key to be used for decrypting the provided column.
func (fd *FileDecryptionProperties) ColumnKey(path string) string {
if d, ok := fd.columnDecryptProps[path]; ok {
if d != nil {
return d.Key()
}
}
return ""
}
// FooterKey returns the key utilized for decrypting the Footer if encrypted and any columns that are encrypted with
// the footer key.
func (fd *FileDecryptionProperties) FooterKey() string { return fd.footerKey }
// AadPrefix returns the prefix to be supplied for constructing the identification strings when decrypting
func (fd *FileDecryptionProperties) AadPrefix() string { return fd.aadPrefix }
// PlaintextFooterIntegrity returns whether or not an integrity check will be performed on a plaintext footer for an
// encrypted file.
func (fd *FileDecryptionProperties) PlaintextFooterIntegrity() bool {
return fd.checkPlaintextFooterIntegrity
}
// PlaintextFilesAllowed returns whether or not this instance of decryption properties are allowed on a plaintext file.
func (fd *FileDecryptionProperties) PlaintextFilesAllowed() bool { return fd.plaintextAllowed }
// SetUtilized is called to mark this instance as utilized once it is used to read a file. A single instance
// can be used for reading one file only. Setting this ensures the keys will be wiped out upon completion of file reading.
func (fd *FileDecryptionProperties) SetUtilized() { fd.utilized = true }
// IsUtilized returns whether or not this instance has been used to decrypt a file. If the footer key and prefix are
// empty and there are no column decryption properties, then this is always false.
func (fd *FileDecryptionProperties) IsUtilized() bool {
if fd.footerKey == "" && len(fd.columnDecryptProps) == 0 && fd.aadPrefix == "" {
return false
}
return fd.utilized
}
// WipeOutDecryptionKeys will clear all the keys for this instance including the column level ones, this will be called
// after this instance has been utilized.
func (fd *FileDecryptionProperties) WipeOutDecryptionKeys() {
fd.footerKey = ""
for _, cd := range fd.columnDecryptProps {
cd.WipeOutDecryptionKey()
}
}
// Clone returns a new instance of these properties, changing the prefix if set (keeping the same prefix if left empty)
func (fd *FileDecryptionProperties) Clone(newAadPrefix string) *FileDecryptionProperties {
keyCopy := fd.footerKey
colDecryptMapCopy := make(ColumnPathToDecryptionPropsMap)
for k, v := range fd.columnDecryptProps {
colDecryptMapCopy[k] = v.Clone()
}
if newAadPrefix == "" {
newAadPrefix = fd.aadPrefix
}
return &FileDecryptionProperties{
footerKey: keyCopy,
KeyRetriever: fd.KeyRetriever,
| {
copy := ce.key
return NewColumnEncryptionProperties(ce.columnPath, WithKey(copy), WithKeyMetadata(ce.keyMetadata))
} | identifier_body |
|
bot.js | = 4;
return permlvl;
};
var regToken = /[\w\d]{24}\.[\w\d]{6}\.[\w\d-_]{27}/g;
client.on('debug', e => {
console.log(chalk.bgBlue.green(e.replace(regToken, 'that was redacted')));
});
client.on('warn', e => {
console.log(chalk.bgYellow(e.replace(regToken, 'that was redacted')));
});
client.on('error', e => {
console.log(chalk.bgRed(e.replace(regToken, 'that was redacted')));
});
client.on('message', async message => {
const prefix = "gr!";
const Komut = prefix.shift().toLowerCase();
if (Komut === "is") {
if (message.guild.channels.find(channel => channel.name === `${message.guild.members.size}`)) return message.channel.send("İstatistik Paneli Zaten Ayarlanmış.")
if (!message.member.hasPermission('ADMINISTRATOR'))
return message.channel.send("Bu Kodu `Yönetici` Yetkisi Olan Kişi Kullanabilir.")
message.channel.send(`İstatistik kanallarının kurulumu başlatılsın mı? Onaylıyorsanız **evet** yazınız.`)
message.channel.awaitMessages(response => response.content === 'evet', {
max: 1,
time: 10000,
errors: ['time'],
})
.then((collected) => {
message.guild.createChannel('İstatistik Kanalı', 'category', [{
id: message.guild.id
}])
message.guild.createChannel(`Sunucu Üye Sayısı: ${message.guild.members.size}`, 'voice', [{
id: message.guild.id,
deny: ['CONNECT']
}])
.then(channel =>
channel.setParent(message.guild.channels.find(channel => channel.name === "İstatistik Kanalı")))
message.channel.send("Gerekli kanal kuruldu!")
})
}
}) | const request = require('node-superfetch');
const db = require('quick.db');
const ms = require('parse-ms')
let timeout = 600000
let dakdest = await db.fetch(`goldzzz_${msg.author.id}`);
let i = db.fetch(`gold_${msg.author.id}`)
if (i == 'gold') {
if (dakdest !== null && timeout - (Date.now() - dakdest) > 0) {
let time = ms(timeout - (Date.now() - dakdest));
} else {
if(msg.author.bot) return;
if (msg.content.length > 1) {
db.set(`goldzzz_${msg.author.id}`, Date.now());
msg.channel.send('**Bir Gold Üye Belirdi!!**')
}
};
}
else if (i == undefined) {
}
if (!i) return;
});
client.on("guildMemberAdd", async member => {
let sayac = JSON.parse(fs.readFileSync("./otorol.json", "utf8"));
let otorole = JSON.parse(fs.readFileSync("./otorol.json", "utf8"));
let arole = otorole[member.guild.id].sayi
let giriscikis = JSON.parse(fs.readFileSync("./otorol.json", "utf8"));
let embed = new Discord.RichEmbed()
.setTitle('Otorol Sistemi')
.setDescription(`<:oke:616540844408832010> | Sunucuya hoşgeldin \`@${member.user.tag}\` otomatik rol verildi. `)
.setColor("GREEN")
.setFooter("Botify", client.user.avatarURL);
if (!giriscikis[member.guild.id].kanal) {
return;
}
try {
let giriscikiskanalID = giriscikis[member.guild.id].kanal;
let giriscikiskanali = client.guilds.get(member.guild.id).channels.get(giriscikiskanalID);
giriscikiskanali.send(`<:oke:616540844408832010> | Sunucuya hoşgeldin \`@${member.user.tag}\` otomatik rol verildi.`);
} catch (e) { // eğer hata olursa bu hatayı öğrenmek için hatayı konsola gönderelim.
return console.log(e)
}
});
client.on("guildMemberAdd", async (member) => {
let autorole = JSON.parse(fs.readFileSync("./otorol.json", "utf8"));
let role = autorole[member.guild.id].sayi
member.addRole(role)
});
client.on('guildMemberAdd',async member => {
let user = client.users.get(member.id);
let chan = client.channels.get(db.fetch(`guvenlik${member.guild.id}`)) //bu satırda db.fethc('güvenlik') orası
const Canvas = require('canvas')
const canvas = Canvas.createCanvas(360,100);
const ctx = canvas.getContext('2d');
const resim1 = await Canvas.loadImage('https://cdn.discordapp.com/attachments/591299755976425493/614151181752860672/yhosgeldirrn.png')
const resim2 = await Canvas.loadImage('https://cdn.discordapp.com/attachments/591299755976425493/614164419768877056/yhosgeldirrn.png')
const kurulus = new Date().getTime() - user.createdAt.getTime();
const gün = moment(kurulus).format('dddd');
var kontrol;
if (kurulus > 2629800000) kontrol = resim2
if (kurulus < 2629800000) kontrol = resim1
const background = await Canvas.loadImage('https://cdn.discordapp.com/attachments/591299755976425493/614164413318168606/Adsz.png');
ctx.drawImage(background, 0, 0, canvas.width, canvas.height);
const avatar = await Canvas.loadImage(member.user.displayAvatarURL);
ctx.drawImage(kontrol,0,0,canvas.width, canvas.height)
ctx.beginPath();
ctx.lineWidth = 4;
ctx.fill()
ctx.lineWidth = 4;
ctx.arc(180, 46, 36, 0, 2 * Math.PI);
ctx.clip();
ctx.drawImage(avatar, 143,10, 73, 72 );
const attachment = new Discord.Attachment(canvas.toBuffer(), 'STARKs-güvenlik.png');
chan.send(attachment)
});
client.on('guildMemberAdd', async member => {
let tag = await db.fetch(`tag_${member.guild.id}`);
let tagyazi;
if (tag == null) tagyazi = member.setNickname(`${member.user.username}`)
else tagyazi = member.setNickname(`${tag} | ${member.user.username}`)
});
client.on('guildMemberAdd', (member) => {
const db = require('quick.db');
const channelss = db.fetch(`kkanal_${member.guild.id}`).replace("<#", "").replace(">", "")
const kayıts = db.fetch(`ksistem_${member.guild.id}`)
if (kayıts == undefined) {
}
if (kayıts == 'acik') {
member.guild.channels.forEach(async (channel, id) => {
await channel.overwritePermissions(member, {
VIEW_CHANNEL: false
});
});
member.guild.channels.get(channelss).overwritePermissions(member, {
SEND_MESSAGES: true,
VIEW_CHANNEL: true
});
}
});
client.on("message", async msg => {
if(msg.author.bot) return;
if(msg.channel.type === "dm") return;
let i = await db.fetch(`reklamFiltre_${msg.guild.id}`)
if (i == 'acik') {
const reklam = ["discord.app", "discord.gg", "invite","discordapp","discordgg", ".com", ".net", ".xyz", ".tk", ".pw", ".io", ".me", ".gg", "www.", "https", "http", ".gl", ".org", ".com.tr", ".biz", ".party", ".rf.gd", ".az",];
if (reklam.some(word => msg.content.toLowerCase().includes(word))) {
try {
if (!msg.member.hasPermission("MANAGE_GUILD")) {
msg.delete();
let embed = new Discord.RichEmbed()
.setColor(0xffa300)
.setFooter('Greif Blocker s Reklam engellendi.', client.user.avatarURL)
.setAuthor(msg.guild.owner.user.username, msg.guild.owner.user.avatarURL)
.setDescription("Greif Reklam sistemi, " + `***${msg.guild.name}***` + " adlı sunucunuzda reklam yakaladım.")
.addField('Reklamı yapan |
client.on("message", async msg => { | random_line_split |
SI507_Final_Project.py |
except:
pass
try:
Uni.female_tot = soup.find_all('strong',{'class':'f-12'})[2].text
except:
pass
try:
Uni.female_intl = soup.find_all('strong',{'class':'f-12'})[5].text
except:
pass
try:
fullAddress = soup.find('ul',{'class':'fa-ul'}).li.text.strip()
if fullAddress[-5] == '-': # if the zipcode is '5digits - 4 digits' format
fullAddress = fullAddress[:-5] # ignore the last 4 digits
except:
pass
try:
Uni.address = fullAddress[:-6]
except:
pass
try:
Uni.zipcode = fullAddress[-5:]
except:
pass
try:
Uni.phone = soup.find('ul',{'class':'fa-ul'}).a.text
except:
pass
return Uni
def insertIgnoreIntoUniversities(Uni):
'''execute INSERT or IGNORE to store the university info into the 'Universities' table
Parameters
----------
Uni: Instance of the University class
Returns
-------
None
'''
# Connect to dbName
connection = sqlite3.connect(dbName)
cursor = connection.cursor()
insertUni = """
INSERT or IGNORE INTO Universities (NAME,STATE,ADDRESS,ZIPCODE,PHONE,URL,MALE,FEMALE,MALEINTL,FEMALEINTL)
VALUES ("{}","{}","{}","{}","{}","{}","{}","{}","{}","{}");""".format(Uni.name,Uni.states,Uni.address,
Uni.zipcode,Uni.phone,Uni.url,Uni.male_tot,Uni.female_tot,Uni.male_intl,Uni.female_intl)
# Connect or create tables if not exists
cursor.execute(insertUni)
connection.commit()
query = '''
SELECT ID
FROM Universities
WHERE NAME = '{}'
'''.format(Uni.name)
UniID = cursor.execute(query).fetchall()[0][0]
connection.close()
return UniID
def insertIgnoreIntoRestaurants(restaurantNameInfo,UniID):
'''execute INSERT or IGNORE to store the Restaurant info into the 'Restaurants' table
Parameters
----------
restaurantNameInfo: Dictionary containing restaurant's info
Returns
-------
None
'''
# Connect to dbName
connection = sqlite3.connect(dbName)
cursor = connection.cursor()
i = 1
for key in restaurantNameInfo.keys():
if i > 10:
break
insertRest = """
INSERT or IGNORE INTO Restaurants (NAME,ADDRESS,PHONE,UniID)
VALUES ("{}","{}","{}",{});""".format(key,
restaurantNameInfo[key]['address'],restaurantNameInfo[key]['phone'],UniID)
# Connect or create tables if not exists
cursor.execute(insertRest)
connection.commit()
i +=1
connection.close()
def uniInfoString(Uni):
'''string of the University info
Parameters
----------
Uni: instance
Returns
-------
uniInfo: string
'''
uniInfo = """
Name: {}
Address: {}
Phone Number: {}
# of male students: {}
# of female students: {}
# of male international students: {}
# of female international students: {}
""".format(Uni.name, Uni.address+', '+Uni.zipcode, Uni.phone, Uni.male_tot, Uni.female_tot, Uni.male_intl, Uni.female_intl)
return uniInfo
def extractUnis(url, lists):
'''Extract university urls and name recursively
Parameters
----------
url: string
The URL of the state
lists: empty list
Returns
-------
lists
appended lists
'''
response_text = requestResponseText(url)
soup = BeautifulSoup(response_text, 'html.parser') # get the text
# Universities are listed on the several pages
# we have to click 'next' in the website
isnext = soup.find('li',{'class':'btn btn-sm btn-link next'})
if not(isnext == None): # if this is the last page
url_new = 'https://www.internationalstudent.com' + isnext.a['href']
extractUnis(url_new, lists)
return lists.append(soup.find_all('li',{'class':'list-group-item d-flex justify-content-between font-bitter h6'}))
return lists.append(soup.find_all('li',{'class':'list-group-item d-flex justify-content-between font-bitter h6'}))
def getUniList(url):
'''Make a dictionary of university instances from a state url
Parameters
----------
url: string
A URL for a state
Returns
-------
uniNameUrl: Dict
keys: uniName, value: uni url
'''
li_list = []
#dictUniInsatnce = {}
uniNameUrl = {}
extractUnis(url,li_list)
for i in range(len(li_list)):
h = len(li_list) - 1 - i # li_list has a reverse order
for j in range(len(li_list[h])):
uniName = li_list[h][j].a.text.strip()
uniURL = 'https://www.internationalstudent.com' + li_list[h][j].a['href']
#dictUniInsatnce[uniName] = extractUniInfo(uniURL,stateName)
uniNameUrl[uniName] = uniURL
return uniNameUrl
def extractStates():
'''Extract state urls and
make a dict containing the state name and corresponding url
Parameters
----------
None
Returns
-------
dict
state name : state url
'''
stateNameURL = {}
response_text = requestResponseText(baseurlSoup)
soup = BeautifulSoup(response_text, 'html.parser') # get the text
for i in range(3):
ultag = soup.find_all('ul',{'class':'list-unstyled col-md mb-0 d-flex flex-column justify-content-between'})[i+3]
for litag in ultag.find_all('li'):
stateName = litag.a.text.strip()
stateURL = 'https://www.internationalstudent.com' + litag.a['href']
stateNameURL[stateName] = stateURL
return stateNameURL
def requestAPI(url, params):
''' request API and retrun the output in the json format
Parameters
----------
url: Strings
params: dictionary
Returns
-------
json
'''
response = requests.get(url, params=params) # oauth is defined globally
return response.json()
def getNearbyPlaces(uni):
'''Obtain API data from MapQuest API.
Parameters
----------
uni: Insatnce
an instance of an university
Returns
-------
dict
a value of the 'searchResults' value of the converted API return from MapQuest API
'''
params = {"key":secrets.API_KEY, "origin":uni.zipcode, "radius":10, "maxMatches":500, "ambiguities":"ignore"}
unique_key = constructUniqueKey(baseurl=baseurlAPI, params= params)
if unique_key in CACHE_DICT.keys(): # if the unique key is in cache
print("Using Cache")
else: # if the unique key is not in cache
print("Fetching")
CACHE_DICT[unique_key] = requestAPI(url=baseurlAPI, params=params) #request new one
saveCache(CACHE_DICT) # save the current state
results = CACHE_DICT[unique_key]
return results['searchResults']
def extractRestaurantInfoOnly(searchResults):
'''Extract restaurant info from dictionary that was a return of the API request
Parameters
----------
searchResults: Dict
Return of the API request
Returns
-------
restaurantNameInfo : Dict
'Nmae of the restaurant' : {'phone' : value, 'address' : value}
'''
restaurantNameInfo = {}
for i in range(len(searchResults)):
singleRestaurantInfo = {}
name = searchResults[i]['name']
fields = searchResults[i]['fields']
# restaurants only
if fields['group_sic_code_name'] == '(All) Restaurants':
if fields['phone']: # non-empty phone
singleRestaurantInfo['phone'] = fields['phone']
else:
singleRestaurantInfo['phone'] = 'None'
if fields['address'] and fields['city']: # non-empty address & city
singleRestaurantInfo['address'] = fields['address'] + ', ' + fields['city']
else:
singleRestaurantInfo['address'] = 'None'
restaurantNameInfo[name] = singleRestaurantInfo
return restaurantNameInfo
def restaurantInfoString(restaurantNameInfo):
| i = 1
restaurantInfo = ""
for key in restaurantNameInfo.keys():
if i > 10:
break
name = key
address = restaurantNameInfo[key]['address']
phone = restaurantNameInfo[key]['phone']
restaurantInfo = restaurantInfo + "[{}] {} : {}, {} \n".format(i, name, address, phone)
i += 1
return restaurantInfo | identifier_body |
|
SI507_Final_Project.py | dbName
connection = sqlite3.connect(dbName)
cursor = connection.cursor()
insertUni = """
INSERT or IGNORE INTO Universities (NAME,STATE,ADDRESS,ZIPCODE,PHONE,URL,MALE,FEMALE,MALEINTL,FEMALEINTL)
VALUES ("{}","{}","{}","{}","{}","{}","{}","{}","{}","{}");""".format(Uni.name,Uni.states,Uni.address,
Uni.zipcode,Uni.phone,Uni.url,Uni.male_tot,Uni.female_tot,Uni.male_intl,Uni.female_intl)
# Connect or create tables if not exists
cursor.execute(insertUni)
connection.commit()
query = '''
SELECT ID
FROM Universities
WHERE NAME = '{}'
'''.format(Uni.name)
UniID = cursor.execute(query).fetchall()[0][0]
connection.close()
return UniID
def insertIgnoreIntoRestaurants(restaurantNameInfo,UniID):
'''execute INSERT or IGNORE to store the Restaurant info into the 'Restaurants' table
Parameters
----------
restaurantNameInfo: Dictionary containing restaurant's info
Returns
-------
None
'''
# Connect to dbName
connection = sqlite3.connect(dbName)
cursor = connection.cursor()
i = 1
for key in restaurantNameInfo.keys():
if i > 10:
break
insertRest = """
INSERT or IGNORE INTO Restaurants (NAME,ADDRESS,PHONE,UniID)
VALUES ("{}","{}","{}",{});""".format(key,
restaurantNameInfo[key]['address'],restaurantNameInfo[key]['phone'],UniID)
# Connect or create tables if not exists
cursor.execute(insertRest)
connection.commit()
i +=1
connection.close()
def uniInfoString(Uni):
'''string of the University info
Parameters
----------
Uni: instance
Returns
-------
uniInfo: string
'''
uniInfo = """
Name: {}
Address: {}
Phone Number: {}
# of male students: {}
# of female students: {}
# of male international students: {}
# of female international students: {}
""".format(Uni.name, Uni.address+', '+Uni.zipcode, Uni.phone, Uni.male_tot, Uni.female_tot, Uni.male_intl, Uni.female_intl)
return uniInfo
def extractUnis(url, lists):
'''Extract university urls and name recursively
Parameters
----------
url: string
The URL of the state
lists: empty list
Returns
-------
lists
appended lists
'''
response_text = requestResponseText(url)
soup = BeautifulSoup(response_text, 'html.parser') # get the text
# Universities are listed on the several pages
# we have to click 'next' in the website
isnext = soup.find('li',{'class':'btn btn-sm btn-link next'})
if not(isnext == None): # if this is the last page
url_new = 'https://www.internationalstudent.com' + isnext.a['href']
extractUnis(url_new, lists)
return lists.append(soup.find_all('li',{'class':'list-group-item d-flex justify-content-between font-bitter h6'}))
return lists.append(soup.find_all('li',{'class':'list-group-item d-flex justify-content-between font-bitter h6'}))
def getUniList(url):
'''Make a dictionary of university instances from a state url
Parameters
----------
url: string
A URL for a state
Returns
-------
uniNameUrl: Dict
keys: uniName, value: uni url
'''
li_list = []
#dictUniInsatnce = {}
uniNameUrl = {}
extractUnis(url,li_list)
for i in range(len(li_list)):
h = len(li_list) - 1 - i # li_list has a reverse order
for j in range(len(li_list[h])):
uniName = li_list[h][j].a.text.strip()
uniURL = 'https://www.internationalstudent.com' + li_list[h][j].a['href']
#dictUniInsatnce[uniName] = extractUniInfo(uniURL,stateName)
uniNameUrl[uniName] = uniURL
return uniNameUrl
def extractStates():
'''Extract state urls and
make a dict containing the state name and corresponding url
Parameters
----------
None
Returns
-------
dict
state name : state url
'''
stateNameURL = {}
response_text = requestResponseText(baseurlSoup)
soup = BeautifulSoup(response_text, 'html.parser') # get the text
for i in range(3):
ultag = soup.find_all('ul',{'class':'list-unstyled col-md mb-0 d-flex flex-column justify-content-between'})[i+3]
for litag in ultag.find_all('li'):
stateName = litag.a.text.strip()
stateURL = 'https://www.internationalstudent.com' + litag.a['href']
stateNameURL[stateName] = stateURL
return stateNameURL
def requestAPI(url, params):
''' request API and retrun the output in the json format
Parameters
----------
url: Strings
params: dictionary
Returns
-------
json
'''
response = requests.get(url, params=params) # oauth is defined globally
return response.json()
def getNearbyPlaces(uni):
'''Obtain API data from MapQuest API.
Parameters
----------
uni: Insatnce
an instance of an university
Returns
-------
dict
a value of the 'searchResults' value of the converted API return from MapQuest API
'''
params = {"key":secrets.API_KEY, "origin":uni.zipcode, "radius":10, "maxMatches":500, "ambiguities":"ignore"}
unique_key = constructUniqueKey(baseurl=baseurlAPI, params= params)
if unique_key in CACHE_DICT.keys(): # if the unique key is in cache
print("Using Cache")
else: # if the unique key is not in cache
print("Fetching")
CACHE_DICT[unique_key] = requestAPI(url=baseurlAPI, params=params) #request new one
saveCache(CACHE_DICT) # save the current state
results = CACHE_DICT[unique_key]
return results['searchResults']
def extractRestaurantInfoOnly(searchResults):
'''Extract restaurant info from dictionary that was a return of the API request
Parameters
----------
searchResults: Dict
Return of the API request
Returns
-------
restaurantNameInfo : Dict
'Nmae of the restaurant' : {'phone' : value, 'address' : value}
'''
restaurantNameInfo = {}
for i in range(len(searchResults)):
singleRestaurantInfo = {}
name = searchResults[i]['name']
fields = searchResults[i]['fields']
# restaurants only
if fields['group_sic_code_name'] == '(All) Restaurants':
if fields['phone']: # non-empty phone
singleRestaurantInfo['phone'] = fields['phone']
else:
singleRestaurantInfo['phone'] = 'None'
if fields['address'] and fields['city']: # non-empty address & city
singleRestaurantInfo['address'] = fields['address'] + ', ' + fields['city']
else:
singleRestaurantInfo['address'] = 'None'
restaurantNameInfo[name] = singleRestaurantInfo
return restaurantNameInfo
def restaurantInfoString(restaurantNameInfo):
i = 1
restaurantInfo = ""
for key in restaurantNameInfo.keys():
if i > 10:
break
name = key
address = restaurantNameInfo[key]['address']
phone = restaurantNameInfo[key]['phone']
restaurantInfo = restaurantInfo + "[{}] {} : {}, {} \n".format(i, name, address, phone)
i += 1
return restaurantInfo
def printDictKeys(dictionary):
i = 1
for key in dictionary.keys():
print('[{}] {}'.format(i, key))
i += 1
pass
if __name__=="__main__":
# upper lower case
createDB()
exitFlag = False
while not(exitFlag):
print("=============================================")
stateNameURL = extractStates()
printDictKeys(stateNameURL)
while 1:
print("=============================================")
stateName = input("Type a state name (case sensitive) you want to explore or 'exit': ")
if stateName in STATES:
break
elif stateName == 'exit':
print("Bye~")
exitFlag = True
break
else:
print("Please type a correct state name !")
if exitFlag:
break
print("=============================================")
print("University lists in the chosen state")
uniNameUrl = getUniList(stateNameURL[stateName])
printDictKeys(uniNameUrl)
while 1:
print("=============================================")
uniName = input("Type one of the universities you are interested in or 'exit': ")
if uniName in uniNameUrl.keys():
| break | conditional_block |
|
SI507_Final_Project.py | )
return unique_key
def requestResponseText(url):
''' request response text of the url
Parameters
----------
url: string
Returns
-------
response.text
'''
if (url in CACHE_DICT.keys()):
print("Using cache")
else:
print("Fetching")
response = requests.get(url)
CACHE_DICT[url] = response.text
saveCache(CACHE_DICT)
return CACHE_DICT[url]
def extractUniInfo(url, stateName):
'''Extract an university info and
make an instance that contains the info
Parameters
----------
url: string
The URL for an university
Returns
-------
instance
an university instance
'''
Uni = university() # call the class
response_text = requestResponseText(url)
soup = BeautifulSoup(response_text, 'html.parser') # get the text
Uni.url = url
Uni.states = stateName
## some universities have no information
try:
Uni.name = soup.find('div',{'class':'card card-body mb-3 p-3'}).h1.text # name
except:
pass
try:
Uni.state = soup.find_all('li',{'class':'breadcrumb-item'})[2].a.text
except:
pass
try:
Uni.male_tot = soup.find_all('strong',{'class':'f-12'})[1].text
except:
pass
try:
Uni.male_intl = soup.find_all('strong',{'class':'f-12'})[4].text
except:
pass
try:
Uni.female_tot = soup.find_all('strong',{'class':'f-12'})[2].text
except:
pass
try:
Uni.female_intl = soup.find_all('strong',{'class':'f-12'})[5].text
except:
pass
try:
fullAddress = soup.find('ul',{'class':'fa-ul'}).li.text.strip()
if fullAddress[-5] == '-': # if the zipcode is '5digits - 4 digits' format
fullAddress = fullAddress[:-5] # ignore the last 4 digits
except:
pass
try:
Uni.address = fullAddress[:-6]
except:
pass
try:
Uni.zipcode = fullAddress[-5:]
except:
pass
try:
Uni.phone = soup.find('ul',{'class':'fa-ul'}).a.text
except:
pass
return Uni
def insertIgnoreIntoUniversities(Uni):
'''execute INSERT or IGNORE to store the university info into the 'Universities' table
Parameters
----------
Uni: Instance of the University class
Returns
-------
None
'''
# Connect to dbName
connection = sqlite3.connect(dbName)
cursor = connection.cursor()
insertUni = """
INSERT or IGNORE INTO Universities (NAME,STATE,ADDRESS,ZIPCODE,PHONE,URL,MALE,FEMALE,MALEINTL,FEMALEINTL)
VALUES ("{}","{}","{}","{}","{}","{}","{}","{}","{}","{}");""".format(Uni.name,Uni.states,Uni.address,
Uni.zipcode,Uni.phone,Uni.url,Uni.male_tot,Uni.female_tot,Uni.male_intl,Uni.female_intl)
# Connect or create tables if not exists
cursor.execute(insertUni)
connection.commit()
query = '''
SELECT ID
FROM Universities
WHERE NAME = '{}'
'''.format(Uni.name)
UniID = cursor.execute(query).fetchall()[0][0]
connection.close()
return UniID
def insertIgnoreIntoRestaurants(restaurantNameInfo,UniID):
'''execute INSERT or IGNORE to store the Restaurant info into the 'Restaurants' table
Parameters
----------
restaurantNameInfo: Dictionary containing restaurant's info
Returns
-------
None
'''
# Connect to dbName
connection = sqlite3.connect(dbName)
cursor = connection.cursor()
i = 1
for key in restaurantNameInfo.keys():
if i > 10:
break
insertRest = """
INSERT or IGNORE INTO Restaurants (NAME,ADDRESS,PHONE,UniID)
VALUES ("{}","{}","{}",{});""".format(key,
restaurantNameInfo[key]['address'],restaurantNameInfo[key]['phone'],UniID)
# Connect or create tables if not exists
cursor.execute(insertRest)
connection.commit()
i +=1
connection.close()
def uniInfoString(Uni):
'''string of the University info
Parameters
----------
Uni: instance
Returns
-------
uniInfo: string
'''
uniInfo = """
Name: {}
Address: {}
Phone Number: {}
# of male students: {}
# of female students: {}
# of male international students: {}
# of female international students: {}
""".format(Uni.name, Uni.address+', '+Uni.zipcode, Uni.phone, Uni.male_tot, Uni.female_tot, Uni.male_intl, Uni.female_intl)
return uniInfo
def extractUnis(url, lists):
'''Extract university urls and name recursively
Parameters
----------
url: string
The URL of the state
lists: empty list
Returns
-------
lists
appended lists
'''
response_text = requestResponseText(url)
soup = BeautifulSoup(response_text, 'html.parser') # get the text
# Universities are listed on the several pages
# we have to click 'next' in the website
isnext = soup.find('li',{'class':'btn btn-sm btn-link next'})
if not(isnext == None): # if this is the last page
url_new = 'https://www.internationalstudent.com' + isnext.a['href']
extractUnis(url_new, lists)
return lists.append(soup.find_all('li',{'class':'list-group-item d-flex justify-content-between font-bitter h6'}))
return lists.append(soup.find_all('li',{'class':'list-group-item d-flex justify-content-between font-bitter h6'}))
def getUniList(url):
'''Make a dictionary of university instances from a state url
Parameters
----------
url: string
A URL for a state
Returns
-------
uniNameUrl: Dict
keys: uniName, value: uni url
'''
li_list = []
#dictUniInsatnce = {}
uniNameUrl = {}
extractUnis(url,li_list)
for i in range(len(li_list)):
h = len(li_list) - 1 - i # li_list has a reverse order
for j in range(len(li_list[h])):
uniName = li_list[h][j].a.text.strip()
uniURL = 'https://www.internationalstudent.com' + li_list[h][j].a['href']
#dictUniInsatnce[uniName] = extractUniInfo(uniURL,stateName)
uniNameUrl[uniName] = uniURL
return uniNameUrl
def extractStates():
'''Extract state urls and
make a dict containing the state name and corresponding url
Parameters
----------
None
Returns
-------
dict
state name : state url
'''
stateNameURL = {}
response_text = requestResponseText(baseurlSoup)
soup = BeautifulSoup(response_text, 'html.parser') # get the text
for i in range(3):
ultag = soup.find_all('ul',{'class':'list-unstyled col-md mb-0 d-flex flex-column justify-content-between'})[i+3]
for litag in ultag.find_all('li'):
stateName = litag.a.text.strip()
stateURL = 'https://www.internationalstudent.com' + litag.a['href']
stateNameURL[stateName] = stateURL
return stateNameURL
def requestAPI(url, params):
''' request API and retrun the output in the json format
Parameters
----------
url: Strings
params: dictionary
Returns
-------
json
'''
response = requests.get(url, params=params) # oauth is defined globally
return response.json()
def getNearbyPlaces(uni):
'''Obtain API data from MapQuest API.
Parameters
----------
uni: Insatnce
an instance of an university
Returns
-------
dict
a value of the 'searchResults' value of the converted API return from MapQuest API
'''
params = {"key":secrets.API_KEY, "origin":uni.zipcode, "radius":10, "maxMatches":500, "ambiguities":"ignore"}
unique_key = constructUniqueKey(baseurl=baseurlAPI, params= params)
if unique_key in CACHE_DICT.keys(): # if the unique key is in cache
print("Using Cache")
else: # if the unique key is not in cache
print("Fetching")
CACHE_DICT[unique_key] = requestAPI(url=baseurlAPI, params=params) #request new one
saveCache(CACHE_DICT) # save the current state
results = CACHE_DICT[unique_key]
return results['searchResults']
def | extractRestaurantInfoOnly | identifier_name |
|
SI507_Final_Project.py | string
address of the restaurnat
zipcode: string
zip-code of the restaurnat
'''
def __init__(self): #initialize empty attributes
self.name = [] # address of the restaurant
self.address = [] # address of the restaurant
self.zipcode = [] # zip code of the restaurant
def info(self):
return self.name + ' : ' + self.address + ' [' + self.zipcode + ']'
pass
def loadCache():
''' Load cache if exists
Parameters
----------
None
Returns
-------
cache
jason format, if the cache exists
empty, if the cache does not exist
'''
try: # Try to get cache
cache_file = open(CACHE_FILE_NAME, 'r')
cache_file_contents = cache_file.read()
cache = json.loads(cache_file_contents)
cache_file.close()
except: # if no cache
cache = {}
return cache
# Load the cache, save in a global variable
CACHE_DICT = loadCache()
def createDB():
''' create DB if not exists
'''
# Connect to dbName
connection = sqlite3.connect(dbName)
cursor = connection.cursor()
# Connect or create tables if not exists
cursor.execute(createTableUniversities)
cursor.execute(createTableRestaurants)
# Close connection
connection.close()
def saveCache(cache):
''' save cache
Parameters
----------
cache : dict
Returns
-------
None
'''
cache_file = open(CACHE_FILE_NAME, 'w')
contents_to_write = json.dumps(cache)
cache_file.write(contents_to_write)
cache_file.close()
def constructUniqueKey(baseurl, params):
''' constructs a key that is guaranteed to uniquely and
repeatably identify an API request by its baseurl and params
Parameters
----------
baseurl: string
The URL for the API endpoint
params: dict
A dictionary of param:value pairs
Returns
-------
string
the unique key as a string
'''
key_value_temp = [] # empty string
connector = "_"
for i in params.keys(): # get all of values in params
key_value_temp.append(f'{i}_{params[i]}')
key_value_temp.sort() # sort string in alphabat order
unique_key = baseurl + connector + connector.join(key_value_temp)
return unique_key
def requestResponseText(url):
''' request response text of the url
Parameters
----------
url: string
Returns
-------
response.text
'''
if (url in CACHE_DICT.keys()):
print("Using cache")
else:
print("Fetching")
response = requests.get(url)
CACHE_DICT[url] = response.text
saveCache(CACHE_DICT)
return CACHE_DICT[url]
def extractUniInfo(url, stateName):
'''Extract an university info and
make an instance that contains the info
Parameters
----------
url: string
The URL for an university
Returns
-------
instance
an university instance
'''
Uni = university() # call the class
response_text = requestResponseText(url)
soup = BeautifulSoup(response_text, 'html.parser') # get the text
Uni.url = url
Uni.states = stateName
## some universities have no information
try:
Uni.name = soup.find('div',{'class':'card card-body mb-3 p-3'}).h1.text # name
except:
pass
| pass
try:
Uni.male_tot = soup.find_all('strong',{'class':'f-12'})[1].text
except:
pass
try:
Uni.male_intl = soup.find_all('strong',{'class':'f-12'})[4].text
except:
pass
try:
Uni.female_tot = soup.find_all('strong',{'class':'f-12'})[2].text
except:
pass
try:
Uni.female_intl = soup.find_all('strong',{'class':'f-12'})[5].text
except:
pass
try:
fullAddress = soup.find('ul',{'class':'fa-ul'}).li.text.strip()
if fullAddress[-5] == '-': # if the zipcode is '5digits - 4 digits' format
fullAddress = fullAddress[:-5] # ignore the last 4 digits
except:
pass
try:
Uni.address = fullAddress[:-6]
except:
pass
try:
Uni.zipcode = fullAddress[-5:]
except:
pass
try:
Uni.phone = soup.find('ul',{'class':'fa-ul'}).a.text
except:
pass
return Uni
def insertIgnoreIntoUniversities(Uni):
'''execute INSERT or IGNORE to store the university info into the 'Universities' table
Parameters
----------
Uni: Instance of the University class
Returns
-------
None
'''
# Connect to dbName
connection = sqlite3.connect(dbName)
cursor = connection.cursor()
insertUni = """
INSERT or IGNORE INTO Universities (NAME,STATE,ADDRESS,ZIPCODE,PHONE,URL,MALE,FEMALE,MALEINTL,FEMALEINTL)
VALUES ("{}","{}","{}","{}","{}","{}","{}","{}","{}","{}");""".format(Uni.name,Uni.states,Uni.address,
Uni.zipcode,Uni.phone,Uni.url,Uni.male_tot,Uni.female_tot,Uni.male_intl,Uni.female_intl)
# Connect or create tables if not exists
cursor.execute(insertUni)
connection.commit()
query = '''
SELECT ID
FROM Universities
WHERE NAME = '{}'
'''.format(Uni.name)
UniID = cursor.execute(query).fetchall()[0][0]
connection.close()
return UniID
def insertIgnoreIntoRestaurants(restaurantNameInfo,UniID):
'''execute INSERT or IGNORE to store the Restaurant info into the 'Restaurants' table
Parameters
----------
restaurantNameInfo: Dictionary containing restaurant's info
Returns
-------
None
'''
# Connect to dbName
connection = sqlite3.connect(dbName)
cursor = connection.cursor()
i = 1
for key in restaurantNameInfo.keys():
if i > 10:
break
insertRest = """
INSERT or IGNORE INTO Restaurants (NAME,ADDRESS,PHONE,UniID)
VALUES ("{}","{}","{}",{});""".format(key,
restaurantNameInfo[key]['address'],restaurantNameInfo[key]['phone'],UniID)
# Connect or create tables if not exists
cursor.execute(insertRest)
connection.commit()
i +=1
connection.close()
def uniInfoString(Uni):
'''string of the University info
Parameters
----------
Uni: instance
Returns
-------
uniInfo: string
'''
uniInfo = """
Name: {}
Address: {}
Phone Number: {}
# of male students: {}
# of female students: {}
# of male international students: {}
# of female international students: {}
""".format(Uni.name, Uni.address+', '+Uni.zipcode, Uni.phone, Uni.male_tot, Uni.female_tot, Uni.male_intl, Uni.female_intl)
return uniInfo
def extractUnis(url, lists):
'''Extract university urls and name recursively
Parameters
----------
url: string
The URL of the state
lists: empty list
Returns
-------
lists
appended lists
'''
response_text = requestResponseText(url)
soup = BeautifulSoup(response_text, 'html.parser') # get the text
# Universities are listed on the several pages
# we have to click 'next' in the website
isnext = soup.find('li',{'class':'btn btn-sm btn-link next'})
if not(isnext == None): # if this is the last page
url_new = 'https://www.internationalstudent.com' + isnext.a['href']
extractUnis(url_new, lists)
return lists.append(soup.find_all('li',{'class':'list-group-item d-flex justify-content-between font-bitter h6'}))
return lists.append(soup.find_all('li',{'class':'list-group-item d-flex justify-content-between font-bitter h6'}))
def getUniList(url):
'''Make a dictionary of university instances from a state url
Parameters
----------
url: string
A URL for a state
Returns
-------
uniNameUrl: Dict
keys: uniName, value: uni url
'''
li_list = []
#dictUniInsatnce = {}
uniNameUrl = {}
extractUnis(url,li_list)
for i in range(len(li_list)):
h = len(li_list) - 1 - i # li_list has a reverse order
for j in range(len(li_list[h])):
uniName = li_list[h][j].a.text.strip()
uniURL = 'https://www.internationalstudent.com' + li | try:
Uni.state = soup.find_all('li',{'class':'breadcrumb-item'})[2].a.text
except:
| random_line_split |
main.go | WHISPER_ENDPOINT", "WHISPER_URL"},
Value: "https://api.whisper.rotational.dev",
},
}
app.Commands = []*cli.Command{
{
Name: "serve",
Usage: "run the whisper server",
Category: "server",
Action: serve,
Flags: []cli.Flag{
&cli.StringFlag{
Name: "addr",
Aliases: []string{"a"},
Usage: "address to bind the whisper server on",
EnvVars: []string{"WHISPER_BIND_ADDR"},
},
},
},
{
Name: "create",
Usage: "create a whisper secret",
Category: "client",
Before: initClient,
Action: create,
Flags: []cli.Flag{
&cli.StringFlag{
Name: "secret",
Aliases: []string{"s"},
Usage: "input the secret as a string on the command line", | Aliases: []string{"G", "gs"},
Usage: "generate a random secret of the specified length",
},
&cli.StringFlag{
Name: "in",
Aliases: []string{"i", "u", "upload"},
Usage: "upload a file as the secret contents",
},
&cli.StringFlag{
Name: "password",
Aliases: []string{"p"},
Usage: "specify a password to access the secret",
},
&cli.IntFlag{
Name: "generate-password",
Aliases: []string{"g", "gp"},
Usage: "generate a random password of the specified length",
},
&cli.IntFlag{
Name: "accesses",
Aliases: []string{"a"},
Usage: "set number of allowed accesses; default 1, -1 for unlimited until expiration",
},
&cli.DurationFlag{
Name: "lifetime",
Aliases: []string{"l", "e", "expires", "expires-after"},
Usage: "specify the lifetime of the secret before it is deleted",
},
&cli.BoolFlag{
Name: "b64encoded",
Aliases: []string{"b", "b64"},
Usage: "specify if the secret is base64 encoded (true if uploading a file, false if generated)",
},
},
},
{
Name: "fetch",
Usage: "fetch a whisper secret by its token",
ArgsUsage: "token",
Category: "client",
Before: initClient,
Action: fetch,
Flags: []cli.Flag{
&cli.StringFlag{
Name: "password",
Aliases: []string{"p"},
Usage: "specify a password to access the secret",
},
&cli.StringFlag{
Name: "out",
Aliases: []string{"o", "d", "download"},
Usage: "download the secret to a file or to a directory",
},
},
},
{
Name: "destroy",
Usage: "destroy a whisper secret by its token",
ArgsUsage: "token",
Category: "client",
Before: initClient,
Action: destroy,
Flags: []cli.Flag{
&cli.StringFlag{
Name: "password",
Aliases: []string{"p"},
Usage: "specify a password to access the secret",
},
},
},
{
Name: "status",
Usage: "get the whisper server status",
Category: "client",
Before: initClient,
Action: status,
},
}
app.Run(os.Args)
}
//===========================================================================
// Server Actions
//===========================================================================
func serve(c *cli.Context) (err error) {
// Create server configuration
var conf config.Config
if conf, err = config.New(); err != nil {
return cli.Exit(err, 1)
}
// Update from CLI flags
if addr := c.String("addr"); addr != "" {
conf.BindAddr = addr
}
// Create and run the whisper server
var server *whisper.Server
if server, err = whisper.New(conf); err != nil {
return cli.Exit(err, 1)
}
if err = server.Serve(); err != nil {
return cli.Exit(err, 1)
}
return nil
}
//===========================================================================
// Client Actions
//===========================================================================
var client v1.Service
func create(c *cli.Context) (err error) {
// Create the request
req := &v1.CreateSecretRequest{
Password: c.String("password"),
Accesses: c.Int("accesses"),
Lifetime: v1.Duration(c.Duration("lifetime")),
}
// Add the secret to the request via one of the command line options
switch {
case c.String("secret") != "":
if c.Int("generate-secret") != 0 || c.String("in") != "" {
return cli.Exit("specify only one of secret, generate-secret, or in path", 1)
}
// Basic secret provided via the CLI
req.Secret = c.String("secret")
req.IsBase64 = c.Bool("b64encoded")
case c.String("in") != "":
if c.Int("generate-secret") != 0 {
// The check for secret has already been done
return cli.Exit("specify only one of secret, generate-secret, or in path", 1)
}
// Load the secret as base64 encoded data from a file
var data []byte
if data, err = ioutil.ReadFile(c.String("in")); err != nil {
return cli.Exit(err, 1)
}
req.Filename = filepath.Base(c.String("in"))
req.Secret = base64.StdEncoding.EncodeToString(data)
req.IsBase64 = true
case c.Int("generate-secret") != 0:
// Generate a random secret of the specified length
if req.Secret, err = generateRandomSecret(c.Int("generate-secret")); err != nil {
return cli.Exit(err, 1)
}
req.IsBase64 = false
default:
// No secret was specified at all?
return cli.Exit("specify at least one of secret, generate-secret, or in path", 1)
}
// Handle password generation if requested
if gp := c.Int("generate-password"); gp > 0 {
if req.Password != "" {
return cli.Exit("specify either password or generate password, not both", 1)
}
if req.Password, err = generateRandomSecret(gp); err != nil {
return cli.Exit(err, 1)
}
// Print the password so that it can be used to retrieve the secret later
fmt.Printf("Password for retrieval: %s\n", req.Password)
}
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
defer cancel()
var rep *v1.CreateSecretReply
if rep, err = client.CreateSecret(ctx, req); err != nil {
return cli.Exit(err, 1)
}
return printJSON(rep)
}
func fetch(c *cli.Context) (err error) {
if c.NArg() != 1 {
return cli.Exit("specify one token to fetch the secret for", 1)
}
token := c.Args().First()
password := c.String("password")
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
defer cancel()
var rep *v1.FetchSecretReply
if rep, err = client.FetchSecret(ctx, token, password); err != nil {
return cli.Exit(err, 1)
}
// Figure out where to write the file to; if out is a directory, write the
var path string
// If the user has specified an output location, handle it.
if out := c.String("out"); out != "" {
var isDir bool
if isDir, err = isDirectory(out); err == nil && isDir {
if rep.Filename != "" {
path = filepath.Join(out, rep.Filename)
} else {
path = filepath.Join(out, "secret.dat")
}
} else {
path = out
}
} else {
// If the user didn't specify an out location and the response has a filename,
// write the file with the specified filename in the current working directory.
path = rep.Filename
}
// If we've discovered a path to write the file to, write it there, decoding the
// data as necessary from base64. Otherwise print the json to stdout and exit.
if path != "" {
var data []byte
if rep.IsBase64 {
if data, err = base64.StdEncoding.DecodeString(rep.Secret); err != nil {
return cli.Exit(err, 1)
}
} else {
data = []byte(rep.Secret)
}
if err = ioutil.WriteFile(path | },
&cli.IntFlag{
Name: "generate-secret", | random_line_split |
main.go | ISPER_ENDPOINT", "WHISPER_URL"},
Value: "https://api.whisper.rotational.dev",
},
}
app.Commands = []*cli.Command{
{
Name: "serve",
Usage: "run the whisper server",
Category: "server",
Action: serve,
Flags: []cli.Flag{
&cli.StringFlag{
Name: "addr",
Aliases: []string{"a"},
Usage: "address to bind the whisper server on",
EnvVars: []string{"WHISPER_BIND_ADDR"},
},
},
},
{
Name: "create",
Usage: "create a whisper secret",
Category: "client",
Before: initClient,
Action: create,
Flags: []cli.Flag{
&cli.StringFlag{
Name: "secret",
Aliases: []string{"s"},
Usage: "input the secret as a string on the command line",
},
&cli.IntFlag{
Name: "generate-secret",
Aliases: []string{"G", "gs"},
Usage: "generate a random secret of the specified length",
},
&cli.StringFlag{
Name: "in",
Aliases: []string{"i", "u", "upload"},
Usage: "upload a file as the secret contents",
},
&cli.StringFlag{
Name: "password",
Aliases: []string{"p"},
Usage: "specify a password to access the secret",
},
&cli.IntFlag{
Name: "generate-password",
Aliases: []string{"g", "gp"},
Usage: "generate a random password of the specified length",
},
&cli.IntFlag{
Name: "accesses",
Aliases: []string{"a"},
Usage: "set number of allowed accesses; default 1, -1 for unlimited until expiration",
},
&cli.DurationFlag{
Name: "lifetime",
Aliases: []string{"l", "e", "expires", "expires-after"},
Usage: "specify the lifetime of the secret before it is deleted",
},
&cli.BoolFlag{
Name: "b64encoded",
Aliases: []string{"b", "b64"},
Usage: "specify if the secret is base64 encoded (true if uploading a file, false if generated)",
},
},
},
{
Name: "fetch",
Usage: "fetch a whisper secret by its token",
ArgsUsage: "token",
Category: "client",
Before: initClient,
Action: fetch,
Flags: []cli.Flag{
&cli.StringFlag{
Name: "password",
Aliases: []string{"p"},
Usage: "specify a password to access the secret",
},
&cli.StringFlag{
Name: "out",
Aliases: []string{"o", "d", "download"},
Usage: "download the secret to a file or to a directory",
},
},
},
{
Name: "destroy",
Usage: "destroy a whisper secret by its token",
ArgsUsage: "token",
Category: "client",
Before: initClient,
Action: destroy,
Flags: []cli.Flag{
&cli.StringFlag{
Name: "password",
Aliases: []string{"p"},
Usage: "specify a password to access the secret",
},
},
},
{
Name: "status",
Usage: "get the whisper server status",
Category: "client",
Before: initClient,
Action: status,
},
}
app.Run(os.Args)
}
//===========================================================================
// Server Actions
//===========================================================================
func serve(c *cli.Context) (err error) {
// Create server configuration
var conf config.Config
if conf, err = config.New(); err != nil {
return cli.Exit(err, 1)
}
// Update from CLI flags
if addr := c.String("addr"); addr != "" {
conf.BindAddr = addr
}
// Create and run the whisper server
var server *whisper.Server
if server, err = whisper.New(conf); err != nil {
return cli.Exit(err, 1)
}
if err = server.Serve(); err != nil {
return cli.Exit(err, 1)
}
return nil
}
//===========================================================================
// Client Actions
//===========================================================================
var client v1.Service
func create(c *cli.Context) (err error) {
// Create the request
req := &v1.CreateSecretRequest{
Password: c.String("password"),
Accesses: c.Int("accesses"),
Lifetime: v1.Duration(c.Duration("lifetime")),
}
// Add the secret to the request via one of the command line options
switch {
case c.String("secret") != "":
if c.Int("generate-secret") != 0 || c.String("in") != "" {
return cli.Exit("specify only one of secret, generate-secret, or in path", 1)
}
// Basic secret provided via the CLI
req.Secret = c.String("secret")
req.IsBase64 = c.Bool("b64encoded")
case c.String("in") != "":
if c.Int("generate-secret") != 0 {
// The check for secret has already been done
return cli.Exit("specify only one of secret, generate-secret, or in path", 1)
}
// Load the secret as base64 encoded data from a file
var data []byte
if data, err = ioutil.ReadFile(c.String("in")); err != nil {
return cli.Exit(err, 1)
}
req.Filename = filepath.Base(c.String("in"))
req.Secret = base64.StdEncoding.EncodeToString(data)
req.IsBase64 = true
case c.Int("generate-secret") != 0:
// Generate a random secret of the specified length
if req.Secret, err = generateRandomSecret(c.Int("generate-secret")); err != nil {
return cli.Exit(err, 1)
}
req.IsBase64 = false
default:
// No secret was specified at all?
return cli.Exit("specify at least one of secret, generate-secret, or in path", 1)
}
// Handle password generation if requested
if gp := c.Int("generate-password"); gp > 0 {
if req.Password != "" {
return cli.Exit("specify either password or generate password, not both", 1)
}
if req.Password, err = generateRandomSecret(gp); err != nil |
// Print the password so that it can be used to retrieve the secret later
fmt.Printf("Password for retrieval: %s\n", req.Password)
}
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
defer cancel()
var rep *v1.CreateSecretReply
if rep, err = client.CreateSecret(ctx, req); err != nil {
return cli.Exit(err, 1)
}
return printJSON(rep)
}
func fetch(c *cli.Context) (err error) {
if c.NArg() != 1 {
return cli.Exit("specify one token to fetch the secret for", 1)
}
token := c.Args().First()
password := c.String("password")
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
defer cancel()
var rep *v1.FetchSecretReply
if rep, err = client.FetchSecret(ctx, token, password); err != nil {
return cli.Exit(err, 1)
}
// Figure out where to write the file to; if out is a directory, write the
var path string
// If the user has specified an output location, handle it.
if out := c.String("out"); out != "" {
var isDir bool
if isDir, err = isDirectory(out); err == nil && isDir {
if rep.Filename != "" {
path = filepath.Join(out, rep.Filename)
} else {
path = filepath.Join(out, "secret.dat")
}
} else {
path = out
}
} else {
// If the user didn't specify an out location and the response has a filename,
// write the file with the specified filename in the current working directory.
path = rep.Filename
}
// If we've discovered a path to write the file to, write it there, decoding the
// data as necessary from base64. Otherwise print the json to stdout and exit.
if path != "" {
var data []byte
if rep.IsBase64 {
if data, err = base64.StdEncoding.DecodeString(rep.Secret); err != nil {
return cli.Exit(err, 1)
}
} else {
data = []byte(rep.Secret)
}
if err = ioutil.WriteFile | {
return cli.Exit(err, 1)
} | conditional_block |
main.go | ", "expires", "expires-after"},
Usage: "specify the lifetime of the secret before it is deleted",
},
&cli.BoolFlag{
Name: "b64encoded",
Aliases: []string{"b", "b64"},
Usage: "specify if the secret is base64 encoded (true if uploading a file, false if generated)",
},
},
},
{
Name: "fetch",
Usage: "fetch a whisper secret by its token",
ArgsUsage: "token",
Category: "client",
Before: initClient,
Action: fetch,
Flags: []cli.Flag{
&cli.StringFlag{
Name: "password",
Aliases: []string{"p"},
Usage: "specify a password to access the secret",
},
&cli.StringFlag{
Name: "out",
Aliases: []string{"o", "d", "download"},
Usage: "download the secret to a file or to a directory",
},
},
},
{
Name: "destroy",
Usage: "destroy a whisper secret by its token",
ArgsUsage: "token",
Category: "client",
Before: initClient,
Action: destroy,
Flags: []cli.Flag{
&cli.StringFlag{
Name: "password",
Aliases: []string{"p"},
Usage: "specify a password to access the secret",
},
},
},
{
Name: "status",
Usage: "get the whisper server status",
Category: "client",
Before: initClient,
Action: status,
},
}
app.Run(os.Args)
}
//===========================================================================
// Server Actions
//===========================================================================
func serve(c *cli.Context) (err error) {
// Create server configuration
var conf config.Config
if conf, err = config.New(); err != nil {
return cli.Exit(err, 1)
}
// Update from CLI flags
if addr := c.String("addr"); addr != "" {
conf.BindAddr = addr
}
// Create and run the whisper server
var server *whisper.Server
if server, err = whisper.New(conf); err != nil {
return cli.Exit(err, 1)
}
if err = server.Serve(); err != nil {
return cli.Exit(err, 1)
}
return nil
}
//===========================================================================
// Client Actions
//===========================================================================
var client v1.Service
func create(c *cli.Context) (err error) {
// Create the request
req := &v1.CreateSecretRequest{
Password: c.String("password"),
Accesses: c.Int("accesses"),
Lifetime: v1.Duration(c.Duration("lifetime")),
}
// Add the secret to the request via one of the command line options
switch {
case c.String("secret") != "":
if c.Int("generate-secret") != 0 || c.String("in") != "" {
return cli.Exit("specify only one of secret, generate-secret, or in path", 1)
}
// Basic secret provided via the CLI
req.Secret = c.String("secret")
req.IsBase64 = c.Bool("b64encoded")
case c.String("in") != "":
if c.Int("generate-secret") != 0 {
// The check for secret has already been done
return cli.Exit("specify only one of secret, generate-secret, or in path", 1)
}
// Load the secret as base64 encoded data from a file
var data []byte
if data, err = ioutil.ReadFile(c.String("in")); err != nil {
return cli.Exit(err, 1)
}
req.Filename = filepath.Base(c.String("in"))
req.Secret = base64.StdEncoding.EncodeToString(data)
req.IsBase64 = true
case c.Int("generate-secret") != 0:
// Generate a random secret of the specified length
if req.Secret, err = generateRandomSecret(c.Int("generate-secret")); err != nil {
return cli.Exit(err, 1)
}
req.IsBase64 = false
default:
// No secret was specified at all?
return cli.Exit("specify at least one of secret, generate-secret, or in path", 1)
}
// Handle password generation if requested
if gp := c.Int("generate-password"); gp > 0 {
if req.Password != "" {
return cli.Exit("specify either password or generate password, not both", 1)
}
if req.Password, err = generateRandomSecret(gp); err != nil {
return cli.Exit(err, 1)
}
// Print the password so that it can be used to retrieve the secret later
fmt.Printf("Password for retrieval: %s\n", req.Password)
}
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
defer cancel()
var rep *v1.CreateSecretReply
if rep, err = client.CreateSecret(ctx, req); err != nil {
return cli.Exit(err, 1)
}
return printJSON(rep)
}
func fetch(c *cli.Context) (err error) {
if c.NArg() != 1 {
return cli.Exit("specify one token to fetch the secret for", 1)
}
token := c.Args().First()
password := c.String("password")
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
defer cancel()
var rep *v1.FetchSecretReply
if rep, err = client.FetchSecret(ctx, token, password); err != nil {
return cli.Exit(err, 1)
}
// Figure out where to write the file to; if out is a directory, write the
var path string
// If the user has specified an output location, handle it.
if out := c.String("out"); out != "" {
var isDir bool
if isDir, err = isDirectory(out); err == nil && isDir {
if rep.Filename != "" {
path = filepath.Join(out, rep.Filename)
} else {
path = filepath.Join(out, "secret.dat")
}
} else {
path = out
}
} else {
// If the user didn't specify an out location and the response has a filename,
// write the file with the specified filename in the current working directory.
path = rep.Filename
}
// If we've discovered a path to write the file to, write it there, decoding the
// data as necessary from base64. Otherwise print the json to stdout and exit.
if path != "" {
var data []byte
if rep.IsBase64 {
if data, err = base64.StdEncoding.DecodeString(rep.Secret); err != nil {
return cli.Exit(err, 1)
}
} else {
data = []byte(rep.Secret)
}
if err = ioutil.WriteFile(path, data, 0644); err != nil {
return cli.Exit(err, 1)
}
fmt.Printf("secret written to %s\n", path)
return nil
}
// Simply print the JSON response as the last case.
// TODO: should we provide a flag to just print the secret for copy and paste?
return printJSON(rep)
}
func destroy(c *cli.Context) (err error) {
if c.NArg() != 1 {
return cli.Exit("specify one token to fetch the secret for", 1)
}
token := c.Args().First()
password := c.String("password")
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
defer cancel()
var rep *v1.DestroySecretReply
if rep, err = client.DestroySecret(ctx, token, password); err != nil {
return cli.Exit(err, 1)
}
return printJSON(rep)
}
func status(c *cli.Context) (err error) {
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
defer cancel()
var rep *v1.StatusReply
if rep, err = client.Status(ctx); err != nil {
return cli.Exit(err, 1)
}
return printJSON(rep)
}
//===========================================================================
// Helper Functions
//===========================================================================
func initClient(c *cli.Context) (err error) {
if client, err = v1.New(c.String("endpoint")); err != nil {
return cli.Exit(err, 1)
}
return nil
}
func generateRandomSecret(n int) (s string, err error) {
const letters = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-_%=+"
ret := make([]byte, n)
for i := 0; i < n; i++ {
num, err := rand.Int(rand.Reader, big.NewInt(int64(len(letters))))
if err != nil {
return "", err
}
ret[i] = letters[num.Int64()]
}
return string(ret), nil
}
func isDirectory(path string) (isDir bool, err error) | {
var fi fs.FileInfo
if fi, err = os.Stat(path); err != nil {
return false, err
}
return fi.IsDir(), nil
} | identifier_body |
|
main.go | ISPER_ENDPOINT", "WHISPER_URL"},
Value: "https://api.whisper.rotational.dev",
},
}
app.Commands = []*cli.Command{
{
Name: "serve",
Usage: "run the whisper server",
Category: "server",
Action: serve,
Flags: []cli.Flag{
&cli.StringFlag{
Name: "addr",
Aliases: []string{"a"},
Usage: "address to bind the whisper server on",
EnvVars: []string{"WHISPER_BIND_ADDR"},
},
},
},
{
Name: "create",
Usage: "create a whisper secret",
Category: "client",
Before: initClient,
Action: create,
Flags: []cli.Flag{
&cli.StringFlag{
Name: "secret",
Aliases: []string{"s"},
Usage: "input the secret as a string on the command line",
},
&cli.IntFlag{
Name: "generate-secret",
Aliases: []string{"G", "gs"},
Usage: "generate a random secret of the specified length",
},
&cli.StringFlag{
Name: "in",
Aliases: []string{"i", "u", "upload"},
Usage: "upload a file as the secret contents",
},
&cli.StringFlag{
Name: "password",
Aliases: []string{"p"},
Usage: "specify a password to access the secret",
},
&cli.IntFlag{
Name: "generate-password",
Aliases: []string{"g", "gp"},
Usage: "generate a random password of the specified length",
},
&cli.IntFlag{
Name: "accesses",
Aliases: []string{"a"},
Usage: "set number of allowed accesses; default 1, -1 for unlimited until expiration",
},
&cli.DurationFlag{
Name: "lifetime",
Aliases: []string{"l", "e", "expires", "expires-after"},
Usage: "specify the lifetime of the secret before it is deleted",
},
&cli.BoolFlag{
Name: "b64encoded",
Aliases: []string{"b", "b64"},
Usage: "specify if the secret is base64 encoded (true if uploading a file, false if generated)",
},
},
},
{
Name: "fetch",
Usage: "fetch a whisper secret by its token",
ArgsUsage: "token",
Category: "client",
Before: initClient,
Action: fetch,
Flags: []cli.Flag{
&cli.StringFlag{
Name: "password",
Aliases: []string{"p"},
Usage: "specify a password to access the secret",
},
&cli.StringFlag{
Name: "out",
Aliases: []string{"o", "d", "download"},
Usage: "download the secret to a file or to a directory",
},
},
},
{
Name: "destroy",
Usage: "destroy a whisper secret by its token",
ArgsUsage: "token",
Category: "client",
Before: initClient,
Action: destroy,
Flags: []cli.Flag{
&cli.StringFlag{
Name: "password",
Aliases: []string{"p"},
Usage: "specify a password to access the secret",
},
},
},
{
Name: "status",
Usage: "get the whisper server status",
Category: "client",
Before: initClient,
Action: status,
},
}
app.Run(os.Args)
}
//===========================================================================
// Server Actions
//===========================================================================
func serve(c *cli.Context) (err error) {
// Create server configuration
var conf config.Config
if conf, err = config.New(); err != nil {
return cli.Exit(err, 1)
}
// Update from CLI flags
if addr := c.String("addr"); addr != "" {
conf.BindAddr = addr
}
// Create and run the whisper server
var server *whisper.Server
if server, err = whisper.New(conf); err != nil {
return cli.Exit(err, 1)
}
if err = server.Serve(); err != nil {
return cli.Exit(err, 1)
}
return nil
}
//===========================================================================
// Client Actions
//===========================================================================
var client v1.Service
func | (c *cli.Context) (err error) {
// Create the request
req := &v1.CreateSecretRequest{
Password: c.String("password"),
Accesses: c.Int("accesses"),
Lifetime: v1.Duration(c.Duration("lifetime")),
}
// Add the secret to the request via one of the command line options
switch {
case c.String("secret") != "":
if c.Int("generate-secret") != 0 || c.String("in") != "" {
return cli.Exit("specify only one of secret, generate-secret, or in path", 1)
}
// Basic secret provided via the CLI
req.Secret = c.String("secret")
req.IsBase64 = c.Bool("b64encoded")
case c.String("in") != "":
if c.Int("generate-secret") != 0 {
// The check for secret has already been done
return cli.Exit("specify only one of secret, generate-secret, or in path", 1)
}
// Load the secret as base64 encoded data from a file
var data []byte
if data, err = ioutil.ReadFile(c.String("in")); err != nil {
return cli.Exit(err, 1)
}
req.Filename = filepath.Base(c.String("in"))
req.Secret = base64.StdEncoding.EncodeToString(data)
req.IsBase64 = true
case c.Int("generate-secret") != 0:
// Generate a random secret of the specified length
if req.Secret, err = generateRandomSecret(c.Int("generate-secret")); err != nil {
return cli.Exit(err, 1)
}
req.IsBase64 = false
default:
// No secret was specified at all?
return cli.Exit("specify at least one of secret, generate-secret, or in path", 1)
}
// Handle password generation if requested
if gp := c.Int("generate-password"); gp > 0 {
if req.Password != "" {
return cli.Exit("specify either password or generate password, not both", 1)
}
if req.Password, err = generateRandomSecret(gp); err != nil {
return cli.Exit(err, 1)
}
// Print the password so that it can be used to retrieve the secret later
fmt.Printf("Password for retrieval: %s\n", req.Password)
}
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
defer cancel()
var rep *v1.CreateSecretReply
if rep, err = client.CreateSecret(ctx, req); err != nil {
return cli.Exit(err, 1)
}
return printJSON(rep)
}
func fetch(c *cli.Context) (err error) {
if c.NArg() != 1 {
return cli.Exit("specify one token to fetch the secret for", 1)
}
token := c.Args().First()
password := c.String("password")
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
defer cancel()
var rep *v1.FetchSecretReply
if rep, err = client.FetchSecret(ctx, token, password); err != nil {
return cli.Exit(err, 1)
}
// Figure out where to write the file to; if out is a directory, write the
var path string
// If the user has specified an output location, handle it.
if out := c.String("out"); out != "" {
var isDir bool
if isDir, err = isDirectory(out); err == nil && isDir {
if rep.Filename != "" {
path = filepath.Join(out, rep.Filename)
} else {
path = filepath.Join(out, "secret.dat")
}
} else {
path = out
}
} else {
// If the user didn't specify an out location and the response has a filename,
// write the file with the specified filename in the current working directory.
path = rep.Filename
}
// If we've discovered a path to write the file to, write it there, decoding the
// data as necessary from base64. Otherwise print the json to stdout and exit.
if path != "" {
var data []byte
if rep.IsBase64 {
if data, err = base64.StdEncoding.DecodeString(rep.Secret); err != nil {
return cli.Exit(err, 1)
}
} else {
data = []byte(rep.Secret)
}
if err = ioutil.WriteFile(path | create | identifier_name |
sound.js | };
//
// luv.update = function() {
// // This will throw an error;
// // cry might need some time to load.
// // Continue reading for a working version.
// if(something) { cry.play(); }
// };
//
// A simple way to fix this is to check that all media has been loaded before
// attempting to play any sounds. The `luv.media` object has a `isLoaded` method
// that we can use for that. A simple way is to just end the `luv.update` call
// if media is still being loaded. Like this:
//
// luv.update = function() {
// if(!luv.media.isLoaded()) { return; }
// // All sounds (and images) are loaded now, we can play them
// if(something) { cry.play(); }
// };
//
// Note: play returns a *sound instance*. The same sound can have several sound
// instances playing simultaneously; each of those is one instance. See `audio/sound_instance.js` for
// details.
//
// Possible options:
//
// * `volume`: float number, from 0 (muted) to 1 (max volume). Default: 1.
// * `loop`: boolean, true if the instance must loop, false otherwise. Default: false.
// * `speed`: float number, 1 is regular velocity, 2 is 2x, 0.5 is half, etc. Default: 1.
// * `time`: float number, in seconds. The time offset to be used. Default: 0
// * `status`: string, it can be either "paused" or "ready". Defaults to "ready".
play: function(options) {
if(!this.isLoaded()) {
throw new Error("Attepted to play a non loaded sound: " + this);
}
var instance = this.getReadyInstance(options);
instance.play();
return instance;
},
// Pauses all the instances of the sound. If you want to pause an individual instance,
// call `instance.pause()` instead of `sound.pause()`.
pause: function() {
this.instances.forEach(function(instance){ instance.pause(); });
},
// Stops all the instances of the sound. The difference between `pause` and `stop` is that
// stop "rewinds" each instance, and marks it as "ready to be reused";
stop: function() {
this.instances.forEach(function(instance){ instance.stop(); });
},
// `countInstances` returns how many instances the sound has.
// Includes both playing and finished instances.
countInstances: function() {
return this.instances.length;
},
// `countPlayingInstances` counts how many instances of the sound are currently playing.
// Non-playing instances are destroyed after 3 seconds of inactivity by default.
countPlayingInstances: function() {
var count = 0;
this.instances.forEach(function(inst){ count += inst.isPlaying() ? 1 : 0; });
return count;
},
// `getReadyInstance` returns the first instance which is available for playing.
// The method tries to find one available instance in the list of instances; if no
// available instances are found, it creates a new one.
//
// accepts the same options as `play`. The only difference is that getReadyInstance returns
// an instance in the `"ready"` status, while the one returned by `play` is in the `"playing"` status.
getReadyInstance: function(options) {
var instance = getExistingReadyInstance(this.instances);
if(!instance) {
instance = createInstance(this);
this.instances.push(instance);
}
instance.reset(this.el, options);
return instance;
},
// `getExpirationTime` returns how much time instances are preserved before they
// expire. By default it's 3 seconds.
getExpirationTime: function() {
return this.expirationTime;
},
// `setExpirationTime` sets the time it takes to expire an instance after it has stopped playing.
// In some browers, it takes time to create each sound instance, so increasing this value can
// By default it is 3 seconds.
setExpirationTime: function(seconds) {
this.expirationTime = seconds;
}
});
// This class variable controls the default expiration time of sound instances
Luv.Audio.Sound.DEFAULT_EXPIRATION_TIME = 3; // 3 seconds
// Sound is an asset. The `Luv.Media.Asset` mixin adds methods like `isLoaded` and `isPending` to the class.
Luv.Audio.Sound.include(Luv.Media.Asset);
// `Luv.Audio.SoundMethods` is a mixin shared by both `Luv.Audio.Sound` and `Luv.Audio.SoundInstance`.
// In `Sound`, they modify the "defaults" used for creating new instances. In `SoundInstance` they modify
// the instances themselves.
Luv.Audio.SoundMethods = {
// `setVolume` expects a float between 0.0 (no sound) and 1.0 (full sound). Defaults to 1.
//
// * When invoked in a `Sound`, it alters how any subsequent calls to sound.play() sound. Alternatively, you can invoke
// `sound.play({sound: 0.5})` to alter the volume of only one sound instance.
// * When invoked in a `SoundInstance`, it alters the volume of that particular instance.
setVolume: function(volume) {
volume = clampNumber(volume, 0, 1);
this.el.volume = volume;
},
// `getVolume` returns the volume of a particular sound/sound instance. See `setVolume` for more details.
getVolume: function() {
return this.el.volume;
},
// `setLoop` expects a `true` or `false` value.
//
// * When invoked in a `Sound`, it will make the sound "play in loop" in all sucessive calls to `sound.play()`. It is
// usually a better idea to call `sound.play({loop: true})` instead. That way, only one instance of the sound will loop.
// * When invoked in a `SoundInstance`, it will make the instance loop (or deactivate the looping).
setLoop: function(loop) {
this.loop = !!loop;
if(loop) {
this.el.loop = "loop";
} else {
this.el.removeAttribute("loop");
}
},
// `getLoop` returns the state of the internal `loop` variable (true if the sound/sound instance starts over after finishing, false
// if the sound/sound instance just halts after the first play).
getLoop: function() {
return this.loop;
},
// `setSpeed` expects a numeric float with the speed at which the sound/sound instance will play. 1.0 is regular. 2.0 is 2x. 0.5 is half
// speed. And so on.
// If nothing is specified, the default speed of any sound is 1.0.
//
// * When invoked in a `Sound`, it alters the speed of all the sound instances produced by calls to `sound.play()`. You can also invoke
// `sound.play({speed: 2.0})` to alter the speed of a particular sound instance without modifying the others.
// * When invoked in a `SoundInstance`, it alters the speed of that instance only.
setSpeed: function(speed) {
this.el.playbackRate = speed;
},
// `getSpeed` returns the sound/sound instance speed. See `setSpeed` for details.
getSpeed: function() {
return this.el.playbackRate;
},
// `setTime` expects a float number specifying the "time offset" of a particular sound/sound instance. Defaults to 0.
//
// * When invoked in a `Sound`, it will make all the sound instances created by `sound.play()` have a default time when they start playing.
// You can alternatively do `sound.play({time: 4})` to only modify one particular instance.
// * When invoked in a `SoundInstance`:
// * If the instance is playing, it will "jump" to that time.
// * If the instance is not playing, it will "start" on that time when it is played.
setTime: function(time) {
try {
this.el.currentTime = time;
} catch(err) {
// some browsers throw an error when setting currentTime right after loading
// a node. See https://bugzilla.mozilla.org/show_bug.cgi?id=465498
}
},
// `getTime` returns the internal `time` attribute of a sound/sound instance. See `setTime` for details.
getTime: function() {
return this.el.currentTime;
},
// `getDuration` returns the total duration of a sound instance.
getDuration: function() {
return this.el.duration;
}
};
Luv.Audio.Sound.include(Luv.Audio.SoundMethods);
// Internal function used by Luv.Sound.getReadyInstance
var getExistingReadyInstance = function(instances) {
var instance;
for(var i=0; i< instances.length; i++) | {
instance = instances[i];
if(instance.isReady()) {
return instance;
}
} | conditional_block |
|
sound.js | sfx/cry.mp3');
// };
//
// luv.update = function() {
// // This will throw an error;
// // cry might need some time to load.
// // Continue reading for a working version.
// if(something) { cry.play(); }
// };
//
// A simple way to fix this is to check that all media has been loaded before
// attempting to play any sounds. The `luv.media` object has a `isLoaded` method
// that we can use for that. A simple way is to just end the `luv.update` call
// if media is still being loaded. Like this:
//
// luv.update = function() {
// if(!luv.media.isLoaded()) { return; }
// // All sounds (and images) are loaded now, we can play them
// if(something) { cry.play(); }
// };
//
// Note: play returns a *sound instance*. The same sound can have several sound
// instances playing simultaneously; each of those is one instance. See `audio/sound_instance.js` for
// details.
//
// Possible options:
//
// * `volume`: float number, from 0 (muted) to 1 (max volume). Default: 1.
// * `loop`: boolean, true if the instance must loop, false otherwise. Default: false.
// * `speed`: float number, 1 is regular velocity, 2 is 2x, 0.5 is half, etc. Default: 1.
// * `time`: float number, in seconds. The time offset to be used. Default: 0
// * `status`: string, it can be either "paused" or "ready". Defaults to "ready".
play: function(options) {
if(!this.isLoaded()) {
throw new Error("Attepted to play a non loaded sound: " + this);
}
var instance = this.getReadyInstance(options);
instance.play();
return instance;
},
// Pauses all the instances of the sound. If you want to pause an individual instance,
// call `instance.pause()` instead of `sound.pause()`.
pause: function() {
this.instances.forEach(function(instance){ instance.pause(); });
},
// Stops all the instances of the sound. The difference between `pause` and `stop` is that
// stop "rewinds" each instance, and marks it as "ready to be reused";
stop: function() {
this.instances.forEach(function(instance){ instance.stop(); });
},
// `countInstances` returns how many instances the sound has.
// Includes both playing and finished instances.
countInstances: function() {
return this.instances.length;
},
// `countPlayingInstances` counts how many instances of the sound are currently playing.
// Non-playing instances are destroyed after 3 seconds of inactivity by default.
countPlayingInstances: function() {
var count = 0;
this.instances.forEach(function(inst){ count += inst.isPlaying() ? 1 : 0; });
return count;
},
// `getReadyInstance` returns the first instance which is available for playing.
// The method tries to find one available instance in the list of instances; if no
// available instances are found, it creates a new one.
//
// accepts the same options as `play`. The only difference is that getReadyInstance returns
// an instance in the `"ready"` status, while the one returned by `play` is in the `"playing"` status.
getReadyInstance: function(options) {
var instance = getExistingReadyInstance(this.instances);
if(!instance) {
instance = createInstance(this);
this.instances.push(instance);
}
instance.reset(this.el, options);
return instance;
},
// `getExpirationTime` returns how much time instances are preserved before they
// expire. By default it's 3 seconds.
getExpirationTime: function() {
return this.expirationTime;
},
// `setExpirationTime` sets the time it takes to expire an instance after it has stopped playing.
// In some browers, it takes time to create each sound instance, so increasing this value can
// By default it is 3 seconds.
setExpirationTime: function(seconds) {
this.expirationTime = seconds;
}
});
// This class variable controls the default expiration time of sound instances
Luv.Audio.Sound.DEFAULT_EXPIRATION_TIME = 3; // 3 seconds
// Sound is an asset. The `Luv.Media.Asset` mixin adds methods like `isLoaded` and `isPending` to the class.
Luv.Audio.Sound.include(Luv.Media.Asset);
// `Luv.Audio.SoundMethods` is a mixin shared by both `Luv.Audio.Sound` and `Luv.Audio.SoundInstance`.
// In `Sound`, they modify the "defaults" used for creating new instances. In `SoundInstance` they modify
// the instances themselves.
Luv.Audio.SoundMethods = {
// `setVolume` expects a float between 0.0 (no sound) and 1.0 (full sound). Defaults to 1.
//
// * When invoked in a `Sound`, it alters how any subsequent calls to sound.play() sound. Alternatively, you can invoke
// `sound.play({sound: 0.5})` to alter the volume of only one sound instance.
// * When invoked in a `SoundInstance`, it alters the volume of that particular instance.
setVolume: function(volume) {
volume = clampNumber(volume, 0, 1);
this.el.volume = volume;
},
// `getVolume` returns the volume of a particular sound/sound instance. See `setVolume` for more details.
getVolume: function() {
return this.el.volume;
},
// `setLoop` expects a `true` or `false` value.
//
// * When invoked in a `Sound`, it will make the sound "play in loop" in all sucessive calls to `sound.play()`. It is
// usually a better idea to call `sound.play({loop: true})` instead. That way, only one instance of the sound will loop.
// * When invoked in a `SoundInstance`, it will make the instance loop (or deactivate the looping).
setLoop: function(loop) {
this.loop = !!loop;
if(loop) {
this.el.loop = "loop";
} else {
this.el.removeAttribute("loop");
}
},
// `getLoop` returns the state of the internal `loop` variable (true if the sound/sound instance starts over after finishing, false
// if the sound/sound instance just halts after the first play).
getLoop: function() {
return this.loop;
},
// `setSpeed` expects a numeric float with the speed at which the sound/sound instance will play. 1.0 is regular. 2.0 is 2x. 0.5 is half
// speed. And so on.
// If nothing is specified, the default speed of any sound is 1.0.
//
// * When invoked in a `Sound`, it alters the speed of all the sound instances produced by calls to `sound.play()`. You can also invoke
// `sound.play({speed: 2.0})` to alter the speed of a particular sound instance without modifying the others.
// * When invoked in a `SoundInstance`, it alters the speed of that instance only.
setSpeed: function(speed) {
this.el.playbackRate = speed;
},
// `getSpeed` returns the sound/sound instance speed. See `setSpeed` for details.
getSpeed: function() {
return this.el.playbackRate;
},
// `setTime` expects a float number specifying the "time offset" of a particular sound/sound instance. Defaults to 0.
//
// * When invoked in a `Sound`, it will make all the sound instances created by `sound.play()` have a default time when they start playing.
// You can alternatively do `sound.play({time: 4})` to only modify one particular instance.
// * When invoked in a `SoundInstance`:
// * If the instance is playing, it will "jump" to that time.
// * If the instance is not playing, it will "start" on that time when it is played.
setTime: function(time) {
try {
this.el.currentTime = time;
} catch(err) {
// some browsers throw an error when setting currentTime right after loading
// a node. See https://bugzilla.mozilla.org/show_bug.cgi?id=465498
}
},
// `getTime` returns the internal `time` attribute of a sound/sound instance. See `setTime` for details.
getTime: function() {
return this.el.currentTime;
},
// `getDuration` returns the total duration of a sound instance.
getDuration: function() {
return this.el.duration;
}
};
Luv.Audio.Sound.include(Luv.Audio.SoundMethods);
// Internal function used by Luv.Sound.getReadyInstance | var getExistingReadyInstance = function(instances) {
var instance;
for(var i=0; i< instances.length; i++) {
instance = instances[i];
if(instance.isReady()) { | random_line_split |
|
LonghurstProvince.py | (target='Iodide'):
"""
Driver to add Longhurst Provinces fields to spatial NetCDF files
"""
Filenames = [
's2s_predicted_{}_0.125x0.125_No_Skagerrak',
's2s_feature_variables_0.125x0.125',
's2s_predicted_{}_0.125x0.125',
]
folder = '/work/home/ts551/data/iodide/'
for name in Filenames:
print(name)
# Open dataset
ds = xr.open_dataset(folder + name.format(target)+'.nc')
# Longhurst Province to dataset
ds = add_Longhurst_Province_raster_to_array(ds)
# Add LWI index too
ds = add_LWI2array(ds, res='0.125x0.125')
# Save dataset
ds.to_netcdf(name+'_with_Provinces_II.nc')
del ds
def add_Longhurst_Province_raster_to_array(ds):
"""
Add Longhurst Province to xr.dataset as a raster
"""
import geopandas
from rasterio import features
from affine import Affine
# Get the shape files
provinces = geopandas.read_file('/work/home/ts551/data/longhurst_v4_2010')
shapes = [(shape, n) for n, shape in enumerate(provinces.geometry)]
# Now add the existing array
ds_tmp = ds[list(ds.data_vars)[0]].copy().mean(dim='time')
# Add raster the provinces onto this
ds_tmp['LonghurstProvince'] = rasterize(shapes, ds_tmp.coords)
# Then update the variable
ds['LonghurstProvince'] = ds_tmp['LonghurstProvince']
# Add Some attributes
attrs = {
'Long name': 'Longhurst Provinces',
'data downloaded from': 'http://www.marineregions.org/downloads.php#longhurst',
'version': 'Version 4 - March 2010',
'Citations': "Longhurst, A.R et al. (1995). An estimate of global primary production in the ocean from satellite radiometer data. J. Plankton Res. 17, 1245-1271 ; Longhurst, A.R. (1995). Seasonal cycles of pelagic production and consumption. Prog. Oceanogr. 36, 77-167 ; Longhurst, A.R. (1998). Ecological Geography of the Sea. Academic Press, San Diego. 397p. (IMIS) ; Longhurst, A.R. (2006). Ecological Geography of the Sea. 2nd Edition. Academic Press, San Diego, 560p.",
}
ds['LonghurstProvince'].attrs = attrs
return ds
def add_LonghurstProvince2NetCDF(ds=None, res='4x5', LatVar='lat', LonVar='lon',
CoordVar='Province', ExStr=''):
"""
Add numbers for Longhurst Provinces to NetCDF
Parameters
-------
ds (xr.Dataset): xarray dataset to add LWI to
res (str): horizontal resolution of dataset (e.g. 4x5)
CoordVar (str): name to give to newly created
LatVar (str): variable name in DataFrame for latitude
LonVar (str): variable name in DataFrame for longitude
ExStr (str): extra string to add as a suffix to outputted files
Returns
-------
(None)
"""
# Get xml data for provinces
provinces, tree = ParseLonghurstProvinceFile()
# Just use 4x5 as an example
if isinstance(ds, type(None)):
ds = utils.get_feature_variables_as_ds(res=res)
# get dictionary of province numbers
Rnum2prov = RosieLonghurstProvinceFileNum2Province(
None, invert=True, rtn_dict=True)
# Get latitudes
DSlats = ds[LatVar].values
# Get longitudes
DSlons = ds[LonVar].values
# Get all lats and make a long form of the coords.
lats = []
lons = []
coords = []
for lat in DSlats:
for lon in DSlons:
lats += [lat]
lons += [lon]
# Make into a DataFrame
df = pd.DataFrame()
df[LatVar] = lats
df[LonVar] = lons
# Add a single variable for the coordinate
def f(x):
return (x[LonVar], x[LatVar])
df[CoordVar] = df.apply(f, axis=1)
# map the calculation of provinces
def GetProv(x):
return Get_LonghurstProvince4coord(x[CoordVar], provinces=provinces,
num2prov=Rnum2prov, tree=tree, verbose=False)
df[CoordVar] = df.apply(GetProv, axis=1)
# Construct DataFrame by unstacking
lat = df[LatVar].values
lon = df[LonVar].values
vals = df[CoordVar].values
df = pd.DataFrame(vals, index=[lat, lon]).unstack()
df.to_csv('Intial_test_{}_processed_{}.csv'.format(res, ExStr))
# Convert to Dataset
ds = xr.Dataset(data_vars={CoordVar: (['lat', 'lon', ], df.values)},
coords={'lat': DSlats, 'lon': DSlons, })
# Save as NetCDF file
ds.to_netcdf('Intial_test_{}_netCDF_{}.nc'.format(res, ExStr))
def add_LonghurstProvince2table(df, LatVar='Latitude', LonVar='Longitude'):
"""
Add numbers for Longhurst provenience to DataFrame
"""
# Get xml data for provinces
provinces, tree = ParseLonghurstProvinceFile()
# Get the observational data
if isinstance(df, type(None)):
df = get_processed_df_obs_mod() # NOTE this df contains values >400nM
Rnum2prov = RosieLonghurstProvinceFileNum2Province(
None, invert=True, rtn_dict=True)
# - Work with the provinces
# Add a single variable for the coordinate
CoordVar = 'Coord'
def f(x):
return (x[LonVar], x[LatVar])
df[CoordVar] = df.apply(f, axis=1)
# map the calculation of provinces
def GetProv(x):
return Get_LonghurstProvince4coord(x[CoordVar], provinces=provinces,
num2prov=Rnum2prov, tree=tree, verbose=False)
df['MIT Province'] = df.apply(GetProv, axis=1)
# Provence name
df['PName (R)'] = df['Province'].map(
RosieLonghurstProvinceFileNum2Province)
df['PName (MIT)'] = df['MIT Province'].map(
RosieLonghurstProvinceFileNum2Province)
# - Check the assignment
# How many are just the same?
bool = df['MIT Province'] == df['Province']
PrtStr = '#={} ({:.2f}%) are the calculated to be the same thing '
Ns = float(df.loc[bool, :].shape[0])
N = float(df.shape[0])
print(PrtStr.format(N, Ns / N * 100))
# Which of these are just missing assignments in the input files?
Nnan = float(df['Province'].dropna().shape[0])
PrtStr = 'The % non matching, observations without provinces #={} ({:.2f}%)'
print(PrtStr.format(N-Nnan, (N-Nnan)/N*100))
# The locations where both assignments have been made?
dfT = df.loc[np.isfinite(df['Province']), :]
# For certain points the new approach failed.
tmp = dfT.loc[~np.isfinite(dfT['MIT Province']), :]
print('The following provinces were not assigned (# of times) by MIT method:')
PrtStr = 'This is a {} observations ({:.2f}%)'
print(PrtStr.format(tmp.shape[0], tmp.shape[0]/N * 100))
print(tmp['PName (R)'].value_counts())
# What are the locations of these points?
PrtStr = 'Full name of {} is {}'
for prov in tmp.value_counts().index:
print(PrtStr.format(prov, Get_LonghurstProvinceName4Num(prov)))
# What data sets contribute to this
PrtStr = 'Datasets contributing to these numbers: {}'
print(PrtStr.format(', '.join(set(tmp['Data_Key']))))
# For others, the assigned provinces differed
bool = dfT['MIT Province'] != dfT['Province']
vars2use = [u'Data_Key', 'MIT Province',
'Province', 'PName (MIT)', 'PName (R)']
tmp = dfT.loc[bool, :][vars2use].dropna()
# Print the differences | add_longhurst_raster_array_and_LWI_core_NetCDFs | identifier_name |
|
LonghurstProvince.py | += [lat]
lons += [lon]
# Make into a DataFrame
df = pd.DataFrame()
df[LatVar] = lats
df[LonVar] = lons
# Add a single variable for the coordinate
def f(x):
return (x[LonVar], x[LatVar])
df[CoordVar] = df.apply(f, axis=1)
# map the calculation of provinces
def GetProv(x):
return Get_LonghurstProvince4coord(x[CoordVar], provinces=provinces,
num2prov=Rnum2prov, tree=tree, verbose=False)
df[CoordVar] = df.apply(GetProv, axis=1)
# Construct DataFrame by unstacking
lat = df[LatVar].values
lon = df[LonVar].values
vals = df[CoordVar].values
df = pd.DataFrame(vals, index=[lat, lon]).unstack()
df.to_csv('Intial_test_{}_processed_{}.csv'.format(res, ExStr))
# Convert to Dataset
ds = xr.Dataset(data_vars={CoordVar: (['lat', 'lon', ], df.values)},
coords={'lat': DSlats, 'lon': DSlons, })
# Save as NetCDF file
ds.to_netcdf('Intial_test_{}_netCDF_{}.nc'.format(res, ExStr))
def add_LonghurstProvince2table(df, LatVar='Latitude', LonVar='Longitude'):
"""
Add numbers for Longhurst provenience to DataFrame
"""
# Get xml data for provinces
provinces, tree = ParseLonghurstProvinceFile()
# Get the observational data
if isinstance(df, type(None)):
df = get_processed_df_obs_mod() # NOTE this df contains values >400nM
Rnum2prov = RosieLonghurstProvinceFileNum2Province(
None, invert=True, rtn_dict=True)
# - Work with the provinces
# Add a single variable for the coordinate
CoordVar = 'Coord'
def f(x):
return (x[LonVar], x[LatVar])
df[CoordVar] = df.apply(f, axis=1)
# map the calculation of provinces
def GetProv(x):
return Get_LonghurstProvince4coord(x[CoordVar], provinces=provinces,
num2prov=Rnum2prov, tree=tree, verbose=False)
df['MIT Province'] = df.apply(GetProv, axis=1)
# Provence name
df['PName (R)'] = df['Province'].map(
RosieLonghurstProvinceFileNum2Province)
df['PName (MIT)'] = df['MIT Province'].map(
RosieLonghurstProvinceFileNum2Province)
# - Check the assignment
# How many are just the same?
bool = df['MIT Province'] == df['Province']
PrtStr = '#={} ({:.2f}%) are the calculated to be the same thing '
Ns = float(df.loc[bool, :].shape[0])
N = float(df.shape[0])
print(PrtStr.format(N, Ns / N * 100))
# Which of these are just missing assignments in the input files?
Nnan = float(df['Province'].dropna().shape[0])
PrtStr = 'The % non matching, observations without provinces #={} ({:.2f}%)'
print(PrtStr.format(N-Nnan, (N-Nnan)/N*100))
# The locations where both assignments have been made?
dfT = df.loc[np.isfinite(df['Province']), :]
# For certain points the new approach failed.
tmp = dfT.loc[~np.isfinite(dfT['MIT Province']), :]
print('The following provinces were not assigned (# of times) by MIT method:')
PrtStr = 'This is a {} observations ({:.2f}%)'
print(PrtStr.format(tmp.shape[0], tmp.shape[0]/N * 100))
print(tmp['PName (R)'].value_counts())
# What are the locations of these points?
PrtStr = 'Full name of {} is {}'
for prov in tmp.value_counts().index:
print(PrtStr.format(prov, Get_LonghurstProvinceName4Num(prov)))
# What data sets contribute to this
PrtStr = 'Datasets contributing to these numbers: {}'
print(PrtStr.format(', '.join(set(tmp['Data_Key']))))
# For others, the assigned provinces differed
bool = dfT['MIT Province'] != dfT['Province']
vars2use = [u'Data_Key', 'MIT Province',
'Province', 'PName (MIT)', 'PName (R)']
tmp = dfT.loc[bool, :][vars2use].dropna()
# Print the differences to screen
print("When assignment differs - The MIT method gives:")
PrtStr = "MIT:'{}' ({}), but R gives '{}' ({})"
for prov in list(set(tmp['PName (R)'])):
tmp_ = tmp.loc[tmp['PName (R)'] == prov, :]
for idx in tmp_.index:
|
# What data sets contribute to this
PrtStr = 'Datasets contributing to these numbers: {}'
print(PrtStr.format(', '.join(set(tmp['Data_Key']))))
def ParseLonghurstProvinceFile():
"""
Parse the Longhurst *.xml file into a dictionary object
Notes
-----
- This code is copied from elsewhere and not used...
Original code: https://github.com/thechisholmlab/Longhurst-Province-Finder
"""
from xml.dom.minidom import parse, parseString
provinces = {}
tree = parse('longhurst.xml')
for node in tree.getElementsByTagName('MarineRegions:longhurst'):
# 1. Get province code, name and bounding box from file
provCode = node.getElementsByTagName('MarineRegions:provcode')[
0].firstChild.data
provName = node.getElementsByTagName('MarineRegions:provdescr')[
0].firstChild.data
fid = node.getAttribute("fid")
b = node.getElementsByTagName('gml:coordinates')[0].firstChild.data
# 2. Parse bounding box coordinates
b = b.split(' ')
x1, y1 = b[0].split(',')
x2, y2 = b[1].split(',')
x1 = float(x1)
y1 = float(y1)
x2 = float(x2)
y2 = float(y2)
# Add province to dictionary
provinces[fid] = {'provName': provName, 'provCode': provCode,
'x1': x1, 'y1': y1, 'x2': x2, 'y2': y2}
return provinces, tree
def LonghurstProvinceFileNum2Province(input, invert=False, rtn_dict=False):
"""
Get the Longhurst province - REDUNDENT
Parameters
-------
input (str): input string to use as key to return dictionary value
invert (float): reverse the key/pair of the dictionary
rtn_dict (bool): return the entire dictionary.
Returns
-------
(str)
Notes
-----
- these are not the longhurst numbers (just number within MIT list)
"""
num2prov = {
1: u'FKLD', 2: u'CHIL', 3: u'TASM', 4: u'BRAZ', 5: u'SATL', 6: u'EAFR',
7: u'AUSW',
8: u'AUSE', 9: u'ISSG', 10: u'BENG', 11: u'ARCH', 12: u'SUND', 13: u'GUIN',
14: u'PEQD', 15: u'MONS', 16: u'ETRA', 17: u'CNRY', 18: u'GUIA', 19: u'ARAB',
20: u'WTRA', 21: u'KURO', 22: u'NECS', 23: u'NASE', 24: u'PSAE', 25: u'CHIN',
26: u'INDE', 27: u'CAMR', 28: u'PNEC', 29: u'REDS', 30: u'INDW', 31: u'CARB',
32: u'NPTG', 33: u'NATR', 34: u'MEDI', 35: u'CCAL', 36: u'NWCS', 37: u'NASW',
38: u'GFST', 39: u'NADR', 40: u'ALSK', 41: u'AR | MITp_ = tmp_.loc[tmp_.index == idx, :]['PName (MIT)'].values[0]
print(PrtStr.format(MITp_, Get_LonghurstProvinceName4Num(MITp_),
prov, Get_LonghurstProvinceName4Num(prov))) | conditional_block |
LonghurstProvince.py | Print the differences to screen
print("When assignment differs - The MIT method gives:")
PrtStr = "MIT:'{}' ({}), but R gives '{}' ({})"
for prov in list(set(tmp['PName (R)'])):
tmp_ = tmp.loc[tmp['PName (R)'] == prov, :]
for idx in tmp_.index:
MITp_ = tmp_.loc[tmp_.index == idx, :]['PName (MIT)'].values[0]
print(PrtStr.format(MITp_, Get_LonghurstProvinceName4Num(MITp_),
prov, Get_LonghurstProvinceName4Num(prov)))
# What data sets contribute to this
PrtStr = 'Datasets contributing to these numbers: {}'
print(PrtStr.format(', '.join(set(tmp['Data_Key']))))
def ParseLonghurstProvinceFile():
"""
Parse the Longhurst *.xml file into a dictionary object
Notes
-----
- This code is copied from elsewhere and not used...
Original code: https://github.com/thechisholmlab/Longhurst-Province-Finder
"""
from xml.dom.minidom import parse, parseString
provinces = {}
tree = parse('longhurst.xml')
for node in tree.getElementsByTagName('MarineRegions:longhurst'):
# 1. Get province code, name and bounding box from file
provCode = node.getElementsByTagName('MarineRegions:provcode')[
0].firstChild.data
provName = node.getElementsByTagName('MarineRegions:provdescr')[
0].firstChild.data
fid = node.getAttribute("fid")
b = node.getElementsByTagName('gml:coordinates')[0].firstChild.data
# 2. Parse bounding box coordinates
b = b.split(' ')
x1, y1 = b[0].split(',')
x2, y2 = b[1].split(',')
x1 = float(x1)
y1 = float(y1)
x2 = float(x2)
y2 = float(y2)
# Add province to dictionary
provinces[fid] = {'provName': provName, 'provCode': provCode,
'x1': x1, 'y1': y1, 'x2': x2, 'y2': y2}
return provinces, tree
def LonghurstProvinceFileNum2Province(input, invert=False, rtn_dict=False):
"""
Get the Longhurst province - REDUNDENT
Parameters
-------
input (str): input string to use as key to return dictionary value
invert (float): reverse the key/pair of the dictionary
rtn_dict (bool): return the entire dictionary.
Returns
-------
(str)
Notes
-----
- these are not the longhurst numbers (just number within MIT list)
"""
num2prov = {
1: u'FKLD', 2: u'CHIL', 3: u'TASM', 4: u'BRAZ', 5: u'SATL', 6: u'EAFR',
7: u'AUSW',
8: u'AUSE', 9: u'ISSG', 10: u'BENG', 11: u'ARCH', 12: u'SUND', 13: u'GUIN',
14: u'PEQD', 15: u'MONS', 16: u'ETRA', 17: u'CNRY', 18: u'GUIA', 19: u'ARAB',
20: u'WTRA', 21: u'KURO', 22: u'NECS', 23: u'NASE', 24: u'PSAE', 25: u'CHIN',
26: u'INDE', 27: u'CAMR', 28: u'PNEC', 29: u'REDS', 30: u'INDW', 31: u'CARB',
32: u'NPTG', 33: u'NATR', 34: u'MEDI', 35: u'CCAL', 36: u'NWCS', 37: u'NASW',
38: u'GFST', 39: u'NADR', 40: u'ALSK', 41: u'ARCT', 42: u'SARC', 43: u'NEWZ',
44: u'SSTC', 45: u'SPSG', 46: u'PSAW', 47: u'BERS', 48: u'NPPF', 49: u'NPSW',
50: u'ANTA', 51: u'SANT', 52: u'WARM', 53: u'APLR', 54: u'BPLR'
}
# Invert?
if invert:
num2prov = {v: k for k, v in list(num2prov.items())}
# Return the dictionary
if rtn_dict:
return num2prov
else:
return num2prov[input]
def MarineRegionsOrg_LonghurstProvinceFileNum2Province(input, invert=False,
rtn_dict=False):
"""
Get the Longhurst province
Parameters
-------
input (str): input string to use as key to return dictionary value
invert (float): reverse the key/pair of the dictionary
rtn_dict (bool): return the entire dictionary.
Returns
-------
(str)
Notes
-----
- This is listing order of the shape file from
http://www.marineregions.org/sources.php#longhurst
"""
num2prov = {
0: u'BPLR', 1: u'ARCT', 2: u'SARC', 3: u'NADR', 4: u'GFST', 5: u'NASW',
6: u'NATR',
7: u'WTRA', 8: u'ETRA', 9: u'SATL', 10: u'NECS', 11: u'CNRY', 12: u'GUIN',
13: u'GUIA', 14: u'NWCS', 15: u'MEDI', 16: u'CARB', 17: u'NASE', 18: u'BRAZ',
19: u'FKLD', 20: u'BENG', 21: u'MONS', 22: u'ISSG', 23: u'EAFR', 24: u'REDS',
25: u'ARAB', 26: u'INDE', 27: u'INDW', 28: u'AUSW', 29: u'BERS', 30: u'PSAE',
31: u'PSAW', 32: u'KURO', 33: u'NPPF', 34: u'NPSW', 35: u'TASM', 36: u'SPSG',
37: u'NPTG', 38: u'PNEC', 39: u'PEQD', 40: u'WARM', 41: u'ARCH', 42: u'ALSK',
43: u'CCAL', 44: u'CAMR', 45: u'CHIL', 46: u'CHIN', 47: u'SUND', 48: u'AUSE',
49: u'NEWZ', 50: u'SSTC', 51: u'SANT', 52: u'ANTA', 53: u'APLR'
}
# Invert?
if invert:
num2prov = {v: k for k, v in list(num2prov.items())}
# Return the dictionary
if rtn_dict:
return num2prov
else:
return num2prov[input]
def RosieLonghurstProvinceFileNum2Province(input, invert=False, rtn_dict=False):
| """
Get the longhurst province
Parameters
-------
input (str): input string to use as key to return dictionary value
invert (float): reverse the key/pair of the dictionary
rtn_dict (bool): return the entire dictionary.
Returns
-------
(str)
Notes
-----
- these **are** the longhurst numbers (other funcs. give numbers used elsewhere)
"""
Rnum2prov = {
1: 'BPLR', 2: 'ARCT', 3: 'SARC', 4: 'NADR', 5: 'GFST', 6: 'NASW', 7: 'NATR',
8: 'WTRA', 9: 'ETRA', 10: 'SATL', 11: 'NECS', 12: 'CNRY', 13: 'GUIN', 14: 'GUIA', | identifier_body |
|
LonghurstProvince.py | pd.DataFrame(vals, index=[lat, lon]).unstack()
df.to_csv('Intial_test_{}_processed_{}.csv'.format(res, ExStr))
# Convert to Dataset
ds = xr.Dataset(data_vars={CoordVar: (['lat', 'lon', ], df.values)},
coords={'lat': DSlats, 'lon': DSlons, })
# Save as NetCDF file
ds.to_netcdf('Intial_test_{}_netCDF_{}.nc'.format(res, ExStr))
def add_LonghurstProvince2table(df, LatVar='Latitude', LonVar='Longitude'):
"""
Add numbers for Longhurst provenience to DataFrame
"""
# Get xml data for provinces
provinces, tree = ParseLonghurstProvinceFile()
# Get the observational data
if isinstance(df, type(None)):
df = get_processed_df_obs_mod() # NOTE this df contains values >400nM
Rnum2prov = RosieLonghurstProvinceFileNum2Province(
None, invert=True, rtn_dict=True)
# - Work with the provinces
# Add a single variable for the coordinate
CoordVar = 'Coord'
def f(x):
return (x[LonVar], x[LatVar])
df[CoordVar] = df.apply(f, axis=1)
# map the calculation of provinces
def GetProv(x):
return Get_LonghurstProvince4coord(x[CoordVar], provinces=provinces,
num2prov=Rnum2prov, tree=tree, verbose=False)
df['MIT Province'] = df.apply(GetProv, axis=1)
# Provence name
df['PName (R)'] = df['Province'].map(
RosieLonghurstProvinceFileNum2Province)
df['PName (MIT)'] = df['MIT Province'].map(
RosieLonghurstProvinceFileNum2Province)
# - Check the assignment
# How many are just the same?
bool = df['MIT Province'] == df['Province']
PrtStr = '#={} ({:.2f}%) are the calculated to be the same thing '
Ns = float(df.loc[bool, :].shape[0])
N = float(df.shape[0])
print(PrtStr.format(N, Ns / N * 100))
# Which of these are just missing assignments in the input files?
Nnan = float(df['Province'].dropna().shape[0])
PrtStr = 'The % non matching, observations without provinces #={} ({:.2f}%)'
print(PrtStr.format(N-Nnan, (N-Nnan)/N*100))
# The locations where both assignments have been made?
dfT = df.loc[np.isfinite(df['Province']), :]
# For certain points the new approach failed.
tmp = dfT.loc[~np.isfinite(dfT['MIT Province']), :]
print('The following provinces were not assigned (# of times) by MIT method:')
PrtStr = 'This is a {} observations ({:.2f}%)'
print(PrtStr.format(tmp.shape[0], tmp.shape[0]/N * 100))
print(tmp['PName (R)'].value_counts())
# What are the locations of these points?
PrtStr = 'Full name of {} is {}'
for prov in tmp.value_counts().index:
print(PrtStr.format(prov, Get_LonghurstProvinceName4Num(prov)))
# What data sets contribute to this
PrtStr = 'Datasets contributing to these numbers: {}'
print(PrtStr.format(', '.join(set(tmp['Data_Key']))))
# For others, the assigned provinces differed
bool = dfT['MIT Province'] != dfT['Province']
vars2use = [u'Data_Key', 'MIT Province',
'Province', 'PName (MIT)', 'PName (R)']
tmp = dfT.loc[bool, :][vars2use].dropna()
# Print the differences to screen
print("When assignment differs - The MIT method gives:")
PrtStr = "MIT:'{}' ({}), but R gives '{}' ({})"
for prov in list(set(tmp['PName (R)'])):
tmp_ = tmp.loc[tmp['PName (R)'] == prov, :]
for idx in tmp_.index:
MITp_ = tmp_.loc[tmp_.index == idx, :]['PName (MIT)'].values[0]
print(PrtStr.format(MITp_, Get_LonghurstProvinceName4Num(MITp_),
prov, Get_LonghurstProvinceName4Num(prov)))
# What data sets contribute to this
PrtStr = 'Datasets contributing to these numbers: {}'
print(PrtStr.format(', '.join(set(tmp['Data_Key']))))
def ParseLonghurstProvinceFile():
"""
Parse the Longhurst *.xml file into a dictionary object
Notes
-----
- This code is copied from elsewhere and not used...
Original code: https://github.com/thechisholmlab/Longhurst-Province-Finder
"""
from xml.dom.minidom import parse, parseString
provinces = {}
tree = parse('longhurst.xml')
for node in tree.getElementsByTagName('MarineRegions:longhurst'):
# 1. Get province code, name and bounding box from file
provCode = node.getElementsByTagName('MarineRegions:provcode')[
0].firstChild.data
provName = node.getElementsByTagName('MarineRegions:provdescr')[
0].firstChild.data
fid = node.getAttribute("fid")
b = node.getElementsByTagName('gml:coordinates')[0].firstChild.data
# 2. Parse bounding box coordinates
b = b.split(' ')
x1, y1 = b[0].split(',')
x2, y2 = b[1].split(',')
x1 = float(x1)
y1 = float(y1)
x2 = float(x2)
y2 = float(y2)
# Add province to dictionary
provinces[fid] = {'provName': provName, 'provCode': provCode,
'x1': x1, 'y1': y1, 'x2': x2, 'y2': y2}
return provinces, tree
def LonghurstProvinceFileNum2Province(input, invert=False, rtn_dict=False):
"""
Get the Longhurst province - REDUNDENT
Parameters
-------
input (str): input string to use as key to return dictionary value
invert (float): reverse the key/pair of the dictionary
rtn_dict (bool): return the entire dictionary.
Returns
-------
(str)
Notes
-----
- these are not the longhurst numbers (just number within MIT list)
"""
num2prov = {
1: u'FKLD', 2: u'CHIL', 3: u'TASM', 4: u'BRAZ', 5: u'SATL', 6: u'EAFR',
7: u'AUSW',
8: u'AUSE', 9: u'ISSG', 10: u'BENG', 11: u'ARCH', 12: u'SUND', 13: u'GUIN',
14: u'PEQD', 15: u'MONS', 16: u'ETRA', 17: u'CNRY', 18: u'GUIA', 19: u'ARAB',
20: u'WTRA', 21: u'KURO', 22: u'NECS', 23: u'NASE', 24: u'PSAE', 25: u'CHIN',
26: u'INDE', 27: u'CAMR', 28: u'PNEC', 29: u'REDS', 30: u'INDW', 31: u'CARB',
32: u'NPTG', 33: u'NATR', 34: u'MEDI', 35: u'CCAL', 36: u'NWCS', 37: u'NASW',
38: u'GFST', 39: u'NADR', 40: u'ALSK', 41: u'ARCT', 42: u'SARC', 43: u'NEWZ',
44: u'SSTC', 45: u'SPSG', 46: u'PSAW', 47: u'BERS', 48: u'NPPF', 49: u'NPSW',
50: u'ANTA', 51: u'SANT', 52: u'WARM', 53: u'APLR', 54: u'BPLR'
}
# Invert?
if invert:
num2prov = {v: k for k, v in list(num2prov.items())}
# Return the dictionary
if rtn_dict:
return num2prov
else:
return num2prov[input]
def MarineRegionsOrg_LonghurstProvinceFileNum2Province(input, invert=False, | random_line_split |
||
helpers.js |
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __generator = (this && this.__generator) || function (thisArg, body) {
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
function verb(n) { return function (v) { return step([n, v]); }; }
function step(op) {
if (f) throw new TypeError("Generator is already executing.");
while (_) try {
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
if (y = 0, t) op = [op[0] & 2, t.value];
switch (op[0]) {
case 0: case 1: t = op; break;
case 4: _.label++; return { value: op[1], done: false };
case 5: _.label++; y = op[1]; op = [0]; continue;
case 7: op = _.ops.pop(); _.trys.pop(); continue;
default:
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
if (t[2]) _.ops.pop();
_.trys.pop(); continue;
}
op = body.call(thisArg, _);
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
}
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.ExtensionHelpers = void 0;
var fs = require("fs");
var https = require("https");
var extractZip = require("extract-zip");
var path = require("path");
/// Extension helpers
var ExtensionHelpers = /** @class */ (function () {
function ExtensionHelpers() {
}
/// Check value contains content befor returning string value
ExtensionHelpers.prototype.NamePairCheck = function (name, value, addQuotes) {
if (value != undefined && value != '') {
var argument = ' /' + name;
if (addQuotes == true) {
argument += '="' + value + '"';
}
else {
argument += '=' + value;
}
console.log(argument);
return argument;
}
else {
return '';
}
};
/// Download Dot Cover or get some local repo
ExtensionHelpers.prototype.DownloadDotCover = function (customDotCoverPath) {
return __awaiter(this, void 0, void 0, function () {
var DotCoverPath, outputLocation, downloadFileName, url, zipLocation;
var _this = this;
return __generator(this, function (_a) {
DotCoverPath = "CommandLineTools-2020-1-3";
outputLocation = __dirname + "\\download\\" + DotCoverPath;
// Check if folder exists
if (customDotCoverPath != undefined && customDotCoverPath != '') {
console.debug("Command Line Tools Custom Path: ", customDotCoverPath);
outputLocation = customDotCoverPath;
}
else if (fs.existsSync(outputLocation)) {
console.debug("Command Line Tools Using Existing Download");
}
else {
downloadFileName = 'JetBrains.dotCover.CommandLineTools.2020.1.3';
console.log("Downloading Command Line Tools ", downloadFileName);
url = "https://download.jetbrains.com/resharper/ReSharperUltimate.2020.1.3/" + downloadFileName + ".zip";
zipLocation = outputLocation + '.zip';
this.download(url, zipLocation).then(function () { return __awaiter(_this, void 0, void 0, function () {
return __generator(this, function (_a) {
switch (_a.label) {
case 0:
// Unzip
console.debug('extract to ' + outputLocation);
return [4 /*yield*/, extractZip(outputLocation + '.zip', { dir: outputLocation })
// Remove Zip
];
case 1:
_a.sent();
// Remove Zip
fs.unlink(outputLocation + '.zip', function (err) {
if (err)
console.debug('Original File not deleted!');
console.debug('Original File deleted!');
});
return [2 /*return*/, outputLocation];
}
});
}); });
}
return [2 /*return*/, outputLocation];
});
});
};
/**
* Download a resource from `url` to `dest`.
* @param {string} url - Valid URL to attempt download of resource
* @param {string} dest - Valid path to save the file.
* @returns {Promise<void>} - Returns asynchronously when successfully completed download
*/
ExtensionHelpers.prototype.download = function (url, dest) {
var _this = this;
return new Promise(function (resolve, reject) {
var request = https.get(url, function (response) {
console.debug('download response code ', response.statusCode);
if (response.statusCode === 200) {
var file_1 = fs.createWriteStream(dest, { flags: 'wx' });
file_1.on('finish', function () { return resolve(); });
file_1.on('error', function (err) {
console.debug('error ', err);
file_1.close();
if (err.message === 'EEXIST')
reject('File already exists');
else
fs.unlink(dest, function () { return reject(err.message); }); // Delete temp file
});
if (fs.existsSync(dest)) {
fs.rmdir(dest, { recursive: true }, function (err) {
if (err) {
console.error(err);
}
});
}
response.pipe(file_1);
}
else if (response.statusCode === 302 || response.statusCode === 301) {
//Recursively follow redirects, only a 200 will resolve.
var redirectlocation = '';
if (response.headers.location != undefined)
redirectlocation = response.headers.location;
_this.download(redirectlocation, dest).then(function () { return resolve(); });
}
else {
reject("Server responded with " + response.statusCode + ": " + response.statusMessage);
}
});
request.on('error', function (err) {
reject(err.message);
});
});
};
/// Workout output location
ExtensionHelpers.prototype.GetOutputLocation = function (outputLocation, targetWorkingDir, reportType, outputFilename) {
if (outputFilename == undefined || outputFilename == '')
outputFilename = "CodeAnalyseResults";
if (outputLocation == undefined || outputLocation == '') {
outputLocation = targetWorkingDir;
}
else if (outputLocation.includes('.xml') == false && outputLocation.includes('.html') == false) {
if (outputLocation.substring(outputLocation.length - 1).includes("\\") || outputLocation.substring(outputLocation.length - 1).includes("/")) {
outputLocation += outputFilename + "." + reportType;
}
else {
outputLocation += "\\" + outputFilename + "." + reportType;
}
}
console.debug('Output Location: ', outputLocation);
return outputLocation;
};
/// run Dot Cover command
ExtensionHelpers.prototype.RunDotCoverTask = function (cmdline, callBack) {
console.log("**** - Run Command Line Script.. Starting - **** ");
var exec = require("child_process").exec;
exec(cmdline, function (error, stdout, stderr) {
if (error) {
callBack("error: " + error.message, false);
return;
| { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } | identifier_body |
|
helpers.js | step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __generator = (this && this.__generator) || function (thisArg, body) {
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
function verb(n) { return function (v) { return step([n, v]); }; }
function step(op) {
if (f) throw new TypeError("Generator is already executing.");
while (_) try {
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
if (y = 0, t) op = [op[0] & 2, t.value];
switch (op[0]) {
case 0: case 1: t = op; break;
case 4: _.label++; return { value: op[1], done: false };
case 5: _.label++; y = op[1]; op = [0]; continue;
case 7: op = _.ops.pop(); _.trys.pop(); continue;
default:
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
if (op[0] === 6 && _.label < t[1]) |
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
if (t[2]) _.ops.pop();
_.trys.pop(); continue;
}
op = body.call(thisArg, _);
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
}
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.ExtensionHelpers = void 0;
var fs = require("fs");
var https = require("https");
var extractZip = require("extract-zip");
var path = require("path");
/// Extension helpers
var ExtensionHelpers = /** @class */ (function () {
function ExtensionHelpers() {
}
/// Check value contains content befor returning string value
ExtensionHelpers.prototype.NamePairCheck = function (name, value, addQuotes) {
if (value != undefined && value != '') {
var argument = ' /' + name;
if (addQuotes == true) {
argument += '="' + value + '"';
}
else {
argument += '=' + value;
}
console.log(argument);
return argument;
}
else {
return '';
}
};
/// Download Dot Cover or get some local repo
ExtensionHelpers.prototype.DownloadDotCover = function (customDotCoverPath) {
return __awaiter(this, void 0, void 0, function () {
var DotCoverPath, outputLocation, downloadFileName, url, zipLocation;
var _this = this;
return __generator(this, function (_a) {
DotCoverPath = "CommandLineTools-2020-1-3";
outputLocation = __dirname + "\\download\\" + DotCoverPath;
// Check if folder exists
if (customDotCoverPath != undefined && customDotCoverPath != '') {
console.debug("Command Line Tools Custom Path: ", customDotCoverPath);
outputLocation = customDotCoverPath;
}
else if (fs.existsSync(outputLocation)) {
console.debug("Command Line Tools Using Existing Download");
}
else {
downloadFileName = 'JetBrains.dotCover.CommandLineTools.2020.1.3';
console.log("Downloading Command Line Tools ", downloadFileName);
url = "https://download.jetbrains.com/resharper/ReSharperUltimate.2020.1.3/" + downloadFileName + ".zip";
zipLocation = outputLocation + '.zip';
this.download(url, zipLocation).then(function () { return __awaiter(_this, void 0, void 0, function () {
return __generator(this, function (_a) {
switch (_a.label) {
case 0:
// Unzip
console.debug('extract to ' + outputLocation);
return [4 /*yield*/, extractZip(outputLocation + '.zip', { dir: outputLocation })
// Remove Zip
];
case 1:
_a.sent();
// Remove Zip
fs.unlink(outputLocation + '.zip', function (err) {
if (err)
console.debug('Original File not deleted!');
console.debug('Original File deleted!');
});
return [2 /*return*/, outputLocation];
}
});
}); });
}
return [2 /*return*/, outputLocation];
});
});
};
/**
* Download a resource from `url` to `dest`.
* @param {string} url - Valid URL to attempt download of resource
* @param {string} dest - Valid path to save the file.
* @returns {Promise<void>} - Returns asynchronously when successfully completed download
*/
ExtensionHelpers.prototype.download = function (url, dest) {
var _this = this;
return new Promise(function (resolve, reject) {
var request = https.get(url, function (response) {
console.debug('download response code ', response.statusCode);
if (response.statusCode === 200) {
var file_1 = fs.createWriteStream(dest, { flags: 'wx' });
file_1.on('finish', function () { return resolve(); });
file_1.on('error', function (err) {
console.debug('error ', err);
file_1.close();
if (err.message === 'EEXIST')
reject('File already exists');
else
fs.unlink(dest, function () { return reject(err.message); }); // Delete temp file
});
if (fs.existsSync(dest)) {
fs.rmdir(dest, { recursive: true }, function (err) {
if (err) {
console.error(err);
}
});
}
response.pipe(file_1);
}
else if (response.statusCode === 302 || response.statusCode === 301) {
//Recursively follow redirects, only a 200 will resolve.
var redirectlocation = '';
if (response.headers.location != undefined)
redirectlocation = response.headers.location;
_this.download(redirectlocation, dest).then(function () { return resolve(); });
}
else {
reject("Server responded with " + response.statusCode + ": " + response.statusMessage);
}
});
request.on('error', function (err) {
reject(err.message);
});
});
};
/// Workout output location
ExtensionHelpers.prototype.GetOutputLocation = function (outputLocation, targetWorkingDir, reportType, outputFilename) {
if (outputFilename == undefined || outputFilename == '')
outputFilename = "CodeAnalyseResults";
if (outputLocation == undefined || outputLocation == '') {
outputLocation = targetWorkingDir;
}
else if (outputLocation.includes('.xml') == false && outputLocation.includes('.html') == false) {
if (outputLocation.substring(outputLocation.length - 1).includes("\\") || outputLocation.substring(outputLocation.length - 1).includes("/")) {
outputLocation += outputFilename + "." + reportType;
}
else {
outputLocation += "\\" + outputFilename + "." + reportType;
}
}
console.debug('Output Location: ', outputLocation);
return outputLocation;
};
/// run Dot Cover command
ExtensionHelpers.prototype.RunDotCoverTask = function (cmdline, callBack) {
console.log("**** - Run Command Line Script.. Starting - **** ");
var exec = require("child_process").exec;
exec(cmdline, function (error, stdout, stderr) {
if (error) {
callBack("error: " + error.message, false);
return;
}
if (stderr) {
callBack("stderr: " + stderr, true);
return;
}
callBack("stdout: " + stdout, true);
});
console.log("**** - Run Command Line Script.. Ending - **** ");
};
/// Get the dynamic Test Assemblies
ExtensionHelpers.prototype.GetTestAssemblies = function (projectPattern, targetWorkingDir, targetArguments) {
if (projectPattern != undefined && projectPattern != '') {
var searchPath = targetWorkingDir + '';
var childItems = | { _.label = t[1]; t = op; break; } | conditional_block |
helpers.js | step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __generator = (this && this.__generator) || function (thisArg, body) {
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
function verb(n) { return function (v) { return step([n, v]); }; }
function step(op) {
if (f) throw new TypeError("Generator is already executing.");
while (_) try {
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
if (y = 0, t) op = [op[0] & 2, t.value];
switch (op[0]) {
case 0: case 1: t = op; break;
case 4: _.label++; return { value: op[1], done: false };
case 5: _.label++; y = op[1]; op = [0]; continue;
case 7: op = _.ops.pop(); _.trys.pop(); continue;
default:
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
if (t[2]) _.ops.pop();
_.trys.pop(); continue;
}
op = body.call(thisArg, _);
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
}
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.ExtensionHelpers = void 0;
var fs = require("fs");
var https = require("https");
var extractZip = require("extract-zip");
var path = require("path");
/// Extension helpers
var ExtensionHelpers = /** @class */ (function () {
function ExtensionHelpers() {
}
/// Check value contains content befor returning string value
ExtensionHelpers.prototype.NamePairCheck = function (name, value, addQuotes) {
if (value != undefined && value != '') {
var argument = ' /' + name;
if (addQuotes == true) {
argument += '="' + value + '"';
}
else {
argument += '=' + value;
}
console.log(argument);
return argument;
}
else {
return '';
}
};
/// Download Dot Cover or get some local repo
ExtensionHelpers.prototype.DownloadDotCover = function (customDotCoverPath) { | return __awaiter(this, void 0, void 0, function () {
var DotCoverPath, outputLocation, downloadFileName, url, zipLocation;
var _this = this;
return __generator(this, function (_a) {
DotCoverPath = "CommandLineTools-2020-1-3";
outputLocation = __dirname + "\\download\\" + DotCoverPath;
// Check if folder exists
if (customDotCoverPath != undefined && customDotCoverPath != '') {
console.debug("Command Line Tools Custom Path: ", customDotCoverPath);
outputLocation = customDotCoverPath;
}
else if (fs.existsSync(outputLocation)) {
console.debug("Command Line Tools Using Existing Download");
}
else {
downloadFileName = 'JetBrains.dotCover.CommandLineTools.2020.1.3';
console.log("Downloading Command Line Tools ", downloadFileName);
url = "https://download.jetbrains.com/resharper/ReSharperUltimate.2020.1.3/" + downloadFileName + ".zip";
zipLocation = outputLocation + '.zip';
this.download(url, zipLocation).then(function () { return __awaiter(_this, void 0, void 0, function () {
return __generator(this, function (_a) {
switch (_a.label) {
case 0:
// Unzip
console.debug('extract to ' + outputLocation);
return [4 /*yield*/, extractZip(outputLocation + '.zip', { dir: outputLocation })
// Remove Zip
];
case 1:
_a.sent();
// Remove Zip
fs.unlink(outputLocation + '.zip', function (err) {
if (err)
console.debug('Original File not deleted!');
console.debug('Original File deleted!');
});
return [2 /*return*/, outputLocation];
}
});
}); });
}
return [2 /*return*/, outputLocation];
});
});
};
/**
* Download a resource from `url` to `dest`.
* @param {string} url - Valid URL to attempt download of resource
* @param {string} dest - Valid path to save the file.
* @returns {Promise<void>} - Returns asynchronously when successfully completed download
*/
ExtensionHelpers.prototype.download = function (url, dest) {
var _this = this;
return new Promise(function (resolve, reject) {
var request = https.get(url, function (response) {
console.debug('download response code ', response.statusCode);
if (response.statusCode === 200) {
var file_1 = fs.createWriteStream(dest, { flags: 'wx' });
file_1.on('finish', function () { return resolve(); });
file_1.on('error', function (err) {
console.debug('error ', err);
file_1.close();
if (err.message === 'EEXIST')
reject('File already exists');
else
fs.unlink(dest, function () { return reject(err.message); }); // Delete temp file
});
if (fs.existsSync(dest)) {
fs.rmdir(dest, { recursive: true }, function (err) {
if (err) {
console.error(err);
}
});
}
response.pipe(file_1);
}
else if (response.statusCode === 302 || response.statusCode === 301) {
//Recursively follow redirects, only a 200 will resolve.
var redirectlocation = '';
if (response.headers.location != undefined)
redirectlocation = response.headers.location;
_this.download(redirectlocation, dest).then(function () { return resolve(); });
}
else {
reject("Server responded with " + response.statusCode + ": " + response.statusMessage);
}
});
request.on('error', function (err) {
reject(err.message);
});
});
};
/// Workout output location
ExtensionHelpers.prototype.GetOutputLocation = function (outputLocation, targetWorkingDir, reportType, outputFilename) {
if (outputFilename == undefined || outputFilename == '')
outputFilename = "CodeAnalyseResults";
if (outputLocation == undefined || outputLocation == '') {
outputLocation = targetWorkingDir;
}
else if (outputLocation.includes('.xml') == false && outputLocation.includes('.html') == false) {
if (outputLocation.substring(outputLocation.length - 1).includes("\\") || outputLocation.substring(outputLocation.length - 1).includes("/")) {
outputLocation += outputFilename + "." + reportType;
}
else {
outputLocation += "\\" + outputFilename + "." + reportType;
}
}
console.debug('Output Location: ', outputLocation);
return outputLocation;
};
/// run Dot Cover command
ExtensionHelpers.prototype.RunDotCoverTask = function (cmdline, callBack) {
console.log("**** - Run Command Line Script.. Starting - **** ");
var exec = require("child_process").exec;
exec(cmdline, function (error, stdout, stderr) {
if (error) {
callBack("error: " + error.message, false);
return;
}
if (stderr) {
callBack("stderr: " + stderr, true);
return;
}
callBack("stdout: " + stdout, true);
});
console.log("**** - Run Command Line Script.. Ending - **** ");
};
/// Get the dynamic Test Assemblies
ExtensionHelpers.prototype.GetTestAssemblies = function (projectPattern, targetWorkingDir, targetArguments) {
if (projectPattern != undefined && projectPattern != '') {
var searchPath = targetWorkingDir + '';
var childItems = this.GetAll | random_line_split |
|
helpers.js | step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __generator = (this && this.__generator) || function (thisArg, body) {
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
function verb(n) { return function (v) { return step([n, v]); }; }
function step(op) {
if (f) throw new TypeError("Generator is already executing.");
while (_) try {
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
if (y = 0, t) op = [op[0] & 2, t.value];
switch (op[0]) {
case 0: case 1: t = op; break;
case 4: _.label++; return { value: op[1], done: false };
case 5: _.label++; y = op[1]; op = [0]; continue;
case 7: op = _.ops.pop(); _.trys.pop(); continue;
default:
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
if (t[2]) _.ops.pop();
_.trys.pop(); continue;
}
op = body.call(thisArg, _);
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
}
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.ExtensionHelpers = void 0;
var fs = require("fs");
var https = require("https");
var extractZip = require("extract-zip");
var path = require("path");
/// Extension helpers
var ExtensionHelpers = /** @class */ (function () {
function | () {
}
/// Check value contains content befor returning string value
ExtensionHelpers.prototype.NamePairCheck = function (name, value, addQuotes) {
if (value != undefined && value != '') {
var argument = ' /' + name;
if (addQuotes == true) {
argument += '="' + value + '"';
}
else {
argument += '=' + value;
}
console.log(argument);
return argument;
}
else {
return '';
}
};
/// Download Dot Cover or get some local repo
ExtensionHelpers.prototype.DownloadDotCover = function (customDotCoverPath) {
return __awaiter(this, void 0, void 0, function () {
var DotCoverPath, outputLocation, downloadFileName, url, zipLocation;
var _this = this;
return __generator(this, function (_a) {
DotCoverPath = "CommandLineTools-2020-1-3";
outputLocation = __dirname + "\\download\\" + DotCoverPath;
// Check if folder exists
if (customDotCoverPath != undefined && customDotCoverPath != '') {
console.debug("Command Line Tools Custom Path: ", customDotCoverPath);
outputLocation = customDotCoverPath;
}
else if (fs.existsSync(outputLocation)) {
console.debug("Command Line Tools Using Existing Download");
}
else {
downloadFileName = 'JetBrains.dotCover.CommandLineTools.2020.1.3';
console.log("Downloading Command Line Tools ", downloadFileName);
url = "https://download.jetbrains.com/resharper/ReSharperUltimate.2020.1.3/" + downloadFileName + ".zip";
zipLocation = outputLocation + '.zip';
this.download(url, zipLocation).then(function () { return __awaiter(_this, void 0, void 0, function () {
return __generator(this, function (_a) {
switch (_a.label) {
case 0:
// Unzip
console.debug('extract to ' + outputLocation);
return [4 /*yield*/, extractZip(outputLocation + '.zip', { dir: outputLocation })
// Remove Zip
];
case 1:
_a.sent();
// Remove Zip
fs.unlink(outputLocation + '.zip', function (err) {
if (err)
console.debug('Original File not deleted!');
console.debug('Original File deleted!');
});
return [2 /*return*/, outputLocation];
}
});
}); });
}
return [2 /*return*/, outputLocation];
});
});
};
/**
* Download a resource from `url` to `dest`.
* @param {string} url - Valid URL to attempt download of resource
* @param {string} dest - Valid path to save the file.
* @returns {Promise<void>} - Returns asynchronously when successfully completed download
*/
ExtensionHelpers.prototype.download = function (url, dest) {
var _this = this;
return new Promise(function (resolve, reject) {
var request = https.get(url, function (response) {
console.debug('download response code ', response.statusCode);
if (response.statusCode === 200) {
var file_1 = fs.createWriteStream(dest, { flags: 'wx' });
file_1.on('finish', function () { return resolve(); });
file_1.on('error', function (err) {
console.debug('error ', err);
file_1.close();
if (err.message === 'EEXIST')
reject('File already exists');
else
fs.unlink(dest, function () { return reject(err.message); }); // Delete temp file
});
if (fs.existsSync(dest)) {
fs.rmdir(dest, { recursive: true }, function (err) {
if (err) {
console.error(err);
}
});
}
response.pipe(file_1);
}
else if (response.statusCode === 302 || response.statusCode === 301) {
//Recursively follow redirects, only a 200 will resolve.
var redirectlocation = '';
if (response.headers.location != undefined)
redirectlocation = response.headers.location;
_this.download(redirectlocation, dest).then(function () { return resolve(); });
}
else {
reject("Server responded with " + response.statusCode + ": " + response.statusMessage);
}
});
request.on('error', function (err) {
reject(err.message);
});
});
};
/// Workout output location
ExtensionHelpers.prototype.GetOutputLocation = function (outputLocation, targetWorkingDir, reportType, outputFilename) {
if (outputFilename == undefined || outputFilename == '')
outputFilename = "CodeAnalyseResults";
if (outputLocation == undefined || outputLocation == '') {
outputLocation = targetWorkingDir;
}
else if (outputLocation.includes('.xml') == false && outputLocation.includes('.html') == false) {
if (outputLocation.substring(outputLocation.length - 1).includes("\\") || outputLocation.substring(outputLocation.length - 1).includes("/")) {
outputLocation += outputFilename + "." + reportType;
}
else {
outputLocation += "\\" + outputFilename + "." + reportType;
}
}
console.debug('Output Location: ', outputLocation);
return outputLocation;
};
/// run Dot Cover command
ExtensionHelpers.prototype.RunDotCoverTask = function (cmdline, callBack) {
console.log("**** - Run Command Line Script.. Starting - **** ");
var exec = require("child_process").exec;
exec(cmdline, function (error, stdout, stderr) {
if (error) {
callBack("error: " + error.message, false);
return;
}
if (stderr) {
callBack("stderr: " + stderr, true);
return;
}
callBack("stdout: " + stdout, true);
});
console.log("**** - Run Command Line Script.. Ending - **** ");
};
/// Get the dynamic Test Assemblies
ExtensionHelpers.prototype.GetTestAssemblies = function (projectPattern, targetWorkingDir, targetArguments) {
if (projectPattern != undefined && projectPattern != '') {
var searchPath = targetWorkingDir + '';
var childItems = this | ExtensionHelpers | identifier_name |
utils.py | , ManyToManyRel) and not related.symmetrical:
yield (name, related)
def _resolve_model(obj):
"""
Resolve supplied `obj` to a Django model class.
`obj` must be a Django model class itself, or a string
representation of one. Useful in situations like GH #1225 where
Django may not have resolved a string-based reference to a model in
another model's foreign key definition.
String representations should have the format:
'appname.ModelName'
"""
if isinstance(obj, six.string_types) and len(obj.split(".")) == 2:
app_name, model_name = obj.split(".")
resolved_model = apps.get_model(app_name, model_name)
if resolved_model is None:
msg = "Django did not return a model for {0}.{1}"
raise ImproperlyConfigured(msg.format(app_name, model_name))
return resolved_model
elif inspect.isclass(obj) and issubclass(obj, Model):
return obj
raise ValueError("{0} is not a Django model".format(obj))
def to_kebab_case(name):
s1 = re.sub("(.)([A-Z][a-z]+)", r"\1-\2", name.title().replace(" ", ""))
return re.sub("([a-z0-9])([A-Z])", r"\1-\2", s1).lower()
def get_related_model(field):
# Backward compatibility patch for Django versions lower than 1.9.x
if DJANGO_VERSION < (1, 9):
return _resolve_model(field.rel.to)
return field.remote_field.model
def get_model_fields(model):
# Backward compatibility patch for Django versions lower than 1.11.x
if DJANGO_VERSION >= (1, 11):
private_fields = model._meta.private_fields
else:
private_fields = model._meta.virtual_fields
all_fields_list = (
list(model._meta.fields)
+ list(model._meta.local_many_to_many)
+ list(private_fields)
+ list(model._meta.fields_map.values())
)
# Make sure we don't duplicate local fields with "reverse" version
# and get the real reverse django related_name
reverse_fields = list(get_reverse_fields(model))
exclude_fields = [field[1] for field in reverse_fields]
local_fields = [(field.name, field) for field in all_fields_list if field not in exclude_fields]
all_fields = local_fields + reverse_fields
return all_fields
def get_obj(app_label, model_name, object_id):
"""
Function used to get a object
:param app_label: A valid Django Model or a string with format: <app_label>.<model_name>
:param model_name: Key into kwargs that contains de data: new_person
:param object_id:
:return: instance
"""
try:
model = apps.get_model("{}.{}".format(app_label, model_name))
assert is_valid_django_model(model), ("Model {}.{} do not exist.").format(
app_label, model_name
)
obj = get_Object_or_None(model, pk=object_id)
return obj
except model.DoesNotExist:
return None
except LookupError:
pass
except ValidationError as e:
raise ValidationError(e.__str__())
except TypeError as e:
raise TypeError(e.__str__())
except Exception as e:
raise Exception(e.__str__())
def create_obj(django_model, new_obj_key=None, *args, **kwargs):
"""
Function used by my on traditional Mutations to create objs
:param django_model: A valid Django Model or a string with format:
<app_label>.<model_name>
:param new_obj_key: Key into kwargs that contains de data: new_person
:param args:
:param kwargs: Dict with model attributes values
:return: instance of model after saved it
"""
try:
if isinstance(django_model, six.string_types):
django_model = apps.get_model(django_model)
assert is_valid_django_model(django_model), (
"You need to pass a valid Django Model or a string with format: "
'<app_label>.<model_name> to "create_obj"'
' function, received "{}".'
).format(django_model)
data = kwargs.get(new_obj_key, None) if new_obj_key else kwargs
new_obj = django_model(**data)
new_obj.full_clean()
new_obj.save()
return new_obj
except LookupError:
pass
except ValidationError as e:
raise ValidationError(e.__str__())
except TypeError as e:
raise TypeError(e.__str__())
except Exception as e:
return e.__str__()
def clean_dict(d):
"""
Remove all empty fields in a nested dict
"""
if not isinstance(d, (dict, list)):
return d
if isinstance(d, list):
return [v for v in (clean_dict(v) for v in d) if v]
return OrderedDict([(k, v) for k, v in ((k, clean_dict(v)) for k, v in list(d.items())) if v])
def get_type(_type):
if isinstance(_type, (GraphQLList, GraphQLNonNull)):
return get_type(_type.of_type)
return _type
def get_fields(info):
fragments = info.fragments
field_nodes = info.field_nodes[0].selection_set.selections
for field_ast in field_nodes:
field_name = field_ast.name.value
if isinstance(field_ast, FragmentSpreadNode):
for field in fragments[field_name].selection_set.selections:
yield field.name.value
continue
yield field_name
def is_required(field):
try:
blank = getattr(field, "blank", getattr(field, "field", None))
default = getattr(field, "default", getattr(field, "field", None))
# null = getattr(field, "null", getattr(field, "field", None))
if blank is None:
blank = True
elif not isinstance(blank, bool):
blank = getattr(blank, "blank", True)
if default is None:
default = NOT_PROVIDED
elif default != NOT_PROVIDED:
default = getattr(default, "default", default)
except AttributeError:
return False
return not blank and default == NOT_PROVIDED
def _get_queryset(klass):
"""
Returns a QuerySet from a Model, Manager, or QuerySet. Created to make
get_object_or_404 and get_list_or_404 more DRY.
Raises a ValueError if klass is not a Model, Manager, or QuerySet.
"""
if isinstance(klass, QuerySet):
return klass
elif isinstance(klass, Manager):
manager = klass
elif isinstance(klass, ModelBase):
manager = klass._default_manager
else:
if isinstance(klass, type):
klass__name = klass.__name__ | klass__name = klass.__class__.__name__
raise ValueError(
"Object is of type '{}', but must be a Django Model, "
"Manager, or QuerySet".format(klass__name)
)
return manager.all()
def _get_custom_resolver(info):
"""
Get custom user defined resolver for query.
This resolver must return QuerySet instance to be successfully resolved.
"""
parent = info.parent_type
custom_resolver_name = f"resolve_{to_snake_case(info.field_name)}"
if hasattr(parent.graphene_type, custom_resolver_name):
return getattr(parent.graphene_type, custom_resolver_name)
return None
def get_Object_or_None(klass, *args, **kwargs):
"""
Uses get() to return an object, or None if the object does not exist.
klass may be a Model, Manager, or QuerySet object. All other passed
arguments and keyword arguments are used in the get() query.
Note: Like with get(), an MultipleObjectsReturned will be raised
if more than one object is found.
Ex: get_Object_or_None(User, db, id=1)
"""
queryset = _get_queryset(klass)
try:
if args:
return queryset.using(args[0]).get(**kwargs)
else:
return queryset.get(*args, **kwargs)
except queryset.model.DoesNotExist:
return None
# except queryset.model.MultipleObjectsReturned:
# return get_Objects_or_None(klass, *args, **kwargs)
def get_extra_filters(root, model):
extra_filters = {}
for field in model._meta.get_fields():
if field.is_relation and field.related_model == root._meta.model:
extra_filters.update({field.name: root})
return extra_filters
def get_related_fields(model):
return {
field.name: field
for field in model._meta.get_fields()
if field.is_relation and not isinstance(field, (GenericForeignKey, GenericRel))
}
def find_field(field, fields_dict):
temp = fields_dict.get(field.name.value, fields_dict.get(to_snake_case(field.name.value), None))
return temp
def recursive_params(
selection_set, fragments, available_related_fields, select_related, prefetch_related
):
for field in selection_set.selections:
if isinstance(field, FragmentSpreadNode) and fragments:
a, b = recursive_params(
fragments[field.name.value].selection_set,
fragments,
available_related_fields,
select_related,
prefetch_related,
)
[select_related.append(x) for x in a if x not in select_related]
[prefetch_related.append | else: | random_line_split |
utils.py | , ManyToManyRel) and not related.symmetrical:
yield (name, related)
def _resolve_model(obj):
"""
Resolve supplied `obj` to a Django model class.
`obj` must be a Django model class itself, or a string
representation of one. Useful in situations like GH #1225 where
Django may not have resolved a string-based reference to a model in
another model's foreign key definition.
String representations should have the format:
'appname.ModelName'
"""
if isinstance(obj, six.string_types) and len(obj.split(".")) == 2:
app_name, model_name = obj.split(".")
resolved_model = apps.get_model(app_name, model_name)
if resolved_model is None:
msg = "Django did not return a model for {0}.{1}"
raise ImproperlyConfigured(msg.format(app_name, model_name))
return resolved_model
elif inspect.isclass(obj) and issubclass(obj, Model):
return obj
raise ValueError("{0} is not a Django model".format(obj))
def to_kebab_case(name):
s1 = re.sub("(.)([A-Z][a-z]+)", r"\1-\2", name.title().replace(" ", ""))
return re.sub("([a-z0-9])([A-Z])", r"\1-\2", s1).lower()
def get_related_model(field):
# Backward compatibility patch for Django versions lower than 1.9.x
if DJANGO_VERSION < (1, 9):
return _resolve_model(field.rel.to)
return field.remote_field.model
def get_model_fields(model):
# Backward compatibility patch for Django versions lower than 1.11.x
if DJANGO_VERSION >= (1, 11):
private_fields = model._meta.private_fields
else:
private_fields = model._meta.virtual_fields
all_fields_list = (
list(model._meta.fields)
+ list(model._meta.local_many_to_many)
+ list(private_fields)
+ list(model._meta.fields_map.values())
)
# Make sure we don't duplicate local fields with "reverse" version
# and get the real reverse django related_name
reverse_fields = list(get_reverse_fields(model))
exclude_fields = [field[1] for field in reverse_fields]
local_fields = [(field.name, field) for field in all_fields_list if field not in exclude_fields]
all_fields = local_fields + reverse_fields
return all_fields
def get_obj(app_label, model_name, object_id):
"""
Function used to get a object
:param app_label: A valid Django Model or a string with format: <app_label>.<model_name>
:param model_name: Key into kwargs that contains de data: new_person
:param object_id:
:return: instance
"""
try:
model = apps.get_model("{}.{}".format(app_label, model_name))
assert is_valid_django_model(model), ("Model {}.{} do not exist.").format(
app_label, model_name
)
obj = get_Object_or_None(model, pk=object_id)
return obj
except model.DoesNotExist:
return None
except LookupError:
pass
except ValidationError as e:
raise ValidationError(e.__str__())
except TypeError as e:
raise TypeError(e.__str__())
except Exception as e:
raise Exception(e.__str__())
def create_obj(django_model, new_obj_key=None, *args, **kwargs):
"""
Function used by my on traditional Mutations to create objs
:param django_model: A valid Django Model or a string with format:
<app_label>.<model_name>
:param new_obj_key: Key into kwargs that contains de data: new_person
:param args:
:param kwargs: Dict with model attributes values
:return: instance of model after saved it
"""
try:
if isinstance(django_model, six.string_types):
django_model = apps.get_model(django_model)
assert is_valid_django_model(django_model), (
"You need to pass a valid Django Model or a string with format: "
'<app_label>.<model_name> to "create_obj"'
' function, received "{}".'
).format(django_model)
data = kwargs.get(new_obj_key, None) if new_obj_key else kwargs
new_obj = django_model(**data)
new_obj.full_clean()
new_obj.save()
return new_obj
except LookupError:
pass
except ValidationError as e:
raise ValidationError(e.__str__())
except TypeError as e:
raise TypeError(e.__str__())
except Exception as e:
return e.__str__()
def clean_dict(d):
"""
Remove all empty fields in a nested dict
"""
if not isinstance(d, (dict, list)):
return d
if isinstance(d, list):
return [v for v in (clean_dict(v) for v in d) if v]
return OrderedDict([(k, v) for k, v in ((k, clean_dict(v)) for k, v in list(d.items())) if v])
def get_type(_type):
if isinstance(_type, (GraphQLList, GraphQLNonNull)):
return get_type(_type.of_type)
return _type
def get_fields(info):
fragments = info.fragments
field_nodes = info.field_nodes[0].selection_set.selections
for field_ast in field_nodes:
field_name = field_ast.name.value
if isinstance(field_ast, FragmentSpreadNode):
for field in fragments[field_name].selection_set.selections:
yield field.name.value
continue
yield field_name
def is_required(field):
try:
blank = getattr(field, "blank", getattr(field, "field", None))
default = getattr(field, "default", getattr(field, "field", None))
# null = getattr(field, "null", getattr(field, "field", None))
if blank is None:
blank = True
elif not isinstance(blank, bool):
blank = getattr(blank, "blank", True)
if default is None:
default = NOT_PROVIDED
elif default != NOT_PROVIDED:
default = getattr(default, "default", default)
except AttributeError:
return False
return not blank and default == NOT_PROVIDED
def _get_queryset(klass):
"""
Returns a QuerySet from a Model, Manager, or QuerySet. Created to make
get_object_or_404 and get_list_or_404 more DRY.
Raises a ValueError if klass is not a Model, Manager, or QuerySet.
"""
if isinstance(klass, QuerySet):
return klass
elif isinstance(klass, Manager):
manager = klass
elif isinstance(klass, ModelBase):
manager = klass._default_manager
else:
|
return manager.all()
def _get_custom_resolver(info):
"""
Get custom user defined resolver for query.
This resolver must return QuerySet instance to be successfully resolved.
"""
parent = info.parent_type
custom_resolver_name = f"resolve_{to_snake_case(info.field_name)}"
if hasattr(parent.graphene_type, custom_resolver_name):
return getattr(parent.graphene_type, custom_resolver_name)
return None
def get_Object_or_None(klass, *args, **kwargs):
"""
Uses get() to return an object, or None if the object does not exist.
klass may be a Model, Manager, or QuerySet object. All other passed
arguments and keyword arguments are used in the get() query.
Note: Like with get(), an MultipleObjectsReturned will be raised
if more than one object is found.
Ex: get_Object_or_None(User, db, id=1)
"""
queryset = _get_queryset(klass)
try:
if args:
return queryset.using(args[0]).get(**kwargs)
else:
return queryset.get(*args, **kwargs)
except queryset.model.DoesNotExist:
return None
# except queryset.model.MultipleObjectsReturned:
# return get_Objects_or_None(klass, *args, **kwargs)
def get_extra_filters(root, model):
extra_filters = {}
for field in model._meta.get_fields():
if field.is_relation and field.related_model == root._meta.model:
extra_filters.update({field.name: root})
return extra_filters
def get_related_fields(model):
return {
field.name: field
for field in model._meta.get_fields()
if field.is_relation and not isinstance(field, (GenericForeignKey, GenericRel))
}
def find_field(field, fields_dict):
temp = fields_dict.get(field.name.value, fields_dict.get(to_snake_case(field.name.value), None))
return temp
def recursive_params(
selection_set, fragments, available_related_fields, select_related, prefetch_related
):
for field in selection_set.selections:
if isinstance(field, FragmentSpreadNode) and fragments:
a, b = recursive_params(
fragments[field.name.value].selection_set,
fragments,
available_related_fields,
select_related,
prefetch_related,
)
[select_related.append(x) for x in a if x not in select_related]
[prefetch_related | if isinstance(klass, type):
klass__name = klass.__name__
else:
klass__name = klass.__class__.__name__
raise ValueError(
"Object is of type '{}', but must be a Django Model, "
"Manager, or QuerySet".format(klass__name)
) | conditional_block |
utils.py | , ManyToManyRel) and not related.symmetrical:
yield (name, related)
def _resolve_model(obj):
"""
Resolve supplied `obj` to a Django model class.
`obj` must be a Django model class itself, or a string
representation of one. Useful in situations like GH #1225 where
Django may not have resolved a string-based reference to a model in
another model's foreign key definition.
String representations should have the format:
'appname.ModelName'
"""
if isinstance(obj, six.string_types) and len(obj.split(".")) == 2:
app_name, model_name = obj.split(".")
resolved_model = apps.get_model(app_name, model_name)
if resolved_model is None:
msg = "Django did not return a model for {0}.{1}"
raise ImproperlyConfigured(msg.format(app_name, model_name))
return resolved_model
elif inspect.isclass(obj) and issubclass(obj, Model):
return obj
raise ValueError("{0} is not a Django model".format(obj))
def to_kebab_case(name):
s1 = re.sub("(.)([A-Z][a-z]+)", r"\1-\2", name.title().replace(" ", ""))
return re.sub("([a-z0-9])([A-Z])", r"\1-\2", s1).lower()
def get_related_model(field):
# Backward compatibility patch for Django versions lower than 1.9.x
if DJANGO_VERSION < (1, 9):
return _resolve_model(field.rel.to)
return field.remote_field.model
def get_model_fields(model):
# Backward compatibility patch for Django versions lower than 1.11.x
if DJANGO_VERSION >= (1, 11):
private_fields = model._meta.private_fields
else:
private_fields = model._meta.virtual_fields
all_fields_list = (
list(model._meta.fields)
+ list(model._meta.local_many_to_many)
+ list(private_fields)
+ list(model._meta.fields_map.values())
)
# Make sure we don't duplicate local fields with "reverse" version
# and get the real reverse django related_name
reverse_fields = list(get_reverse_fields(model))
exclude_fields = [field[1] for field in reverse_fields]
local_fields = [(field.name, field) for field in all_fields_list if field not in exclude_fields]
all_fields = local_fields + reverse_fields
return all_fields
def get_obj(app_label, model_name, object_id):
| except ValidationError as e:
raise ValidationError(e.__str__())
except TypeError as e:
raise TypeError(e.__str__())
except Exception as e:
raise Exception(e.__str__())
def create_obj(django_model, new_obj_key=None, *args, **kwargs):
"""
Function used by my on traditional Mutations to create objs
:param django_model: A valid Django Model or a string with format:
<app_label>.<model_name>
:param new_obj_key: Key into kwargs that contains de data: new_person
:param args:
:param kwargs: Dict with model attributes values
:return: instance of model after saved it
"""
try:
if isinstance(django_model, six.string_types):
django_model = apps.get_model(django_model)
assert is_valid_django_model(django_model), (
"You need to pass a valid Django Model or a string with format: "
'<app_label>.<model_name> to "create_obj"'
' function, received "{}".'
).format(django_model)
data = kwargs.get(new_obj_key, None) if new_obj_key else kwargs
new_obj = django_model(**data)
new_obj.full_clean()
new_obj.save()
return new_obj
except LookupError:
pass
except ValidationError as e:
raise ValidationError(e.__str__())
except TypeError as e:
raise TypeError(e.__str__())
except Exception as e:
return e.__str__()
def clean_dict(d):
"""
Remove all empty fields in a nested dict
"""
if not isinstance(d, (dict, list)):
return d
if isinstance(d, list):
return [v for v in (clean_dict(v) for v in d) if v]
return OrderedDict([(k, v) for k, v in ((k, clean_dict(v)) for k, v in list(d.items())) if v])
def get_type(_type):
if isinstance(_type, (GraphQLList, GraphQLNonNull)):
return get_type(_type.of_type)
return _type
def get_fields(info):
fragments = info.fragments
field_nodes = info.field_nodes[0].selection_set.selections
for field_ast in field_nodes:
field_name = field_ast.name.value
if isinstance(field_ast, FragmentSpreadNode):
for field in fragments[field_name].selection_set.selections:
yield field.name.value
continue
yield field_name
def is_required(field):
try:
blank = getattr(field, "blank", getattr(field, "field", None))
default = getattr(field, "default", getattr(field, "field", None))
# null = getattr(field, "null", getattr(field, "field", None))
if blank is None:
blank = True
elif not isinstance(blank, bool):
blank = getattr(blank, "blank", True)
if default is None:
default = NOT_PROVIDED
elif default != NOT_PROVIDED:
default = getattr(default, "default", default)
except AttributeError:
return False
return not blank and default == NOT_PROVIDED
def _get_queryset(klass):
"""
Returns a QuerySet from a Model, Manager, or QuerySet. Created to make
get_object_or_404 and get_list_or_404 more DRY.
Raises a ValueError if klass is not a Model, Manager, or QuerySet.
"""
if isinstance(klass, QuerySet):
return klass
elif isinstance(klass, Manager):
manager = klass
elif isinstance(klass, ModelBase):
manager = klass._default_manager
else:
if isinstance(klass, type):
klass__name = klass.__name__
else:
klass__name = klass.__class__.__name__
raise ValueError(
"Object is of type '{}', but must be a Django Model, "
"Manager, or QuerySet".format(klass__name)
)
return manager.all()
def _get_custom_resolver(info):
"""
Get custom user defined resolver for query.
This resolver must return QuerySet instance to be successfully resolved.
"""
parent = info.parent_type
custom_resolver_name = f"resolve_{to_snake_case(info.field_name)}"
if hasattr(parent.graphene_type, custom_resolver_name):
return getattr(parent.graphene_type, custom_resolver_name)
return None
def get_Object_or_None(klass, *args, **kwargs):
"""
Uses get() to return an object, or None if the object does not exist.
klass may be a Model, Manager, or QuerySet object. All other passed
arguments and keyword arguments are used in the get() query.
Note: Like with get(), an MultipleObjectsReturned will be raised
if more than one object is found.
Ex: get_Object_or_None(User, db, id=1)
"""
queryset = _get_queryset(klass)
try:
if args:
return queryset.using(args[0]).get(**kwargs)
else:
return queryset.get(*args, **kwargs)
except queryset.model.DoesNotExist:
return None
# except queryset.model.MultipleObjectsReturned:
# return get_Objects_or_None(klass, *args, **kwargs)
def get_extra_filters(root, model):
extra_filters = {}
for field in model._meta.get_fields():
if field.is_relation and field.related_model == root._meta.model:
extra_filters.update({field.name: root})
return extra_filters
def get_related_fields(model):
return {
field.name: field
for field in model._meta.get_fields()
if field.is_relation and not isinstance(field, (GenericForeignKey, GenericRel))
}
def find_field(field, fields_dict):
temp = fields_dict.get(field.name.value, fields_dict.get(to_snake_case(field.name.value), None))
return temp
def recursive_params(
selection_set, fragments, available_related_fields, select_related, prefetch_related
):
for field in selection_set.selections:
if isinstance(field, FragmentSpreadNode) and fragments:
a, b = recursive_params(
fragments[field.name.value].selection_set,
fragments,
available_related_fields,
select_related,
prefetch_related,
)
[select_related.append(x) for x in a if x not in select_related]
[prefetch_related.append | """
Function used to get a object
:param app_label: A valid Django Model or a string with format: <app_label>.<model_name>
:param model_name: Key into kwargs that contains de data: new_person
:param object_id:
:return: instance
"""
try:
model = apps.get_model("{}.{}".format(app_label, model_name))
assert is_valid_django_model(model), ("Model {}.{} do not exist.").format(
app_label, model_name
)
obj = get_Object_or_None(model, pk=object_id)
return obj
except model.DoesNotExist:
return None
except LookupError:
pass | identifier_body |
utils.py | , ManyToManyRel) and not related.symmetrical:
yield (name, related)
def _resolve_model(obj):
"""
Resolve supplied `obj` to a Django model class.
`obj` must be a Django model class itself, or a string
representation of one. Useful in situations like GH #1225 where
Django may not have resolved a string-based reference to a model in
another model's foreign key definition.
String representations should have the format:
'appname.ModelName'
"""
if isinstance(obj, six.string_types) and len(obj.split(".")) == 2:
app_name, model_name = obj.split(".")
resolved_model = apps.get_model(app_name, model_name)
if resolved_model is None:
msg = "Django did not return a model for {0}.{1}"
raise ImproperlyConfigured(msg.format(app_name, model_name))
return resolved_model
elif inspect.isclass(obj) and issubclass(obj, Model):
return obj
raise ValueError("{0} is not a Django model".format(obj))
def to_kebab_case(name):
s1 = re.sub("(.)([A-Z][a-z]+)", r"\1-\2", name.title().replace(" ", ""))
return re.sub("([a-z0-9])([A-Z])", r"\1-\2", s1).lower()
def get_related_model(field):
# Backward compatibility patch for Django versions lower than 1.9.x
if DJANGO_VERSION < (1, 9):
return _resolve_model(field.rel.to)
return field.remote_field.model
def get_model_fields(model):
# Backward compatibility patch for Django versions lower than 1.11.x
if DJANGO_VERSION >= (1, 11):
private_fields = model._meta.private_fields
else:
private_fields = model._meta.virtual_fields
all_fields_list = (
list(model._meta.fields)
+ list(model._meta.local_many_to_many)
+ list(private_fields)
+ list(model._meta.fields_map.values())
)
# Make sure we don't duplicate local fields with "reverse" version
# and get the real reverse django related_name
reverse_fields = list(get_reverse_fields(model))
exclude_fields = [field[1] for field in reverse_fields]
local_fields = [(field.name, field) for field in all_fields_list if field not in exclude_fields]
all_fields = local_fields + reverse_fields
return all_fields
def get_obj(app_label, model_name, object_id):
"""
Function used to get a object
:param app_label: A valid Django Model or a string with format: <app_label>.<model_name>
:param model_name: Key into kwargs that contains de data: new_person
:param object_id:
:return: instance
"""
try:
model = apps.get_model("{}.{}".format(app_label, model_name))
assert is_valid_django_model(model), ("Model {}.{} do not exist.").format(
app_label, model_name
)
obj = get_Object_or_None(model, pk=object_id)
return obj
except model.DoesNotExist:
return None
except LookupError:
pass
except ValidationError as e:
raise ValidationError(e.__str__())
except TypeError as e:
raise TypeError(e.__str__())
except Exception as e:
raise Exception(e.__str__())
def create_obj(django_model, new_obj_key=None, *args, **kwargs):
"""
Function used by my on traditional Mutations to create objs
:param django_model: A valid Django Model or a string with format:
<app_label>.<model_name>
:param new_obj_key: Key into kwargs that contains de data: new_person
:param args:
:param kwargs: Dict with model attributes values
:return: instance of model after saved it
"""
try:
if isinstance(django_model, six.string_types):
django_model = apps.get_model(django_model)
assert is_valid_django_model(django_model), (
"You need to pass a valid Django Model or a string with format: "
'<app_label>.<model_name> to "create_obj"'
' function, received "{}".'
).format(django_model)
data = kwargs.get(new_obj_key, None) if new_obj_key else kwargs
new_obj = django_model(**data)
new_obj.full_clean()
new_obj.save()
return new_obj
except LookupError:
pass
except ValidationError as e:
raise ValidationError(e.__str__())
except TypeError as e:
raise TypeError(e.__str__())
except Exception as e:
return e.__str__()
def | (d):
"""
Remove all empty fields in a nested dict
"""
if not isinstance(d, (dict, list)):
return d
if isinstance(d, list):
return [v for v in (clean_dict(v) for v in d) if v]
return OrderedDict([(k, v) for k, v in ((k, clean_dict(v)) for k, v in list(d.items())) if v])
def get_type(_type):
if isinstance(_type, (GraphQLList, GraphQLNonNull)):
return get_type(_type.of_type)
return _type
def get_fields(info):
fragments = info.fragments
field_nodes = info.field_nodes[0].selection_set.selections
for field_ast in field_nodes:
field_name = field_ast.name.value
if isinstance(field_ast, FragmentSpreadNode):
for field in fragments[field_name].selection_set.selections:
yield field.name.value
continue
yield field_name
def is_required(field):
try:
blank = getattr(field, "blank", getattr(field, "field", None))
default = getattr(field, "default", getattr(field, "field", None))
# null = getattr(field, "null", getattr(field, "field", None))
if blank is None:
blank = True
elif not isinstance(blank, bool):
blank = getattr(blank, "blank", True)
if default is None:
default = NOT_PROVIDED
elif default != NOT_PROVIDED:
default = getattr(default, "default", default)
except AttributeError:
return False
return not blank and default == NOT_PROVIDED
def _get_queryset(klass):
"""
Returns a QuerySet from a Model, Manager, or QuerySet. Created to make
get_object_or_404 and get_list_or_404 more DRY.
Raises a ValueError if klass is not a Model, Manager, or QuerySet.
"""
if isinstance(klass, QuerySet):
return klass
elif isinstance(klass, Manager):
manager = klass
elif isinstance(klass, ModelBase):
manager = klass._default_manager
else:
if isinstance(klass, type):
klass__name = klass.__name__
else:
klass__name = klass.__class__.__name__
raise ValueError(
"Object is of type '{}', but must be a Django Model, "
"Manager, or QuerySet".format(klass__name)
)
return manager.all()
def _get_custom_resolver(info):
"""
Get custom user defined resolver for query.
This resolver must return QuerySet instance to be successfully resolved.
"""
parent = info.parent_type
custom_resolver_name = f"resolve_{to_snake_case(info.field_name)}"
if hasattr(parent.graphene_type, custom_resolver_name):
return getattr(parent.graphene_type, custom_resolver_name)
return None
def get_Object_or_None(klass, *args, **kwargs):
"""
Uses get() to return an object, or None if the object does not exist.
klass may be a Model, Manager, or QuerySet object. All other passed
arguments and keyword arguments are used in the get() query.
Note: Like with get(), an MultipleObjectsReturned will be raised
if more than one object is found.
Ex: get_Object_or_None(User, db, id=1)
"""
queryset = _get_queryset(klass)
try:
if args:
return queryset.using(args[0]).get(**kwargs)
else:
return queryset.get(*args, **kwargs)
except queryset.model.DoesNotExist:
return None
# except queryset.model.MultipleObjectsReturned:
# return get_Objects_or_None(klass, *args, **kwargs)
def get_extra_filters(root, model):
extra_filters = {}
for field in model._meta.get_fields():
if field.is_relation and field.related_model == root._meta.model:
extra_filters.update({field.name: root})
return extra_filters
def get_related_fields(model):
return {
field.name: field
for field in model._meta.get_fields()
if field.is_relation and not isinstance(field, (GenericForeignKey, GenericRel))
}
def find_field(field, fields_dict):
temp = fields_dict.get(field.name.value, fields_dict.get(to_snake_case(field.name.value), None))
return temp
def recursive_params(
selection_set, fragments, available_related_fields, select_related, prefetch_related
):
for field in selection_set.selections:
if isinstance(field, FragmentSpreadNode) and fragments:
a, b = recursive_params(
fragments[field.name.value].selection_set,
fragments,
available_related_fields,
select_related,
prefetch_related,
)
[select_related.append(x) for x in a if x not in select_related]
[prefetch_related | clean_dict | identifier_name |
mod.rs | let start = Instant::now();
// Postgres only supports a maximum of 2^15 params
let (remain, posts) = if item.len() > 1280 {
let remain = item.split_off(1280);
(remain, item)
} else {
(vec![], item)
};
item = remain;
let rows = posts.len();
let query = "INSERT INTO
posts
(board, thread_no, post_no, subject, username, tripcode,
email, unique_id, since4_pass, country, filename,
image_hash, image_width, image_height, ts, comment, deleted,
ghost, sticky, spoiler, op, capcode) VALUES ";
let stmt = std::iter::once(Cow::Borrowed(query))
.chain((0..rows).map(|i| {
let z = i * 22;
Cow::Owned(
[
if i == 0 { "(" } else { "\n,(" },
PLACEHOLDERS[z], // board
",",
PLACEHOLDERS[z + 1], // thread_no
",",
PLACEHOLDERS[z + 2], // post_no
",to_tsvector(",
PLACEHOLDERS[z + 3], // subject
"),to_tsvector(",
PLACEHOLDERS[z + 4], // username
"),to_tsvector(",
PLACEHOLDERS[z + 5], // tripcode
"),to_tsvector(",
PLACEHOLDERS[z + 6], // email
"),",
PLACEHOLDERS[z + 7], // unique_id
",",
PLACEHOLDERS[z + 8], // since4_pass
",",
PLACEHOLDERS[z + 9], // country
",to_tsvector(REPLACE(",
PLACEHOLDERS[z + 10], // filename
",'.',' ')),",
PLACEHOLDERS[z + 11], // image_hash
",",
PLACEHOLDERS[z + 12], // image_width
",",
PLACEHOLDERS[z + 13], // image_height
",TO_TIMESTAMP(CAST(",
PLACEHOLDERS[z + 14], // ts
"::INT8 AS FLOAT8)),to_tsvector(",
PLACEHOLDERS[z + 15], // comment
"),",
PLACEHOLDERS[z + 16], // deleted
",",
PLACEHOLDERS[z + 17], // ghost
",",
PLACEHOLDERS[z + 18], // sticky
",",
PLACEHOLDERS[z + 19], // spoiler
",",
PLACEHOLDERS[z + 20], // op
",CAST(",
PLACEHOLDERS[z + 21], // capcode
"::INT8 AS INT4))",
]
.join(""),
)
}))
.chain(std::iter::once(Cow::Borrowed(
" ON CONFLICT (board, post_no) DO UPDATE SET
deleted = EXCLUDED.deleted,
sticky = EXCLUDED.sticky,
comment = COALESCE(EXCLUDED.comment, posts.comment);
",
)))
.collect::<String>();
let i64_rena = arena::Arena::new(posts.len() * 4);
let str_rena = arena::Arena::new(posts.len() * 4);
let params = (0..posts.len())
.into_iter()
.map(|i| {
let values: Box<[&(dyn ToSql + Sync)]> = Box::new([
str_rena.alloc(Some(posts[i].board.to_string())),
i64_rena.alloc(Some(posts[i].thread_no() as i64)),
i64_rena.alloc(Some(posts[i].no as i64)),
&posts[i].sub,
&posts[i].name,
&posts[i].trip,
&posts[i].email,
&posts[i].id,
&posts[i].since4pass,
str_rena.alloc(posts[i].poster_country()),
str_rena.alloc(posts[i].media_filename()),
&posts[i].md5,
&posts[i].w,
&posts[i].h,
i64_rena.alloc(Some(posts[i].time as i64)),
str_rena.alloc(posts[i].comment().map(|x| str_sanitize(x))),
&posts[i].deleted,
&false,
&posts[i].sticky,
&posts[i].spoiler,
if posts[i].is_op() { &true } else { &false },
i64_rena.alloc(posts[i].short_capcode().chars().next().map(|c| c as i64)),
]);
values.into_vec()
})
.flatten()
.collect::<Vec<_>>();
let mut attempts = 0;
let mut backoff = backoff::ExponentialBackoff::default();
backoff.max_elapsed_time = None;
loop {
let r = client.execute(stmt.as_str(), ¶ms).await;
match r {
Ok(_) => break,
Err(err) => {
if attempts >= self.retries_on_save_error {
return Err(Error::from(err));
}
attempts += 1;
if let Some(b) = backoff.next_backoff() {
tokio::time::delay_for(b).await;
}
continue;
}
}
}
self.metrics.incr_posts(rows as u64);
self.metrics.incr_query_time(start.elapsed());
self.notify_post(rows);
// Since values contains references to data in the 'renas,
// the values must be dropped before we drop the 'renas
drop(params);
drop(i64_rena);
drop(str_rena);
}
Ok(())
}
async fn send_posts(self: Arc<Self>, item: Vec<imageboard::Post>) {
let board = item[0].board;
let thread_no = item[0].thread_no();
let post_no = item[0].no;
let sz = item.len();
match self.save_posts(item).await {
Ok(_) => debug!(
"Flushed {} posts to postgres. [First]: {}/{}/{}",
sz, board, thread_no, post_no
),
Err(err) => {
error!(
"Failed to save data for {} posts [First]: {}/{}/{}: {}",
sz, board, thread_no, post_no, err
);
if !self.fail_on_save_error {
warn!("Some posts were unable to be archived, however the error isn't being treated as fatal. Some posts may be lost.")
}
self.metrics.incr_save_error(1);
self.failed.store(true, Ordering::SeqCst);
}
}
}
fn notify_post(&self, no_posts: usize) {
let old = self.inflight_posts.fetch_sub(no_posts, Ordering::AcqRel);
let curr = old - no_posts;
if curr < self.max_inflight_posts {
self.waker.wake();
}
if curr == 0 {
self.flush_waker.wake();
self.close_waker.wake();
}
}
fn is_ready(&self) -> bool {
let posts = self.inflight_posts.load(Ordering::Acquire);
posts < self.max_inflight_posts
}
fn is_empty(&self) -> bool {
let posts = self.inflight_posts.load(Ordering::Acquire);
posts == 0
}
fn has_failed(&self) -> bool {
return self.fail_on_save_error && self.failed.load(Ordering::Relaxed);
}
}
impl Sink<Vec<imageboard::Post>> for Search {
type Error = Error;
fn poll_ready(self: Pin<&mut Self>, cx: &mut Context) -> Poll<Result<(), Self::Error>> {
if self.inner.has_failed() {
return Poll::Ready(Err(Error::ArchiveError));
}
self.inner.waker.register(cx.waker());
match self.inner.is_ready() {
true => Poll::Ready(Ok(())),
false => Poll::Pending,
}
}
fn start_send(self: Pin<&mut Self>, item: Vec<imageboard::Post>) -> Result<(), Self::Error> {
if item.len() > 0 {
self.inner
.inflight_posts
.fetch_add(item.len(), Ordering::AcqRel);
self.inner.process_tx.send(Some(item)).unwrap();
}
Ok(())
}
fn poll_flush(self: Pin<&mut Self>, cx: &mut Context) -> Poll<Result<(), Self::Error>> {
if self.inner.has_failed() {
return Poll::Ready(Err(Error::ArchiveError));
}
self.inner.flush_waker.register(cx.waker());
match self.inner.is_empty() {
true => Poll::Ready(Ok(())),
false => Poll::Pending,
}
}
fn poll_close(self: Pin<&mut Self>, cx: &mut Context) -> Poll<Result<(), Self::Error>> {
let _ = self.inner.process_tx.send(None);
if self.inner.has_failed() {
return Poll::Ready(Err(Error::ArchiveError));
}
self.inner.close_waker.register(cx.waker());
match self.inner.is_empty() {
true => Poll::Ready(Ok(())), | false => Poll::Pending,
}
}
} | random_line_split |
|
mod.rs | ::Acquire) as f64;
let tt = self.inner.metrics.query_time_ns.load(Ordering::Acquire) as f64;
let m = Metrics {
posts: self.inner.metrics.posts.load(Ordering::Acquire),
avg_insert_time_ms: queries / tt * 1_000_000.,
save_errors: self.inner.metrics.save_errors.load(Ordering::Acquire),
};
let m: Box<dyn erased_serde::Serialize + Send> = Box::new(m);
futures::future::ready(m).boxed()
}
}
#[must_use = "futures do nothing unless you `.await` or poll them"]
pub struct Search {
inner: Arc<SearchInner>,
}
impl Search {
#[allow(dead_code)]
pub fn builder() -> SearchBuilder {
SearchBuilder::default()
}
pub fn metrics_provider(&self) -> impl super::MetricsProvider {
SearchMetricsProvider {
inner: self.inner.clone(),
}
}
}
impl SearchInner {
async fn save_posts(&self, mut item: Vec<imageboard::Post>) -> Result<(), Error> {
let client = self.db_pool.get().await?;
while item.len() > 0 {
let start = Instant::now();
// Postgres only supports a maximum of 2^15 params
let (remain, posts) = if item.len() > 1280 {
let remain = item.split_off(1280);
(remain, item)
} else {
(vec![], item)
};
item = remain;
let rows = posts.len();
let query = "INSERT INTO
posts
(board, thread_no, post_no, subject, username, tripcode,
email, unique_id, since4_pass, country, filename,
image_hash, image_width, image_height, ts, comment, deleted,
ghost, sticky, spoiler, op, capcode) VALUES ";
let stmt = std::iter::once(Cow::Borrowed(query))
.chain((0..rows).map(|i| {
let z = i * 22;
Cow::Owned(
[
if i == 0 { "(" } else { "\n,(" },
PLACEHOLDERS[z], // board
",",
PLACEHOLDERS[z + 1], // thread_no
",",
PLACEHOLDERS[z + 2], // post_no
",to_tsvector(",
PLACEHOLDERS[z + 3], // subject
"),to_tsvector(",
PLACEHOLDERS[z + 4], // username
"),to_tsvector(",
PLACEHOLDERS[z + 5], // tripcode
"),to_tsvector(",
PLACEHOLDERS[z + 6], // email
"),",
PLACEHOLDERS[z + 7], // unique_id
",",
PLACEHOLDERS[z + 8], // since4_pass
",",
PLACEHOLDERS[z + 9], // country
",to_tsvector(REPLACE(",
PLACEHOLDERS[z + 10], // filename
",'.',' ')),",
PLACEHOLDERS[z + 11], // image_hash
",",
PLACEHOLDERS[z + 12], // image_width
",",
PLACEHOLDERS[z + 13], // image_height
",TO_TIMESTAMP(CAST(",
PLACEHOLDERS[z + 14], // ts
"::INT8 AS FLOAT8)),to_tsvector(",
PLACEHOLDERS[z + 15], // comment
"),",
PLACEHOLDERS[z + 16], // deleted
",",
PLACEHOLDERS[z + 17], // ghost
",",
PLACEHOLDERS[z + 18], // sticky
",",
PLACEHOLDERS[z + 19], // spoiler
",",
PLACEHOLDERS[z + 20], // op
",CAST(",
PLACEHOLDERS[z + 21], // capcode
"::INT8 AS INT4))",
]
.join(""),
)
}))
.chain(std::iter::once(Cow::Borrowed(
" ON CONFLICT (board, post_no) DO UPDATE SET
deleted = EXCLUDED.deleted,
sticky = EXCLUDED.sticky,
comment = COALESCE(EXCLUDED.comment, posts.comment);
",
)))
.collect::<String>();
let i64_rena = arena::Arena::new(posts.len() * 4);
let str_rena = arena::Arena::new(posts.len() * 4);
let params = (0..posts.len())
.into_iter()
.map(|i| {
let values: Box<[&(dyn ToSql + Sync)]> = Box::new([
str_rena.alloc(Some(posts[i].board.to_string())),
i64_rena.alloc(Some(posts[i].thread_no() as i64)),
i64_rena.alloc(Some(posts[i].no as i64)),
&posts[i].sub,
&posts[i].name,
&posts[i].trip,
&posts[i].email,
&posts[i].id,
&posts[i].since4pass,
str_rena.alloc(posts[i].poster_country()),
str_rena.alloc(posts[i].media_filename()),
&posts[i].md5,
&posts[i].w,
&posts[i].h,
i64_rena.alloc(Some(posts[i].time as i64)),
str_rena.alloc(posts[i].comment().map(|x| str_sanitize(x))),
&posts[i].deleted,
&false,
&posts[i].sticky,
&posts[i].spoiler,
if posts[i].is_op() { &true } else { &false },
i64_rena.alloc(posts[i].short_capcode().chars().next().map(|c| c as i64)),
]);
values.into_vec()
})
.flatten()
.collect::<Vec<_>>();
let mut attempts = 0;
let mut backoff = backoff::ExponentialBackoff::default();
backoff.max_elapsed_time = None;
loop {
let r = client.execute(stmt.as_str(), ¶ms).await;
match r {
Ok(_) => break,
Err(err) => {
if attempts >= self.retries_on_save_error {
return Err(Error::from(err));
}
attempts += 1;
if let Some(b) = backoff.next_backoff() {
tokio::time::delay_for(b).await;
}
continue;
}
}
}
self.metrics.incr_posts(rows as u64);
self.metrics.incr_query_time(start.elapsed());
self.notify_post(rows);
// Since values contains references to data in the 'renas,
// the values must be dropped before we drop the 'renas
drop(params);
drop(i64_rena);
drop(str_rena);
}
Ok(())
}
async fn send_posts(self: Arc<Self>, item: Vec<imageboard::Post>) {
let board = item[0].board;
let thread_no = item[0].thread_no();
let post_no = item[0].no;
let sz = item.len();
match self.save_posts(item).await {
Ok(_) => debug!(
"Flushed {} posts to postgres. [First]: {}/{}/{}",
sz, board, thread_no, post_no
),
Err(err) => {
error!(
"Failed to save data for {} posts [First]: {}/{}/{}: {}",
sz, board, thread_no, post_no, err
);
if !self.fail_on_save_error {
warn!("Some posts were unable to be archived, however the error isn't being treated as fatal. Some posts may be lost.")
}
self.metrics.incr_save_error(1);
self.failed.store(true, Ordering::SeqCst);
}
}
}
fn notify_post(&self, no_posts: usize) {
let old = self.inflight_posts.fetch_sub(no_posts, Ordering::AcqRel);
let curr = old - no_posts;
if curr < self.max_inflight_posts {
self.waker.wake();
}
if curr == 0 {
self.flush_waker.wake();
self.close_waker.wake();
}
}
fn is_ready(&self) -> bool {
let posts = self.inflight_posts.load(Ordering::Acquire);
posts < self.max_inflight_posts
}
fn is_empty(&self) -> bool {
let posts = self.inflight_posts.load(Ordering::Acquire);
posts == 0
}
fn has_failed(&self) -> bool {
return self.fail_on_save_error && self.failed.load(Ordering::Relaxed);
}
}
impl Sink<Vec<imageboard::Post>> for Search {
type Error = Error;
fn poll_ready(self: Pin<&mut Self>, cx: &mut Context) -> Poll<Result<(), Self::Error>> {
if self.inner.has_failed() {
return Poll::Ready(Err(Error::ArchiveError));
}
self.inner.waker.register(cx.waker());
match self.inner.is_ready() {
true => Poll::Ready(Ok(())),
false => Poll::Pending,
}
}
fn | start_send | identifier_name |
|
mod.rs | }
#[must_use = "futures do nothing unless you `.await` or poll them"]
pub struct Search {
inner: Arc<SearchInner>,
}
impl Search {
#[allow(dead_code)]
pub fn builder() -> SearchBuilder {
SearchBuilder::default()
}
pub fn metrics_provider(&self) -> impl super::MetricsProvider {
SearchMetricsProvider {
inner: self.inner.clone(),
}
}
}
impl SearchInner {
async fn save_posts(&self, mut item: Vec<imageboard::Post>) -> Result<(), Error> {
let client = self.db_pool.get().await?;
while item.len() > 0 {
let start = Instant::now();
// Postgres only supports a maximum of 2^15 params
let (remain, posts) = if item.len() > 1280 {
let remain = item.split_off(1280);
(remain, item)
} else {
(vec![], item)
};
item = remain;
let rows = posts.len();
let query = "INSERT INTO
posts
(board, thread_no, post_no, subject, username, tripcode,
email, unique_id, since4_pass, country, filename,
image_hash, image_width, image_height, ts, comment, deleted,
ghost, sticky, spoiler, op, capcode) VALUES ";
let stmt = std::iter::once(Cow::Borrowed(query))
.chain((0..rows).map(|i| {
let z = i * 22;
Cow::Owned(
[
if i == 0 { "(" } else { "\n,(" },
PLACEHOLDERS[z], // board
",",
PLACEHOLDERS[z + 1], // thread_no
",",
PLACEHOLDERS[z + 2], // post_no
",to_tsvector(",
PLACEHOLDERS[z + 3], // subject
"),to_tsvector(",
PLACEHOLDERS[z + 4], // username
"),to_tsvector(",
PLACEHOLDERS[z + 5], // tripcode
"),to_tsvector(",
PLACEHOLDERS[z + 6], // email
"),",
PLACEHOLDERS[z + 7], // unique_id
",",
PLACEHOLDERS[z + 8], // since4_pass
",",
PLACEHOLDERS[z + 9], // country
",to_tsvector(REPLACE(",
PLACEHOLDERS[z + 10], // filename
",'.',' ')),",
PLACEHOLDERS[z + 11], // image_hash
",",
PLACEHOLDERS[z + 12], // image_width
",",
PLACEHOLDERS[z + 13], // image_height
",TO_TIMESTAMP(CAST(",
PLACEHOLDERS[z + 14], // ts
"::INT8 AS FLOAT8)),to_tsvector(",
PLACEHOLDERS[z + 15], // comment
"),",
PLACEHOLDERS[z + 16], // deleted
",",
PLACEHOLDERS[z + 17], // ghost
",",
PLACEHOLDERS[z + 18], // sticky
",",
PLACEHOLDERS[z + 19], // spoiler
",",
PLACEHOLDERS[z + 20], // op
",CAST(",
PLACEHOLDERS[z + 21], // capcode
"::INT8 AS INT4))",
]
.join(""),
)
}))
.chain(std::iter::once(Cow::Borrowed(
" ON CONFLICT (board, post_no) DO UPDATE SET
deleted = EXCLUDED.deleted,
sticky = EXCLUDED.sticky,
comment = COALESCE(EXCLUDED.comment, posts.comment);
",
)))
.collect::<String>();
let i64_rena = arena::Arena::new(posts.len() * 4);
let str_rena = arena::Arena::new(posts.len() * 4);
let params = (0..posts.len())
.into_iter()
.map(|i| {
let values: Box<[&(dyn ToSql + Sync)]> = Box::new([
str_rena.alloc(Some(posts[i].board.to_string())),
i64_rena.alloc(Some(posts[i].thread_no() as i64)),
i64_rena.alloc(Some(posts[i].no as i64)),
&posts[i].sub,
&posts[i].name,
&posts[i].trip,
&posts[i].email,
&posts[i].id,
&posts[i].since4pass,
str_rena.alloc(posts[i].poster_country()),
str_rena.alloc(posts[i].media_filename()),
&posts[i].md5,
&posts[i].w,
&posts[i].h,
i64_rena.alloc(Some(posts[i].time as i64)),
str_rena.alloc(posts[i].comment().map(|x| str_sanitize(x))),
&posts[i].deleted,
&false,
&posts[i].sticky,
&posts[i].spoiler,
if posts[i].is_op() { &true } else { &false },
i64_rena.alloc(posts[i].short_capcode().chars().next().map(|c| c as i64)),
]);
values.into_vec()
})
.flatten()
.collect::<Vec<_>>();
let mut attempts = 0;
let mut backoff = backoff::ExponentialBackoff::default();
backoff.max_elapsed_time = None;
loop {
let r = client.execute(stmt.as_str(), ¶ms).await;
match r {
Ok(_) => break,
Err(err) => {
if attempts >= self.retries_on_save_error {
return Err(Error::from(err));
}
attempts += 1;
if let Some(b) = backoff.next_backoff() {
tokio::time::delay_for(b).await;
}
continue;
}
}
}
self.metrics.incr_posts(rows as u64);
self.metrics.incr_query_time(start.elapsed());
self.notify_post(rows);
// Since values contains references to data in the 'renas,
// the values must be dropped before we drop the 'renas
drop(params);
drop(i64_rena);
drop(str_rena);
}
Ok(())
}
async fn send_posts(self: Arc<Self>, item: Vec<imageboard::Post>) {
let board = item[0].board;
let thread_no = item[0].thread_no();
let post_no = item[0].no;
let sz = item.len();
match self.save_posts(item).await {
Ok(_) => debug!(
"Flushed {} posts to postgres. [First]: {}/{}/{}",
sz, board, thread_no, post_no
),
Err(err) => {
error!(
"Failed to save data for {} posts [First]: {}/{}/{}: {}",
sz, board, thread_no, post_no, err
);
if !self.fail_on_save_error {
warn!("Some posts were unable to be archived, however the error isn't being treated as fatal. Some posts may be lost.")
}
self.metrics.incr_save_error(1);
self.failed.store(true, Ordering::SeqCst);
}
}
}
fn notify_post(&self, no_posts: usize) {
let old = self.inflight_posts.fetch_sub(no_posts, Ordering::AcqRel);
let curr = old - no_posts;
if curr < self.max_inflight_posts {
self.waker.wake();
}
if curr == 0 {
self.flush_waker.wake();
self.close_waker.wake();
}
}
fn is_ready(&self) -> bool {
let posts = self.inflight_posts.load(Ordering::Acquire);
posts < self.max_inflight_posts
}
fn is_empty(&self) -> bool {
let posts = self.inflight_posts.load(Ordering::Acquire);
posts == 0
}
fn has_failed(&self) -> bool {
return self.fail_on_save_error && self.failed.load(Ordering::Relaxed);
}
}
impl Sink<Vec<imageboard::Post>> for Search {
type Error = Error;
fn poll_ready(self: Pin<&mut Self>, cx: &mut Context) -> Poll<Result<(), Self::Error>> {
if self.inner.has_failed() {
return Poll::Ready(Err(Error::ArchiveError));
}
self.inner.waker.register(cx.waker());
match self.inner.is_ready() {
true => Poll::Ready(Ok(())),
false => Poll::Pending,
}
}
fn start_send(self: Pin<&mut Self>, item: Vec<imageboard::Post>) -> Result<(), Self::Error> {
if item.len() > 0 {
self.inner
.inflight_posts
.fetch_add(item.len(), Ordering::AcqRel);
self.inner.process_tx.send(Some(item)).unwrap();
}
Ok(())
}
fn poll_flush(self: Pin<&mut Self>, cx: &mut Context) -> Poll<Result<(), Self::Error>> {
if self.inner.has_failed() | {
return Poll::Ready(Err(Error::ArchiveError));
} | conditional_block |
|
mod.rs | ) {
self.queries.fetch_add(1, Ordering::Relaxed);
self.query_time_ns
.fetch_add(dur.as_nanos() as u64, Ordering::Relaxed);
}
pub fn incr_save_error(&self, count: u64) {
self.save_errors.fetch_add(count, Ordering::Relaxed);
}
}
#[derive(Clone)]
pub struct SearchMetricsProvider {
inner: Arc<SearchInner>,
}
impl super::MetricsProvider for SearchMetricsProvider {
fn name(&self) -> &'static str {
"pg_search"
}
fn metrics(
&self,
) -> Pin<Box<dyn std::future::Future<Output = Box<dyn erased_serde::Serialize + Send>> + Send>>
{
let queries = self.inner.metrics.queries.load(Ordering::Acquire) as f64;
let tt = self.inner.metrics.query_time_ns.load(Ordering::Acquire) as f64;
let m = Metrics {
posts: self.inner.metrics.posts.load(Ordering::Acquire),
avg_insert_time_ms: queries / tt * 1_000_000.,
save_errors: self.inner.metrics.save_errors.load(Ordering::Acquire),
};
let m: Box<dyn erased_serde::Serialize + Send> = Box::new(m);
futures::future::ready(m).boxed()
}
}
#[must_use = "futures do nothing unless you `.await` or poll them"]
pub struct Search {
inner: Arc<SearchInner>,
}
impl Search {
#[allow(dead_code)]
pub fn builder() -> SearchBuilder {
SearchBuilder::default()
}
pub fn metrics_provider(&self) -> impl super::MetricsProvider {
SearchMetricsProvider {
inner: self.inner.clone(),
}
}
}
impl SearchInner {
async fn save_posts(&self, mut item: Vec<imageboard::Post>) -> Result<(), Error> {
let client = self.db_pool.get().await?;
while item.len() > 0 {
let start = Instant::now();
// Postgres only supports a maximum of 2^15 params
let (remain, posts) = if item.len() > 1280 {
let remain = item.split_off(1280);
(remain, item)
} else {
(vec![], item)
};
item = remain;
let rows = posts.len();
let query = "INSERT INTO
posts
(board, thread_no, post_no, subject, username, tripcode,
email, unique_id, since4_pass, country, filename,
image_hash, image_width, image_height, ts, comment, deleted,
ghost, sticky, spoiler, op, capcode) VALUES ";
let stmt = std::iter::once(Cow::Borrowed(query))
.chain((0..rows).map(|i| {
let z = i * 22;
Cow::Owned(
[
if i == 0 { "(" } else { "\n,(" },
PLACEHOLDERS[z], // board
",",
PLACEHOLDERS[z + 1], // thread_no
",",
PLACEHOLDERS[z + 2], // post_no
",to_tsvector(",
PLACEHOLDERS[z + 3], // subject
"),to_tsvector(",
PLACEHOLDERS[z + 4], // username
"),to_tsvector(",
PLACEHOLDERS[z + 5], // tripcode
"),to_tsvector(",
PLACEHOLDERS[z + 6], // email
"),",
PLACEHOLDERS[z + 7], // unique_id
",",
PLACEHOLDERS[z + 8], // since4_pass
",",
PLACEHOLDERS[z + 9], // country
",to_tsvector(REPLACE(",
PLACEHOLDERS[z + 10], // filename
",'.',' ')),",
PLACEHOLDERS[z + 11], // image_hash
",",
PLACEHOLDERS[z + 12], // image_width
",",
PLACEHOLDERS[z + 13], // image_height
",TO_TIMESTAMP(CAST(",
PLACEHOLDERS[z + 14], // ts
"::INT8 AS FLOAT8)),to_tsvector(",
PLACEHOLDERS[z + 15], // comment
"),",
PLACEHOLDERS[z + 16], // deleted
",",
PLACEHOLDERS[z + 17], // ghost
",",
PLACEHOLDERS[z + 18], // sticky
",",
PLACEHOLDERS[z + 19], // spoiler
",",
PLACEHOLDERS[z + 20], // op
",CAST(",
PLACEHOLDERS[z + 21], // capcode
"::INT8 AS INT4))",
]
.join(""),
)
}))
.chain(std::iter::once(Cow::Borrowed(
" ON CONFLICT (board, post_no) DO UPDATE SET
deleted = EXCLUDED.deleted,
sticky = EXCLUDED.sticky,
comment = COALESCE(EXCLUDED.comment, posts.comment);
",
)))
.collect::<String>();
let i64_rena = arena::Arena::new(posts.len() * 4);
let str_rena = arena::Arena::new(posts.len() * 4);
let params = (0..posts.len())
.into_iter()
.map(|i| {
let values: Box<[&(dyn ToSql + Sync)]> = Box::new([
str_rena.alloc(Some(posts[i].board.to_string())),
i64_rena.alloc(Some(posts[i].thread_no() as i64)),
i64_rena.alloc(Some(posts[i].no as i64)),
&posts[i].sub,
&posts[i].name,
&posts[i].trip,
&posts[i].email,
&posts[i].id,
&posts[i].since4pass,
str_rena.alloc(posts[i].poster_country()),
str_rena.alloc(posts[i].media_filename()),
&posts[i].md5,
&posts[i].w,
&posts[i].h,
i64_rena.alloc(Some(posts[i].time as i64)),
str_rena.alloc(posts[i].comment().map(|x| str_sanitize(x))),
&posts[i].deleted,
&false,
&posts[i].sticky,
&posts[i].spoiler,
if posts[i].is_op() { &true } else { &false },
i64_rena.alloc(posts[i].short_capcode().chars().next().map(|c| c as i64)),
]);
values.into_vec()
})
.flatten()
.collect::<Vec<_>>();
let mut attempts = 0;
let mut backoff = backoff::ExponentialBackoff::default();
backoff.max_elapsed_time = None;
loop {
let r = client.execute(stmt.as_str(), ¶ms).await;
match r {
Ok(_) => break,
Err(err) => {
if attempts >= self.retries_on_save_error {
return Err(Error::from(err));
}
attempts += 1;
if let Some(b) = backoff.next_backoff() {
tokio::time::delay_for(b).await;
}
continue;
}
}
}
self.metrics.incr_posts(rows as u64);
self.metrics.incr_query_time(start.elapsed());
self.notify_post(rows);
// Since values contains references to data in the 'renas,
// the values must be dropped before we drop the 'renas
drop(params);
drop(i64_rena);
drop(str_rena);
}
Ok(())
}
async fn send_posts(self: Arc<Self>, item: Vec<imageboard::Post>) {
let board = item[0].board;
let thread_no = item[0].thread_no();
let post_no = item[0].no;
let sz = item.len();
match self.save_posts(item).await {
Ok(_) => debug!(
"Flushed {} posts to postgres. [First]: {}/{}/{}",
sz, board, thread_no, post_no
),
Err(err) => {
error!(
"Failed to save data for {} posts [First]: {}/{}/{}: {}",
sz, board, thread_no, post_no, err
);
if !self.fail_on_save_error {
warn!("Some posts were unable to be archived, however the error isn't being treated as fatal. Some posts may be lost.")
}
self.metrics.incr_save_error(1);
self.failed.store(true, Ordering::SeqCst);
}
}
}
fn notify_post(&self, no_posts: usize) {
let old = self.inflight_posts.fetch_sub(no_posts, Ordering::AcqRel);
let curr = old - no_posts;
if curr < self.max_inflight_posts {
self.waker.wake();
}
if curr == 0 {
self.flush_waker.wake();
self.close_waker.wake();
}
}
fn is_ready(&self) -> bool | {
let posts = self.inflight_posts.load(Ordering::Acquire);
posts < self.max_inflight_posts
} | identifier_body |
|
file_system_persistence.go | protocol.BlockPairContainer) primitives.BlockHeight {
if block == nil {
return 0
}
return block.TransactionsBlock.Header.BlockHeight()
}
func (f *BlockPersistence) GracefulShutdown(shutdownContext context.Context) {
logger := f.logger.WithTags(log.String("filename", blocksFileName(f.config)))
if err := f.blockWriter.Close(); err != nil {
logger.Error("failed to close blocks file")
return
}
logger.Info("closed blocks file")
}
func NewBlockPersistence(conf config.FilesystemBlockPersistenceConfig, parent log.Logger, metricFactory metric.Factory) (*BlockPersistence, error) {
logger := parent.WithTags(log.String("adapter", "block-storage"))
metrics := generateBlockStorageMetrics(metricFactory)
codec := newCodec(conf.BlockStorageFileSystemMaxBlockSizeInBytes())
file, blocksOffset, err := openBlocksFile(conf, logger)
if err != nil {
return nil, err
}
bhIndex, err := buildIndex(bufio.NewReaderSize(file, 1024*1024), blocksOffset, logger, codec, metrics)
if err != nil |
newTip, err := newFileBlockWriter(file, codec, bhIndex.fetchNextOffset())
if err != nil {
closeSilently(file, logger)
return nil, err
}
adapter := &BlockPersistence{
bhIndex: bhIndex,
config: conf,
blockTracker: synchronization.NewBlockTracker(logger, uint64(bhIndex.getLastBlockHeight()), 5),
metrics: metrics,
logger: logger,
blockWriter: newTip,
codec: codec,
}
if size, err := getBlockFileSize(file); err != nil {
return adapter, err
} else {
adapter.metrics.sizeOnDisk.Add(size)
}
return adapter, nil
}
func getBlockFileSize(file *os.File) (int64, error) {
if fi, err := file.Stat(); err != nil {
return 0, errors.Wrap(err, "unable to read file size for metrics")
} else {
return fi.Size(), nil
}
}
func openBlocksFile(conf config.FilesystemBlockPersistenceConfig, logger log.Logger) (*os.File, int64, error) {
dir := conf.BlockStorageFileSystemDataDir()
filename := blocksFileName(conf)
err := os.MkdirAll(dir, os.ModePerm)
if err != nil {
return nil, 0, errors.Wrapf(err, "failed to verify data directory exists %s", dir)
}
file, err := os.OpenFile(filename, os.O_RDWR|os.O_CREATE, 0600)
if err != nil {
return nil, 0, errors.Wrapf(err, "failed to open blocks file for writing %s", filename)
}
err = advisoryLockExclusive(file)
if err != nil {
closeSilently(file, logger)
return nil, 0, errors.Wrapf(err, "failed to obtain exclusive lock for writing %s", filename)
}
firstBlockOffset, err := validateFileHeader(file, conf, logger)
if err != nil {
closeSilently(file, logger)
return nil, 0, errors.Wrapf(err, "failed to obtain exclusive lock for writing %s", filename)
}
return file, firstBlockOffset, nil
}
func validateFileHeader(file *os.File, conf config.FilesystemBlockPersistenceConfig, logger log.Logger) (int64, error) {
info, err := file.Stat()
if err != nil {
return 0, err
}
if info.Size() == 0 { // empty file
if err := writeNewFileHeader(file, conf, logger); err != nil {
return 0, err
}
}
offset, err := file.Seek(0, io.SeekStart)
if err != nil {
return 0, errors.Wrapf(err, "error reading blocks file header")
}
if offset != 0 {
return 0, fmt.Errorf("error reading blocks file header")
}
header := newBlocksFileHeader(0, 0)
err = header.read(file)
if err != nil {
return 0, errors.Wrapf(err, "error reading blocks file header")
}
if header.NetworkType != uint32(conf.NetworkType()) {
return 0, fmt.Errorf("blocks file network type mismatch. found netowrk type %d expected %d", header.NetworkType, conf.NetworkType())
}
if header.ChainId != uint32(conf.VirtualChainId()) {
return 0, fmt.Errorf("blocks file virtual chain id mismatch. found vchain id %d expected %d", header.ChainId, conf.VirtualChainId())
}
offset, err = file.Seek(0, io.SeekCurrent) // read current offset
if err != nil {
return 0, errors.Wrapf(err, "error reading blocks file header")
}
return offset, nil
}
func writeNewFileHeader(file *os.File, conf config.FilesystemBlockPersistenceConfig, logger log.Logger) error {
header := newBlocksFileHeader(uint32(conf.NetworkType()), uint32(conf.VirtualChainId()))
logger.Info("creating new blocks file", log.String("filename", file.Name()))
err := header.write(file)
if err != nil {
return errors.Wrapf(err, "error writing blocks file header")
}
err = file.Sync()
if err != nil {
return errors.Wrapf(err, "error writing blocks file header")
}
return nil
}
func advisoryLockExclusive(file *os.File) error {
return syscall.Flock(int(file.Fd()), syscall.LOCK_EX|syscall.LOCK_NB)
}
func newFileBlockWriter(file *os.File, codec blockCodec, nextBlockOffset int64) (*blockWriter, error) {
newOffset, err := file.Seek(nextBlockOffset, io.SeekStart)
if err != nil {
return nil, errors.Wrapf(err, "failed to seek to next block offset %d", nextBlockOffset)
}
if newOffset != nextBlockOffset {
return nil, fmt.Errorf("failed to seek to next block offset. requested offset %d, but new reached %d", nextBlockOffset, newOffset)
}
result := newBlockWriter(file, codec)
return result, nil
}
func buildIndex(r io.Reader, firstBlockOffset int64, logger log.Logger, c blockCodec, metrics *metrics) (*blockHeightIndex, error) {
bhIndex := newBlockHeightIndex(logger, firstBlockOffset)
offset := int64(firstBlockOffset)
for {
aBlock, blockSize, err := c.decode(r)
if err != nil {
if err == io.EOF {
logger.Info("built index", log.Int64("valid-block-bytes", offset), logfields.BlockHeight(bhIndex.getLastBlockHeight()))
} else {
logger.Error("built index, found and ignoring invalid block records", log.Int64("valid-block-bytes", offset), log.Error(err), logfields.BlockHeight(bhIndex.getLastBlockHeight()))
}
break // index up to EOF or first invalid record.
}
err = bhIndex.appendBlock(offset+int64(blockSize), aBlock, nil)
if err != nil {
return nil, errors.Wrap(err, "failed building block height index")
}
metrics.indexLastUpdateTime.Update(time.Now().Unix())
offset = offset + int64(blockSize)
}
return bhIndex, nil
}
func (f *BlockPersistence) WriteNextBlock(blockPair *protocol.BlockPairContainer) (bool, primitives.BlockHeight, error) {
f.blockWriter.Lock()
defer f.blockWriter.Unlock()
bh := getBlockHeight(blockPair)
if err := f.bhIndex.validateCandidateBlockHeight(bh); err != nil {
return false, f.bhIndex.getLastBlockHeight(), nil
}
n, err := f.blockWriter.writeBlock(blockPair)
if err != nil {
return false, f.bhIndex.getLastBlockHeight(), err
}
startPos := f.bhIndex.fetchNextOffset()
err = f.bhIndex.appendBlock(startPos+int64(n), blockPair, f.blockTracker)
if err != nil {
return false, f.bhIndex.getLastBlockHeight(), errors.Wrap(err, "failed to update index after writing block")
}
f.metrics.indexLastUpdateTime.Update(time.Now().Unix())
f.metrics.sizeOnDisk.Add(int64(n))
return true, f.bhIndex.getLastBlockHeight(), nil
}
func (f *BlockPersistence) ScanBlocks(from primitives.BlockHeight, pageSize uint8, cursor adapter.CursorFunc) error {
sequentialHeight := f.bhIndex.getLastBlockHeight()
if (sequentialHeight < from) || from == 0 {
return fmt.Errorf("requested unsupported block height %d. Supported range for scan is determined by sequence top height (%d)", from, sequentialHeight)
}
file, err := os.Open(f.blockFileName())
if err != nil {
return errors.Wrap(err, "failed to open blocks file for reading")
}
defer closeSilently(file, f.logger)
var offset int64
fromHeight := from
wantsMore := true
eof := false
for fromHeight <= sequentialHeight && wantsMore && !eof {
toHeight := fromHeight + primitives.BlockHeight(pageSize) - 1
if toHeight > sequentialHeight {
toHeight = sequentialHeight
}
page := make([]*protocol.BlockPairContainer, 0, pageSize)
for height := fromHeight; height <= toHeight; height++ {
aBlock, nextOffset, err := f.fetchBlockFromFile(height, | {
closeSilently(file, logger)
return nil, err
} | conditional_block |
file_system_persistence.go | protocol.BlockPairContainer) primitives.BlockHeight {
if block == nil {
return 0
}
return block.TransactionsBlock.Header.BlockHeight()
}
func (f *BlockPersistence) GracefulShutdown(shutdownContext context.Context) {
logger := f.logger.WithTags(log.String("filename", blocksFileName(f.config)))
if err := f.blockWriter.Close(); err != nil {
logger.Error("failed to close blocks file")
return
}
logger.Info("closed blocks file")
}
func NewBlockPersistence(conf config.FilesystemBlockPersistenceConfig, parent log.Logger, metricFactory metric.Factory) (*BlockPersistence, error) {
logger := parent.WithTags(log.String("adapter", "block-storage"))
metrics := generateBlockStorageMetrics(metricFactory)
codec := newCodec(conf.BlockStorageFileSystemMaxBlockSizeInBytes())
file, blocksOffset, err := openBlocksFile(conf, logger)
if err != nil {
return nil, err
}
bhIndex, err := buildIndex(bufio.NewReaderSize(file, 1024*1024), blocksOffset, logger, codec, metrics)
if err != nil {
closeSilently(file, logger)
return nil, err
}
newTip, err := newFileBlockWriter(file, codec, bhIndex.fetchNextOffset())
if err != nil {
closeSilently(file, logger)
return nil, err
}
adapter := &BlockPersistence{
bhIndex: bhIndex,
config: conf,
blockTracker: synchronization.NewBlockTracker(logger, uint64(bhIndex.getLastBlockHeight()), 5),
metrics: metrics,
logger: logger,
blockWriter: newTip,
codec: codec,
}
if size, err := getBlockFileSize(file); err != nil {
return adapter, err
} else {
adapter.metrics.sizeOnDisk.Add(size)
}
return adapter, nil
}
func getBlockFileSize(file *os.File) (int64, error) {
if fi, err := file.Stat(); err != nil {
return 0, errors.Wrap(err, "unable to read file size for metrics")
} else {
return fi.Size(), nil
}
}
func openBlocksFile(conf config.FilesystemBlockPersistenceConfig, logger log.Logger) (*os.File, int64, error) {
dir := conf.BlockStorageFileSystemDataDir()
filename := blocksFileName(conf)
err := os.MkdirAll(dir, os.ModePerm)
if err != nil {
return nil, 0, errors.Wrapf(err, "failed to verify data directory exists %s", dir)
}
file, err := os.OpenFile(filename, os.O_RDWR|os.O_CREATE, 0600)
if err != nil {
return nil, 0, errors.Wrapf(err, "failed to open blocks file for writing %s", filename)
}
err = advisoryLockExclusive(file)
if err != nil {
closeSilently(file, logger)
return nil, 0, errors.Wrapf(err, "failed to obtain exclusive lock for writing %s", filename)
}
firstBlockOffset, err := validateFileHeader(file, conf, logger)
if err != nil {
closeSilently(file, logger)
return nil, 0, errors.Wrapf(err, "failed to obtain exclusive lock for writing %s", filename)
}
return file, firstBlockOffset, nil
}
func validateFileHeader(file *os.File, conf config.FilesystemBlockPersistenceConfig, logger log.Logger) (int64, error) {
info, err := file.Stat()
if err != nil {
return 0, err
}
if info.Size() == 0 { // empty file
if err := writeNewFileHeader(file, conf, logger); err != nil {
return 0, err
}
}
offset, err := file.Seek(0, io.SeekStart)
if err != nil {
return 0, errors.Wrapf(err, "error reading blocks file header")
}
if offset != 0 {
return 0, fmt.Errorf("error reading blocks file header")
}
header := newBlocksFileHeader(0, 0)
err = header.read(file)
if err != nil {
return 0, errors.Wrapf(err, "error reading blocks file header")
}
if header.NetworkType != uint32(conf.NetworkType()) {
return 0, fmt.Errorf("blocks file network type mismatch. found netowrk type %d expected %d", header.NetworkType, conf.NetworkType())
}
if header.ChainId != uint32(conf.VirtualChainId()) {
return 0, fmt.Errorf("blocks file virtual chain id mismatch. found vchain id %d expected %d", header.ChainId, conf.VirtualChainId())
}
offset, err = file.Seek(0, io.SeekCurrent) // read current offset
if err != nil {
return 0, errors.Wrapf(err, "error reading blocks file header")
}
return offset, nil
}
func | (file *os.File, conf config.FilesystemBlockPersistenceConfig, logger log.Logger) error {
header := newBlocksFileHeader(uint32(conf.NetworkType()), uint32(conf.VirtualChainId()))
logger.Info("creating new blocks file", log.String("filename", file.Name()))
err := header.write(file)
if err != nil {
return errors.Wrapf(err, "error writing blocks file header")
}
err = file.Sync()
if err != nil {
return errors.Wrapf(err, "error writing blocks file header")
}
return nil
}
func advisoryLockExclusive(file *os.File) error {
return syscall.Flock(int(file.Fd()), syscall.LOCK_EX|syscall.LOCK_NB)
}
func newFileBlockWriter(file *os.File, codec blockCodec, nextBlockOffset int64) (*blockWriter, error) {
newOffset, err := file.Seek(nextBlockOffset, io.SeekStart)
if err != nil {
return nil, errors.Wrapf(err, "failed to seek to next block offset %d", nextBlockOffset)
}
if newOffset != nextBlockOffset {
return nil, fmt.Errorf("failed to seek to next block offset. requested offset %d, but new reached %d", nextBlockOffset, newOffset)
}
result := newBlockWriter(file, codec)
return result, nil
}
func buildIndex(r io.Reader, firstBlockOffset int64, logger log.Logger, c blockCodec, metrics *metrics) (*blockHeightIndex, error) {
bhIndex := newBlockHeightIndex(logger, firstBlockOffset)
offset := int64(firstBlockOffset)
for {
aBlock, blockSize, err := c.decode(r)
if err != nil {
if err == io.EOF {
logger.Info("built index", log.Int64("valid-block-bytes", offset), logfields.BlockHeight(bhIndex.getLastBlockHeight()))
} else {
logger.Error("built index, found and ignoring invalid block records", log.Int64("valid-block-bytes", offset), log.Error(err), logfields.BlockHeight(bhIndex.getLastBlockHeight()))
}
break // index up to EOF or first invalid record.
}
err = bhIndex.appendBlock(offset+int64(blockSize), aBlock, nil)
if err != nil {
return nil, errors.Wrap(err, "failed building block height index")
}
metrics.indexLastUpdateTime.Update(time.Now().Unix())
offset = offset + int64(blockSize)
}
return bhIndex, nil
}
func (f *BlockPersistence) WriteNextBlock(blockPair *protocol.BlockPairContainer) (bool, primitives.BlockHeight, error) {
f.blockWriter.Lock()
defer f.blockWriter.Unlock()
bh := getBlockHeight(blockPair)
if err := f.bhIndex.validateCandidateBlockHeight(bh); err != nil {
return false, f.bhIndex.getLastBlockHeight(), nil
}
n, err := f.blockWriter.writeBlock(blockPair)
if err != nil {
return false, f.bhIndex.getLastBlockHeight(), err
}
startPos := f.bhIndex.fetchNextOffset()
err = f.bhIndex.appendBlock(startPos+int64(n), blockPair, f.blockTracker)
if err != nil {
return false, f.bhIndex.getLastBlockHeight(), errors.Wrap(err, "failed to update index after writing block")
}
f.metrics.indexLastUpdateTime.Update(time.Now().Unix())
f.metrics.sizeOnDisk.Add(int64(n))
return true, f.bhIndex.getLastBlockHeight(), nil
}
func (f *BlockPersistence) ScanBlocks(from primitives.BlockHeight, pageSize uint8, cursor adapter.CursorFunc) error {
sequentialHeight := f.bhIndex.getLastBlockHeight()
if (sequentialHeight < from) || from == 0 {
return fmt.Errorf("requested unsupported block height %d. Supported range for scan is determined by sequence top height (%d)", from, sequentialHeight)
}
file, err := os.Open(f.blockFileName())
if err != nil {
return errors.Wrap(err, "failed to open blocks file for reading")
}
defer closeSilently(file, f.logger)
var offset int64
fromHeight := from
wantsMore := true
eof := false
for fromHeight <= sequentialHeight && wantsMore && !eof {
toHeight := fromHeight + primitives.BlockHeight(pageSize) - 1
if toHeight > sequentialHeight {
toHeight = sequentialHeight
}
page := make([]*protocol.BlockPairContainer, 0, pageSize)
for height := fromHeight; height <= toHeight; height++ {
aBlock, nextOffset, err := f.fetchBlockFromFile(height, offset | writeNewFileHeader | identifier_name |
file_system_persistence.go | Id, conf.VirtualChainId())
}
offset, err = file.Seek(0, io.SeekCurrent) // read current offset
if err != nil {
return 0, errors.Wrapf(err, "error reading blocks file header")
}
return offset, nil
}
func writeNewFileHeader(file *os.File, conf config.FilesystemBlockPersistenceConfig, logger log.Logger) error {
header := newBlocksFileHeader(uint32(conf.NetworkType()), uint32(conf.VirtualChainId()))
logger.Info("creating new blocks file", log.String("filename", file.Name()))
err := header.write(file)
if err != nil {
return errors.Wrapf(err, "error writing blocks file header")
}
err = file.Sync()
if err != nil {
return errors.Wrapf(err, "error writing blocks file header")
}
return nil
}
func advisoryLockExclusive(file *os.File) error {
return syscall.Flock(int(file.Fd()), syscall.LOCK_EX|syscall.LOCK_NB)
}
func newFileBlockWriter(file *os.File, codec blockCodec, nextBlockOffset int64) (*blockWriter, error) {
newOffset, err := file.Seek(nextBlockOffset, io.SeekStart)
if err != nil {
return nil, errors.Wrapf(err, "failed to seek to next block offset %d", nextBlockOffset)
}
if newOffset != nextBlockOffset {
return nil, fmt.Errorf("failed to seek to next block offset. requested offset %d, but new reached %d", nextBlockOffset, newOffset)
}
result := newBlockWriter(file, codec)
return result, nil
}
func buildIndex(r io.Reader, firstBlockOffset int64, logger log.Logger, c blockCodec, metrics *metrics) (*blockHeightIndex, error) {
bhIndex := newBlockHeightIndex(logger, firstBlockOffset)
offset := int64(firstBlockOffset)
for {
aBlock, blockSize, err := c.decode(r)
if err != nil {
if err == io.EOF {
logger.Info("built index", log.Int64("valid-block-bytes", offset), logfields.BlockHeight(bhIndex.getLastBlockHeight()))
} else {
logger.Error("built index, found and ignoring invalid block records", log.Int64("valid-block-bytes", offset), log.Error(err), logfields.BlockHeight(bhIndex.getLastBlockHeight()))
}
break // index up to EOF or first invalid record.
}
err = bhIndex.appendBlock(offset+int64(blockSize), aBlock, nil)
if err != nil {
return nil, errors.Wrap(err, "failed building block height index")
}
metrics.indexLastUpdateTime.Update(time.Now().Unix())
offset = offset + int64(blockSize)
}
return bhIndex, nil
}
func (f *BlockPersistence) WriteNextBlock(blockPair *protocol.BlockPairContainer) (bool, primitives.BlockHeight, error) {
f.blockWriter.Lock()
defer f.blockWriter.Unlock()
bh := getBlockHeight(blockPair)
if err := f.bhIndex.validateCandidateBlockHeight(bh); err != nil {
return false, f.bhIndex.getLastBlockHeight(), nil
}
n, err := f.blockWriter.writeBlock(blockPair)
if err != nil {
return false, f.bhIndex.getLastBlockHeight(), err
}
startPos := f.bhIndex.fetchNextOffset()
err = f.bhIndex.appendBlock(startPos+int64(n), blockPair, f.blockTracker)
if err != nil {
return false, f.bhIndex.getLastBlockHeight(), errors.Wrap(err, "failed to update index after writing block")
}
f.metrics.indexLastUpdateTime.Update(time.Now().Unix())
f.metrics.sizeOnDisk.Add(int64(n))
return true, f.bhIndex.getLastBlockHeight(), nil
}
func (f *BlockPersistence) ScanBlocks(from primitives.BlockHeight, pageSize uint8, cursor adapter.CursorFunc) error {
sequentialHeight := f.bhIndex.getLastBlockHeight()
if (sequentialHeight < from) || from == 0 {
return fmt.Errorf("requested unsupported block height %d. Supported range for scan is determined by sequence top height (%d)", from, sequentialHeight)
}
file, err := os.Open(f.blockFileName())
if err != nil {
return errors.Wrap(err, "failed to open blocks file for reading")
}
defer closeSilently(file, f.logger)
var offset int64
fromHeight := from
wantsMore := true
eof := false
for fromHeight <= sequentialHeight && wantsMore && !eof {
toHeight := fromHeight + primitives.BlockHeight(pageSize) - 1
if toHeight > sequentialHeight {
toHeight = sequentialHeight
}
page := make([]*protocol.BlockPairContainer, 0, pageSize)
for height := fromHeight; height <= toHeight; height++ {
aBlock, nextOffset, err := f.fetchBlockFromFile(height, offset, file)
if err != nil {
if err == io.EOF || err == io.ErrUnexpectedEOF {
eof = true
break
}
return errors.Wrapf(err, "failed to decode block")
}
offset = nextOffset
page = append(page, aBlock)
}
if len(page) > 0 {
wantsMore = cursor(page[0].ResultsBlock.Header.BlockHeight(), page)
}
sequentialHeight = f.bhIndex.getLastBlockHeight()
fromHeight = toHeight + 1
}
return nil
}
func (f *BlockPersistence) seekBlockOffset(height primitives.BlockHeight, currentOffset int64, file *os.File) (error, int64) {
if expectedOffset, ok := f.bhIndex.fetchBlockOffset(height); ok {
if currentOffset != expectedOffset {
newOffset, err := file.Seek(expectedOffset, io.SeekStart)
if newOffset != expectedOffset || err != nil {
return errors.Wrapf(err, "failed to seek in blocks file to position %v", expectedOffset), 0
}
}
return nil, expectedOffset
}
return fmt.Errorf("failed to find requested block %d", uint64(height)), 0
}
func (f *BlockPersistence) fetchBlockFromFile(height primitives.BlockHeight, currentOffset int64, file *os.File) (*protocol.BlockPairContainer, int64, error) {
err, newOffset := f.seekBlockOffset(height, currentOffset, file)
if err != nil {
return nil, 0, err
}
aBlock, blockSize, err := f.codec.decode(file)
if err != nil {
return nil, 0, err
}
return aBlock, newOffset + int64(blockSize), nil
}
func (f *BlockPersistence) GetLastBlockHeight() (primitives.BlockHeight, error) {
return f.bhIndex.getLastBlockHeight(), nil
}
func (f *BlockPersistence) GetLastBlock() (*protocol.BlockPairContainer, error) {
return f.bhIndex.getLastBlock(), nil
}
func (f *BlockPersistence) GetTransactionsBlock(height primitives.BlockHeight) (*protocol.TransactionsBlockContainer, error) {
bpc, err := f.GetBlock(height)
if err != nil {
return nil, err
}
return bpc.TransactionsBlock, nil
}
func (f *BlockPersistence) GetResultsBlock(height primitives.BlockHeight) (*protocol.ResultsBlockContainer, error) {
bpc, err := f.GetBlock(height)
if err != nil {
return nil, err
}
return bpc.ResultsBlock, nil
}
func (f *BlockPersistence) GetBlock(height primitives.BlockHeight) (*protocol.BlockPairContainer, error) {
file, err := os.Open(f.blockFileName())
if err != nil {
return nil, errors.Wrap(err, "failed to open blocks file for reading")
}
defer closeSilently(file, f.logger)
if aBlock, _, err := f.fetchBlockFromFile(height, 0, file); err != nil {
return nil, errors.Wrapf(err, "failed to decode block")
} else {
return aBlock, nil
}
}
func (f *BlockPersistence) GetBlockByTx(txHash primitives.Sha256, minBlockTs primitives.TimestampNano, maxBlockTs primitives.TimestampNano) (block *protocol.BlockPairContainer, txIndexInBlock int, err error) {
scanFrom, ok := f.bhIndex.getEarliestTxBlockInBucketForTsRange(minBlockTs, maxBlockTs)
if !ok {
return nil, 0, nil
}
err = f.ScanBlocks(scanFrom, 1, func(h primitives.BlockHeight, page []*protocol.BlockPairContainer) (wantsMore bool) {
b := page[0]
if b.ResultsBlock.Header.Timestamp() > maxBlockTs {
return false
}
if b.ResultsBlock.Header.Timestamp() < minBlockTs {
return true
}
for i, receipt := range b.ResultsBlock.TransactionReceipts {
if bytes.Equal(receipt.Txhash(), txHash) { // found requested transaction
block = b
txIndexInBlock = i
return false
}
}
return true
})
if err != nil {
return nil, 0, errors.Wrap(err, "failed to fetch block by txHash")
}
return block, txIndexInBlock, nil
}
func (f *BlockPersistence) GetBlockTracker() *synchronization.BlockTracker {
return f.blockTracker
}
func (f *BlockPersistence) blockFileName() string | {
return blocksFileName(f.config)
} | identifier_body |
|
file_system_persistence.go | protocol.BlockPairContainer) primitives.BlockHeight {
if block == nil {
return 0
}
return block.TransactionsBlock.Header.BlockHeight()
}
func (f *BlockPersistence) GracefulShutdown(shutdownContext context.Context) {
logger := f.logger.WithTags(log.String("filename", blocksFileName(f.config)))
if err := f.blockWriter.Close(); err != nil {
logger.Error("failed to close blocks file")
return
}
logger.Info("closed blocks file")
}
func NewBlockPersistence(conf config.FilesystemBlockPersistenceConfig, parent log.Logger, metricFactory metric.Factory) (*BlockPersistence, error) {
logger := parent.WithTags(log.String("adapter", "block-storage"))
metrics := generateBlockStorageMetrics(metricFactory)
codec := newCodec(conf.BlockStorageFileSystemMaxBlockSizeInBytes())
file, blocksOffset, err := openBlocksFile(conf, logger)
if err != nil {
return nil, err
}
bhIndex, err := buildIndex(bufio.NewReaderSize(file, 1024*1024), blocksOffset, logger, codec, metrics)
if err != nil {
closeSilently(file, logger)
return nil, err
}
newTip, err := newFileBlockWriter(file, codec, bhIndex.fetchNextOffset())
if err != nil {
closeSilently(file, logger)
return nil, err
}
adapter := &BlockPersistence{
bhIndex: bhIndex,
config: conf,
blockTracker: synchronization.NewBlockTracker(logger, uint64(bhIndex.getLastBlockHeight()), 5),
metrics: metrics,
logger: logger,
blockWriter: newTip,
codec: codec,
}
if size, err := getBlockFileSize(file); err != nil {
return adapter, err
} else {
adapter.metrics.sizeOnDisk.Add(size)
}
return adapter, nil
}
func getBlockFileSize(file *os.File) (int64, error) {
if fi, err := file.Stat(); err != nil {
return 0, errors.Wrap(err, "unable to read file size for metrics")
} else {
return fi.Size(), nil
}
}
func openBlocksFile(conf config.FilesystemBlockPersistenceConfig, logger log.Logger) (*os.File, int64, error) {
dir := conf.BlockStorageFileSystemDataDir()
filename := blocksFileName(conf)
err := os.MkdirAll(dir, os.ModePerm)
if err != nil {
return nil, 0, errors.Wrapf(err, "failed to verify data directory exists %s", dir)
}
file, err := os.OpenFile(filename, os.O_RDWR|os.O_CREATE, 0600)
if err != nil {
return nil, 0, errors.Wrapf(err, "failed to open blocks file for writing %s", filename)
}
err = advisoryLockExclusive(file)
if err != nil {
closeSilently(file, logger)
return nil, 0, errors.Wrapf(err, "failed to obtain exclusive lock for writing %s", filename)
}
firstBlockOffset, err := validateFileHeader(file, conf, logger)
if err != nil {
closeSilently(file, logger)
return nil, 0, errors.Wrapf(err, "failed to obtain exclusive lock for writing %s", filename)
}
return file, firstBlockOffset, nil
}
func validateFileHeader(file *os.File, conf config.FilesystemBlockPersistenceConfig, logger log.Logger) (int64, error) {
info, err := file.Stat()
if err != nil {
return 0, err
}
if info.Size() == 0 { // empty file
if err := writeNewFileHeader(file, conf, logger); err != nil {
return 0, err
}
}
offset, err := file.Seek(0, io.SeekStart)
if err != nil {
return 0, errors.Wrapf(err, "error reading blocks file header")
}
if offset != 0 {
return 0, fmt.Errorf("error reading blocks file header")
} | return 0, errors.Wrapf(err, "error reading blocks file header")
}
if header.NetworkType != uint32(conf.NetworkType()) {
return 0, fmt.Errorf("blocks file network type mismatch. found netowrk type %d expected %d", header.NetworkType, conf.NetworkType())
}
if header.ChainId != uint32(conf.VirtualChainId()) {
return 0, fmt.Errorf("blocks file virtual chain id mismatch. found vchain id %d expected %d", header.ChainId, conf.VirtualChainId())
}
offset, err = file.Seek(0, io.SeekCurrent) // read current offset
if err != nil {
return 0, errors.Wrapf(err, "error reading blocks file header")
}
return offset, nil
}
func writeNewFileHeader(file *os.File, conf config.FilesystemBlockPersistenceConfig, logger log.Logger) error {
header := newBlocksFileHeader(uint32(conf.NetworkType()), uint32(conf.VirtualChainId()))
logger.Info("creating new blocks file", log.String("filename", file.Name()))
err := header.write(file)
if err != nil {
return errors.Wrapf(err, "error writing blocks file header")
}
err = file.Sync()
if err != nil {
return errors.Wrapf(err, "error writing blocks file header")
}
return nil
}
func advisoryLockExclusive(file *os.File) error {
return syscall.Flock(int(file.Fd()), syscall.LOCK_EX|syscall.LOCK_NB)
}
func newFileBlockWriter(file *os.File, codec blockCodec, nextBlockOffset int64) (*blockWriter, error) {
newOffset, err := file.Seek(nextBlockOffset, io.SeekStart)
if err != nil {
return nil, errors.Wrapf(err, "failed to seek to next block offset %d", nextBlockOffset)
}
if newOffset != nextBlockOffset {
return nil, fmt.Errorf("failed to seek to next block offset. requested offset %d, but new reached %d", nextBlockOffset, newOffset)
}
result := newBlockWriter(file, codec)
return result, nil
}
func buildIndex(r io.Reader, firstBlockOffset int64, logger log.Logger, c blockCodec, metrics *metrics) (*blockHeightIndex, error) {
bhIndex := newBlockHeightIndex(logger, firstBlockOffset)
offset := int64(firstBlockOffset)
for {
aBlock, blockSize, err := c.decode(r)
if err != nil {
if err == io.EOF {
logger.Info("built index", log.Int64("valid-block-bytes", offset), logfields.BlockHeight(bhIndex.getLastBlockHeight()))
} else {
logger.Error("built index, found and ignoring invalid block records", log.Int64("valid-block-bytes", offset), log.Error(err), logfields.BlockHeight(bhIndex.getLastBlockHeight()))
}
break // index up to EOF or first invalid record.
}
err = bhIndex.appendBlock(offset+int64(blockSize), aBlock, nil)
if err != nil {
return nil, errors.Wrap(err, "failed building block height index")
}
metrics.indexLastUpdateTime.Update(time.Now().Unix())
offset = offset + int64(blockSize)
}
return bhIndex, nil
}
func (f *BlockPersistence) WriteNextBlock(blockPair *protocol.BlockPairContainer) (bool, primitives.BlockHeight, error) {
f.blockWriter.Lock()
defer f.blockWriter.Unlock()
bh := getBlockHeight(blockPair)
if err := f.bhIndex.validateCandidateBlockHeight(bh); err != nil {
return false, f.bhIndex.getLastBlockHeight(), nil
}
n, err := f.blockWriter.writeBlock(blockPair)
if err != nil {
return false, f.bhIndex.getLastBlockHeight(), err
}
startPos := f.bhIndex.fetchNextOffset()
err = f.bhIndex.appendBlock(startPos+int64(n), blockPair, f.blockTracker)
if err != nil {
return false, f.bhIndex.getLastBlockHeight(), errors.Wrap(err, "failed to update index after writing block")
}
f.metrics.indexLastUpdateTime.Update(time.Now().Unix())
f.metrics.sizeOnDisk.Add(int64(n))
return true, f.bhIndex.getLastBlockHeight(), nil
}
func (f *BlockPersistence) ScanBlocks(from primitives.BlockHeight, pageSize uint8, cursor adapter.CursorFunc) error {
sequentialHeight := f.bhIndex.getLastBlockHeight()
if (sequentialHeight < from) || from == 0 {
return fmt.Errorf("requested unsupported block height %d. Supported range for scan is determined by sequence top height (%d)", from, sequentialHeight)
}
file, err := os.Open(f.blockFileName())
if err != nil {
return errors.Wrap(err, "failed to open blocks file for reading")
}
defer closeSilently(file, f.logger)
var offset int64
fromHeight := from
wantsMore := true
eof := false
for fromHeight <= sequentialHeight && wantsMore && !eof {
toHeight := fromHeight + primitives.BlockHeight(pageSize) - 1
if toHeight > sequentialHeight {
toHeight = sequentialHeight
}
page := make([]*protocol.BlockPairContainer, 0, pageSize)
for height := fromHeight; height <= toHeight; height++ {
aBlock, nextOffset, err := f.fetchBlockFromFile(height, offset |
header := newBlocksFileHeader(0, 0)
err = header.read(file)
if err != nil { | random_line_split |
app.js | ;
element = document.links[2];
element = document.links[2].id;
element = document.links[2].className;
element = document.links[2].classList;
//forms
element = document.forms[0];
element = document.forms[0].method;
element = document.forms[0].action;
element = document.forms[0].className;
element = document.forms[0].classList;
// images
element = document.images;
//scripts
element = document.scripts[0].getAttribute('src');
//looping all the images
let images = document.images;
let imagesArray = Array.from(images);
imagesArray.forEach(function(image){
return image.src;
});
console.log(imagesArray);
//selecting DOM element
let heading = document.getElementById('heading');
console.log(heading.textContent);
//change the CSS
heading.style.backgroundColor = '#333';
heading.style.color = "#FFFFFF";
heading.style.padding = "10px";
heading.style.marginTop = "50px";
//change the text
heading.textContent = "The best courses";
//another way
heading.innerText = "Online course";
//query selector
let learningHeading = document.querySelector('#learn');
console.log(learningHeading);
//select a class with querySelector
const tagLine = document.querySelector(".tagline");
//const newText = tagLine.textContent.replace('$15', "$12");
//tagLine.textContent = newText;
console.log(tagLine);
let card = document.querySelector(".card");
//select a tag
let heading2 = document.querySelector('h2');
//nest your selectors with a CSS syntax
let image = document.querySelector('.card img');
//query selector supports nth-child, first-child or last-child
let link;
link = document.querySelector('#primary a:first-child');
link = document.querySelector('#primary a:last-child');
link = document.querySelector('#primary a:nth-child(3)');
//getElementsByClassName
//const links = document.getElementsByClassName('link');
//links[0].style.color = 'red';
//another ways
//const links = document.getElementById('primary').getElementsByClassName('link');
const links = document.querySelector('#primary').getElementsByClassName('link');
const images2 = document.getElementsByTagName('img');
///convert HTML collection into an array
let imagesArray2 = Array.from(images2);
//getElementByTagName
const heading1 = document.getElementsByTagName('h1');
const cards = document.querySelectorAll('.card');
const courses = document.querySelectorAll('.card h4');
const coursesArray = Array.from(courses);
coursesArray.forEach(function(course){
return course;
});
//select odd links
const oddLinks = document.querySelectorAll('#primary a:nth-child(odd)');
oddLinks.forEach(function(odd){
odd.style.backgroundColor = 'red';
odd.style.color = 'white';
});
//select even links
const evenLinks = document.querySelectorAll('#primary a:nth-child(even)');
evenLinks.forEach(function(even){
even.style.backgroundColor = 'blue';
even.style.color = 'white';
});
//change all the add-to-cart button text
const addCartBtns = document.querySelectorAll('.add-to-cart');
addCartBtns.forEach(function(button){
button.textContent = 'Something new'
});
//traversing the DOM
const navigation = document.querySelector('#primary');
let element1;
element1 = navigation.childNodes;
navigation.children[0].textContent = 'New text';
element1 = navigation.children[0].nodeName;//name of element
element1 = navigation.children[0].nodeType;//number of element
//1 - element
//2 - attributes
//3 - text nodes
//8 - comments
//9 - document
//10 - dostype
element1 = navigation.children;
const coursesList = document.querySelector('#courses-list');
element1 = coursesList.children[1].children[0].children[0].children[1].lastElementChild;//last element child of component
element1 = coursesList.children[1].children[0].children[0].children[1].firstElementChild;//first element child of component
element1 = coursesList.children[1].children[0].children[0].children[1].childElementCount;//how many element
//traversing from the children to parent
let cartBtn = document.querySelector('.add-to-cart');
element1 = cartBtn;
//parent node
element1 = cartBtn.parentNode;
element1 = cartBtn.parentElement.parentElement.children[0];//best solution
//sibling
element1 = cartBtn.previousElementSibling;
//course name
const courseName = cartBtn.parentElement.querySelector('h4');
//next element sibling
element1 = courseName.nextElementSibling;
console.log(element1);
//creating HTML elements with js
//create a new element
const newLink = document.createElement('a');
//add a class
newLink.className = "link";
//add the href
newLink.href = '#';
//newLink.setAttribute('href', '#');//second method
//add the text
newLink.appendChild(document.createTextNode('New Link'));
//add element to the HTML
document.querySelector('#primary').appendChild(newLink);
//replace an element
const newHeading = document.createElement('h2');
//add an id
newHeading.id = 'heading';
//add the class
newHeading.classList.add('heading');
//add the new text
newHeading.appendChild(document.createTextNode('The best courses'));
//select old heading
const oldHeading = document.querySelector('#heading');
//replace parent
coursesList.replaceChild(newHeading,oldHeading);
//remove elements
const links1 = document.querySelectorAll('.link');
links1[0].remove();
const cards1 = document.querySelectorAll('.card');
cards1[11].remove();
//remove by the children
const links2 = document.querySelectorAll('#primary .link');
navigation.removeChild(links2[4]);
//classes, ids and attributes
const link1 = document.querySelector('.link');
element1 = link1;
//read the class
element1 = link1.className;
//read the class (DOM token list)
element1 = link1.classList;
//access specific class with classList
element1 = link1.classList[0];
//add a new class
link1.classList.add('new-class');
//remove the class
link1.classList.remove('new-class');
//id
link1.id = 'new-id';
//remove the id
link1.id = '';
link1.removeAttribute('id');
//attributes function
element1 = link1.getAttribute('href');
element1 = link1.getAttribute('class');
element1 = link1.setAttribute('href', "http://facebook.com");
element1 = link1.setAttribute('target', "_blank");
element1 = link1.setAttribute('data-link', '10');
element1 = link1.hasAttribute('data-link');
element1 = link1.removeAttribute('data-link');
console.log(element1);
//addEventListener
const clearCartBtn = document.getElementById('clear-cart');
clearCartBtn.addEventListener('click',clearBtnFunction);
function clearBtnFunction(event){
//target
let elementRemove;
elementRemove = event;
//read the target
elementRemove = event.target;
elementRemove = event.target.id;
elementRemove = event.target.className;
elementRemove = event.target.innerText;
console.log(elementRemove);
}
//mouse events
function printEvent(event){
/*if(searchInput.value.length ===0){
alert("Type something")
}*/ //validate
//event.preventDefault(); when submit
console.log(searchInput.value);
console.log(`the event is: ${event.type}`)
}
//click mouse event
//clearCartBtn.addEventListener('click',printEvent);
//double click
clearCartBtn.addEventListener('dblclick',printEvent);
//mouse enter
clearCartBtn.addEventListener('mouseenter',printEvent);
//mouse leave
clearCartBtn.addEventListener('mouseleave',printEvent);
//mouse over
clearCartBtn.addEventListener('mouseover',printEvent);
//mouse out
clearCartBtn.addEventListener('mouseout',printEvent);
//mouse up
clearCartBtn.addEventListener('mouseup',printEvent);
//mouse down
clearCartBtn.addEventListener('mousedown',printEvent);
//input and form events
const searchForm = document.getElementById('search'),
searchInput = document.getElementById('search-course');
//events for <form>
searchForm.addEventListener('submit', printEvent);
//input events
//searchInput.addEventListener('keydown', printEvent);
//searchInput.addEventListener('keyup', printEvent);
//searchInput.addEventListener('keypress', printEvent);
//searchInput.addEventListener('focus', printEvent);
//searchInput.addEventListener('blur', printEvent);
//searchInput.addEventListener('cut', printEvent);
//searchInput.addEventListener('copy', printEvent);
//searchInput.addEventListener('paste', printEvent);
searchInput.addEventListener('input', printEvent);
//event bubbling
const cardCourse = document.querySelector('.card'),
infoCards = document.querySelector('.info-card'),
addCartBtn = document.querySelector('.add-to-cart');
cardCourse.addEventListener('click', function(event){
console.log('You clicked the card');
event.stopPropagation();
});
infoCards.addEventListener('click', function(event){
console.log('You clicked the info');
event.stopPropagation();
});
addCartBtn.addEventListener('click', function(event){
console.log('You clicked the button');
event.stopPropagation();
});
//delegation
const shoppingCart = document.querySelector('#shopping-cart');
shoppingCart.addEventListener('click', removeProductFromCart);
function removeProductFromCart(event){
console.log(event.target);
if (event.target.classList.contains('remove')){
event.target.parentElement.parentElement.remove();
}
}
//add to cart
const courseList = document.querySelector('#courses-list');
courseList.addEventListener('click',addToCart);
function addToCart(event){
if (event.target.classList.contains('add-to-cart')){ | }
console.log(courseList);
//local storage
//add to local storage
localStorage .setItem('name', "Kasia");
// add to session storage
//sessionStorage.setItem('name', 'Kasia');
//remove from | console.log("courses added");
} | random_line_split |
app.js | document.scripts[0].getAttribute('src');
//looping all the images
let images = document.images;
let imagesArray = Array.from(images);
imagesArray.forEach(function(image){
return image.src;
});
console.log(imagesArray);
//selecting DOM element
let heading = document.getElementById('heading');
console.log(heading.textContent);
//change the CSS
heading.style.backgroundColor = '#333';
heading.style.color = "#FFFFFF";
heading.style.padding = "10px";
heading.style.marginTop = "50px";
//change the text
heading.textContent = "The best courses";
//another way
heading.innerText = "Online course";
//query selector
let learningHeading = document.querySelector('#learn');
console.log(learningHeading);
//select a class with querySelector
const tagLine = document.querySelector(".tagline");
//const newText = tagLine.textContent.replace('$15', "$12");
//tagLine.textContent = newText;
console.log(tagLine);
let card = document.querySelector(".card");
//select a tag
let heading2 = document.querySelector('h2');
//nest your selectors with a CSS syntax
let image = document.querySelector('.card img');
//query selector supports nth-child, first-child or last-child
let link;
link = document.querySelector('#primary a:first-child');
link = document.querySelector('#primary a:last-child');
link = document.querySelector('#primary a:nth-child(3)');
//getElementsByClassName
//const links = document.getElementsByClassName('link');
//links[0].style.color = 'red';
//another ways
//const links = document.getElementById('primary').getElementsByClassName('link');
const links = document.querySelector('#primary').getElementsByClassName('link');
const images2 = document.getElementsByTagName('img');
///convert HTML collection into an array
let imagesArray2 = Array.from(images2);
//getElementByTagName
const heading1 = document.getElementsByTagName('h1');
const cards = document.querySelectorAll('.card');
const courses = document.querySelectorAll('.card h4');
const coursesArray = Array.from(courses);
coursesArray.forEach(function(course){
return course;
});
//select odd links
const oddLinks = document.querySelectorAll('#primary a:nth-child(odd)');
oddLinks.forEach(function(odd){
odd.style.backgroundColor = 'red';
odd.style.color = 'white';
});
//select even links
const evenLinks = document.querySelectorAll('#primary a:nth-child(even)');
evenLinks.forEach(function(even){
even.style.backgroundColor = 'blue';
even.style.color = 'white';
});
//change all the add-to-cart button text
const addCartBtns = document.querySelectorAll('.add-to-cart');
addCartBtns.forEach(function(button){
button.textContent = 'Something new'
});
//traversing the DOM
const navigation = document.querySelector('#primary');
let element1;
element1 = navigation.childNodes;
navigation.children[0].textContent = 'New text';
element1 = navigation.children[0].nodeName;//name of element
element1 = navigation.children[0].nodeType;//number of element
//1 - element
//2 - attributes
//3 - text nodes
//8 - comments
//9 - document
//10 - dostype
element1 = navigation.children;
const coursesList = document.querySelector('#courses-list');
element1 = coursesList.children[1].children[0].children[0].children[1].lastElementChild;//last element child of component
element1 = coursesList.children[1].children[0].children[0].children[1].firstElementChild;//first element child of component
element1 = coursesList.children[1].children[0].children[0].children[1].childElementCount;//how many element
//traversing from the children to parent
let cartBtn = document.querySelector('.add-to-cart');
element1 = cartBtn;
//parent node
element1 = cartBtn.parentNode;
element1 = cartBtn.parentElement.parentElement.children[0];//best solution
//sibling
element1 = cartBtn.previousElementSibling;
//course name
const courseName = cartBtn.parentElement.querySelector('h4');
//next element sibling
element1 = courseName.nextElementSibling;
console.log(element1);
//creating HTML elements with js
//create a new element
const newLink = document.createElement('a');
//add a class
newLink.className = "link";
//add the href
newLink.href = '#';
//newLink.setAttribute('href', '#');//second method
//add the text
newLink.appendChild(document.createTextNode('New Link'));
//add element to the HTML
document.querySelector('#primary').appendChild(newLink);
//replace an element
const newHeading = document.createElement('h2');
//add an id
newHeading.id = 'heading';
//add the class
newHeading.classList.add('heading');
//add the new text
newHeading.appendChild(document.createTextNode('The best courses'));
//select old heading
const oldHeading = document.querySelector('#heading');
//replace parent
coursesList.replaceChild(newHeading,oldHeading);
//remove elements
const links1 = document.querySelectorAll('.link');
links1[0].remove();
const cards1 = document.querySelectorAll('.card');
cards1[11].remove();
//remove by the children
const links2 = document.querySelectorAll('#primary .link');
navigation.removeChild(links2[4]);
//classes, ids and attributes
const link1 = document.querySelector('.link');
element1 = link1;
//read the class
element1 = link1.className;
//read the class (DOM token list)
element1 = link1.classList;
//access specific class with classList
element1 = link1.classList[0];
//add a new class
link1.classList.add('new-class');
//remove the class
link1.classList.remove('new-class');
//id
link1.id = 'new-id';
//remove the id
link1.id = '';
link1.removeAttribute('id');
//attributes function
element1 = link1.getAttribute('href');
element1 = link1.getAttribute('class');
element1 = link1.setAttribute('href', "http://facebook.com");
element1 = link1.setAttribute('target', "_blank");
element1 = link1.setAttribute('data-link', '10');
element1 = link1.hasAttribute('data-link');
element1 = link1.removeAttribute('data-link');
console.log(element1);
//addEventListener
const clearCartBtn = document.getElementById('clear-cart');
clearCartBtn.addEventListener('click',clearBtnFunction);
function clearBtnFunction(event){
//target
let elementRemove;
elementRemove = event;
//read the target
elementRemove = event.target;
elementRemove = event.target.id;
elementRemove = event.target.className;
elementRemove = event.target.innerText;
console.log(elementRemove);
}
//mouse events
function printEvent(event){
/*if(searchInput.value.length ===0){
alert("Type something")
}*/ //validate
//event.preventDefault(); when submit
console.log(searchInput.value);
console.log(`the event is: ${event.type}`)
}
//click mouse event
//clearCartBtn.addEventListener('click',printEvent);
//double click
clearCartBtn.addEventListener('dblclick',printEvent);
//mouse enter
clearCartBtn.addEventListener('mouseenter',printEvent);
//mouse leave
clearCartBtn.addEventListener('mouseleave',printEvent);
//mouse over
clearCartBtn.addEventListener('mouseover',printEvent);
//mouse out
clearCartBtn.addEventListener('mouseout',printEvent);
//mouse up
clearCartBtn.addEventListener('mouseup',printEvent);
//mouse down
clearCartBtn.addEventListener('mousedown',printEvent);
//input and form events
const searchForm = document.getElementById('search'),
searchInput = document.getElementById('search-course');
//events for <form>
searchForm.addEventListener('submit', printEvent);
//input events
//searchInput.addEventListener('keydown', printEvent);
//searchInput.addEventListener('keyup', printEvent);
//searchInput.addEventListener('keypress', printEvent);
//searchInput.addEventListener('focus', printEvent);
//searchInput.addEventListener('blur', printEvent);
//searchInput.addEventListener('cut', printEvent);
//searchInput.addEventListener('copy', printEvent);
//searchInput.addEventListener('paste', printEvent);
searchInput.addEventListener('input', printEvent);
//event bubbling
const cardCourse = document.querySelector('.card'),
infoCards = document.querySelector('.info-card'),
addCartBtn = document.querySelector('.add-to-cart');
cardCourse.addEventListener('click', function(event){
console.log('You clicked the card');
event.stopPropagation();
});
infoCards.addEventListener('click', function(event){
console.log('You clicked the info');
event.stopPropagation();
});
addCartBtn.addEventListener('click', function(event){
console.log('You clicked the button');
event.stopPropagation();
});
//delegation
const shoppingCart = document.querySelector('#shopping-cart');
shoppingCart.addEventListener('click', removeProductFromCart);
function removeProductFromCart(event){
console.log(event.target);
if (event.target.classList.contains('remove')){
event.target.parentElement.parentElement.remove();
}
}
//add to cart
const courseList = document.querySelector('#courses-list');
courseList.addEventListener('click',addToCart);
function addToCart(event){
if (event.target.classList.contains('add-to-cart')){
console.log("courses added");
}
}
console.log(courseList);
//local storage
//add to local storage
localStorage .setItem('name', "Kasia");
// add to session storage
//sessionStorage.setItem('name', 'Kasia');
//remove from the storage
localStorage.removeItem('name');
//read the value
//const name = localStorage.getItem('name');
//console.log(name);
//clear the storage
//localStorage.clear();
//localStorage .setItem('name', "Kasia");
//localStorage .setItem('name', "Walter White"); overwrite variable
const localStorageContent = localStorage.getItem('names');
let names;
if(localStorageContent === null){
names = []
}else | {
names = JSON.parse(localStorageContent)
} | conditional_block |
|
app.js | ;
element = document.links[2];
element = document.links[2].id;
element = document.links[2].className;
element = document.links[2].classList;
//forms
element = document.forms[0];
element = document.forms[0].method;
element = document.forms[0].action;
element = document.forms[0].className;
element = document.forms[0].classList;
// images
element = document.images;
//scripts
element = document.scripts[0].getAttribute('src');
//looping all the images
let images = document.images;
let imagesArray = Array.from(images);
imagesArray.forEach(function(image){
return image.src;
});
console.log(imagesArray);
//selecting DOM element
let heading = document.getElementById('heading');
console.log(heading.textContent);
//change the CSS
heading.style.backgroundColor = '#333';
heading.style.color = "#FFFFFF";
heading.style.padding = "10px";
heading.style.marginTop = "50px";
//change the text
heading.textContent = "The best courses";
//another way
heading.innerText = "Online course";
//query selector
let learningHeading = document.querySelector('#learn');
console.log(learningHeading);
//select a class with querySelector
const tagLine = document.querySelector(".tagline");
//const newText = tagLine.textContent.replace('$15', "$12");
//tagLine.textContent = newText;
console.log(tagLine);
let card = document.querySelector(".card");
//select a tag
let heading2 = document.querySelector('h2');
//nest your selectors with a CSS syntax
let image = document.querySelector('.card img');
//query selector supports nth-child, first-child or last-child
let link;
link = document.querySelector('#primary a:first-child');
link = document.querySelector('#primary a:last-child');
link = document.querySelector('#primary a:nth-child(3)');
//getElementsByClassName
//const links = document.getElementsByClassName('link');
//links[0].style.color = 'red';
//another ways
//const links = document.getElementById('primary').getElementsByClassName('link');
const links = document.querySelector('#primary').getElementsByClassName('link');
const images2 = document.getElementsByTagName('img');
///convert HTML collection into an array
let imagesArray2 = Array.from(images2);
//getElementByTagName
const heading1 = document.getElementsByTagName('h1');
const cards = document.querySelectorAll('.card');
const courses = document.querySelectorAll('.card h4');
const coursesArray = Array.from(courses);
coursesArray.forEach(function(course){
return course;
});
//select odd links
const oddLinks = document.querySelectorAll('#primary a:nth-child(odd)');
oddLinks.forEach(function(odd){
odd.style.backgroundColor = 'red';
odd.style.color = 'white';
});
//select even links
const evenLinks = document.querySelectorAll('#primary a:nth-child(even)');
evenLinks.forEach(function(even){
even.style.backgroundColor = 'blue';
even.style.color = 'white';
});
//change all the add-to-cart button text
const addCartBtns = document.querySelectorAll('.add-to-cart');
addCartBtns.forEach(function(button){
button.textContent = 'Something new'
});
//traversing the DOM
const navigation = document.querySelector('#primary');
let element1;
element1 = navigation.childNodes;
navigation.children[0].textContent = 'New text';
element1 = navigation.children[0].nodeName;//name of element
element1 = navigation.children[0].nodeType;//number of element
//1 - element
//2 - attributes
//3 - text nodes
//8 - comments
//9 - document
//10 - dostype
element1 = navigation.children;
const coursesList = document.querySelector('#courses-list');
element1 = coursesList.children[1].children[0].children[0].children[1].lastElementChild;//last element child of component
element1 = coursesList.children[1].children[0].children[0].children[1].firstElementChild;//first element child of component
element1 = coursesList.children[1].children[0].children[0].children[1].childElementCount;//how many element
//traversing from the children to parent
let cartBtn = document.querySelector('.add-to-cart');
element1 = cartBtn;
//parent node
element1 = cartBtn.parentNode;
element1 = cartBtn.parentElement.parentElement.children[0];//best solution
//sibling
element1 = cartBtn.previousElementSibling;
//course name
const courseName = cartBtn.parentElement.querySelector('h4');
//next element sibling
element1 = courseName.nextElementSibling;
console.log(element1);
//creating HTML elements with js
//create a new element
const newLink = document.createElement('a');
//add a class
newLink.className = "link";
//add the href
newLink.href = '#';
//newLink.setAttribute('href', '#');//second method
//add the text
newLink.appendChild(document.createTextNode('New Link'));
//add element to the HTML
document.querySelector('#primary').appendChild(newLink);
//replace an element
const newHeading = document.createElement('h2');
//add an id
newHeading.id = 'heading';
//add the class
newHeading.classList.add('heading');
//add the new text
newHeading.appendChild(document.createTextNode('The best courses'));
//select old heading
const oldHeading = document.querySelector('#heading');
//replace parent
coursesList.replaceChild(newHeading,oldHeading);
//remove elements
const links1 = document.querySelectorAll('.link');
links1[0].remove();
const cards1 = document.querySelectorAll('.card');
cards1[11].remove();
//remove by the children
const links2 = document.querySelectorAll('#primary .link');
navigation.removeChild(links2[4]);
//classes, ids and attributes
const link1 = document.querySelector('.link');
element1 = link1;
//read the class
element1 = link1.className;
//read the class (DOM token list)
element1 = link1.classList;
//access specific class with classList
element1 = link1.classList[0];
//add a new class
link1.classList.add('new-class');
//remove the class
link1.classList.remove('new-class');
//id
link1.id = 'new-id';
//remove the id
link1.id = '';
link1.removeAttribute('id');
//attributes function
element1 = link1.getAttribute('href');
element1 = link1.getAttribute('class');
element1 = link1.setAttribute('href', "http://facebook.com");
element1 = link1.setAttribute('target', "_blank");
element1 = link1.setAttribute('data-link', '10');
element1 = link1.hasAttribute('data-link');
element1 = link1.removeAttribute('data-link');
console.log(element1);
//addEventListener
const clearCartBtn = document.getElementById('clear-cart');
clearCartBtn.addEventListener('click',clearBtnFunction);
function clearBtnFunction(event){
//target
let elementRemove;
elementRemove = event;
//read the target
elementRemove = event.target;
elementRemove = event.target.id;
elementRemove = event.target.className;
elementRemove = event.target.innerText;
console.log(elementRemove);
}
//mouse events
function printEvent(event){
/*if(searchInput.value.length ===0){
alert("Type something")
}*/ //validate
//event.preventDefault(); when submit
console.log(searchInput.value);
console.log(`the event is: ${event.type}`)
}
//click mouse event
//clearCartBtn.addEventListener('click',printEvent);
//double click
clearCartBtn.addEventListener('dblclick',printEvent);
//mouse enter
clearCartBtn.addEventListener('mouseenter',printEvent);
//mouse leave
clearCartBtn.addEventListener('mouseleave',printEvent);
//mouse over
clearCartBtn.addEventListener('mouseover',printEvent);
//mouse out
clearCartBtn.addEventListener('mouseout',printEvent);
//mouse up
clearCartBtn.addEventListener('mouseup',printEvent);
//mouse down
clearCartBtn.addEventListener('mousedown',printEvent);
//input and form events
const searchForm = document.getElementById('search'),
searchInput = document.getElementById('search-course');
//events for <form>
searchForm.addEventListener('submit', printEvent);
//input events
//searchInput.addEventListener('keydown', printEvent);
//searchInput.addEventListener('keyup', printEvent);
//searchInput.addEventListener('keypress', printEvent);
//searchInput.addEventListener('focus', printEvent);
//searchInput.addEventListener('blur', printEvent);
//searchInput.addEventListener('cut', printEvent);
//searchInput.addEventListener('copy', printEvent);
//searchInput.addEventListener('paste', printEvent);
searchInput.addEventListener('input', printEvent);
//event bubbling
const cardCourse = document.querySelector('.card'),
infoCards = document.querySelector('.info-card'),
addCartBtn = document.querySelector('.add-to-cart');
cardCourse.addEventListener('click', function(event){
console.log('You clicked the card');
event.stopPropagation();
});
infoCards.addEventListener('click', function(event){
console.log('You clicked the info');
event.stopPropagation();
});
addCartBtn.addEventListener('click', function(event){
console.log('You clicked the button');
event.stopPropagation();
});
//delegation
const shoppingCart = document.querySelector('#shopping-cart');
shoppingCart.addEventListener('click', removeProductFromCart);
function removeProductFromCart(event){
console.log(event.target);
if (event.target.classList.contains('remove')){
event.target.parentElement.parentElement.remove();
}
}
//add to cart
const courseList = document.querySelector('#courses-list');
courseList.addEventListener('click',addToCart);
function addToCart(event) |
console.log(courseList);
//local storage
//add to local storage
localStorage .setItem('name', "Kasia");
// add to session storage
//sessionStorage.setItem('name', 'Kasia');
//remove | {
if (event.target.classList.contains('add-to-cart')){
console.log("courses added");
}
} | identifier_body |
app.js | ;
element = document.links[2];
element = document.links[2].id;
element = document.links[2].className;
element = document.links[2].classList;
//forms
element = document.forms[0];
element = document.forms[0].method;
element = document.forms[0].action;
element = document.forms[0].className;
element = document.forms[0].classList;
// images
element = document.images;
//scripts
element = document.scripts[0].getAttribute('src');
//looping all the images
let images = document.images;
let imagesArray = Array.from(images);
imagesArray.forEach(function(image){
return image.src;
});
console.log(imagesArray);
//selecting DOM element
let heading = document.getElementById('heading');
console.log(heading.textContent);
//change the CSS
heading.style.backgroundColor = '#333';
heading.style.color = "#FFFFFF";
heading.style.padding = "10px";
heading.style.marginTop = "50px";
//change the text
heading.textContent = "The best courses";
//another way
heading.innerText = "Online course";
//query selector
let learningHeading = document.querySelector('#learn');
console.log(learningHeading);
//select a class with querySelector
const tagLine = document.querySelector(".tagline");
//const newText = tagLine.textContent.replace('$15', "$12");
//tagLine.textContent = newText;
console.log(tagLine);
let card = document.querySelector(".card");
//select a tag
let heading2 = document.querySelector('h2');
//nest your selectors with a CSS syntax
let image = document.querySelector('.card img');
//query selector supports nth-child, first-child or last-child
let link;
link = document.querySelector('#primary a:first-child');
link = document.querySelector('#primary a:last-child');
link = document.querySelector('#primary a:nth-child(3)');
//getElementsByClassName
//const links = document.getElementsByClassName('link');
//links[0].style.color = 'red';
//another ways
//const links = document.getElementById('primary').getElementsByClassName('link');
const links = document.querySelector('#primary').getElementsByClassName('link');
const images2 = document.getElementsByTagName('img');
///convert HTML collection into an array
let imagesArray2 = Array.from(images2);
//getElementByTagName
const heading1 = document.getElementsByTagName('h1');
const cards = document.querySelectorAll('.card');
const courses = document.querySelectorAll('.card h4');
const coursesArray = Array.from(courses);
coursesArray.forEach(function(course){
return course;
});
//select odd links
const oddLinks = document.querySelectorAll('#primary a:nth-child(odd)');
oddLinks.forEach(function(odd){
odd.style.backgroundColor = 'red';
odd.style.color = 'white';
});
//select even links
const evenLinks = document.querySelectorAll('#primary a:nth-child(even)');
evenLinks.forEach(function(even){
even.style.backgroundColor = 'blue';
even.style.color = 'white';
});
//change all the add-to-cart button text
const addCartBtns = document.querySelectorAll('.add-to-cart');
addCartBtns.forEach(function(button){
button.textContent = 'Something new'
});
//traversing the DOM
const navigation = document.querySelector('#primary');
let element1;
element1 = navigation.childNodes;
navigation.children[0].textContent = 'New text';
element1 = navigation.children[0].nodeName;//name of element
element1 = navigation.children[0].nodeType;//number of element
//1 - element
//2 - attributes
//3 - text nodes
//8 - comments
//9 - document
//10 - dostype
element1 = navigation.children;
const coursesList = document.querySelector('#courses-list');
element1 = coursesList.children[1].children[0].children[0].children[1].lastElementChild;//last element child of component
element1 = coursesList.children[1].children[0].children[0].children[1].firstElementChild;//first element child of component
element1 = coursesList.children[1].children[0].children[0].children[1].childElementCount;//how many element
//traversing from the children to parent
let cartBtn = document.querySelector('.add-to-cart');
element1 = cartBtn;
//parent node
element1 = cartBtn.parentNode;
element1 = cartBtn.parentElement.parentElement.children[0];//best solution
//sibling
element1 = cartBtn.previousElementSibling;
//course name
const courseName = cartBtn.parentElement.querySelector('h4');
//next element sibling
element1 = courseName.nextElementSibling;
console.log(element1);
//creating HTML elements with js
//create a new element
const newLink = document.createElement('a');
//add a class
newLink.className = "link";
//add the href
newLink.href = '#';
//newLink.setAttribute('href', '#');//second method
//add the text
newLink.appendChild(document.createTextNode('New Link'));
//add element to the HTML
document.querySelector('#primary').appendChild(newLink);
//replace an element
const newHeading = document.createElement('h2');
//add an id
newHeading.id = 'heading';
//add the class
newHeading.classList.add('heading');
//add the new text
newHeading.appendChild(document.createTextNode('The best courses'));
//select old heading
const oldHeading = document.querySelector('#heading');
//replace parent
coursesList.replaceChild(newHeading,oldHeading);
//remove elements
const links1 = document.querySelectorAll('.link');
links1[0].remove();
const cards1 = document.querySelectorAll('.card');
cards1[11].remove();
//remove by the children
const links2 = document.querySelectorAll('#primary .link');
navigation.removeChild(links2[4]);
//classes, ids and attributes
const link1 = document.querySelector('.link');
element1 = link1;
//read the class
element1 = link1.className;
//read the class (DOM token list)
element1 = link1.classList;
//access specific class with classList
element1 = link1.classList[0];
//add a new class
link1.classList.add('new-class');
//remove the class
link1.classList.remove('new-class');
//id
link1.id = 'new-id';
//remove the id
link1.id = '';
link1.removeAttribute('id');
//attributes function
element1 = link1.getAttribute('href');
element1 = link1.getAttribute('class');
element1 = link1.setAttribute('href', "http://facebook.com");
element1 = link1.setAttribute('target', "_blank");
element1 = link1.setAttribute('data-link', '10');
element1 = link1.hasAttribute('data-link');
element1 = link1.removeAttribute('data-link');
console.log(element1);
//addEventListener
const clearCartBtn = document.getElementById('clear-cart');
clearCartBtn.addEventListener('click',clearBtnFunction);
function clearBtnFunction(event){
//target
let elementRemove;
elementRemove = event;
//read the target
elementRemove = event.target;
elementRemove = event.target.id;
elementRemove = event.target.className;
elementRemove = event.target.innerText;
console.log(elementRemove);
}
//mouse events
function printEvent(event){
/*if(searchInput.value.length ===0){
alert("Type something")
}*/ //validate
//event.preventDefault(); when submit
console.log(searchInput.value);
console.log(`the event is: ${event.type}`)
}
//click mouse event
//clearCartBtn.addEventListener('click',printEvent);
//double click
clearCartBtn.addEventListener('dblclick',printEvent);
//mouse enter
clearCartBtn.addEventListener('mouseenter',printEvent);
//mouse leave
clearCartBtn.addEventListener('mouseleave',printEvent);
//mouse over
clearCartBtn.addEventListener('mouseover',printEvent);
//mouse out
clearCartBtn.addEventListener('mouseout',printEvent);
//mouse up
clearCartBtn.addEventListener('mouseup',printEvent);
//mouse down
clearCartBtn.addEventListener('mousedown',printEvent);
//input and form events
const searchForm = document.getElementById('search'),
searchInput = document.getElementById('search-course');
//events for <form>
searchForm.addEventListener('submit', printEvent);
//input events
//searchInput.addEventListener('keydown', printEvent);
//searchInput.addEventListener('keyup', printEvent);
//searchInput.addEventListener('keypress', printEvent);
//searchInput.addEventListener('focus', printEvent);
//searchInput.addEventListener('blur', printEvent);
//searchInput.addEventListener('cut', printEvent);
//searchInput.addEventListener('copy', printEvent);
//searchInput.addEventListener('paste', printEvent);
searchInput.addEventListener('input', printEvent);
//event bubbling
const cardCourse = document.querySelector('.card'),
infoCards = document.querySelector('.info-card'),
addCartBtn = document.querySelector('.add-to-cart');
cardCourse.addEventListener('click', function(event){
console.log('You clicked the card');
event.stopPropagation();
});
infoCards.addEventListener('click', function(event){
console.log('You clicked the info');
event.stopPropagation();
});
addCartBtn.addEventListener('click', function(event){
console.log('You clicked the button');
event.stopPropagation();
});
//delegation
const shoppingCart = document.querySelector('#shopping-cart');
shoppingCart.addEventListener('click', removeProductFromCart);
function removeProductFromCart(event){
console.log(event.target);
if (event.target.classList.contains('remove')){
event.target.parentElement.parentElement.remove();
}
}
//add to cart
const courseList = document.querySelector('#courses-list');
courseList.addEventListener('click',addToCart);
function | (event){
if (event.target.classList.contains('add-to-cart')){
console.log("courses added");
}
}
console.log(courseList);
//local storage
//add to local storage
localStorage .setItem('name', "Kasia");
// add to session storage
//sessionStorage.setItem('name', 'Kasia');
//remove from | addToCart | identifier_name |
metadata.rs | _or_else(|| {
anyhow!("All workspace members are expected to be under the workspace root")
})?;
let diff = Utf8PathBuf::from_path_buf(path_diff)
.map_err(|_e| anyhow!("Invalid UTF-8 in source directory path diff."))?;
// Create a matching directory tree for the current file within the temp workspace
let new_path = temp_dir.join(diff.as_path());
if let Some(parent) = new_path.parent() {
fs::create_dir_all(parent)?;
}
make_symlink(&path, &new_path)?;
}
}
}
Ok(())
}
/// Creates a copy workspace in a temporary directory for fetching the metadata of the current workspace
fn make_temp_workspace(&self, cargo_workspace_root: &Utf8Path) -> Result<(TempDir, Utf8PathBuf)> {
let temp_dir = TempDir::new()?;
// First gather metadata without downloading any dependencies so we can identify any path dependencies.
let no_deps_metadata = self
.metadata_fetcher
.fetch_metadata(cargo_workspace_root, /*include_deps=*/ false)?;
// There should be a `Cargo.toml` file in the workspace root
fs::copy(
no_deps_metadata.workspace_root.join("Cargo.toml"),
temp_dir.as_ref().join("Cargo.toml"),
)?;
// Optionally copy over the lock file
if no_deps_metadata.workspace_root.join("Cargo.lock").exists() {
fs::copy(
no_deps_metadata.workspace_root.join("Cargo.lock"),
temp_dir.as_ref().join("Cargo.lock"),
)?;
}
let source_dotcargo = cargo_workspace_root.join(".cargo");
let source_dotcargo_config = source_dotcargo.join("config.toml");
if source_dotcargo_config.exists() {
let destination_dotcargo = temp_dir.path().join(".cargo");
fs::create_dir(&destination_dotcargo)?;
let destination_dotcargo_config = destination_dotcargo.join("config.toml");
fs::copy(&source_dotcargo_config, &destination_dotcargo_config)?;
}
// Copy over the Cargo.toml files of each workspace member
let temp_path = Utf8Path::from_path(temp_dir.as_ref())
.ok_or_else(|| anyhow!("Invalid UTF-8 in temp path."))?;
self.link_src_to_workspace(&no_deps_metadata, temp_path)?;
Ok((temp_dir, no_deps_metadata.workspace_root))
}
/// Download a crate's source code from the current registry url
fn fetch_crate_src(&self, dir: &Utf8Path, name: &str, version: &str) -> Result<Utf8PathBuf> {
// The registry url should only be the host URL with ports. No path
let registry_url = {
let mut r_url = self.registry_url.clone();
r_url.set_path("");
r_url.to_string()
};
// Generate a URL with no path. This allows the path to keep any port information
// associated with it.
let mut url = url::Url::parse(®istry_url)?;
url.set_path("");
log::debug!("Cloning binary dependency: {}", &name);
let mut cloner = cargo_clone::Cloner::new();
cloner
.set_registry_url(url.to_string().trim_end_matches('/'))
.set_out_dir(dir);
cloner.clone(
cargo_clone::CloneMethodKind::Crate,
name,
Some(version),
&Vec::new(),
)?;
let crate_dir = dir.join(package_ident(name, version));
if !crate_dir.exists() {
return Err(anyhow!("Directory does not exist"));
}
Ok(crate_dir)
}
/// Add binary dependencies as workspace members to the given workspace root Cargo.toml file
fn inject_binaries_into_workspace(
&self,
binary_deps: Vec<String>,
root_toml: &Utf8Path,
) -> Result<()> {
// Read the current manifest
let mut manifest = {
let content = fs::read_to_string(root_toml)?;
cargo_toml::Manifest::from_str(content.as_str())?
};
// Parse the current `workspace` section of the manifest if one exists
let mut workspace = match manifest.workspace {
Some(workspace) => workspace,
None => cargo_toml::Workspace::default(),
};
// Add the binary dependencies as workspace members to the `workspace` metadata
for dep in binary_deps.iter() {
workspace.members.push(dep.to_string());
}
// Replace the workspace metadata with the modified metadata
manifest.workspace = Some(workspace);
// Write the metadata back to disk.
// cargo_toml::Manifest cannot be serialized direcly.
// see: https://gitlab.com/crates.rs/cargo_toml/-/issues/3
let value = toml::Value::try_from(&manifest)?;
std::fs::write(root_toml, toml::to_string(&value)?)
.with_context(|| format!("Failed to inject workspace metadata to {}", root_toml))
}
/// Look up a crate in a specified crate index to determine it's checksum
fn fetch_crate_checksum(&self, name: &str, version: &str) -> Result<String> {
let index_url_is_file = self.index_url.scheme().to_lowercase() == "file";
let crate_index_path = if !index_url_is_file {
crates_index::BareIndex::from_url(self.index_url.as_ref())?
.open_or_clone()?
.crate_(name)
.ok_or_else(|| anyhow!("Failed to find crate '{}' in index", name))?
} else {
crates_index::Index::new(self.index_url.path())
.crate_(name)
.ok_or_else(|| anyhow!("Failed to find crate '{}' in index", name))?
};
let (_index, crate_version) = crate_index_path
.versions()
.iter()
.enumerate()
.find(|(_, ver)| ver.version() == version)
.ok_or_else(|| anyhow!("Failed to find version {} for crate {}", version, name))?;
Ok(crate_version.checksum()[..].to_hex())
}
/// Ensures a lockfile is generated for a crate on disk
///
/// Args:
/// - reused_lockfile: An optional lockfile to use for fetching metadata to
/// ensure subsequent metadata fetches return consistent results.
/// - cargo_dir: The directory of the cargo workspace to gather metadata for.
/// Returns:
/// If a new lockfile was generated via the `lockfile_generator`, that
/// Lockfile object is returned. New lockfiles are generated when
/// `reused_lockfile` is not provided.
fn cargo_generate_lockfile(
&self,
reused_lockfile: &Option<Utf8PathBuf>,
cargo_dir: &Utf8Path,
) -> Result<Option<Lockfile>> {
let lockfile_path = cargo_dir.join("Cargo.lock");
// Use the reusable lockfile if one is provided
if let Some(reused_lockfile) = reused_lockfile {
fs::copy(reused_lockfile, &lockfile_path)?;
return Ok(None);
}
let lockfile = self.lockfile_generator.generate_lockfile(cargo_dir)?;
// Returning the lockfile here signifies that a new lockfile has been created.
Ok(Some(lockfile))
}
/// Gather all information about a Cargo project to use for planning and rendering steps
pub fn fetch_metadata(
&self,
cargo_workspace_root: &Utf8Path,
binary_dep_info: Option<&HashMap<String, cargo_toml::Dependency>>,
reused_lockfile: Option<Utf8PathBuf>,
) -> Result<RazeMetadata> {
let (cargo_dir, cargo_workspace_root) = self.make_temp_workspace(cargo_workspace_root)?;
let utf8_cargo_dir = Utf8Path::from_path(cargo_dir.as_ref())
.ok_or_else(|| anyhow!("Cargo dir has invalid UTF-8 in fetch_metadata."))?;
let cargo_root_toml = utf8_cargo_dir.join("Cargo.toml");
// Gather new lockfile data if any binary dependencies were provided
let mut checksums: HashMap<String, String> = HashMap::new();
if let Some(binary_dep_info) = binary_dep_info {
if !binary_dep_info.is_empty() {
let mut src_dirnames: Vec<String> = Vec::new();
for (name, info) in binary_dep_info.iter() {
let version = info.req();
let src_dir = self.fetch_crate_src(utf8_cargo_dir, name, version)?;
checksums.insert(
package_ident(name, version),
self.fetch_crate_checksum(name, version)?,
);
if let Some(dirname) = src_dir.file_name() {
src_dirnames.push(dirname.to_string());
}
}
self.inject_binaries_into_workspace(src_dirnames, &cargo_root_toml)?;
}
}
let output_lockfile = self.cargo_generate_lockfile(&reused_lockfile, utf8_cargo_dir)?;
// Load checksums from the lockfile
let workspace_toml_lock = cargo_dir.as_ref().join("Cargo.lock");
if workspace_toml_lock.exists() {
let lockfile = Lockfile::load(workspace_toml_lock)?;
for package in &lockfile.packages {
if let Some(checksum) = &package.checksum | {
checksums.insert(
package_ident(package.name.as_ref(), &package.version.to_string()),
checksum.to_string(),
);
} | conditional_block |
|
metadata.rs |
}
impl MetadataFetcher for CargoMetadataFetcher {
fn fetch_metadata(&self, working_dir: &Utf8Path, include_deps: bool) -> Result<Metadata> {
let mut command = MetadataCommand::new();
if !include_deps {
command.no_deps();
}
command
.cargo_path(&self.cargo_bin_path)
.current_dir(working_dir)
.exec()
.with_context(|| {
format!(
"Failed to fetch Metadata with `{}` from `{}`",
&self.cargo_bin_path, working_dir
)
})
}
}
/// An entity that can generate a lockfile data within a Cargo workspace
pub trait LockfileGenerator {
fn generate_lockfile(&self, crate_root_dir: &Utf8Path) -> Result<Lockfile>;
}
/// A lockfile generator which simply wraps the `cargo generate-lockfile` command
struct CargoLockfileGenerator {
cargo_bin_path: Utf8PathBuf,
}
impl LockfileGenerator for CargoLockfileGenerator {
/// Generate lockfile information from a cargo workspace root
fn generate_lockfile(&self, crate_root_dir: &Utf8Path) -> Result<Lockfile> {
let lockfile_path = crate_root_dir.join("Cargo.lock");
// Generate lockfile
let output = std::process::Command::new(&self.cargo_bin_path)
.arg("generate-lockfile")
.current_dir(crate_root_dir)
.output()
.with_context(|| format!("Generating lockfile in {}", crate_root_dir))?;
if !output.status.success() {
anyhow::bail!(
"Failed to generate lockfile in {}: {}",
crate_root_dir,
String::from_utf8_lossy(&output.stderr)
);
}
// Load lockfile contents
Lockfile::load(&lockfile_path)
.with_context(|| format!("Failed to load lockfile: {}", lockfile_path))
}
}
/// A struct containing all metadata about a project with which to plan generated output files for
#[derive(Debug, Clone)]
pub struct RazeMetadata {
// `cargo metadata` output of the current project
pub metadata: Metadata,
// The absolute path to the current project's cargo workspace root. Note that the workspace
// root in `metadata` will be inside of a temporary directory. For details see:
// https://doc.rust-lang.org/cargo/reference/workspaces.html#root-package
pub cargo_workspace_root: Utf8PathBuf,
// The metadata of a lockfile that was generated as a result of fetching metadata
pub lockfile: Option<Lockfile>,
// A map of all known crates with checksums. Use `checksums_for` to access data from this map.
pub checksums: HashMap<String, String>,
// A map of crates to their enabled general and per-platform features.
pub features: BTreeMap<PackageId, Features>,
}
impl RazeMetadata {
/// Get the checksum of a crate using a unique formatter.
pub fn checksum_for(&self, name: &str, version: &str) -> Option<&String> {
self.checksums.get(&package_ident(name, version))
}
}
/// Create a symlink file on unix systems
#[cfg(target_family = "unix")]
fn make_symlink(src: &Utf8Path, dest: &Utf8Path) -> Result<()> {
std::os::unix::fs::symlink(src, dest)
.with_context(|| "Failed to create symlink for generating metadata")
}
/// Create a symlink file on windows systems
#[cfg(target_family = "windows")]
fn make_symlink(src: &Utf8Path, dest: &Utf8Path) -> Result<()> {
std::os::windows::fs::symlink_file(src, dest)
.with_context(|| "Failed to create symlink for generating metadata")
}
/// A workspace metadata fetcher that uses the Cargo commands to gather information about a Cargo
/// project and it's transitive dependencies for planning and rendering of Bazel BUILD files.
pub struct RazeMetadataFetcher {
registry_url: Url,
index_url: Url,
metadata_fetcher: Box<dyn MetadataFetcher>,
lockfile_generator: Box<dyn LockfileGenerator>,
settings: Option<RazeSettings>,
}
impl RazeMetadataFetcher {
pub fn new<P: Into<Utf8PathBuf>>(
cargo_bin_path: P,
registry_url: Url,
index_url: Url,
settings: Option<RazeSettings>,
) -> RazeMetadataFetcher {
let cargo_bin_pathbuf: Utf8PathBuf = cargo_bin_path.into();
RazeMetadataFetcher {
registry_url,
index_url,
metadata_fetcher: Box::new(CargoMetadataFetcher {
cargo_bin_path: cargo_bin_pathbuf.clone(),
}),
lockfile_generator: Box::new(CargoLockfileGenerator {
cargo_bin_path: cargo_bin_pathbuf,
}),
settings,
}
}
pub fn new_with_settings(settings: Option<RazeSettings>) -> RazeMetadataFetcher {
RazeMetadataFetcher::new(
cargo_bin_path(),
// UNWRAP: The default is covered by testing and should never return err
Url::parse(DEFAULT_CRATE_REGISTRY_URL).unwrap(),
Url::parse(DEFAULT_CRATE_INDEX_URL).unwrap(),
settings,
)
}
/// Reassign the [`crate::metadata::MetadataFetcher`] associated with the Raze Metadata Fetcher
pub fn set_metadata_fetcher(&mut self, fetcher: Box<dyn MetadataFetcher>) {
self.metadata_fetcher = fetcher;
}
/// Reassign the [`crate::metadata::LockfileGenerator`] associated with the current Fetcher
pub fn set_lockfile_generator(&mut self, generator: Box<dyn LockfileGenerator>) {
self.lockfile_generator = generator;
}
/// Symlinks the source code of all workspace members into the temp workspace
fn link_src_to_workspace(&self, no_deps_metadata: &Metadata, temp_dir: &Utf8Path) -> Result<()> {
let crate_member_id_re = match consts::OS {
"windows" => Regex::new(r".+\(path\+file:///(.+)\)")?,
_ => Regex::new(r".+\(path\+file://(.+)\)")?,
};
for member in no_deps_metadata.workspace_members.iter() {
// Get a path to the workspace member directory
let workspace_member_directory = {
let crate_member_id_match = crate_member_id_re
.captures(&member.repr)
.and_then(|cap| cap.get(1));
if crate_member_id_match.is_none() {
continue;
}
// UNWRAP: guarded above
Utf8PathBuf::from(crate_member_id_match.unwrap().as_str())
};
// Sanity check: The assumption is that any crate with an `id` that matches
// the regex pattern above should contain a Cargo.toml file with which we
// can use to infer the existence of libraries from relative paths such as
// `src/lib.rs` and `src/main.rs`.
let toml_path = workspace_member_directory.join("Cargo.toml");
if !toml_path.exists() {
return Err(anyhow!(format!(
"The regex pattern `{}` found a path that did not contain a Cargo.toml file: `{}`",
crate_member_id_re.as_str(),
workspace_member_directory
)));
}
// Copy the Cargo.toml files into the temp directory to match the directory structure on disk
let path_diff = diff_paths(
&workspace_member_directory,
&no_deps_metadata.workspace_root,
)
.ok_or_else(|| {
anyhow!("All workspace members are expected to be under the workspace root")
})?;
let diff = Utf8PathBuf::from_path_buf(path_diff)
.map_err(|_e| anyhow!("Invalid UTF-8 in path diff."))?;
let new_path = temp_dir.join(diff);
fs::create_dir_all(&new_path)?;
fs::copy(
workspace_member_directory.join("Cargo.toml"),
new_path.join("Cargo.toml"),
)?;
// Additionally, symlink everything in some common source directories to ensure specified
// library targets can be relied on and won't prevent fetching metadata
for dir in vec!["bin", "src"].iter() {
let glob_pattern = format!("{}/**/*.rs", workspace_member_directory.join(dir));
for entry in glob(glob_pattern.as_str()).expect("Failed to read glob pattern") {
let path = Utf8PathBuf::from_path_buf(entry?)
.map_err(|_e| anyhow!("Invalid UTF-8 in source directory."))?;
// Determine the difference between the workspace root and the current file
let path_diff = diff_paths(&path, &no_deps_metadata.workspace_root).ok_or_else(|| {
anyhow!("All workspace members are expected to be under the workspace root")
})?;
let diff = Utf8PathBuf::from_path_buf(path_diff)
.map_err(|_e| anyhow!("Invalid UTF-8 in source directory path diff."))?;
// Create a matching directory tree for the current file within the temp workspace
let new_path = temp_dir.join(diff.as_path());
if let Some(parent) = new_path.parent() {
fs::create_dir_all(parent)?;
}
make_symlink(&path, &new_path)?;
}
}
}
Ok(())
}
/// Creates a copy workspace in a temporary directory for fetching the metadata of the current workspace
fn make_temp_workspace(&self, | {
CargoMetadataFetcher {
cargo_bin_path: cargo_bin_path(),
}
} | identifier_body |
|
metadata.rs | (),
fs::read_to_string(working_dir.join("Cargo.toml")).unwrap()
)
})
}
}
pub struct DummyLockfileGenerator {
// Optional lockfile to use for generation
pub lockfile_contents: Option<String>,
}
impl LockfileGenerator for DummyLockfileGenerator {
fn generate_lockfile(&self, _crate_root_dir: &Utf8Path) -> Result<Lockfile> {
match &self.lockfile_contents {
Some(contents) => Lockfile::from_str(contents)
.with_context(|| format!("Failed to load provided lockfile:\n{}", contents)),
None => Lockfile::from_str(basic_lock_contents())
.with_context(|| format!("Failed to load dummy lockfile:\n{}", basic_lock_contents())),
}
}
}
pub fn dummy_raze_metadata_fetcher() -> (RazeMetadataFetcher, MockServer, TempDir) {
let tempdir = TempDir::new().unwrap();
let mock_server = MockServer::start();
let mut fetcher = RazeMetadataFetcher::new(
cargo_bin_path(),
Url::parse(&mock_server.base_url()).unwrap(),
Url::parse(&format!("file://{}", tempdir.as_ref().display())).unwrap(),
None,
);
fetcher.set_metadata_fetcher(Box::new(DummyCargoMetadataFetcher {
metadata_template: None,
}));
fetcher.set_lockfile_generator(Box::new(DummyLockfileGenerator {
lockfile_contents: None,
}));
(fetcher, mock_server, tempdir)
}
pub fn dummy_raze_metadata() -> RazeMetadata {
let dir = make_basic_workspace();
let (mut fetcher, _server, _index_dir) = dummy_raze_metadata_fetcher();
// Always render basic metadata
fetcher.set_metadata_fetcher(Box::new(DummyCargoMetadataFetcher {
metadata_template: Some(templates::BASIC_METADATA.to_string()),
}));
fetcher
.fetch_metadata(utf8_path(dir.as_ref()), None, None)
.unwrap()
}
#[test]
fn test_cargo_subcommand_metadata_fetcher_works_without_lock() {
let dir = TempDir::new().unwrap();
let toml_path = dir.path().join("Cargo.toml");
let mut toml = File::create(&toml_path).unwrap();
toml.write_all(basic_toml_contents().as_bytes()).unwrap();
let mut fetcher = RazeMetadataFetcher::new_with_settings(None);
fetcher.set_lockfile_generator(Box::new(DummyLockfileGenerator {
lockfile_contents: None,
}));
fetcher
.fetch_metadata(utf8_path(dir.as_ref()), None, None)
.unwrap();
}
#[test]
fn test_cargo_subcommand_metadata_fetcher_works_with_lock() {
let dir = TempDir::new().unwrap();
// Create Cargo.toml
{
let path = dir.path().join("Cargo.toml");
let mut toml = File::create(&path).unwrap();
toml.write_all(basic_toml_contents().as_bytes()).unwrap();
}
// Create Cargo.lock
{
let path = dir.path().join("Cargo.lock");
let mut lock = File::create(&path).unwrap();
lock.write_all(basic_lock_contents().as_bytes()).unwrap();
}
let mut fetcher = RazeMetadataFetcher::default();
fetcher.set_lockfile_generator(Box::new(DummyLockfileGenerator {
lockfile_contents: None,
}));
fetcher
.fetch_metadata(utf8_path(dir.as_ref()), None, None)
.unwrap();
}
#[test]
fn test_cargo_subcommand_metadata_fetcher_handles_bad_files() {
let dir = TempDir::new().unwrap();
// Create Cargo.toml
{
let path = dir.path().join("Cargo.toml");
let mut toml = File::create(&path).unwrap();
toml.write_all(b"hello").unwrap();
}
let fetcher = RazeMetadataFetcher::default();
assert!(fetcher
.fetch_metadata(utf8_path(dir.as_ref()), None, None)
.is_err());
}
#[test]
fn test_fetching_src() {
let (fetcher, mock_server, _index_url) = dummy_raze_metadata_fetcher();
let mock = mock_remote_crate("fake-crate", "3.3.3", &mock_server);
let path = fetcher
.fetch_crate_src(utf8_path(mock.data_dir.as_ref()), "fake-crate", "3.3.3")
.unwrap();
for mock in mock.endpoints.iter() {
mock.assert();
}
assert!(path.exists());
// Ensure the name follows a consistent pattern: `{name}-{version}`
assert_eq!(
mock.data_dir.into_path().join("fake-crate-3.3.3").as_path(),
path.as_path()
);
assert!(path.join("Cargo.toml").exists());
assert!(path.join("Cargo.lock").exists());
assert!(path.join("test").exists());
}
#[test]
fn test_inject_dependency_to_workspace() {
let (fetcher, _mock_server, _index_url) = dummy_raze_metadata_fetcher();
let crate_dir = make_workspace_with_dependency();
let utf8_crate_dir = utf8_path(crate_dir.as_ref());
let cargo_toml_path = utf8_crate_dir.join("Cargo.toml");
let mut manifest =
cargo_toml::Manifest::from_str(fs::read_to_string(&cargo_toml_path).unwrap().as_str())
.unwrap();
let basic_dep_toml = crate_dir.as_ref().join("basic_dep/Cargo.toml");
fs::create_dir_all(basic_dep_toml.parent().unwrap()).unwrap();
fs::write(&basic_dep_toml, named_toml_contents("basic_dep", "0.0.1")).unwrap();
assert!(basic_dep_toml.exists());
manifest.workspace = Some({
let mut workspace = cargo_toml::Workspace::default();
workspace.members.push("test".to_string());
workspace
});
// Ensure the manifest only includes the new workspace member after the injection
assert_ne!(
cargo_toml::Manifest::from_str(fs::read_to_string(&cargo_toml_path).unwrap().as_str())
.unwrap(),
manifest
);
// Fetch metadata
fetcher
.inject_binaries_into_workspace(vec!["test".to_string()], &cargo_toml_path)
.unwrap();
// Ensure workspace now has the new member
assert_eq!(
cargo_toml::Manifest::from_str(fs::read_to_string(&cargo_toml_path).unwrap().as_str())
.unwrap(),
manifest
);
}
#[test]
fn test_generate_lockfile_use_previously_generated() {
let (fetcher, _mock_server, _index_url) = dummy_raze_metadata_fetcher();
let crate_dir = make_workspace_with_dependency();
let reused_lockfile =
Utf8PathBuf::from_path_buf(crate_dir.as_ref().join("locks_test/Cargo.raze.lock")).unwrap();
fs::create_dir_all(reused_lockfile.parent().unwrap()).unwrap();
fs::write(&reused_lockfile, "# test_generate_lockfile").unwrap();
// A reuse lockfile was provided so no new lockfile should be returned
assert!(fetcher
.cargo_generate_lockfile(
&Some(reused_lockfile.clone()),
utf8_path(crate_dir.as_ref())
)
.unwrap()
.is_none());
// Returns the built in lockfile
assert_eq!(
cargo_lock::Lockfile::load(crate_dir.as_ref().join("Cargo.lock")).unwrap(),
cargo_lock::Lockfile::load(&reused_lockfile).unwrap(),
);
}
#[test]
fn test_cargo_generate_lockfile_new_file() {
let (mut fetcher, _mock_server, _index_url) = dummy_raze_metadata_fetcher();
fetcher.set_lockfile_generator(Box::new(DummyLockfileGenerator {
lockfile_contents: Some(advanced_lock_contents().to_string()),
}));
let crate_dir = make_workspace(advanced_toml_contents(), None);
// A new lockfile should have been created and it should match the expected contents for the advanced_toml workspace
assert_eq!(
fetcher
.cargo_generate_lockfile(&None, Utf8Path::from_path(crate_dir.as_ref()).unwrap())
.unwrap()
.unwrap(),
Lockfile::from_str(advanced_lock_contents()).unwrap()
);
}
#[test]
fn test_cargo_generate_lockfile_no_file() {
let (mut fetcher, _mock_server, _index_url) = dummy_raze_metadata_fetcher();
fetcher.set_lockfile_generator(Box::new(DummyLockfileGenerator {
lockfile_contents: Some(advanced_lock_contents().to_string()),
}));
let crate_dir = make_workspace(advanced_toml_contents(), None);
let expected_lockfile =
Utf8PathBuf::from_path_buf(crate_dir.as_ref().join("expected/Cargo.expected.lock")).unwrap();
fs::create_dir_all(expected_lockfile.parent().unwrap()).unwrap();
fs::write(&expected_lockfile, advanced_lock_contents()).unwrap();
assert!(fetcher
.cargo_generate_lockfile(
&Some(expected_lockfile.clone()),
utf8_path(crate_dir.as_ref())
)
.unwrap() | random_line_split |
||
metadata.rs | {
cargo_bin_path: Utf8PathBuf,
}
impl LockfileGenerator for CargoLockfileGenerator {
/// Generate lockfile information from a cargo workspace root
fn generate_lockfile(&self, crate_root_dir: &Utf8Path) -> Result<Lockfile> {
let lockfile_path = crate_root_dir.join("Cargo.lock");
// Generate lockfile
let output = std::process::Command::new(&self.cargo_bin_path)
.arg("generate-lockfile")
.current_dir(crate_root_dir)
.output()
.with_context(|| format!("Generating lockfile in {}", crate_root_dir))?;
if !output.status.success() {
anyhow::bail!(
"Failed to generate lockfile in {}: {}",
crate_root_dir,
String::from_utf8_lossy(&output.stderr)
);
}
// Load lockfile contents
Lockfile::load(&lockfile_path)
.with_context(|| format!("Failed to load lockfile: {}", lockfile_path))
}
}
/// A struct containing all metadata about a project with which to plan generated output files for
#[derive(Debug, Clone)]
pub struct RazeMetadata {
// `cargo metadata` output of the current project
pub metadata: Metadata,
// The absolute path to the current project's cargo workspace root. Note that the workspace
// root in `metadata` will be inside of a temporary directory. For details see:
// https://doc.rust-lang.org/cargo/reference/workspaces.html#root-package
pub cargo_workspace_root: Utf8PathBuf,
// The metadata of a lockfile that was generated as a result of fetching metadata
pub lockfile: Option<Lockfile>,
// A map of all known crates with checksums. Use `checksums_for` to access data from this map.
pub checksums: HashMap<String, String>,
// A map of crates to their enabled general and per-platform features.
pub features: BTreeMap<PackageId, Features>,
}
impl RazeMetadata {
/// Get the checksum of a crate using a unique formatter.
pub fn checksum_for(&self, name: &str, version: &str) -> Option<&String> {
self.checksums.get(&package_ident(name, version))
}
}
/// Create a symlink file on unix systems
#[cfg(target_family = "unix")]
fn make_symlink(src: &Utf8Path, dest: &Utf8Path) -> Result<()> {
std::os::unix::fs::symlink(src, dest)
.with_context(|| "Failed to create symlink for generating metadata")
}
/// Create a symlink file on windows systems
#[cfg(target_family = "windows")]
fn make_symlink(src: &Utf8Path, dest: &Utf8Path) -> Result<()> {
std::os::windows::fs::symlink_file(src, dest)
.with_context(|| "Failed to create symlink for generating metadata")
}
/// A workspace metadata fetcher that uses the Cargo commands to gather information about a Cargo
/// project and it's transitive dependencies for planning and rendering of Bazel BUILD files.
pub struct RazeMetadataFetcher {
registry_url: Url,
index_url: Url,
metadata_fetcher: Box<dyn MetadataFetcher>,
lockfile_generator: Box<dyn LockfileGenerator>,
settings: Option<RazeSettings>,
}
impl RazeMetadataFetcher {
pub fn new<P: Into<Utf8PathBuf>>(
cargo_bin_path: P,
registry_url: Url,
index_url: Url,
settings: Option<RazeSettings>,
) -> RazeMetadataFetcher {
let cargo_bin_pathbuf: Utf8PathBuf = cargo_bin_path.into();
RazeMetadataFetcher {
registry_url,
index_url,
metadata_fetcher: Box::new(CargoMetadataFetcher {
cargo_bin_path: cargo_bin_pathbuf.clone(),
}),
lockfile_generator: Box::new(CargoLockfileGenerator {
cargo_bin_path: cargo_bin_pathbuf,
}),
settings,
}
}
pub fn new_with_settings(settings: Option<RazeSettings>) -> RazeMetadataFetcher {
RazeMetadataFetcher::new(
cargo_bin_path(),
// UNWRAP: The default is covered by testing and should never return err
Url::parse(DEFAULT_CRATE_REGISTRY_URL).unwrap(),
Url::parse(DEFAULT_CRATE_INDEX_URL).unwrap(),
settings,
)
}
/// Reassign the [`crate::metadata::MetadataFetcher`] associated with the Raze Metadata Fetcher
pub fn set_metadata_fetcher(&mut self, fetcher: Box<dyn MetadataFetcher>) {
self.metadata_fetcher = fetcher;
}
/// Reassign the [`crate::metadata::LockfileGenerator`] associated with the current Fetcher
pub fn set_lockfile_generator(&mut self, generator: Box<dyn LockfileGenerator>) {
self.lockfile_generator = generator;
}
/// Symlinks the source code of all workspace members into the temp workspace
fn link_src_to_workspace(&self, no_deps_metadata: &Metadata, temp_dir: &Utf8Path) -> Result<()> {
let crate_member_id_re = match consts::OS {
"windows" => Regex::new(r".+\(path\+file:///(.+)\)")?,
_ => Regex::new(r".+\(path\+file://(.+)\)")?,
};
for member in no_deps_metadata.workspace_members.iter() {
// Get a path to the workspace member directory
let workspace_member_directory = {
let crate_member_id_match = crate_member_id_re
.captures(&member.repr)
.and_then(|cap| cap.get(1));
if crate_member_id_match.is_none() {
continue;
}
// UNWRAP: guarded above
Utf8PathBuf::from(crate_member_id_match.unwrap().as_str())
};
// Sanity check: The assumption is that any crate with an `id` that matches
// the regex pattern above should contain a Cargo.toml file with which we
// can use to infer the existence of libraries from relative paths such as
// `src/lib.rs` and `src/main.rs`.
let toml_path = workspace_member_directory.join("Cargo.toml");
if !toml_path.exists() {
return Err(anyhow!(format!(
"The regex pattern `{}` found a path that did not contain a Cargo.toml file: `{}`",
crate_member_id_re.as_str(),
workspace_member_directory
)));
}
// Copy the Cargo.toml files into the temp directory to match the directory structure on disk
let path_diff = diff_paths(
&workspace_member_directory,
&no_deps_metadata.workspace_root,
)
.ok_or_else(|| {
anyhow!("All workspace members are expected to be under the workspace root")
})?;
let diff = Utf8PathBuf::from_path_buf(path_diff)
.map_err(|_e| anyhow!("Invalid UTF-8 in path diff."))?;
let new_path = temp_dir.join(diff);
fs::create_dir_all(&new_path)?;
fs::copy(
workspace_member_directory.join("Cargo.toml"),
new_path.join("Cargo.toml"),
)?;
// Additionally, symlink everything in some common source directories to ensure specified
// library targets can be relied on and won't prevent fetching metadata
for dir in vec!["bin", "src"].iter() {
let glob_pattern = format!("{}/**/*.rs", workspace_member_directory.join(dir));
for entry in glob(glob_pattern.as_str()).expect("Failed to read glob pattern") {
let path = Utf8PathBuf::from_path_buf(entry?)
.map_err(|_e| anyhow!("Invalid UTF-8 in source directory."))?;
// Determine the difference between the workspace root and the current file
let path_diff = diff_paths(&path, &no_deps_metadata.workspace_root).ok_or_else(|| {
anyhow!("All workspace members are expected to be under the workspace root")
})?;
let diff = Utf8PathBuf::from_path_buf(path_diff)
.map_err(|_e| anyhow!("Invalid UTF-8 in source directory path diff."))?;
// Create a matching directory tree for the current file within the temp workspace
let new_path = temp_dir.join(diff.as_path());
if let Some(parent) = new_path.parent() {
fs::create_dir_all(parent)?;
}
make_symlink(&path, &new_path)?;
}
}
}
Ok(())
}
/// Creates a copy workspace in a temporary directory for fetching the metadata of the current workspace
fn make_temp_workspace(&self, cargo_workspace_root: &Utf8Path) -> Result<(TempDir, Utf8PathBuf)> {
let temp_dir = TempDir::new()?;
// First gather metadata without downloading any dependencies so we can identify any path dependencies.
let no_deps_metadata = self
.metadata_fetcher
.fetch_metadata(cargo_workspace_root, /*include_deps=*/ false)?;
// There should be a `Cargo.toml` file in the workspace root
fs::copy(
no_deps_metadata.workspace_root.join("Cargo.toml"),
temp_dir.as_ref().join("Cargo.toml"),
)?;
// Optionally copy over the lock file
if no_deps_metadata.workspace_root.join("Cargo.lock").exists() {
fs::copy(
no_deps_metadata.workspace_root.join("Cargo.lock"),
temp_dir.as_ref().join("Cargo.lock"),
)?;
}
let source_dotcargo = cargo_workspace_root.join(".cargo");
let source_dotcargo_config = source_dotcargo.join | CargoLockfileGenerator | identifier_name |
|
EventModal.js | Icon, Table } from 'antd';
import React, {Component} from 'react';
import videojs from 'video.js';
import Flash from 'videojs-flash';
import 'video.js/dist/video-js.css';
import moment from 'moment';
import styles from './EventModal.less';
import { getLocalTimeF } from '../../../utils/time';
import {imageUrl, stringTransform} from '../../../utils/common'
const {TextArea} =Input;
const FormItem = Form.Item;
const formLayout = {
labelCol: {
span: 6
},
wrapperCol: {
span: 16
},
style: {
marginBottom: 10
}
}
const formLayout1 = {
labelCol: {
span: 2
},
wrapperCol: {
span: 22
},
style: {
marginBottom: 10
}
}
class EventModal extends Component {
constructor(props) {
super(props);
this.onOk = this.onOk.bind(this);
this.state = {
previewVisible: false,
previewImage: '',
}
this.videoNode = [];
this.player = []
}
| (){
const {form} = this.props;
this.props.dispatch({
type:'device/updateState',
payload:{
eventModal:false
}
})
}
handleCancel(){
this.props.dispatch({
type:'device/updateState',
payload:{
eventModal:false
}
})
}
handleImgCancel = () => this.setState({ previewVisible: false })
handlePreview = (file) => {
this.setState({
previewImage: file.url || file.thumbUrl,
previewVisible: true,
});
}
render() {
const {
form: {
getFieldDecorator,
setFieldsValue
},
dispatch,
eventDetailData
} = this.props;
const { previewVisible, previewImage } = this.state;
const { eventData, eventDescription, eventSource, eventTime, eventTypeName, image, video } = eventDetailData || {}
// 初始化图片和视频URL
// let fileList = [{
// uid: 1,
// name: '',
// status: 'done',
// url: 'http://pic.58pic.com/58pic/15/68/59/71X58PICNjx_1024.jpg' ,
// },{
// uid: 1,
// name: '',
// status: 'done',
// url: 'http://pic1.nipic.com/2008-12-30/200812308231244_2.jpg' ,
// },{
// uid: 1,
// name: '',
// status: 'done',
// url: 'http://gss0.baidu.com/-vo3dSag_xI4khGko9WTAnF6hhy/lvpics/w=1000/sign=a669f57d3a12b31bc76cc929b628377a/503d269759ee3d6d801feef140166d224f4ade2b.jpg' ,
// },{
// uid: 1,
// name: '',
// status: 'done',
// url: 'http://img.bimg.126.net/photo/31kQlCGP44-34Q5yxvoqmw==/5770237022569104952.jpg' ,
// }];
let fileList = ''
image && (fileList = image.split(',http').map(item => {
return {
uid: 1,
name: '',
status: 'done',
url: item.indexOf('http')>-1 ? stringTransform(item) : stringTransform('http'+item),
}
}))
let patcharr = [];
let tablepatch = eventData ? (eventData.length % 2) : 0;
if(tablepatch) {
patcharr.push(
<Col span={12}> </Col>
)
}
return (
<div>
{/* <Modal
width="900px"
maskClosable={false}
title="事件详情"
visible={true}
onOk={this.onOk}
onCancel={this.handleCancel.bind(this)}
okText={"关闭"}
cancelText={"取消"}
> */}
<Row>
<Col span={8}>
<FormItem {...formLayout} label="发生时间">
{getLocalTimeF(eventTime)}
</FormItem>
</Col>
<Col span={8}>
<FormItem {...formLayout} label="事件源">
{eventSource || ''}
</FormItem>
</Col>
<Col span={8}>
<FormItem {...formLayout} label="事件类型">
{eventTypeName || ''}
</FormItem>
</Col>
</Row>
<Row>
<Col span={24}>
<FormItem {...formLayout1} label="事件内容">
{eventDescription || ''}
</FormItem>
</Col>
</Row>
<Row>
<Col span={24}>
<FormItem {...formLayout1} label="事件数据" className={styles.item}>
{
(eventData && eventData.length>0) ? <Row type="flex">
{(eventData || []).map(v=>{
return <Col span={12}>
{v.dataName || ''}:{v.dataValue || ''}
</Col>
}).concat(patcharr)} </Row> : '暂无数据'
}
</FormItem>
</Col>
</Row>
<Row className={styles.imgStyle}>
<Col>
<FormItem {...formLayout1} label="附件">
{(fileList && fileList.length) ?
(<div>
<Upload
listType="picture-card"
fileList={fileList}
onPreview={this.handlePreview}
>
</Upload>
<Modal visible={previewVisible} footer={null} onCancel={this.handleImgCancel} maskClosable={false}>
<img alt="example" style={{ width: '100%' }} src={previewImage} />
</Modal>
</div>)
: ''}
{
video && video.split(',').map((item,index) => {
return (
<div className={styles.videoDiv} key={index}>
<div data-vjs-player>
<video ref={node => this.videoNode.push(node)} width='200px' height='200px' className='video-js vjs-default-skin vjs-big-play-centered'>
<p className='vjs-no-js'>暂不支持此种格式</p>
</video>
</div>
</div>
)
})
}
{/* <div>
<div data-vjs-player>
<video ref={node => this.videoNode = node} width='200px' height='200px' className='video-js vjs-default-skin vjs-big-play-centered'>
</video>
</div>
</div> */}
</FormItem>
</Col>
</Row>
{/* </Modal> */}
</div>
)
}
// componentWillUpdate() {
// this.videoNode = []
// }
componentDidUpdate(prevProps) {
const { eventDetailData } = this.props;
const { video } = eventDetailData || {}
if(prevProps.eventDetailData.video != video) {
let videoUrl = []
video && (videoUrl = video.replace(/&/g, "&").split(','))
this.videoNode.length && (this.player = this.videoNode.map((item, index) => {
let videotype = '';
if(videoUrl[index].indexOf('rtmp')>(-1)) {
videotype = [
{
src: videoUrl[index],
type: 'rtmp/flv'
},{
src: videoUrl[index],
type: 'rtmp/mp4'
}
]
}
else if(videoUrl[index].indexOf('m3u8')>(-1)) {
videotype = [
{
src: videoUrl[index],
type: 'application/x-mpegURL'
}
]
}
else {
videotype = [
{
src: videoUrl[index],
type: 'video/mp4'
},{
src: videoUrl[index],
type: "video/webm"
},{
src: videoUrl[index],
type: 'video/ogg'
}
]
}
return videojs(item, {
autoplay: videoUrl[index].indexOf('rtmp')>(-1)?true:false,
controls: true,
bigPlayButton: videoUrl[index].indexOf('rtmp')>(-1)?false:true,
// children: [ 'ControlBar','MediaLoader', 'LoadingSpinner', 'TextTrackDisplay', 'PlayToggle', 'FullscreenToggle', 'TextTrackSettings'],
controlBar: {
volumePanel: false,
},
language: 'zh-CN',
techOrder: ['html5','flash'],
sources: videotype
})
}))
| onOk | identifier_name |
EventModal.js | Icon, Table } from 'antd';
import React, {Component} from 'react';
import videojs from 'video.js';
import Flash from 'videojs-flash';
import 'video.js/dist/video-js.css';
import moment from 'moment';
import styles from './EventModal.less';
import { getLocalTimeF } from '../../../utils/time';
import {imageUrl, stringTransform} from '../../../utils/common'
const {TextArea} =Input;
const FormItem = Form.Item;
const formLayout = {
labelCol: {
span: 6
},
wrapperCol: {
span: 16
},
style: {
marginBottom: 10
}
}
const formLayout1 = {
labelCol: {
span: 2
},
wrapperCol: {
span: 22
},
style: {
marginBottom: 10
}
}
class EventModal extends Component {
constructor(props) {
super(props);
this.onOk = this.onOk.bind(this);
this.state = {
previewVisible: false,
previewImage: '',
}
this.videoNode = [];
this.player = []
}
onOk (){
const {form} = this.props;
this.props.dispatch({
type:'device/updateState',
payload:{
eventModal:false
}
})
}
handleCancel(){
this.props.dispatch({
type:'device/updateState',
payload:{
eventModal:false
}
})
}
handleImgCancel = () => this.setState({ previewVisible: false })
handlePreview = (file) => {
this.setState({
previewImage: file.url || file.thumbUrl,
previewVisible: true,
});
}
render() {
const {
form: {
getFieldDecorator,
setFieldsValue
},
dispatch,
eventDetailData
} = this.props;
const { previewVisible, previewImage } = this.state;
const { eventData, eventDescription, eventSource, eventTime, eventTypeName, image, video } = eventDetailData || {}
// 初始化图片和视频URL
// let fileList = [{
// uid: 1,
// name: '',
// status: 'done',
// url: 'http://pic.58pic.com/58pic/15/68/59/71X58PICNjx_1024.jpg' ,
// },{
// uid: 1,
// name: '',
// status: 'done',
// url: 'http://pic1.nipic.com/2008-12-30/200812308231244_2.jpg' ,
// },{
// uid: 1,
// name: '',
// status: 'done',
// url: 'http://gss0.baidu.com/-vo3dSag_xI4khGko9WTAnF6hhy/lvpics/w=1000/sign=a669f57d3a12b31bc76cc929b628377a/503d269759ee3d6d801feef140166d224f4ade2b.jpg' ,
// },{
// uid: 1,
// name: '',
// status: 'done',
// url: 'http://img.bimg.126.net/photo/31kQlCGP44-34Q5yxvoqmw==/5770237022569104952.jpg' ,
// }];
let fileList = ''
image && (fileList = image.split(',http').map(item => {
return {
uid: 1,
name: '',
status: 'done',
url: item.indexOf('http')>-1 ? stringTransform(item) : stringTransform('http'+item),
}
}))
let patcharr = [];
let tablepatch = eventData ? (eventData.length % 2) : 0;
if(tablepatch) {
patcharr.push(
<Col span={12}> </Col>
)
}
return (
<div>
{/* <Modal
width="900px"
maskClosable={false}
title="事件详情"
visible={true}
onOk={this.onOk}
onCancel={this.handleCancel.bind(this)}
okText={"关闭"}
cancelText={"取消"}
> */}
<Row>
<Col span={8}>
<FormItem {...formLayout} label="发生时间">
{getLocalTimeF(eventTime)}
</FormItem>
</Col>
<Col span={8}>
<FormItem {...formLayout} label="事件源">
{eventSource || ''}
</FormItem>
</Col>
<Col span={8}>
<FormItem {...formLayout} label="事件类型">
{eventTypeName || ''}
</FormItem>
</Col>
</Row>
<Row>
<Col span={24}>
<FormItem {...formLayout1} label="事件内容">
{eventDescription || ''}
</FormItem>
</Col>
</Row>
<Row>
<Col span={24}>
<FormItem {...formLayout1} label="事件数据" className={styles.item}>
{
(eventData && eventData.length>0) ? <Row type="flex">
{(eventData || []).map(v=>{
return <Col span={12}>
{v.dataName || ''}:{v.dataValue || ''}
</Col>
}).concat(patcharr)} </Row> : '暂无数据'
}
</FormItem>
</Col>
</Row>
<Row className={styles.imgStyle}>
<Col>
<FormItem {...formLayout1} label="附件">
{(fileList && fileList.length) ?
(<div>
<Upload
listType="picture-card"
fileList={fileList}
onPreview={this.handlePreview}
>
</Upload>
<Modal visible={previewVisible} footer={null} onCancel={this.handleImgCancel} maskClosable={false}>
<img alt="example" style={{ width: '100%' }} src={previewImage} />
</Modal>
</div>)
: ''}
{
video && video.split(',').map((item,index) => {
return (
<div className={styles.videoDiv} key={index}>
<div data-vjs-player>
<video ref={node => this.videoNode.push(node)} width='200px' height='200px' className='video-js vjs-default-skin vjs-big-play-centered'>
<p className='vjs-no-js'>暂不支持此种格式</p>
</video>
</div>
</div>
)
})
}
{/* <div>
<div data-vjs-player>
<video ref={node => this.videoNode = node} width='200px' height='200px' className='video-js vjs-default-skin vjs-big-play-centered'>
</video>
</div>
</div> */}
</FormItem>
</Col>
</Row>
{/* </Modal> */}
</div>
)
}
// componentWillUpdate() {
// this.videoNode = []
// }
componentDidUpdate(prevProps) {
const { eventDetailData } = this.props;
const { video } = eventDetailData || {}
if(prevProps.eventDetailData.video != video) {
let videoUrl = []
video && (videoUrl = video.replace(/&/g, "&").split(','))
this.videoNode.length && (this.player = this.videoNode.map((item, index) => {
let videotype = '';
if(videoUrl[index].indexOf('rtmp')>(-1)) {
videotype = [
{
src: videoUrl[index],
type: 'rtmp/flv'
},{
src: videoUrl[index], | }
]
}
else if(videoUrl[index].indexOf('m3u8')>(-1)) {
videotype = [
{
src: videoUrl[index],
type: 'application/x-mpegURL'
}
]
}
else {
videotype = [
{
src: videoUrl[index],
type: 'video/mp4'
},{
src: videoUrl[index],
type: "video/webm"
},{
src: videoUrl[index],
type: 'video/ogg'
}
]
}
return videojs(item, {
autoplay: videoUrl[index].indexOf('rtmp')>(-1)?true:false,
controls: true,
bigPlayButton: videoUrl[index].indexOf('rtmp')>(-1)?false:true,
// children: [ 'ControlBar','MediaLoader', 'LoadingSpinner', 'TextTrackDisplay', 'PlayToggle', 'FullscreenToggle', 'TextTrackSettings'],
controlBar: {
volumePanel: false,
},
language: 'zh-CN',
techOrder: ['html5','flash'],
sources: videotype
})
}))
| type: 'rtmp/mp4' | random_line_split |
EventModal.js | , Table } from 'antd';
import React, {Component} from 'react';
import videojs from 'video.js';
import Flash from 'videojs-flash';
import 'video.js/dist/video-js.css';
import moment from 'moment';
import styles from './EventModal.less';
import { getLocalTimeF } from '../../../utils/time';
import {imageUrl, stringTransform} from '../../../utils/common'
const {TextArea} =Input;
const FormItem = Form.Item;
const formLayout = {
labelCol: {
span: 6
},
wrapperCol: {
span: 16
},
style: {
marginBottom: 10
}
}
const formLayout1 = {
labelCol: {
span: 2
},
wrapperCol: {
span: 22
},
style: {
marginBottom: 10
}
}
class EventModal extends Component {
constructor(props) {
super(props);
this.onOk = this.onOk.bind(this);
this.state = {
previewVisible: false,
previewImage: '',
}
this.videoNode = [];
this.player = []
}
onOk (){
const {form} = this.props;
this.props.dispatch({
type:'device/updateState',
payload:{
eventModal:false
}
})
}
handleCancel() |
handleImgCancel = () => this.setState({ previewVisible: false })
handlePreview = (file) => {
this.setState({
previewImage: file.url || file.thumbUrl,
previewVisible: true,
});
}
render() {
const {
form: {
getFieldDecorator,
setFieldsValue
},
dispatch,
eventDetailData
} = this.props;
const { previewVisible, previewImage } = this.state;
const { eventData, eventDescription, eventSource, eventTime, eventTypeName, image, video } = eventDetailData || {}
// 初始化图片和视频URL
// let fileList = [{
// uid: 1,
// name: '',
// status: 'done',
// url: 'http://pic.58pic.com/58pic/15/68/59/71X58PICNjx_1024.jpg' ,
// },{
// uid: 1,
// name: '',
// status: 'done',
// url: 'http://pic1.nipic.com/2008-12-30/200812308231244_2.jpg' ,
// },{
// uid: 1,
// name: '',
// status: 'done',
// url: 'http://gss0.baidu.com/-vo3dSag_xI4khGko9WTAnF6hhy/lvpics/w=1000/sign=a669f57d3a12b31bc76cc929b628377a/503d269759ee3d6d801feef140166d224f4ade2b.jpg' ,
// },{
// uid: 1,
// name: '',
// status: 'done',
// url: 'http://img.bimg.126.net/photo/31kQlCGP44-34Q5yxvoqmw==/5770237022569104952.jpg' ,
// }];
let fileList = ''
image && (fileList = image.split(',http').map(item => {
return {
uid: 1,
name: '',
status: 'done',
url: item.indexOf('http')>-1 ? stringTransform(item) : stringTransform('http'+item),
}
}))
let patcharr = [];
let tablepatch = eventData ? (eventData.length % 2) : 0;
if(tablepatch) {
patcharr.push(
<Col span={12}> </Col>
)
}
return (
<div>
{/* <Modal
width="900px"
maskClosable={false}
title="事件详情"
visible={true}
onOk={this.onOk}
onCancel={this.handleCancel.bind(this)}
okText={"关闭"}
cancelText={"取消"}
> */}
<Row>
<Col span={8}>
<FormItem {...formLayout} label="发生时间">
{getLocalTimeF(eventTime)}
</FormItem>
</Col>
<Col span={8}>
<FormItem {...formLayout} label="事件源">
{eventSource || ''}
</FormItem>
</Col>
<Col span={8}>
<FormItem {...formLayout} label="事件类型">
{eventTypeName || ''}
</FormItem>
</Col>
</Row>
<Row>
<Col span={24}>
<FormItem {...formLayout1} label="事件内容">
{eventDescription || ''}
</FormItem>
</Col>
</Row>
<Row>
<Col span={24}>
<FormItem {...formLayout1} label="事件数据" className={styles.item}>
{
(eventData && eventData.length>0) ? <Row type="flex">
{(eventData || []).map(v=>{
return <Col span={12}>
{v.dataName || ''}:{v.dataValue || ''}
</Col>
}).concat(patcharr)} </Row> : '暂无数据'
}
</FormItem>
</Col>
</Row>
<Row className={styles.imgStyle}>
<Col>
<FormItem {...formLayout1} label="附件">
{(fileList && fileList.length) ?
(<div>
<Upload
listType="picture-card"
fileList={fileList}
onPreview={this.handlePreview}
>
</Upload>
<Modal visible={previewVisible} footer={null} onCancel={this.handleImgCancel} maskClosable={false}>
<img alt="example" style={{ width: '100%' }} src={previewImage} />
</Modal>
</div>)
: ''}
{
video && video.split(',').map((item,index) => {
return (
<div className={styles.videoDiv} key={index}>
<div data-vjs-player>
<video ref={node => this.videoNode.push(node)} width='200px' height='200px' className='video-js vjs-default-skin vjs-big-play-centered'>
<p className='vjs-no-js'>暂不支持此种格式</p>
</video>
</div>
</div>
)
})
}
{/* <div>
<div data-vjs-player>
<video ref={node => this.videoNode = node} width='200px' height='200px' className='video-js vjs-default-skin vjs-big-play-centered'>
</video>
</div>
</div> */}
</FormItem>
</Col>
</Row>
{/* </Modal> */}
</div>
)
}
// componentWillUpdate() {
// this.videoNode = []
// }
componentDidUpdate(prevProps) {
const { eventDetailData } = this.props;
const { video } = eventDetailData || {}
if(prevProps.eventDetailData.video != video) {
let videoUrl = []
video && (videoUrl = video.replace(/&/g, "&").split(','))
this.videoNode.length && (this.player = this.videoNode.map((item, index) => {
let videotype = '';
if(videoUrl[index].indexOf('rtmp')>(-1)) {
videotype = [
{
src: videoUrl[index],
type: 'rtmp/flv'
},{
src: videoUrl[index],
type: 'rtmp/mp4'
}
]
}
else if(videoUrl[index].indexOf('m3u8')>(-1)) {
videotype = [
{
src: videoUrl[index],
type: 'application/x-mpegURL'
}
]
}
else {
videotype = [
{
src: videoUrl[index],
type: 'video/mp4'
},{
src: videoUrl[index],
type: "video/webm"
},{
src: videoUrl[index],
type: 'video/ogg'
}
]
}
return videojs(item, {
autoplay: videoUrl[index].indexOf('rtmp')>(-1)?true:false,
controls: true,
bigPlayButton: videoUrl[index].indexOf('rtmp')>(-1)?false:true,
// children: [ 'ControlBar','MediaLoader', 'LoadingSpinner', 'TextTrackDisplay', 'PlayToggle', 'FullscreenToggle', 'TextTrackSettings'],
controlBar: {
volumePanel: false,
},
language: 'zh-CN',
techOrder: ['html5','flash'],
sources: videotype
})
| {
this.props.dispatch({
type:'device/updateState',
payload:{
eventModal:false
}
})
} | identifier_body |
kieapp_types.go |
// Important: Run "operator-sdk generate k8s" to regenerate code after modifying this file
// KIE environment type to deploy (prod, authoring, trial, etc)
Environment string `json:"environment,omitempty"`
KieDeployments int `json:"kieDeployments"` // Number of KieServer DeploymentConfigs (defaults to 1)
RhpamRegistry KieAppRegistry `json:"rhpamRegistry,omitempty"`
Objects KieAppObjects `json:"objects,omitempty"`
CommonConfig CommonConfig `json:"commonConfig,omitempty"`
Auth KieAppAuthObject `json:"auth,omitempty"`
}
// KieAppRegistry defines the registry that should be used for rhpam images
type KieAppRegistry struct {
Registry string `json:"registry,omitempty"` // Registry to use, can also be set w/ "REGISTRY" env variable
Insecure bool `json:"insecure"` // Specify whether registry is insecure, can also be set w/ "INSECURE" env variable
}
// KieAppStatus defines the observed state of KieApp
type KieAppStatus struct {
// INSERT ADDITIONAL STATUS FIELD - define observed state of cluster
// Important: Run "operator-sdk generate k8s" to regenerate code after modifying this file
Status string `json:"status,omitempty"`
Deployments []string `json:"deployments,omitempty"`
}
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
// KieApp is the Schema for the kieapps API
// +k8s:openapi-gen=true
type KieApp struct {
metav1.TypeMeta `json:",inline"`
metav1.ObjectMeta `json:"metadata,omitempty"`
Spec KieAppSpec `json:"spec,omitempty"`
Status KieAppStatus `json:"status,omitempty"`
}
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
// KieAppList contains a list of KieApp
type KieAppList struct {
metav1.TypeMeta `json:",inline"`
metav1.ListMeta `json:"metadata,omitempty"`
Items []KieApp `json:"items"`
}
type KieAppObjects struct {
// Business Central container configs
Console KieAppObject `json:"console,omitempty"`
// KIE Server container configs
Server KieAppObject `json:"server,omitempty"`
// Smartrouter container configs
Smartrouter KieAppObject `json:"smartrouter,omitempty"`
// S2I Build configuration
Builds []KieAppBuildObject `json:"builds,omitempty"`
}
type KieAppObject struct {
Env []corev1.EnvVar `json:"env,omitempty"`
Resources corev1.ResourceRequirements `json:"resources"`
}
type Environment struct {
Console CustomObject `json:"console,omitempty"`
Smartrouter CustomObject `json:"smartrouter,omitempty"`
Servers []CustomObject `json:"servers,omitempty"`
Others []CustomObject `json:"others,omitempty"`
}
type CustomObject struct {
Omit bool `json:"omit,omitempty"`
PersistentVolumeClaims []corev1.PersistentVolumeClaim `json:"persistentVolumeClaims,omitempty"`
ServiceAccounts []corev1.ServiceAccount `json:"serviceAccounts,omitempty"`
Secrets []corev1.Secret `json:"secrets,omitempty"`
Roles []rbacv1.Role `json:"roles,omitempty"`
RoleBindings []rbacv1.RoleBinding `json:"roleBindings,omitempty"`
DeploymentConfigs []appsv1.DeploymentConfig `json:"deploymentConfigs,omitempty"`
BuildConfigs []buildv1.BuildConfig `json:"buildConfigs,omitempty"`
ImageStreams []oimagev1.ImageStream `json:"imageStreams,omitempty"`
Services []corev1.Service `json:"services,omitempty"`
Routes []routev1.Route `json:"routes,omitempty"`
}
type KieAppBuildObject struct {
KieServerContainerDeployment string `json:"kieServerContainerDeployment,omitempty"`
GitSource GitSource `json:"gitSource,omitempty"`
Webhooks []WebhookSecret `json:"webhooks,omitempty"`
}
type GitSource struct {
URI string `json:"uri,omitempty"`
Reference string `json:"reference,omitempty"`
ContextDir string `json:"contextDir,omitempty"`
}
type WebhookType string
const (
GitHubWebhook WebhookType = "GitHub"
GenericWebhook WebhookType = "Generic"
)
type WebhookSecret struct {
Type WebhookType `json:"type,omitempty"`
Secret string `json:"secret,omitempty"`
}
type KieAppAuthObject struct {
SSO *SSOAuthConfig `json:"sso,omitempty"`
LDAP *LDAPAuthConfig `json:"ldap,omitempty"`
RoleMapper *RoleMapperAuthConfig `json:"roleMapper,omitempty"`
}
type SSOAuthConfig struct {
URL string `json:"url,omitempty"`
Realm string `json:"realm,omitempty"`
AdminUser string `json:"adminUser,omitempty"`
AdminPassword string `json:"adminPassword,omitempty"`
DisableSSLCertValidation bool `json:"disableSSLCertValication,omitempty"`
PrincipalAttribute string `json:"principalAttribute,omitempty"`
Clients SSOAuthClients `json:"clients,omitempty"`
}
type SSOAuthClients struct {
Console SSOAuthClient `json:"console,omitempty"`
Servers []SSOAuthClient `json:"servers,omitempty"`
}
type SSOAuthClient struct {
Name string `json:"name,omitempty"`
Secret string `json:"secret,omitempty"`
HostnameHTTP string `json:"hostnameHTTP,omitempty"`
HostnameHTTPS string `json:"hostnameHTTPS,omitempty"`
}
type LDAPAuthConfig struct {
URL string `json:"url,omitempty"`
BindDN string `json:"bindDN,omitempty"`
BindCredential string `json:"bindCredential,omitempty"`
JAASSecurityDomain string `json:"jaasSecurityDomain,omitempty"`
BaseCtxDN string `json:"baseCtxDN,omitempty"`
BaseFilter string `json:"baseFilter,omitempty"`
SearchScope SearchScopeType `json:"searchScope,omitempty"`
SearchTimeLimit int32 `json:"searchTimeLimit,omitempty"`
DistinguishedNameAttribute string `json:"distinguishedNameAttribute,omitempty"`
ParseUsername bool `json:"parseUsername,omitempty"`
UsernameBeginString string `json:"usernameBeginString,omitempty"`
UsernameEndString string `json:"usernameEndString,omitempty"`
RoleAttributeID string `json:"roleAttributeID,omitempty"`
RolesCtxDN string `json:"rolesCtxDN,omitempty"`
RoleFilter string `json:"roleFilter,omitempty"`
RoleRecursion int16 `json:"roleRecursion,omitempty"`
DefaultRole string `json:"defaultRole,omitempty"`
RoleNameAttributeID string `json:"roleNameAttributeID,omitempty"`
ParseRoleNameFromDN bool `json:"parseRoleNameFromDN,omitempty"`
RoleAttributeIsDN bool `json:"roleAttributeIsDN,omitempty"`
ReferralUserAttributeIDToCheck string `json:"referralUserAttributeIDToCheck,omitempty"`
}
type SearchScopeType string
const (
SubtreeSearchScope SearchScopeType = "SUBTREE_SCOPE"
ObjectSearchScope SearchScopeType = "OBJECT_SCOPE"
OneLevelSearchScope SearchScopeType = "ONELEVEL_SCOPE"
)
type RoleMapperAuthConfig struct {
RolesProperties string `json:"rolesProperties,omitempty"`
ReplaceRole string `json:"replaceRole,omitempty"`
}
type OpenShiftObject interface {
metav1.Object
runtime.Object
}
type EnvTemplate struct {
Template `json:",inline"`
ServerCount []Template `json:"serverCount,omitempty"`
}
type Template struct {
*CommonConfig
ApplicationName string `json:"applicationName,omitempty"`
GitSource GitSource `json:"gitSource,omitempty"`
GitHubWebhookSecret string `json:"githubWebhookSecret,omitempty"`
GenericWebhookSecret string `json:"genericWebhookSecret,omitempty"`
KieServerContainerDeployment string `json:"kieServerContainerDeployment,omitempty"`
Auth AuthTemplate `json:"auth,omitempty"`
}
type CommonConfig struct {
Version string `json:"version,omitempty"`
ImageTag string `json:"imageTag,omitempty"`
ConsoleName string `json:"consoleName,omitempty"`
ConsoleImage string `json:"consoleImage,omitempty"`
KeyStorePassword string `json:"keyStorePassword,omitempty"`
AdminPassword string `json:"adminPassword,omitempty"`
ControllerPassword string `json:"controllerPassword,omitempty"`
ServerPassword string `json:"serverPassword,omitempty"`
MavenPassword string `json:"mavenPassword,omitempty"`
}
type AuthTemplate struct {
SSO SSOAuthConfig `json:"sso,omitempty"`
LDAP LDAPAuthConfig `json:"ldap,omitempty"`
RoleMapper RoleMapperAuthConfig `json:"roleMapper,omitempty"`
}
type PlatformService interface {
Create(ctx context.Context, obj runtime.Object) error
Get(ctx context.Context, key client.ObjectKey, obj runtime.Object) error
List(ctx context.Context, opts *client.ListOptions, list runtime.Object) error
Update(ctx context.Context, obj runtime.Object) error
GetCached(ctx context.Context, key client.ObjectKey, obj runtime.Object) error
ImageStreamTags(namespace string) imagev1.ImageStreamTagInterface
GetScheme() *runtime.Scheme
IsMockService() bool
}
func | init | identifier_name |
|
kieapp_types.go | modifying this file
// KIE environment type to deploy (prod, authoring, trial, etc)
Environment string `json:"environment,omitempty"`
KieDeployments int `json:"kieDeployments"` // Number of KieServer DeploymentConfigs (defaults to 1)
RhpamRegistry KieAppRegistry `json:"rhpamRegistry,omitempty"`
Objects KieAppObjects `json:"objects,omitempty"`
CommonConfig CommonConfig `json:"commonConfig,omitempty"`
Auth KieAppAuthObject `json:"auth,omitempty"`
}
// KieAppRegistry defines the registry that should be used for rhpam images
type KieAppRegistry struct {
Registry string `json:"registry,omitempty"` // Registry to use, can also be set w/ "REGISTRY" env variable
Insecure bool `json:"insecure"` // Specify whether registry is insecure, can also be set w/ "INSECURE" env variable
}
// KieAppStatus defines the observed state of KieApp
type KieAppStatus struct {
// INSERT ADDITIONAL STATUS FIELD - define observed state of cluster
// Important: Run "operator-sdk generate k8s" to regenerate code after modifying this file
Status string `json:"status,omitempty"`
Deployments []string `json:"deployments,omitempty"`
}
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
// KieApp is the Schema for the kieapps API
// +k8s:openapi-gen=true
type KieApp struct {
metav1.TypeMeta `json:",inline"`
metav1.ObjectMeta `json:"metadata,omitempty"`
Spec KieAppSpec `json:"spec,omitempty"`
Status KieAppStatus `json:"status,omitempty"`
}
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
// KieAppList contains a list of KieApp
type KieAppList struct {
metav1.TypeMeta `json:",inline"`
metav1.ListMeta `json:"metadata,omitempty"`
Items []KieApp `json:"items"`
}
type KieAppObjects struct {
// Business Central container configs
Console KieAppObject `json:"console,omitempty"`
// KIE Server container configs
Server KieAppObject `json:"server,omitempty"`
// Smartrouter container configs
Smartrouter KieAppObject `json:"smartrouter,omitempty"`
// S2I Build configuration
Builds []KieAppBuildObject `json:"builds,omitempty"`
}
type KieAppObject struct {
Env []corev1.EnvVar `json:"env,omitempty"`
Resources corev1.ResourceRequirements `json:"resources"`
}
type Environment struct {
Console CustomObject `json:"console,omitempty"`
Smartrouter CustomObject `json:"smartrouter,omitempty"`
Servers []CustomObject `json:"servers,omitempty"`
Others []CustomObject `json:"others,omitempty"`
}
type CustomObject struct {
Omit bool `json:"omit,omitempty"`
PersistentVolumeClaims []corev1.PersistentVolumeClaim `json:"persistentVolumeClaims,omitempty"`
ServiceAccounts []corev1.ServiceAccount `json:"serviceAccounts,omitempty"`
Secrets []corev1.Secret `json:"secrets,omitempty"`
Roles []rbacv1.Role `json:"roles,omitempty"`
RoleBindings []rbacv1.RoleBinding `json:"roleBindings,omitempty"`
DeploymentConfigs []appsv1.DeploymentConfig `json:"deploymentConfigs,omitempty"`
BuildConfigs []buildv1.BuildConfig `json:"buildConfigs,omitempty"`
ImageStreams []oimagev1.ImageStream `json:"imageStreams,omitempty"`
Services []corev1.Service `json:"services,omitempty"`
Routes []routev1.Route `json:"routes,omitempty"`
}
type KieAppBuildObject struct {
KieServerContainerDeployment string `json:"kieServerContainerDeployment,omitempty"`
GitSource GitSource `json:"gitSource,omitempty"`
Webhooks []WebhookSecret `json:"webhooks,omitempty"`
}
type GitSource struct {
URI string `json:"uri,omitempty"`
Reference string `json:"reference,omitempty"`
ContextDir string `json:"contextDir,omitempty"`
}
type WebhookType string
const (
GitHubWebhook WebhookType = "GitHub"
GenericWebhook WebhookType = "Generic"
)
type WebhookSecret struct {
Type WebhookType `json:"type,omitempty"`
Secret string `json:"secret,omitempty"`
}
type KieAppAuthObject struct {
SSO *SSOAuthConfig `json:"sso,omitempty"`
LDAP *LDAPAuthConfig `json:"ldap,omitempty"`
RoleMapper *RoleMapperAuthConfig `json:"roleMapper,omitempty"`
}
type SSOAuthConfig struct {
URL string `json:"url,omitempty"`
Realm string `json:"realm,omitempty"`
AdminUser string `json:"adminUser,omitempty"`
AdminPassword string `json:"adminPassword,omitempty"`
DisableSSLCertValidation bool `json:"disableSSLCertValication,omitempty"`
PrincipalAttribute string `json:"principalAttribute,omitempty"`
Clients SSOAuthClients `json:"clients,omitempty"`
}
type SSOAuthClients struct {
Console SSOAuthClient `json:"console,omitempty"`
Servers []SSOAuthClient `json:"servers,omitempty"`
}
type SSOAuthClient struct {
Name string `json:"name,omitempty"`
Secret string `json:"secret,omitempty"`
HostnameHTTP string `json:"hostnameHTTP,omitempty"`
HostnameHTTPS string `json:"hostnameHTTPS,omitempty"`
}
type LDAPAuthConfig struct {
URL string `json:"url,omitempty"`
BindDN string `json:"bindDN,omitempty"`
BindCredential string `json:"bindCredential,omitempty"`
JAASSecurityDomain string `json:"jaasSecurityDomain,omitempty"`
BaseCtxDN string `json:"baseCtxDN,omitempty"`
BaseFilter string `json:"baseFilter,omitempty"`
SearchScope SearchScopeType `json:"searchScope,omitempty"`
SearchTimeLimit int32 `json:"searchTimeLimit,omitempty"`
DistinguishedNameAttribute string `json:"distinguishedNameAttribute,omitempty"`
ParseUsername bool `json:"parseUsername,omitempty"`
UsernameBeginString string `json:"usernameBeginString,omitempty"`
UsernameEndString string `json:"usernameEndString,omitempty"`
RoleAttributeID string `json:"roleAttributeID,omitempty"`
RolesCtxDN string `json:"rolesCtxDN,omitempty"`
RoleFilter string `json:"roleFilter,omitempty"`
RoleRecursion int16 `json:"roleRecursion,omitempty"`
DefaultRole string `json:"defaultRole,omitempty"`
RoleNameAttributeID string `json:"roleNameAttributeID,omitempty"`
ParseRoleNameFromDN bool `json:"parseRoleNameFromDN,omitempty"`
RoleAttributeIsDN bool `json:"roleAttributeIsDN,omitempty"`
ReferralUserAttributeIDToCheck string `json:"referralUserAttributeIDToCheck,omitempty"`
}
type SearchScopeType string
const (
SubtreeSearchScope SearchScopeType = "SUBTREE_SCOPE"
ObjectSearchScope SearchScopeType = "OBJECT_SCOPE"
OneLevelSearchScope SearchScopeType = "ONELEVEL_SCOPE"
)
type RoleMapperAuthConfig struct {
RolesProperties string `json:"rolesProperties,omitempty"`
ReplaceRole string `json:"replaceRole,omitempty"`
}
type OpenShiftObject interface {
metav1.Object
runtime.Object
}
type EnvTemplate struct {
Template `json:",inline"`
ServerCount []Template `json:"serverCount,omitempty"`
}
type Template struct {
*CommonConfig
ApplicationName string `json:"applicationName,omitempty"`
GitSource GitSource `json:"gitSource,omitempty"`
GitHubWebhookSecret string `json:"githubWebhookSecret,omitempty"`
GenericWebhookSecret string `json:"genericWebhookSecret,omitempty"`
KieServerContainerDeployment string `json:"kieServerContainerDeployment,omitempty"`
Auth AuthTemplate `json:"auth,omitempty"`
}
type CommonConfig struct {
Version string `json:"version,omitempty"`
ImageTag string `json:"imageTag,omitempty"`
ConsoleName string `json:"consoleName,omitempty"`
ConsoleImage string `json:"consoleImage,omitempty"`
KeyStorePassword string `json:"keyStorePassword,omitempty"`
AdminPassword string `json:"adminPassword,omitempty"`
ControllerPassword string `json:"controllerPassword,omitempty"`
ServerPassword string `json:"serverPassword,omitempty"`
MavenPassword string `json:"mavenPassword,omitempty"`
}
type AuthTemplate struct {
SSO SSOAuthConfig `json:"sso,omitempty"`
LDAP LDAPAuthConfig `json:"ldap,omitempty"`
RoleMapper RoleMapperAuthConfig `json:"roleMapper,omitempty"`
}
type PlatformService interface {
Create(ctx context.Context, obj runtime.Object) error
Get(ctx context.Context, key client.ObjectKey, obj runtime.Object) error
List(ctx context.Context, opts *client.ListOptions, list runtime.Object) error
Update(ctx context.Context, obj runtime.Object) error
GetCached(ctx context.Context, key client.ObjectKey, obj runtime.Object) error
ImageStreamTags(namespace string) imagev1.ImageStreamTagInterface
GetScheme() *runtime.Scheme
IsMockService() bool
}
func init() | {
SchemeBuilder.Register(&KieApp{}, &KieAppList{})
} | identifier_body |
|
kieapp_types.go | openshift/client-go/image/clientset/versioned/typed/image/v1"
corev1 "k8s.io/api/core/v1"
rbacv1 "k8s.io/api/rbac/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/runtime"
"sigs.k8s.io/controller-runtime/pkg/client"
)
// EDIT THIS FILE! THIS IS SCAFFOLDING FOR YOU TO OWN!
// NOTE: json tags are required. Any new fields you add must have json tags for the fields to be serialized.
// KieAppSpec defines the desired state of KieApp
type KieAppSpec struct {
// INSERT ADDITIONAL SPEC FIELDS - desired state of cluster
// Important: Run "operator-sdk generate k8s" to regenerate code after modifying this file
// KIE environment type to deploy (prod, authoring, trial, etc)
Environment string `json:"environment,omitempty"`
KieDeployments int `json:"kieDeployments"` // Number of KieServer DeploymentConfigs (defaults to 1)
RhpamRegistry KieAppRegistry `json:"rhpamRegistry,omitempty"`
Objects KieAppObjects `json:"objects,omitempty"`
CommonConfig CommonConfig `json:"commonConfig,omitempty"`
Auth KieAppAuthObject `json:"auth,omitempty"`
}
// KieAppRegistry defines the registry that should be used for rhpam images
type KieAppRegistry struct {
Registry string `json:"registry,omitempty"` // Registry to use, can also be set w/ "REGISTRY" env variable
Insecure bool `json:"insecure"` // Specify whether registry is insecure, can also be set w/ "INSECURE" env variable
}
// KieAppStatus defines the observed state of KieApp
type KieAppStatus struct {
// INSERT ADDITIONAL STATUS FIELD - define observed state of cluster
// Important: Run "operator-sdk generate k8s" to regenerate code after modifying this file
Status string `json:"status,omitempty"`
Deployments []string `json:"deployments,omitempty"`
}
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
// KieApp is the Schema for the kieapps API
// +k8s:openapi-gen=true
type KieApp struct {
metav1.TypeMeta `json:",inline"`
metav1.ObjectMeta `json:"metadata,omitempty"`
Spec KieAppSpec `json:"spec,omitempty"`
Status KieAppStatus `json:"status,omitempty"`
}
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
// KieAppList contains a list of KieApp
type KieAppList struct {
metav1.TypeMeta `json:",inline"`
metav1.ListMeta `json:"metadata,omitempty"`
Items []KieApp `json:"items"`
}
type KieAppObjects struct {
// Business Central container configs
Console KieAppObject `json:"console,omitempty"`
// KIE Server container configs
Server KieAppObject `json:"server,omitempty"`
// Smartrouter container configs
Smartrouter KieAppObject `json:"smartrouter,omitempty"`
// S2I Build configuration
Builds []KieAppBuildObject `json:"builds,omitempty"`
}
type KieAppObject struct {
Env []corev1.EnvVar `json:"env,omitempty"`
Resources corev1.ResourceRequirements `json:"resources"`
}
type Environment struct {
Console CustomObject `json:"console,omitempty"`
Smartrouter CustomObject `json:"smartrouter,omitempty"`
Servers []CustomObject `json:"servers,omitempty"`
Others []CustomObject `json:"others,omitempty"`
}
type CustomObject struct {
Omit bool `json:"omit,omitempty"` | DeploymentConfigs []appsv1.DeploymentConfig `json:"deploymentConfigs,omitempty"`
BuildConfigs []buildv1.BuildConfig `json:"buildConfigs,omitempty"`
ImageStreams []oimagev1.ImageStream `json:"imageStreams,omitempty"`
Services []corev1.Service `json:"services,omitempty"`
Routes []routev1.Route `json:"routes,omitempty"`
}
type KieAppBuildObject struct {
KieServerContainerDeployment string `json:"kieServerContainerDeployment,omitempty"`
GitSource GitSource `json:"gitSource,omitempty"`
Webhooks []WebhookSecret `json:"webhooks,omitempty"`
}
type GitSource struct {
URI string `json:"uri,omitempty"`
Reference string `json:"reference,omitempty"`
ContextDir string `json:"contextDir,omitempty"`
}
type WebhookType string
const (
GitHubWebhook WebhookType = "GitHub"
GenericWebhook WebhookType = "Generic"
)
type WebhookSecret struct {
Type WebhookType `json:"type,omitempty"`
Secret string `json:"secret,omitempty"`
}
type KieAppAuthObject struct {
SSO *SSOAuthConfig `json:"sso,omitempty"`
LDAP *LDAPAuthConfig `json:"ldap,omitempty"`
RoleMapper *RoleMapperAuthConfig `json:"roleMapper,omitempty"`
}
type SSOAuthConfig struct {
URL string `json:"url,omitempty"`
Realm string `json:"realm,omitempty"`
AdminUser string `json:"adminUser,omitempty"`
AdminPassword string `json:"adminPassword,omitempty"`
DisableSSLCertValidation bool `json:"disableSSLCertValication,omitempty"`
PrincipalAttribute string `json:"principalAttribute,omitempty"`
Clients SSOAuthClients `json:"clients,omitempty"`
}
type SSOAuthClients struct {
Console SSOAuthClient `json:"console,omitempty"`
Servers []SSOAuthClient `json:"servers,omitempty"`
}
type SSOAuthClient struct {
Name string `json:"name,omitempty"`
Secret string `json:"secret,omitempty"`
HostnameHTTP string `json:"hostnameHTTP,omitempty"`
HostnameHTTPS string `json:"hostnameHTTPS,omitempty"`
}
type LDAPAuthConfig struct {
URL string `json:"url,omitempty"`
BindDN string `json:"bindDN,omitempty"`
BindCredential string `json:"bindCredential,omitempty"`
JAASSecurityDomain string `json:"jaasSecurityDomain,omitempty"`
BaseCtxDN string `json:"baseCtxDN,omitempty"`
BaseFilter string `json:"baseFilter,omitempty"`
SearchScope SearchScopeType `json:"searchScope,omitempty"`
SearchTimeLimit int32 `json:"searchTimeLimit,omitempty"`
DistinguishedNameAttribute string `json:"distinguishedNameAttribute,omitempty"`
ParseUsername bool `json:"parseUsername,omitempty"`
UsernameBeginString string `json:"usernameBeginString,omitempty"`
UsernameEndString string `json:"usernameEndString,omitempty"`
RoleAttributeID string `json:"roleAttributeID,omitempty"`
RolesCtxDN string `json:"rolesCtxDN,omitempty"`
RoleFilter string `json:"roleFilter,omitempty"`
RoleRecursion int16 `json:"roleRecursion,omitempty"`
DefaultRole string `json:"defaultRole,omitempty"`
RoleNameAttributeID string `json:"roleNameAttributeID,omitempty"`
ParseRoleNameFromDN bool `json:"parseRoleNameFromDN,omitempty"`
RoleAttributeIsDN bool `json:"roleAttributeIsDN,omitempty"`
ReferralUserAttributeIDToCheck string `json:"referralUserAttributeIDToCheck,omitempty"`
}
type SearchScopeType string
const (
SubtreeSearchScope SearchScopeType = "SUBTREE_SCOPE"
ObjectSearchScope SearchScopeType = "OBJECT_SCOPE"
OneLevelSearchScope SearchScopeType = "ONELEVEL_SCOPE"
)
type RoleMapperAuthConfig struct {
RolesProperties string `json:"rolesProperties,omitempty"`
ReplaceRole string `json:"replaceRole,omitempty"`
}
type OpenShiftObject interface {
metav1.Object
runtime.Object
}
type EnvTemplate struct {
Template `json:",inline"`
ServerCount []Template `json:"serverCount,omitempty"`
}
type Template struct {
*CommonConfig
ApplicationName string `json:"applicationName,omitempty"`
GitSource GitSource `json:"gitSource,omitempty"`
GitHubWebhookSecret string `json:"githubWebhookSecret,omitempty"`
GenericWebhookSecret string `json:"genericWebhookSecret,omitempty"`
KieServerContainerDeployment string `json:"kieServerContainerDeployment,omitempty"`
Auth AuthTemplate `json:"auth,omitempty"`
}
type CommonConfig struct {
Version string `json:"version,omitempty"`
ImageTag string `json:"imageTag,omitempty"`
ConsoleName string `json:"consoleName,omitempty"`
ConsoleImage string `json:"consoleImage,omitempty"`
KeyStorePassword string `json:"keyStorePassword,omitempty"`
AdminPassword string `json:"adminPassword,omitempty"`
ControllerPassword string `json:"controllerPassword,omitempty"`
ServerPassword string `json:"serverPassword,omitempty"`
MavenPassword string `json:"mavenPassword,omitempty"`
}
| PersistentVolumeClaims []corev1.PersistentVolumeClaim `json:"persistentVolumeClaims,omitempty"`
ServiceAccounts []corev1.ServiceAccount `json:"serviceAccounts,omitempty"`
Secrets []corev1.Secret `json:"secrets,omitempty"`
Roles []rbacv1.Role `json:"roles,omitempty"`
RoleBindings []rbacv1.RoleBinding `json:"roleBindings,omitempty"` | random_line_split |
main.go | * 500)
defer t.Stop()
go func() {
for {
select {
case <-t.C:
writer.Flush()
}
}
}()
// main logic - now multi-threaded for a tonne of traffic, quickly. Hopefully it's still functional :D
for u := range urls {
wg.Add(1)
go func(site string) {
defer wg.Done()
finalUrls := []string{}
u, payloads, results := replaceParameters(u, -1, "unknown") // we pass -1 here so we replace all parameters
if u == "" {
return
}
if !quietMode {
fmt.Println("Generated URL:", u)
}
// If the identified URL has neither http or https infront of it. Create both and scan them.
if !strings.Contains(u, "http://") && !strings.Contains(u, "https://") {
finalUrls = append(finalUrls, "http://"+u)
finalUrls = append(finalUrls, "https://"+u)
} else if strings.Contains(u, "http://") {
finalUrls = append(finalUrls, "https://"+u)
} else if strings.Contains(u, "https://") {
finalUrls = append(finalUrls, "http://"+u)
} else {
// else, just scan the submitted one as it has either protocol
finalUrls = append(finalUrls, u)
}
// now loop the slice of finalUrls (either submitted OR 2 urls with http/https appended to them)
for _, uu := range finalUrls {
ssti, injectionPayloadElements := makeRequest(uu, results, quietMode)
if ssti {
// if we had a possible SSTI win, let the user know
workingPayloads := ""
for i, wp := range injectionPayloadElements {
workingPayloads += payloads[wp]
if i != len(injectionPayloadElements)-1 {
workingPayloads += "|"
}
}
fmt.Printf("URL:%s -> Parameter Payload: %s\n", uu, workingPayloads)
// now we have seen a possible win, try figure out the template based on the hardcoded knowledge we have
attemptToIdentifyEngine(uu, injectionPayloadElements[0], quietMode) // this injectionPayloadElements[0] allows us to just replace the first vulnerable URL param
if saveOutput {
line := uu + "|" + workingPayloads
outputToSave = append(outputToSave, line)
}
}
}
}(u)
}
wg.Wait()
// just in case anything is still in buffer
writer.Flush()
if saveOutput && len(outputToSave) > 0 {
file, err := os.OpenFile(outputFileFlag, os.O_CREATE|os.O_WRONLY, 0644)
if err != nil && !quietMode {
log.Fatalf("failed creating file: %s", err)
}
datawriter := bufio.NewWriter(file)
for _, data := range outputToSave {
_, _ = datawriter.WriteString(data + "\n")
}
datawriter.Flush()
file.Close()
}
}
func banner() {
fmt.Println("---------------------------------------------------")
fmt.Println("lazyssti -> Crawl3r")
fmt.Println("Generates SSTI URL's and highlights possible vulns")
fmt.Println("Run again with -q for cleaner output")
fmt.Println("---------------------------------------------------")
}
func readStdin() <-chan string {
lines := make(chan string)
go func() {
defer close(lines)
sc := bufio.NewScanner(os.Stdin)
for sc.Scan() {
url := strings.ToLower(sc.Text())
if url != "" {
lines <- url
}
}
}()
return lines
}
// TODO: Should we randomise this? Do we care? probably not.
// We should extend this to generate a payload PER parameter incase we get multiple injection points across a site. Store the payloads + results for loop them in the regex
func generatePayload(template string, paramNumber int) (string, string) {
payload := ""
injectionResult := ""
switch template {
case templateJinja2:
payload = "skid{{" + strconv.Itoa(paramNumber) + "*'" + strconv.Itoa(paramNumber) + "'}}life"
injectionResult = "skid" + strings.Repeat(strconv.Itoa(paramNumber), paramNumber) + "life"
case templateMako:
payload = "ski${'" + strconv.Itoa(paramNumber) + "'.join('dl')}ife"
injectionResult = "skid" + strconv.Itoa(paramNumber) + "life"
case templateSmarty:
payload = "skid" + strconv.Itoa(paramNumber) + "{*comment*}life"
injectionResult = "skid" + strconv.Itoa(paramNumber) + "life"
case templateTwig:
payload = "skid{{" + strconv.Itoa(paramNumber) + "*'" + strconv.Itoa(paramNumber) + "'}}life"
injectionResult = "skid" + strconv.Itoa(paramNumber*paramNumber) + "life"
case "unknown":
payload = "skid${" + strconv.Itoa(paramNumber) + "*" + strconv.Itoa(paramNumber) + "}life"
injectionResult = "skid" + strconv.Itoa(paramNumber*paramNumber) + "life"
}
return payload, injectionResult
}
// returns: url, slice of payloads, slice of results
func replaceParameters(url string, paramToReplace int, template string) (string, []string, []string) {
urlParamSplit := strings.Split(url, "?")
if len(urlParamSplit) != 2 {
return "", nil, nil // ? was not identified in the URL. Skip it.
}
if len(urlParamSplit[1]) == 0 {
return "", nil, nil // Although we had a ? in the URL, no parameters were actually identified as the amount of chars after the ? appeared to be 0
}
parameterSplit := strings.Split(urlParamSplit[1], "&")
if len(parameterSplit) == 0 {
return "", nil, nil // Although we had a ? in the URL, no parameters were actually identified
}
generatedPayloadCount := 1 // start from 1 because we aren't CS students
generatedPayloads := []string{} // collect all payloads ready to return
generatedPayloadResults := []string{} // collect all payload results ready to return
injectedParams := []string{}
for i, ps := range parameterSplit {
// only replace the target parameter if specified in the function parameters
if paramToReplace != -1 {
if i != paramToReplace {
injectedParams = append(injectedParams, ps)
continue
}
}
paramAndVal := strings.Split(ps, "=")
if len(paramAndVal) == 1 {
// we didn't have a = in the parameter? Just add back to the URL
injectedParams = append(injectedParams, ps)
} else {
// we did manage to split. Let's inject the payload and rebuild the URL parameter
// create a generic payload for an unknown templating engine (should be a 'catch' all type of payload?)
injectionPayload, injectionResult := generatePayload(template, generatedPayloadCount)
newParamAndVal := paramAndVal[0] + "=" + injectionPayload
injectedParams = append(injectedParams, newParamAndVal)
generatedPayloads = append(generatedPayloads, injectionPayload)
generatedPayloadResults = append(generatedPayloadResults, injectionResult)
generatedPayloadCount += 1
}
}
finalUrl := urlParamSplit[0] + "?"
for _, ip := range injectedParams {
finalUrl += ip + "&"
}
finalUrl = removeLastRune(finalUrl)
return finalUrl, generatedPayloads, generatedPayloadResults
}
func makeRequest(url string, injectionCriteria []string, quietMode bool) (bool, []int) {
resp, err := http.Get(url)
if err != nil {
if !quietMode {
fmt.Println("[error] performing the request to:", url)
}
return false, nil
}
defer resp.Body.Close()
if resp.StatusCode == http.StatusOK {
bodyBytes, err := ioutil.ReadAll(resp.Body)
if err != nil {
if !quietMode {
fmt.Println("[error] reading response bytes from:", url)
}
return false, nil
}
bodyString := string(bodyBytes)
includesResult := false
workingPayloads := []int{}
for i, ic := range injectionCriteria {
if doesBodyIncludeInjectionResult(ic, bodyString, quietMode) {
includesResult = true
workingPayloads = append(workingPayloads, i)
break
}
}
return includesResult, workingPayloads
} else {
return false, nil
}
}
func doesBodyIncludeInjectionResult(criteria string, body string, quietMode bool) bool {
r, _ := regexp.Compile(criteria)
return r.MatchString(body)
}
| func attemptToIdentifyEngine(url string, vulnParamElement int, quietMode bool) []string {
// this might be meh, but make a request to the same URL per template based on the payloads we have
// for this, we don't care about the number of parameters - we just want to try identify the template engine
| random_line_split |
|
main.go | )
ch := readStdin()
go func() {
for u := range ch {
urls <- u
}
close(urls)
}()
var outputFileFlag string
flag.StringVar(&outputFileFlag, "o", "", "Output file for possible SSTI vulnerable URLs")
quietModeFlag := flag.Bool("q", false, "Only output the URLs with possible SSTI vulnerabilities")
flag.Parse()
quietMode := *quietModeFlag
saveOutput := outputFileFlag != ""
outputToSave := []string{}
if !quietMode {
banner()
fmt.Println("")
}
writer := bufio.NewWriter(out)
var wg sync.WaitGroup
// flush to writer periodically
t := time.NewTicker(time.Millisecond * 500)
defer t.Stop()
go func() {
for {
select {
case <-t.C:
writer.Flush()
}
}
}()
// main logic - now multi-threaded for a tonne of traffic, quickly. Hopefully it's still functional :D
for u := range urls {
wg.Add(1)
go func(site string) {
defer wg.Done()
finalUrls := []string{}
u, payloads, results := replaceParameters(u, -1, "unknown") // we pass -1 here so we replace all parameters
if u == "" {
return
}
if !quietMode {
fmt.Println("Generated URL:", u)
}
// If the identified URL has neither http or https infront of it. Create both and scan them.
if !strings.Contains(u, "http://") && !strings.Contains(u, "https://") {
finalUrls = append(finalUrls, "http://"+u)
finalUrls = append(finalUrls, "https://"+u)
} else if strings.Contains(u, "http://") {
finalUrls = append(finalUrls, "https://"+u)
} else if strings.Contains(u, "https://") {
finalUrls = append(finalUrls, "http://"+u)
} else {
// else, just scan the submitted one as it has either protocol
finalUrls = append(finalUrls, u)
}
// now loop the slice of finalUrls (either submitted OR 2 urls with http/https appended to them)
for _, uu := range finalUrls {
ssti, injectionPayloadElements := makeRequest(uu, results, quietMode)
if ssti {
// if we had a possible SSTI win, let the user know
workingPayloads := ""
for i, wp := range injectionPayloadElements {
workingPayloads += payloads[wp]
if i != len(injectionPayloadElements)-1 {
workingPayloads += "|"
}
}
fmt.Printf("URL:%s -> Parameter Payload: %s\n", uu, workingPayloads)
// now we have seen a possible win, try figure out the template based on the hardcoded knowledge we have
attemptToIdentifyEngine(uu, injectionPayloadElements[0], quietMode) // this injectionPayloadElements[0] allows us to just replace the first vulnerable URL param
if saveOutput {
line := uu + "|" + workingPayloads
outputToSave = append(outputToSave, line)
}
}
}
}(u)
}
wg.Wait()
// just in case anything is still in buffer
writer.Flush()
if saveOutput && len(outputToSave) > 0 {
file, err := os.OpenFile(outputFileFlag, os.O_CREATE|os.O_WRONLY, 0644)
if err != nil && !quietMode {
log.Fatalf("failed creating file: %s", err)
}
datawriter := bufio.NewWriter(file)
for _, data := range outputToSave {
_, _ = datawriter.WriteString(data + "\n")
}
datawriter.Flush()
file.Close()
}
}
func banner() {
fmt.Println("---------------------------------------------------")
fmt.Println("lazyssti -> Crawl3r")
fmt.Println("Generates SSTI URL's and highlights possible vulns")
fmt.Println("Run again with -q for cleaner output")
fmt.Println("---------------------------------------------------")
}
func | () <-chan string {
lines := make(chan string)
go func() {
defer close(lines)
sc := bufio.NewScanner(os.Stdin)
for sc.Scan() {
url := strings.ToLower(sc.Text())
if url != "" {
lines <- url
}
}
}()
return lines
}
// TODO: Should we randomise this? Do we care? probably not.
// We should extend this to generate a payload PER parameter incase we get multiple injection points across a site. Store the payloads + results for loop them in the regex
func generatePayload(template string, paramNumber int) (string, string) {
payload := ""
injectionResult := ""
switch template {
case templateJinja2:
payload = "skid{{" + strconv.Itoa(paramNumber) + "*'" + strconv.Itoa(paramNumber) + "'}}life"
injectionResult = "skid" + strings.Repeat(strconv.Itoa(paramNumber), paramNumber) + "life"
case templateMako:
payload = "ski${'" + strconv.Itoa(paramNumber) + "'.join('dl')}ife"
injectionResult = "skid" + strconv.Itoa(paramNumber) + "life"
case templateSmarty:
payload = "skid" + strconv.Itoa(paramNumber) + "{*comment*}life"
injectionResult = "skid" + strconv.Itoa(paramNumber) + "life"
case templateTwig:
payload = "skid{{" + strconv.Itoa(paramNumber) + "*'" + strconv.Itoa(paramNumber) + "'}}life"
injectionResult = "skid" + strconv.Itoa(paramNumber*paramNumber) + "life"
case "unknown":
payload = "skid${" + strconv.Itoa(paramNumber) + "*" + strconv.Itoa(paramNumber) + "}life"
injectionResult = "skid" + strconv.Itoa(paramNumber*paramNumber) + "life"
}
return payload, injectionResult
}
// returns: url, slice of payloads, slice of results
func replaceParameters(url string, paramToReplace int, template string) (string, []string, []string) {
urlParamSplit := strings.Split(url, "?")
if len(urlParamSplit) != 2 {
return "", nil, nil // ? was not identified in the URL. Skip it.
}
if len(urlParamSplit[1]) == 0 {
return "", nil, nil // Although we had a ? in the URL, no parameters were actually identified as the amount of chars after the ? appeared to be 0
}
parameterSplit := strings.Split(urlParamSplit[1], "&")
if len(parameterSplit) == 0 {
return "", nil, nil // Although we had a ? in the URL, no parameters were actually identified
}
generatedPayloadCount := 1 // start from 1 because we aren't CS students
generatedPayloads := []string{} // collect all payloads ready to return
generatedPayloadResults := []string{} // collect all payload results ready to return
injectedParams := []string{}
for i, ps := range parameterSplit {
// only replace the target parameter if specified in the function parameters
if paramToReplace != -1 {
if i != paramToReplace {
injectedParams = append(injectedParams, ps)
continue
}
}
paramAndVal := strings.Split(ps, "=")
if len(paramAndVal) == 1 {
// we didn't have a = in the parameter? Just add back to the URL
injectedParams = append(injectedParams, ps)
} else {
// we did manage to split. Let's inject the payload and rebuild the URL parameter
// create a generic payload for an unknown templating engine (should be a 'catch' all type of payload?)
injectionPayload, injectionResult := generatePayload(template, generatedPayloadCount)
newParamAndVal := paramAndVal[0] + "=" + injectionPayload
injectedParams = append(injectedParams, newParamAndVal)
generatedPayloads = append(generatedPayloads, injectionPayload)
generatedPayloadResults = append(generatedPayloadResults, injectionResult)
generatedPayloadCount += 1
}
}
finalUrl := urlParamSplit[0] + "?"
for _, ip := range injectedParams {
finalUrl += ip + "&"
}
finalUrl = removeLastRune(finalUrl)
return finalUrl, generatedPayloads, generatedPayloadResults
}
func makeRequest(url string, injectionCriteria []string, quietMode bool) (bool, []int) {
resp, err := http.Get(url)
if err != nil {
if !quietMode {
fmt.Println("[error] performing the request to:", url)
}
return false, nil
}
defer resp.Body.Close()
if resp.StatusCode == http.StatusOK {
bodyBytes, err := ioutil.ReadAll(resp.Body)
if err != nil {
if !quietMode {
fmt.Println("[error] reading response bytes from:", url)
}
return false, nil
}
bodyString := string(bodyBytes)
includesResult := false
workingPayloads := []int{}
for i, ic := range injectionCriteria {
if doesBodyIncludeInjectionResult(ic, bodyString, quietMode) {
includesResult = true | readStdin | identifier_name |
main.go | )
ch := readStdin()
go func() {
for u := range ch {
urls <- u
}
close(urls)
}()
var outputFileFlag string
flag.StringVar(&outputFileFlag, "o", "", "Output file for possible SSTI vulnerable URLs")
quietModeFlag := flag.Bool("q", false, "Only output the URLs with possible SSTI vulnerabilities")
flag.Parse()
quietMode := *quietModeFlag
saveOutput := outputFileFlag != ""
outputToSave := []string{}
if !quietMode {
banner()
fmt.Println("")
}
writer := bufio.NewWriter(out)
var wg sync.WaitGroup
// flush to writer periodically
t := time.NewTicker(time.Millisecond * 500)
defer t.Stop()
go func() {
for {
select {
case <-t.C:
writer.Flush()
}
}
}()
// main logic - now multi-threaded for a tonne of traffic, quickly. Hopefully it's still functional :D
for u := range urls {
wg.Add(1)
go func(site string) {
defer wg.Done()
finalUrls := []string{}
u, payloads, results := replaceParameters(u, -1, "unknown") // we pass -1 here so we replace all parameters
if u == "" {
return
}
if !quietMode {
fmt.Println("Generated URL:", u)
}
// If the identified URL has neither http or https infront of it. Create both and scan them.
if !strings.Contains(u, "http://") && !strings.Contains(u, "https://") {
finalUrls = append(finalUrls, "http://"+u)
finalUrls = append(finalUrls, "https://"+u)
} else if strings.Contains(u, "http://") {
finalUrls = append(finalUrls, "https://"+u)
} else if strings.Contains(u, "https://") {
finalUrls = append(finalUrls, "http://"+u)
} else {
// else, just scan the submitted one as it has either protocol
finalUrls = append(finalUrls, u)
}
// now loop the slice of finalUrls (either submitted OR 2 urls with http/https appended to them)
for _, uu := range finalUrls {
ssti, injectionPayloadElements := makeRequest(uu, results, quietMode)
if ssti {
// if we had a possible SSTI win, let the user know
workingPayloads := ""
for i, wp := range injectionPayloadElements {
workingPayloads += payloads[wp]
if i != len(injectionPayloadElements)-1 {
workingPayloads += "|"
}
}
fmt.Printf("URL:%s -> Parameter Payload: %s\n", uu, workingPayloads)
// now we have seen a possible win, try figure out the template based on the hardcoded knowledge we have
attemptToIdentifyEngine(uu, injectionPayloadElements[0], quietMode) // this injectionPayloadElements[0] allows us to just replace the first vulnerable URL param
if saveOutput {
line := uu + "|" + workingPayloads
outputToSave = append(outputToSave, line)
}
}
}
}(u)
}
wg.Wait()
// just in case anything is still in buffer
writer.Flush()
if saveOutput && len(outputToSave) > 0 {
file, err := os.OpenFile(outputFileFlag, os.O_CREATE|os.O_WRONLY, 0644)
if err != nil && !quietMode {
log.Fatalf("failed creating file: %s", err)
}
datawriter := bufio.NewWriter(file)
for _, data := range outputToSave {
_, _ = datawriter.WriteString(data + "\n")
}
datawriter.Flush()
file.Close()
}
}
func banner() {
fmt.Println("---------------------------------------------------")
fmt.Println("lazyssti -> Crawl3r")
fmt.Println("Generates SSTI URL's and highlights possible vulns")
fmt.Println("Run again with -q for cleaner output")
fmt.Println("---------------------------------------------------")
}
func readStdin() <-chan string {
lines := make(chan string)
go func() {
defer close(lines)
sc := bufio.NewScanner(os.Stdin)
for sc.Scan() {
url := strings.ToLower(sc.Text())
if url != "" {
lines <- url
}
}
}()
return lines
}
// TODO: Should we randomise this? Do we care? probably not.
// We should extend this to generate a payload PER parameter incase we get multiple injection points across a site. Store the payloads + results for loop them in the regex
func generatePayload(template string, paramNumber int) (string, string) {
payload := ""
injectionResult := ""
switch template {
case templateJinja2:
payload = "skid{{" + strconv.Itoa(paramNumber) + "*'" + strconv.Itoa(paramNumber) + "'}}life"
injectionResult = "skid" + strings.Repeat(strconv.Itoa(paramNumber), paramNumber) + "life"
case templateMako:
payload = "ski${'" + strconv.Itoa(paramNumber) + "'.join('dl')}ife"
injectionResult = "skid" + strconv.Itoa(paramNumber) + "life"
case templateSmarty:
payload = "skid" + strconv.Itoa(paramNumber) + "{*comment*}life"
injectionResult = "skid" + strconv.Itoa(paramNumber) + "life"
case templateTwig:
payload = "skid{{" + strconv.Itoa(paramNumber) + "*'" + strconv.Itoa(paramNumber) + "'}}life"
injectionResult = "skid" + strconv.Itoa(paramNumber*paramNumber) + "life"
case "unknown":
payload = "skid${" + strconv.Itoa(paramNumber) + "*" + strconv.Itoa(paramNumber) + "}life"
injectionResult = "skid" + strconv.Itoa(paramNumber*paramNumber) + "life"
}
return payload, injectionResult
}
// returns: url, slice of payloads, slice of results
func replaceParameters(url string, paramToReplace int, template string) (string, []string, []string) {
urlParamSplit := strings.Split(url, "?")
if len(urlParamSplit) != 2 {
return "", nil, nil // ? was not identified in the URL. Skip it.
}
if len(urlParamSplit[1]) == 0 |
parameterSplit := strings.Split(urlParamSplit[1], "&")
if len(parameterSplit) == 0 {
return "", nil, nil // Although we had a ? in the URL, no parameters were actually identified
}
generatedPayloadCount := 1 // start from 1 because we aren't CS students
generatedPayloads := []string{} // collect all payloads ready to return
generatedPayloadResults := []string{} // collect all payload results ready to return
injectedParams := []string{}
for i, ps := range parameterSplit {
// only replace the target parameter if specified in the function parameters
if paramToReplace != -1 {
if i != paramToReplace {
injectedParams = append(injectedParams, ps)
continue
}
}
paramAndVal := strings.Split(ps, "=")
if len(paramAndVal) == 1 {
// we didn't have a = in the parameter? Just add back to the URL
injectedParams = append(injectedParams, ps)
} else {
// we did manage to split. Let's inject the payload and rebuild the URL parameter
// create a generic payload for an unknown templating engine (should be a 'catch' all type of payload?)
injectionPayload, injectionResult := generatePayload(template, generatedPayloadCount)
newParamAndVal := paramAndVal[0] + "=" + injectionPayload
injectedParams = append(injectedParams, newParamAndVal)
generatedPayloads = append(generatedPayloads, injectionPayload)
generatedPayloadResults = append(generatedPayloadResults, injectionResult)
generatedPayloadCount += 1
}
}
finalUrl := urlParamSplit[0] + "?"
for _, ip := range injectedParams {
finalUrl += ip + "&"
}
finalUrl = removeLastRune(finalUrl)
return finalUrl, generatedPayloads, generatedPayloadResults
}
func makeRequest(url string, injectionCriteria []string, quietMode bool) (bool, []int) {
resp, err := http.Get(url)
if err != nil {
if !quietMode {
fmt.Println("[error] performing the request to:", url)
}
return false, nil
}
defer resp.Body.Close()
if resp.StatusCode == http.StatusOK {
bodyBytes, err := ioutil.ReadAll(resp.Body)
if err != nil {
if !quietMode {
fmt.Println("[error] reading response bytes from:", url)
}
return false, nil
}
bodyString := string(bodyBytes)
includesResult := false
workingPayloads := []int{}
for i, ic := range injectionCriteria {
if doesBodyIncludeInjectionResult(ic, bodyString, quietMode) {
includesResult = | {
return "", nil, nil // Although we had a ? in the URL, no parameters were actually identified as the amount of chars after the ? appeared to be 0
} | conditional_block |
main.go | I vulnerabilities")
flag.Parse()
quietMode := *quietModeFlag
saveOutput := outputFileFlag != ""
outputToSave := []string{}
if !quietMode {
banner()
fmt.Println("")
}
writer := bufio.NewWriter(out)
var wg sync.WaitGroup
// flush to writer periodically
t := time.NewTicker(time.Millisecond * 500)
defer t.Stop()
go func() {
for {
select {
case <-t.C:
writer.Flush()
}
}
}()
// main logic - now multi-threaded for a tonne of traffic, quickly. Hopefully it's still functional :D
for u := range urls {
wg.Add(1)
go func(site string) {
defer wg.Done()
finalUrls := []string{}
u, payloads, results := replaceParameters(u, -1, "unknown") // we pass -1 here so we replace all parameters
if u == "" {
return
}
if !quietMode {
fmt.Println("Generated URL:", u)
}
// If the identified URL has neither http or https infront of it. Create both and scan them.
if !strings.Contains(u, "http://") && !strings.Contains(u, "https://") {
finalUrls = append(finalUrls, "http://"+u)
finalUrls = append(finalUrls, "https://"+u)
} else if strings.Contains(u, "http://") {
finalUrls = append(finalUrls, "https://"+u)
} else if strings.Contains(u, "https://") {
finalUrls = append(finalUrls, "http://"+u)
} else {
// else, just scan the submitted one as it has either protocol
finalUrls = append(finalUrls, u)
}
// now loop the slice of finalUrls (either submitted OR 2 urls with http/https appended to them)
for _, uu := range finalUrls {
ssti, injectionPayloadElements := makeRequest(uu, results, quietMode)
if ssti {
// if we had a possible SSTI win, let the user know
workingPayloads := ""
for i, wp := range injectionPayloadElements {
workingPayloads += payloads[wp]
if i != len(injectionPayloadElements)-1 {
workingPayloads += "|"
}
}
fmt.Printf("URL:%s -> Parameter Payload: %s\n", uu, workingPayloads)
// now we have seen a possible win, try figure out the template based on the hardcoded knowledge we have
attemptToIdentifyEngine(uu, injectionPayloadElements[0], quietMode) // this injectionPayloadElements[0] allows us to just replace the first vulnerable URL param
if saveOutput {
line := uu + "|" + workingPayloads
outputToSave = append(outputToSave, line)
}
}
}
}(u)
}
wg.Wait()
// just in case anything is still in buffer
writer.Flush()
if saveOutput && len(outputToSave) > 0 {
file, err := os.OpenFile(outputFileFlag, os.O_CREATE|os.O_WRONLY, 0644)
if err != nil && !quietMode {
log.Fatalf("failed creating file: %s", err)
}
datawriter := bufio.NewWriter(file)
for _, data := range outputToSave {
_, _ = datawriter.WriteString(data + "\n")
}
datawriter.Flush()
file.Close()
}
}
func banner() {
fmt.Println("---------------------------------------------------")
fmt.Println("lazyssti -> Crawl3r")
fmt.Println("Generates SSTI URL's and highlights possible vulns")
fmt.Println("Run again with -q for cleaner output")
fmt.Println("---------------------------------------------------")
}
func readStdin() <-chan string {
lines := make(chan string)
go func() {
defer close(lines)
sc := bufio.NewScanner(os.Stdin)
for sc.Scan() {
url := strings.ToLower(sc.Text())
if url != "" {
lines <- url
}
}
}()
return lines
}
// TODO: Should we randomise this? Do we care? probably not.
// We should extend this to generate a payload PER parameter incase we get multiple injection points across a site. Store the payloads + results for loop them in the regex
func generatePayload(template string, paramNumber int) (string, string) {
payload := ""
injectionResult := ""
switch template {
case templateJinja2:
payload = "skid{{" + strconv.Itoa(paramNumber) + "*'" + strconv.Itoa(paramNumber) + "'}}life"
injectionResult = "skid" + strings.Repeat(strconv.Itoa(paramNumber), paramNumber) + "life"
case templateMako:
payload = "ski${'" + strconv.Itoa(paramNumber) + "'.join('dl')}ife"
injectionResult = "skid" + strconv.Itoa(paramNumber) + "life"
case templateSmarty:
payload = "skid" + strconv.Itoa(paramNumber) + "{*comment*}life"
injectionResult = "skid" + strconv.Itoa(paramNumber) + "life"
case templateTwig:
payload = "skid{{" + strconv.Itoa(paramNumber) + "*'" + strconv.Itoa(paramNumber) + "'}}life"
injectionResult = "skid" + strconv.Itoa(paramNumber*paramNumber) + "life"
case "unknown":
payload = "skid${" + strconv.Itoa(paramNumber) + "*" + strconv.Itoa(paramNumber) + "}life"
injectionResult = "skid" + strconv.Itoa(paramNumber*paramNumber) + "life"
}
return payload, injectionResult
}
// returns: url, slice of payloads, slice of results
func replaceParameters(url string, paramToReplace int, template string) (string, []string, []string) {
urlParamSplit := strings.Split(url, "?")
if len(urlParamSplit) != 2 {
return "", nil, nil // ? was not identified in the URL. Skip it.
}
if len(urlParamSplit[1]) == 0 {
return "", nil, nil // Although we had a ? in the URL, no parameters were actually identified as the amount of chars after the ? appeared to be 0
}
parameterSplit := strings.Split(urlParamSplit[1], "&")
if len(parameterSplit) == 0 {
return "", nil, nil // Although we had a ? in the URL, no parameters were actually identified
}
generatedPayloadCount := 1 // start from 1 because we aren't CS students
generatedPayloads := []string{} // collect all payloads ready to return
generatedPayloadResults := []string{} // collect all payload results ready to return
injectedParams := []string{}
for i, ps := range parameterSplit {
// only replace the target parameter if specified in the function parameters
if paramToReplace != -1 {
if i != paramToReplace {
injectedParams = append(injectedParams, ps)
continue
}
}
paramAndVal := strings.Split(ps, "=")
if len(paramAndVal) == 1 {
// we didn't have a = in the parameter? Just add back to the URL
injectedParams = append(injectedParams, ps)
} else {
// we did manage to split. Let's inject the payload and rebuild the URL parameter
// create a generic payload for an unknown templating engine (should be a 'catch' all type of payload?)
injectionPayload, injectionResult := generatePayload(template, generatedPayloadCount)
newParamAndVal := paramAndVal[0] + "=" + injectionPayload
injectedParams = append(injectedParams, newParamAndVal)
generatedPayloads = append(generatedPayloads, injectionPayload)
generatedPayloadResults = append(generatedPayloadResults, injectionResult)
generatedPayloadCount += 1
}
}
finalUrl := urlParamSplit[0] + "?"
for _, ip := range injectedParams {
finalUrl += ip + "&"
}
finalUrl = removeLastRune(finalUrl)
return finalUrl, generatedPayloads, generatedPayloadResults
}
func makeRequest(url string, injectionCriteria []string, quietMode bool) (bool, []int) {
resp, err := http.Get(url)
if err != nil {
if !quietMode {
fmt.Println("[error] performing the request to:", url)
}
return false, nil
}
defer resp.Body.Close()
if resp.StatusCode == http.StatusOK {
bodyBytes, err := ioutil.ReadAll(resp.Body)
if err != nil {
if !quietMode {
fmt.Println("[error] reading response bytes from:", url)
}
return false, nil
}
bodyString := string(bodyBytes)
includesResult := false
workingPayloads := []int{}
for i, ic := range injectionCriteria {
if doesBodyIncludeInjectionResult(ic, bodyString, quietMode) {
includesResult = true
workingPayloads = append(workingPayloads, i)
break
}
}
return includesResult, workingPayloads
} else {
return false, nil
}
}
func doesBodyIncludeInjectionResult(criteria string, body string, quietMode bool) bool | {
r, _ := regexp.Compile(criteria)
return r.MatchString(body)
} | identifier_body |
|
Generator.py | (0, num_elems), 2)]
def calculate_action_list(num_agents):
all_actions = []
for agent_idx in range(num_agents):
all_actions.append(CorridorConstants.move_action(agent_idx))
for agent_idx_pair in calculate_pairs(num_agents):
all_actions.append(CorridorConstants.click_action(*agent_idx_pair))
all_actions.append(CorridorConstants.idle_action())
return all_actions
def calculate_tile_index(w, tile):
return (tile[0]) * w + (tile[1])
def calculate_tile_from_idx(w, h, tile_idx):
return int(tile_idx / w), tile_idx % h
def create_combination_template(length, *args):
res = [RockSamplingConstants.WILDCARD for _ in range(length)]
for i in args:
res[i] = PLACE_HOLDER_SYMBOL
return ' '.join(res)
def calculate_direction(src_tile, dst_tile):
x1, y1 = src_tile[0], src_tile[1]
x2, y2 = dst_tile[0], dst_tile[1]
if y2 - y1 > 0:
return RockSamplingConstants.right()
elif y1 - y2 > 0:
return RockSamplingConstants.left()
elif x2 - x1 > 0:
return RockSamplingConstants.down()
else:
return RockSamplingConstants.up()
def cast_to_template(template, cast):
return template.replace(PLACE_HOLDER_SYMBOL, cast)
def calculate_all_tiles(w, h):
return [(i, j) for j in range(0, w) for i in range(0, h)]
def calculate_neighbors_dict(w, h):
res = {}
tiles = calculate_all_tiles(w, h)
for tile in tiles:
cur_neighbors = []
i, j = tile[0], tile[1]
if i > 1:
cur_neighbors.append((i - 1, j))
if i < h:
cur_neighbors.append((i + 1, j))
if j > 1:
cur_neighbors.append((i, j - 1))
if j < w:
cur_neighbors.append((i, j + 1))
res[tile] = cur_neighbors
return res
def matrix_to_string(mat):
return '\n'.join('\t'.join('%0.2f' % x for x in y) for y in mat)
def calculate_move_matrix(corridor_length, succ_prob):
num_tiles = corridor_length
res = np.zeros(shape=(num_tiles, num_tiles)) # can use sparse matrix instead
for src_tile_idx in range(num_tiles):
dst_tile_idx = min(src_tile_idx + 1, corridor_length - 1)
if dst_tile_idx == src_tile_idx:
|
else:
res[src_tile_idx, src_tile_idx] = 1.0 - succ_prob
res[src_tile_idx, dst_tile_idx] = succ_prob
return res
def calculate_euclidian_distance_in_grid(p1, p2):
return np.sqrt((p1[0] - p2[0]) ** 2 + (p1[1] - p2[1]) ** 2)
def calculate_good_sense_matrices(w, h, rock_positions, sense_decay_const):
all_tiles = [calculate_tile_from_idx(w, h, i) for i in range(w * h)]
distances = {(i, j): calculate_euclidian_distance_in_grid(all_tiles[i], all_tiles[j]) for i, j in
combinations(range(w * h), 2)}
res = {}
for rock_idx, rock_tile_idx in enumerate(rock_positions):
curr_mat = np.zeros(shape=(w * h, 3))
for curr_pos in range(w * h):
try:
distance_to_rock = distances[rock_tile_idx, curr_pos]
except KeyError:
try:
distance_to_rock = distances[curr_pos, rock_tile_idx]
except KeyError:
distance_to_rock = 0
succ_prob = 0.5 + 0.5 / (sense_decay_const ** distance_to_rock)
curr_mat[curr_pos][RockSamplingConstants.good_idx] = succ_prob
curr_mat[curr_pos][RockSamplingConstants.bad_idx] = 1 - succ_prob
curr_mat[curr_pos][RockSamplingConstants.null_idx] = 0
res[RockSamplingConstants.rock_symbol(rock_idx)] = curr_mat
return res
def calculate_bad_sense_matrices(w, h, rock_positions, sense_decay_const):
res = calculate_good_sense_matrices(w, h, rock_positions, sense_decay_const)
good, bad = RockSamplingConstants.good_idx, RockSamplingConstants.bad_idx
for rock, sense_matrix in res.items():
sense_matrix[:, [good, bad]] = sense_matrix[:, [bad, good]]
return res
def calculate_sense_martices(w, h, rock_positions, sense_decay_const):
res = {}
good_matrices = calculate_good_sense_matrices(w, h, rock_positions, sense_decay_const)
bad_matrices = calculate_bad_sense_matrices(w, h, rock_positions, sense_decay_const)
for rock_idx in range(len(rock_positions)):
rock_symbol = RockSamplingConstants.rock_symbol(rock_idx)
res[rock_symbol] = {RockSamplingConstants.good_quality(): good_matrices[rock_symbol],
RockSamplingConstants.bad_quality(): bad_matrices[rock_symbol]}
return res
def get_rock_sample_reward_matrix(good_sample_reward, bad_sample_penalty):
res = np.zeros(shape=(2, 2))
good, bad = RockSamplingConstants.good_idx, RockSamplingConstants.bad_idx
res[bad][bad] = -bad_sample_penalty
res[bad][good] = 0 # can't happen
res[good][bad] = good_sample_reward
res[good][good] = 0 # no effect
return res
def generate_random_tile(w, h):
return randint(a=0, b=h - 1), randint(a=0, b=w - 1)
def project_direction_matrix_to_control_area(direction_matrix, control_area):
num_rows, num_cols = direction_matrix.shape
res = direction_matrix.copy()
for i in range(num_rows):
if i not in control_area:
res[i] = np.zeros(num_cols)
res[i][i] = 1.0
return res
def project_sense_matrix_to_control_area(sense_matrix, control_area):
num_rows, num_obs = sense_matrix.shape
res = sense_matrix.copy()
good, bad, null = RockSamplingConstants.good_idx, RockSamplingConstants.bad_idx, RockSamplingConstants.null_idx
for i in range(num_rows):
if i not in control_area:
res[i][good] = 0.5
res[i][bad] = 0.5
res[i][null] = 0
return res
def calculate_sample_matrix(sample_prob):
res = np.zeros(shape=(2, 2))
good, bad = RockSamplingConstants.good_idx, RockSamplingConstants.bad_idx
res[bad][bad] = 1.0
res[bad][good] = 0.0
res[good][bad] = sample_prob
res[good][good] = 1 - sample_prob
return res
def generate_template(template_file_name, parameters):
"""
:param template_file_name: file should be in cwd/templates/template_file_name, in jinja2 format
:param parameters: dictionary containing parameters for rendering
:return: the rendered template in string format
"""
env = Environment(loader=FileSystemLoader('templates'), lstrip_blocks=True, trim_blocks=True,
extensions=['jinja2.ext.do'])
env.globals.update(calculate_action_list=calculate_action_list,
calculate_tile_index=calculate_tile_index,
create_combination_template=create_combination_template,
calculate_pairs=calculate_pairs,
cast_to_template=cast_to_template,
calculate_move_matrix=calculate_move_matrix,
corridor_constants=CorridorConstants,
pomdpx_constants=POMDPXConstants,
time_zero=POMDPXConstants.time_zero,
time_one=POMDPXConstants.time_one,
matrix_to_string=matrix_to_string)
template = env.get_template(template_file_name)
return template.render(parameters)
def generate_domain_config(corridor_length, num_agents,
good_click_reward,
bad_click_penalty,
move_cost,
click_cost,
move_prob,
agents_positions,
bias_constant,
discount=0.95,
domain_name_suffix=""):
agents_positions = agents_positions
agents_positions = [{calculate_tile_index(corridor_length, tile): prob for tile, prob in dist.items()} for dist in
agents_positions] if agents_positions is not None else [
{calculate_tile_index(corridor_length, (0, 0)): 1.0} for _ in
range(num_agents)]
res = {"DOMAIN_NAME": "COR-%d_%dA%s" % (
corridor_length, num_agents, domain_name_suffix),
"NUM_AGENTS": num_agents,
"CORRIDOR_LENGTH": corridor_length,
"DISCOUNT": discount,
# None yields a uniform distribution
# Positions are indices not tiles!
"AGENTS_POSITIONS": agents_positions,
"PROB_MOVE": move_prob,
"MOVE_COST": -move_cost,
"CLICK_COST": -click_cost,
"GOOD_CLICK_REWARD": good | res[src_tile_idx, dst_tile_idx] = 1.0 | conditional_block |
Generator.py | (0, num_elems), 2)]
def calculate_action_list(num_agents):
all_actions = []
for agent_idx in range(num_agents):
all_actions.append(CorridorConstants.move_action(agent_idx))
for agent_idx_pair in calculate_pairs(num_agents):
all_actions.append(CorridorConstants.click_action(*agent_idx_pair))
all_actions.append(CorridorConstants.idle_action())
return all_actions
def calculate_tile_index(w, tile):
return (tile[0]) * w + (tile[1])
def calculate_tile_from_idx(w, h, tile_idx):
return int(tile_idx / w), tile_idx % h
def create_combination_template(length, *args):
res = [RockSamplingConstants.WILDCARD for _ in range(length)]
for i in args:
res[i] = PLACE_HOLDER_SYMBOL
return ' '.join(res)
def calculate_direction(src_tile, dst_tile):
x1, y1 = src_tile[0], src_tile[1]
x2, y2 = dst_tile[0], dst_tile[1]
if y2 - y1 > 0:
return RockSamplingConstants.right()
elif y1 - y2 > 0:
return RockSamplingConstants.left()
elif x2 - x1 > 0:
return RockSamplingConstants.down()
else:
return RockSamplingConstants.up()
def cast_to_template(template, cast):
return template.replace(PLACE_HOLDER_SYMBOL, cast)
def calculate_all_tiles(w, h):
return [(i, j) for j in range(0, w) for i in range(0, h)]
def calculate_neighbors_dict(w, h):
res = {}
tiles = calculate_all_tiles(w, h)
for tile in tiles:
cur_neighbors = []
i, j = tile[0], tile[1]
if i > 1:
cur_neighbors.append((i - 1, j))
if i < h:
cur_neighbors.append((i + 1, j))
if j > 1:
cur_neighbors.append((i, j - 1))
if j < w:
cur_neighbors.append((i, j + 1))
res[tile] = cur_neighbors
return res
def matrix_to_string(mat):
return '\n'.join('\t'.join('%0.2f' % x for x in y) for y in mat)
def calculate_move_matrix(corridor_length, succ_prob):
num_tiles = corridor_length
res = np.zeros(shape=(num_tiles, num_tiles)) # can use sparse matrix instead
for src_tile_idx in range(num_tiles):
dst_tile_idx = min(src_tile_idx + 1, corridor_length - 1)
if dst_tile_idx == src_tile_idx:
res[src_tile_idx, dst_tile_idx] = 1.0
else:
res[src_tile_idx, src_tile_idx] = 1.0 - succ_prob
res[src_tile_idx, dst_tile_idx] = succ_prob
return res
def calculate_euclidian_distance_in_grid(p1, p2):
return np.sqrt((p1[0] - p2[0]) ** 2 + (p1[1] - p2[1]) ** 2)
def calculate_good_sense_matrices(w, h, rock_positions, sense_decay_const):
all_tiles = [calculate_tile_from_idx(w, h, i) for i in range(w * h)]
distances = {(i, j): calculate_euclidian_distance_in_grid(all_tiles[i], all_tiles[j]) for i, j in
combinations(range(w * h), 2)}
res = {}
for rock_idx, rock_tile_idx in enumerate(rock_positions):
curr_mat = np.zeros(shape=(w * h, 3))
for curr_pos in range(w * h):
try:
distance_to_rock = distances[rock_tile_idx, curr_pos]
except KeyError:
try:
distance_to_rock = distances[curr_pos, rock_tile_idx]
except KeyError:
distance_to_rock = 0
succ_prob = 0.5 + 0.5 / (sense_decay_const ** distance_to_rock)
curr_mat[curr_pos][RockSamplingConstants.good_idx] = succ_prob
curr_mat[curr_pos][RockSamplingConstants.bad_idx] = 1 - succ_prob
curr_mat[curr_pos][RockSamplingConstants.null_idx] = 0
res[RockSamplingConstants.rock_symbol(rock_idx)] = curr_mat
return res
def calculate_bad_sense_matrices(w, h, rock_positions, sense_decay_const):
res = calculate_good_sense_matrices(w, h, rock_positions, sense_decay_const)
good, bad = RockSamplingConstants.good_idx, RockSamplingConstants.bad_idx
for rock, sense_matrix in res.items():
sense_matrix[:, [good, bad]] = sense_matrix[:, [bad, good]]
return res
def calculate_sense_martices(w, h, rock_positions, sense_decay_const):
res = {}
good_matrices = calculate_good_sense_matrices(w, h, rock_positions, sense_decay_const)
bad_matrices = calculate_bad_sense_matrices(w, h, rock_positions, sense_decay_const)
for rock_idx in range(len(rock_positions)):
rock_symbol = RockSamplingConstants.rock_symbol(rock_idx)
res[rock_symbol] = {RockSamplingConstants.good_quality(): good_matrices[rock_symbol],
RockSamplingConstants.bad_quality(): bad_matrices[rock_symbol]}
return res
def get_rock_sample_reward_matrix(good_sample_reward, bad_sample_penalty): | res = np.zeros(shape=(2, 2))
good, bad = RockSamplingConstants.good_idx, RockSamplingConstants.bad_idx
res[bad][bad] = -bad_sample_penalty
res[bad][good] = 0 # can't happen
res[good][bad] = good_sample_reward
res[good][good] = 0 # no effect
return res
def generate_random_tile(w, h):
return randint(a=0, b=h - 1), randint(a=0, b=w - 1)
def project_direction_matrix_to_control_area(direction_matrix, control_area):
num_rows, num_cols = direction_matrix.shape
res = direction_matrix.copy()
for i in range(num_rows):
if i not in control_area:
res[i] = np.zeros(num_cols)
res[i][i] = 1.0
return res
def project_sense_matrix_to_control_area(sense_matrix, control_area):
num_rows, num_obs = sense_matrix.shape
res = sense_matrix.copy()
good, bad, null = RockSamplingConstants.good_idx, RockSamplingConstants.bad_idx, RockSamplingConstants.null_idx
for i in range(num_rows):
if i not in control_area:
res[i][good] = 0.5
res[i][bad] = 0.5
res[i][null] = 0
return res
def calculate_sample_matrix(sample_prob):
res = np.zeros(shape=(2, 2))
good, bad = RockSamplingConstants.good_idx, RockSamplingConstants.bad_idx
res[bad][bad] = 1.0
res[bad][good] = 0.0
res[good][bad] = sample_prob
res[good][good] = 1 - sample_prob
return res
def generate_template(template_file_name, parameters):
"""
:param template_file_name: file should be in cwd/templates/template_file_name, in jinja2 format
:param parameters: dictionary containing parameters for rendering
:return: the rendered template in string format
"""
env = Environment(loader=FileSystemLoader('templates'), lstrip_blocks=True, trim_blocks=True,
extensions=['jinja2.ext.do'])
env.globals.update(calculate_action_list=calculate_action_list,
calculate_tile_index=calculate_tile_index,
create_combination_template=create_combination_template,
calculate_pairs=calculate_pairs,
cast_to_template=cast_to_template,
calculate_move_matrix=calculate_move_matrix,
corridor_constants=CorridorConstants,
pomdpx_constants=POMDPXConstants,
time_zero=POMDPXConstants.time_zero,
time_one=POMDPXConstants.time_one,
matrix_to_string=matrix_to_string)
template = env.get_template(template_file_name)
return template.render(parameters)
def generate_domain_config(corridor_length, num_agents,
good_click_reward,
bad_click_penalty,
move_cost,
click_cost,
move_prob,
agents_positions,
bias_constant,
discount=0.95,
domain_name_suffix=""):
agents_positions = agents_positions
agents_positions = [{calculate_tile_index(corridor_length, tile): prob for tile, prob in dist.items()} for dist in
agents_positions] if agents_positions is not None else [
{calculate_tile_index(corridor_length, (0, 0)): 1.0} for _ in
range(num_agents)]
res = {"DOMAIN_NAME": "COR-%d_%dA%s" % (
corridor_length, num_agents, domain_name_suffix),
"NUM_AGENTS": num_agents,
"CORRIDOR_LENGTH": corridor_length,
"DISCOUNT": discount,
# None yields a uniform distribution
# Positions are indices not tiles!
"AGENTS_POSITIONS": agents_positions,
"PROB_MOVE": move_prob,
"MOVE_COST": -move_cost,
"CLICK_COST": -click_cost,
"GOOD_CLICK_REWARD": good | random_line_split |
|
Generator.py | (0, num_elems), 2)]
def calculate_action_list(num_agents):
all_actions = []
for agent_idx in range(num_agents):
all_actions.append(CorridorConstants.move_action(agent_idx))
for agent_idx_pair in calculate_pairs(num_agents):
all_actions.append(CorridorConstants.click_action(*agent_idx_pair))
all_actions.append(CorridorConstants.idle_action())
return all_actions
def calculate_tile_index(w, tile):
return (tile[0]) * w + (tile[1])
def calculate_tile_from_idx(w, h, tile_idx):
return int(tile_idx / w), tile_idx % h
def create_combination_template(length, *args):
res = [RockSamplingConstants.WILDCARD for _ in range(length)]
for i in args:
res[i] = PLACE_HOLDER_SYMBOL
return ' '.join(res)
def calculate_direction(src_tile, dst_tile):
x1, y1 = src_tile[0], src_tile[1]
x2, y2 = dst_tile[0], dst_tile[1]
if y2 - y1 > 0:
return RockSamplingConstants.right()
elif y1 - y2 > 0:
return RockSamplingConstants.left()
elif x2 - x1 > 0:
return RockSamplingConstants.down()
else:
return RockSamplingConstants.up()
def cast_to_template(template, cast):
return template.replace(PLACE_HOLDER_SYMBOL, cast)
def calculate_all_tiles(w, h):
return [(i, j) for j in range(0, w) for i in range(0, h)]
def calculate_neighbors_dict(w, h):
res = {}
tiles = calculate_all_tiles(w, h)
for tile in tiles:
cur_neighbors = []
i, j = tile[0], tile[1]
if i > 1:
cur_neighbors.append((i - 1, j))
if i < h:
cur_neighbors.append((i + 1, j))
if j > 1:
cur_neighbors.append((i, j - 1))
if j < w:
cur_neighbors.append((i, j + 1))
res[tile] = cur_neighbors
return res
def matrix_to_string(mat):
return '\n'.join('\t'.join('%0.2f' % x for x in y) for y in mat)
def calculate_move_matrix(corridor_length, succ_prob):
num_tiles = corridor_length
res = np.zeros(shape=(num_tiles, num_tiles)) # can use sparse matrix instead
for src_tile_idx in range(num_tiles):
dst_tile_idx = min(src_tile_idx + 1, corridor_length - 1)
if dst_tile_idx == src_tile_idx:
res[src_tile_idx, dst_tile_idx] = 1.0
else:
res[src_tile_idx, src_tile_idx] = 1.0 - succ_prob
res[src_tile_idx, dst_tile_idx] = succ_prob
return res
def calculate_euclidian_distance_in_grid(p1, p2):
return np.sqrt((p1[0] - p2[0]) ** 2 + (p1[1] - p2[1]) ** 2)
def calculate_good_sense_matrices(w, h, rock_positions, sense_decay_const):
all_tiles = [calculate_tile_from_idx(w, h, i) for i in range(w * h)]
distances = {(i, j): calculate_euclidian_distance_in_grid(all_tiles[i], all_tiles[j]) for i, j in
combinations(range(w * h), 2)}
res = {}
for rock_idx, rock_tile_idx in enumerate(rock_positions):
curr_mat = np.zeros(shape=(w * h, 3))
for curr_pos in range(w * h):
try:
distance_to_rock = distances[rock_tile_idx, curr_pos]
except KeyError:
try:
distance_to_rock = distances[curr_pos, rock_tile_idx]
except KeyError:
distance_to_rock = 0
succ_prob = 0.5 + 0.5 / (sense_decay_const ** distance_to_rock)
curr_mat[curr_pos][RockSamplingConstants.good_idx] = succ_prob
curr_mat[curr_pos][RockSamplingConstants.bad_idx] = 1 - succ_prob
curr_mat[curr_pos][RockSamplingConstants.null_idx] = 0
res[RockSamplingConstants.rock_symbol(rock_idx)] = curr_mat
return res
def calculate_bad_sense_matrices(w, h, rock_positions, sense_decay_const):
res = calculate_good_sense_matrices(w, h, rock_positions, sense_decay_const)
good, bad = RockSamplingConstants.good_idx, RockSamplingConstants.bad_idx
for rock, sense_matrix in res.items():
sense_matrix[:, [good, bad]] = sense_matrix[:, [bad, good]]
return res
def calculate_sense_martices(w, h, rock_positions, sense_decay_const):
|
def get_rock_sample_reward_matrix(good_sample_reward, bad_sample_penalty):
res = np.zeros(shape=(2, 2))
good, bad = RockSamplingConstants.good_idx, RockSamplingConstants.bad_idx
res[bad][bad] = -bad_sample_penalty
res[bad][good] = 0 # can't happen
res[good][bad] = good_sample_reward
res[good][good] = 0 # no effect
return res
def generate_random_tile(w, h):
return randint(a=0, b=h - 1), randint(a=0, b=w - 1)
def project_direction_matrix_to_control_area(direction_matrix, control_area):
num_rows, num_cols = direction_matrix.shape
res = direction_matrix.copy()
for i in range(num_rows):
if i not in control_area:
res[i] = np.zeros(num_cols)
res[i][i] = 1.0
return res
def project_sense_matrix_to_control_area(sense_matrix, control_area):
num_rows, num_obs = sense_matrix.shape
res = sense_matrix.copy()
good, bad, null = RockSamplingConstants.good_idx, RockSamplingConstants.bad_idx, RockSamplingConstants.null_idx
for i in range(num_rows):
if i not in control_area:
res[i][good] = 0.5
res[i][bad] = 0.5
res[i][null] = 0
return res
def calculate_sample_matrix(sample_prob):
res = np.zeros(shape=(2, 2))
good, bad = RockSamplingConstants.good_idx, RockSamplingConstants.bad_idx
res[bad][bad] = 1.0
res[bad][good] = 0.0
res[good][bad] = sample_prob
res[good][good] = 1 - sample_prob
return res
def generate_template(template_file_name, parameters):
"""
:param template_file_name: file should be in cwd/templates/template_file_name, in jinja2 format
:param parameters: dictionary containing parameters for rendering
:return: the rendered template in string format
"""
env = Environment(loader=FileSystemLoader('templates'), lstrip_blocks=True, trim_blocks=True,
extensions=['jinja2.ext.do'])
env.globals.update(calculate_action_list=calculate_action_list,
calculate_tile_index=calculate_tile_index,
create_combination_template=create_combination_template,
calculate_pairs=calculate_pairs,
cast_to_template=cast_to_template,
calculate_move_matrix=calculate_move_matrix,
corridor_constants=CorridorConstants,
pomdpx_constants=POMDPXConstants,
time_zero=POMDPXConstants.time_zero,
time_one=POMDPXConstants.time_one,
matrix_to_string=matrix_to_string)
template = env.get_template(template_file_name)
return template.render(parameters)
def generate_domain_config(corridor_length, num_agents,
good_click_reward,
bad_click_penalty,
move_cost,
click_cost,
move_prob,
agents_positions,
bias_constant,
discount=0.95,
domain_name_suffix=""):
agents_positions = agents_positions
agents_positions = [{calculate_tile_index(corridor_length, tile): prob for tile, prob in dist.items()} for dist in
agents_positions] if agents_positions is not None else [
{calculate_tile_index(corridor_length, (0, 0)): 1.0} for _ in
range(num_agents)]
res = {"DOMAIN_NAME": "COR-%d_%dA%s" % (
corridor_length, num_agents, domain_name_suffix),
"NUM_AGENTS": num_agents,
"CORRIDOR_LENGTH": corridor_length,
"DISCOUNT": discount,
# None yields a uniform distribution
# Positions are indices not tiles!
"AGENTS_POSITIONS": agents_positions,
"PROB_MOVE": move_prob,
"MOVE_COST": -move_cost,
"CLICK_COST": -click_cost,
"GOOD_CLICK_REWARD": good | res = {}
good_matrices = calculate_good_sense_matrices(w, h, rock_positions, sense_decay_const)
bad_matrices = calculate_bad_sense_matrices(w, h, rock_positions, sense_decay_const)
for rock_idx in range(len(rock_positions)):
rock_symbol = RockSamplingConstants.rock_symbol(rock_idx)
res[rock_symbol] = {RockSamplingConstants.good_quality(): good_matrices[rock_symbol],
RockSamplingConstants.bad_quality(): bad_matrices[rock_symbol]}
return res | identifier_body |
Generator.py | (0, num_elems), 2)]
def calculate_action_list(num_agents):
all_actions = []
for agent_idx in range(num_agents):
all_actions.append(CorridorConstants.move_action(agent_idx))
for agent_idx_pair in calculate_pairs(num_agents):
all_actions.append(CorridorConstants.click_action(*agent_idx_pair))
all_actions.append(CorridorConstants.idle_action())
return all_actions
def calculate_tile_index(w, tile):
return (tile[0]) * w + (tile[1])
def calculate_tile_from_idx(w, h, tile_idx):
return int(tile_idx / w), tile_idx % h
def | (length, *args):
res = [RockSamplingConstants.WILDCARD for _ in range(length)]
for i in args:
res[i] = PLACE_HOLDER_SYMBOL
return ' '.join(res)
def calculate_direction(src_tile, dst_tile):
x1, y1 = src_tile[0], src_tile[1]
x2, y2 = dst_tile[0], dst_tile[1]
if y2 - y1 > 0:
return RockSamplingConstants.right()
elif y1 - y2 > 0:
return RockSamplingConstants.left()
elif x2 - x1 > 0:
return RockSamplingConstants.down()
else:
return RockSamplingConstants.up()
def cast_to_template(template, cast):
return template.replace(PLACE_HOLDER_SYMBOL, cast)
def calculate_all_tiles(w, h):
return [(i, j) for j in range(0, w) for i in range(0, h)]
def calculate_neighbors_dict(w, h):
res = {}
tiles = calculate_all_tiles(w, h)
for tile in tiles:
cur_neighbors = []
i, j = tile[0], tile[1]
if i > 1:
cur_neighbors.append((i - 1, j))
if i < h:
cur_neighbors.append((i + 1, j))
if j > 1:
cur_neighbors.append((i, j - 1))
if j < w:
cur_neighbors.append((i, j + 1))
res[tile] = cur_neighbors
return res
def matrix_to_string(mat):
return '\n'.join('\t'.join('%0.2f' % x for x in y) for y in mat)
def calculate_move_matrix(corridor_length, succ_prob):
num_tiles = corridor_length
res = np.zeros(shape=(num_tiles, num_tiles)) # can use sparse matrix instead
for src_tile_idx in range(num_tiles):
dst_tile_idx = min(src_tile_idx + 1, corridor_length - 1)
if dst_tile_idx == src_tile_idx:
res[src_tile_idx, dst_tile_idx] = 1.0
else:
res[src_tile_idx, src_tile_idx] = 1.0 - succ_prob
res[src_tile_idx, dst_tile_idx] = succ_prob
return res
def calculate_euclidian_distance_in_grid(p1, p2):
return np.sqrt((p1[0] - p2[0]) ** 2 + (p1[1] - p2[1]) ** 2)
def calculate_good_sense_matrices(w, h, rock_positions, sense_decay_const):
all_tiles = [calculate_tile_from_idx(w, h, i) for i in range(w * h)]
distances = {(i, j): calculate_euclidian_distance_in_grid(all_tiles[i], all_tiles[j]) for i, j in
combinations(range(w * h), 2)}
res = {}
for rock_idx, rock_tile_idx in enumerate(rock_positions):
curr_mat = np.zeros(shape=(w * h, 3))
for curr_pos in range(w * h):
try:
distance_to_rock = distances[rock_tile_idx, curr_pos]
except KeyError:
try:
distance_to_rock = distances[curr_pos, rock_tile_idx]
except KeyError:
distance_to_rock = 0
succ_prob = 0.5 + 0.5 / (sense_decay_const ** distance_to_rock)
curr_mat[curr_pos][RockSamplingConstants.good_idx] = succ_prob
curr_mat[curr_pos][RockSamplingConstants.bad_idx] = 1 - succ_prob
curr_mat[curr_pos][RockSamplingConstants.null_idx] = 0
res[RockSamplingConstants.rock_symbol(rock_idx)] = curr_mat
return res
def calculate_bad_sense_matrices(w, h, rock_positions, sense_decay_const):
res = calculate_good_sense_matrices(w, h, rock_positions, sense_decay_const)
good, bad = RockSamplingConstants.good_idx, RockSamplingConstants.bad_idx
for rock, sense_matrix in res.items():
sense_matrix[:, [good, bad]] = sense_matrix[:, [bad, good]]
return res
def calculate_sense_martices(w, h, rock_positions, sense_decay_const):
res = {}
good_matrices = calculate_good_sense_matrices(w, h, rock_positions, sense_decay_const)
bad_matrices = calculate_bad_sense_matrices(w, h, rock_positions, sense_decay_const)
for rock_idx in range(len(rock_positions)):
rock_symbol = RockSamplingConstants.rock_symbol(rock_idx)
res[rock_symbol] = {RockSamplingConstants.good_quality(): good_matrices[rock_symbol],
RockSamplingConstants.bad_quality(): bad_matrices[rock_symbol]}
return res
def get_rock_sample_reward_matrix(good_sample_reward, bad_sample_penalty):
res = np.zeros(shape=(2, 2))
good, bad = RockSamplingConstants.good_idx, RockSamplingConstants.bad_idx
res[bad][bad] = -bad_sample_penalty
res[bad][good] = 0 # can't happen
res[good][bad] = good_sample_reward
res[good][good] = 0 # no effect
return res
def generate_random_tile(w, h):
return randint(a=0, b=h - 1), randint(a=0, b=w - 1)
def project_direction_matrix_to_control_area(direction_matrix, control_area):
num_rows, num_cols = direction_matrix.shape
res = direction_matrix.copy()
for i in range(num_rows):
if i not in control_area:
res[i] = np.zeros(num_cols)
res[i][i] = 1.0
return res
def project_sense_matrix_to_control_area(sense_matrix, control_area):
num_rows, num_obs = sense_matrix.shape
res = sense_matrix.copy()
good, bad, null = RockSamplingConstants.good_idx, RockSamplingConstants.bad_idx, RockSamplingConstants.null_idx
for i in range(num_rows):
if i not in control_area:
res[i][good] = 0.5
res[i][bad] = 0.5
res[i][null] = 0
return res
def calculate_sample_matrix(sample_prob):
res = np.zeros(shape=(2, 2))
good, bad = RockSamplingConstants.good_idx, RockSamplingConstants.bad_idx
res[bad][bad] = 1.0
res[bad][good] = 0.0
res[good][bad] = sample_prob
res[good][good] = 1 - sample_prob
return res
def generate_template(template_file_name, parameters):
"""
:param template_file_name: file should be in cwd/templates/template_file_name, in jinja2 format
:param parameters: dictionary containing parameters for rendering
:return: the rendered template in string format
"""
env = Environment(loader=FileSystemLoader('templates'), lstrip_blocks=True, trim_blocks=True,
extensions=['jinja2.ext.do'])
env.globals.update(calculate_action_list=calculate_action_list,
calculate_tile_index=calculate_tile_index,
create_combination_template=create_combination_template,
calculate_pairs=calculate_pairs,
cast_to_template=cast_to_template,
calculate_move_matrix=calculate_move_matrix,
corridor_constants=CorridorConstants,
pomdpx_constants=POMDPXConstants,
time_zero=POMDPXConstants.time_zero,
time_one=POMDPXConstants.time_one,
matrix_to_string=matrix_to_string)
template = env.get_template(template_file_name)
return template.render(parameters)
def generate_domain_config(corridor_length, num_agents,
good_click_reward,
bad_click_penalty,
move_cost,
click_cost,
move_prob,
agents_positions,
bias_constant,
discount=0.95,
domain_name_suffix=""):
agents_positions = agents_positions
agents_positions = [{calculate_tile_index(corridor_length, tile): prob for tile, prob in dist.items()} for dist in
agents_positions] if agents_positions is not None else [
{calculate_tile_index(corridor_length, (0, 0)): 1.0} for _ in
range(num_agents)]
res = {"DOMAIN_NAME": "COR-%d_%dA%s" % (
corridor_length, num_agents, domain_name_suffix),
"NUM_AGENTS": num_agents,
"CORRIDOR_LENGTH": corridor_length,
"DISCOUNT": discount,
# None yields a uniform distribution
# Positions are indices not tiles!
"AGENTS_POSITIONS": agents_positions,
"PROB_MOVE": move_prob,
"MOVE_COST": -move_cost,
"CLICK_COST": -click_cost,
"GOOD_CLICK_REWARD": | create_combination_template | identifier_name |
main.rs | let world_height = world_width * height / width;
let world_left = center.x - world_width / 2.0;
let world_top = center.y + world_height / 2.0;
let world_bottom = center.y - world_height / 2.0;
(world_width, world_height, world_left, world_top, world_bottom)
}
fn pixel_to_world(pixel_coord: &Point<f64>, zoom: f64, pixels: &Point<i32>, center: &Point<f64>) -> Point<f64> {
let (world_width, world_height, world_left, world_top, _world_bottom) = get_screen_in_world(zoom, &pixels, ¢er);
Point {
x: pixel_coord.x / (pixels.x as f64) * world_width + world_left,
y: -pixel_coord.y / (pixels.y as f64) * world_height + world_top,
}
}
fn calc_mandelbrot(pixels: &Point<i32>, center: &Point<f64>, zoom: f64) -> (Vec<GLfloat>, Vec<GLfloat>) {
let start = time::precise_time_ns();
let mut colors : Vec<GLfloat> = vec![];
let mut positions : Vec<GLfloat> = vec![];
let width = pixels.x as f64;
let height = pixels.y as f64;
let (world_width, world_height, world_left, world_top, _world_bottom) = get_screen_in_world(zoom, &pixels, ¢er);
let (tx, rx) = channel();
for y_pixel in 0..pixels.y {
let tx = tx.clone();
let x_pixels = pixels.x;
spawn(move || {
let mut line = vec![];
for x_pixel in 0..x_pixels {
let x = (x_pixel as f64) / width * world_width + world_left;
let y = -(y_pixel as f64) / height * world_height + world_top;
let iterations = mandel::calc(x, y);
line.push(iterations);
}
tx.send(Line { y: y_pixel, values: line }).unwrap();
});
}
for _y_pixel in 0..pixels.y {
let line = rx.recv().unwrap();
let mut x_pixel = 0;
for value in line.values {
x_pixel += 1;
let y_pixel = line.y;
positions.push(( (x_pixel as f64) / width * world_width + world_left) as f32);
positions.push((-(y_pixel as f64) / height * world_height + world_top ) as f32);
let color = value as GLfloat / mandel::DETAIL as GLfloat;
colors.push(color);
colors.push(color);
colors.push(color);
}
}
let end = time::precise_time_ns();
println!("Calculated fractal in {}", HumanTimeDuration { nanoseconds: end - start });
(positions, colors)
}
fn draw_fractal(positions : &Vec<GLfloat>, colors : &Vec<GLfloat>, vertex_buffer : GLuint, color_buffer : GLuint, window: &mut Window) {
let points = colors.len() / 3;
unsafe {
load_vector_in_buffer(vertex_buffer, &positions);
load_vector_in_buffer(color_buffer, &colors);
gl::DrawArrays(gl::POINTS, 0, points as i32);
window.swap_buffers();
}
}
fn main() {
let mut glfw = glfw::init(glfw::FAIL_ON_ERRORS).unwrap();
glfw.window_hint(WindowHint::ContextVersion(3, 2));
glfw.window_hint(WindowHint::OpenGlForwardCompat(true));
glfw.window_hint(WindowHint::OpenGlProfile(OpenGlProfileHint::Core));
let x_initial_points = 500;
let y_initial_points = 300;
// since mouse button events don't send mouse positions, we need to store them
let mut mouse = Point::new(0f64, 0f64);
let mut mouse_start_pan = Point::new(0f64, 0f64);
let mut mouse_button_1_pressed = false;
let mut zoom = 2.0;
let mut center = Point::new(-0.7, 0.0);
let (mut window, events) = glfw.create_window(x_initial_points, y_initial_points, "Mandelbrot", WindowMode::Windowed)
.expect("Failed to create GLFW window.");
let mut pixels = {
let (x_pixels, y_pixels) = window.get_framebuffer_size();
Point::new(x_pixels, y_pixels)
};
// on "retina displays" there are two pixels per point, otherwise, it is one
let pixel_size = pixels.x / (x_initial_points as i32);
window.set_key_polling(true);
window.set_framebuffer_size_polling(true);
window.set_scroll_polling(true);
window.set_cursor_pos_polling(true);
window.set_mouse_button_polling(true);
window.make_current();
gl::load_with(|s| window.get_proc_address(s));
let vertex_shader = gl_util::compile_shader(&load_shader("mandel.v.glsl"), gl::VERTEX_SHADER);
let fragment_shader = gl_util::compile_shader(&load_shader("mandel.f.glsl"), gl::FRAGMENT_SHADER);
let program = gl_util::link_program(vertex_shader, fragment_shader);
unsafe {
gl::ClearColor(0.0, 0.0, 0.0, 1.0);
gl::Clear(gl::COLOR_BUFFER_BIT);
}
let mut vertex_array = 0;
let vertex_buffer = create_buffer();
let color_buffer = create_buffer();
unsafe {
gl::GenVertexArrays(1, &mut vertex_array);
gl::BindVertexArray(vertex_array);
gl::UseProgram(program);
gl::BindFragDataLocation(program, 0, CString::new("out_color").unwrap().as_ptr());
bind_attribute_to_buffer(program, "position", vertex_buffer, 2);
bind_attribute_to_buffer(program, "color", color_buffer, 3);
}
let mut current_tile : Option<Tile> = None;
let (tx_incoming_order, rx_incoming_order ) = channel();
let (tx_completed_order, rx_completed_order) = channel();
spawn(move || {
loop {
let tile_spec : TileSpecification = rx_incoming_order.recv().unwrap();
let (positions, colors) = calc_mandelbrot(&tile_spec.pixels, &tile_spec.center, tile_spec.zoom);
tx_completed_order.send(Tile { specification: tile_spec, positions: positions, colors: colors }).unwrap();
}
});
let mut tile_queue_empty = true;
while !window.should_close() {
let mut needs_redraw = false;
glfw.poll_events();
for (_, event) in glfw::flush_messages(&events) {
match event {
glfw::WindowEvent::Key(Key::Escape, _, _, _) => {
window.set_should_close(true)
}
glfw::WindowEvent::FramebufferSize(width, height) => {
pixels.x = width;
pixels.y = height;
needs_redraw = true;
}
glfw::WindowEvent::Scroll(_x, y) => {
let old_world = pixel_to_world(&mouse, zoom, &pixels, ¢er);
zoom += y;
let new_world = pixel_to_world(&mouse, zoom, &pixels, ¢er);
center = center + old_world - new_world;
needs_redraw = true;
}
glfw::WindowEvent::MouseButton(glfw::MouseButton::Button1, glfw::Action::Press, _) => {
mouse_button_1_pressed = true;
mouse_start_pan = mouse;
}
glfw::WindowEvent::MouseButton(glfw::MouseButton::Button1, glfw::Action::Release, _) => {
mouse_button_1_pressed = false;
}
glfw::WindowEvent::CursorPos(x, y) => {
mouse.x = x;
mouse.y = y;
if mouse_button_1_pressed {
let world_per_pixel = world_width_from_zoom(zoom) / (pixels.x as f64);
let world_per_point = world_per_pixel * (pixel_size as f64);
let mut mouse_movement = mouse - mouse_start_pan;
mouse_movement.y = -mouse_movement.y;
center = center - mouse_movement * world_per_point;
mouse_start_pan = mouse;
needs_redraw = true;
}
}
e => { println!("Unhandled event: {:?}", e); }
}
}
match rx_completed_order.try_recv() {
Ok(tile) => {
current_tile = Some(tile);
tile_queue_empty = true;
needs_redraw = true;
},
_ => {
// TODO: Handle disconnect
}
}
if needs_redraw { |
unsafe {
gl::ClearColor(0.2, 0.1, 0.05, 1.0);
gl::Clear(gl::COLOR_BUFFER_BIT);
}
unsafe { set_viewport(program, zoom, &pixels, ¢er) };
match current_tile {
Some(ref tile) => {
draw_fractal(&tile.positions, &tile.colors, vertex_buffer, color_buffer, &mut window);
}
None => { /* no tile ready yet */ }
}
}
| conditional_block |
|
main.rs | >, center: &Point<f64>) -> (f64, f64, f64, f64, f64) {
let width = pixels.x as f64;
let height = pixels.y as f64;
let world_width = world_width_from_zoom(zoom);
let world_height = world_width * height / width;
let world_left = center.x - world_width / 2.0;
let world_top = center.y + world_height / 2.0;
let world_bottom = center.y - world_height / 2.0;
(world_width, world_height, world_left, world_top, world_bottom)
}
fn pixel_to_world(pixel_coord: &Point<f64>, zoom: f64, pixels: &Point<i32>, center: &Point<f64>) -> Point<f64> {
let (world_width, world_height, world_left, world_top, _world_bottom) = get_screen_in_world(zoom, &pixels, ¢er);
Point {
x: pixel_coord.x / (pixels.x as f64) * world_width + world_left,
y: -pixel_coord.y / (pixels.y as f64) * world_height + world_top,
}
}
fn calc_mandelbrot(pixels: &Point<i32>, center: &Point<f64>, zoom: f64) -> (Vec<GLfloat>, Vec<GLfloat>) {
let start = time::precise_time_ns();
let mut colors : Vec<GLfloat> = vec![];
let mut positions : Vec<GLfloat> = vec![];
let width = pixels.x as f64;
let height = pixels.y as f64;
let (world_width, world_height, world_left, world_top, _world_bottom) = get_screen_in_world(zoom, &pixels, ¢er);
let (tx, rx) = channel();
for y_pixel in 0..pixels.y {
let tx = tx.clone();
let x_pixels = pixels.x;
spawn(move || {
let mut line = vec![];
for x_pixel in 0..x_pixels {
let x = (x_pixel as f64) / width * world_width + world_left;
let y = -(y_pixel as f64) / height * world_height + world_top;
let iterations = mandel::calc(x, y);
line.push(iterations);
}
tx.send(Line { y: y_pixel, values: line }).unwrap();
});
}
for _y_pixel in 0..pixels.y {
let line = rx.recv().unwrap();
let mut x_pixel = 0;
for value in line.values {
x_pixel += 1;
let y_pixel = line.y;
positions.push(( (x_pixel as f64) / width * world_width + world_left) as f32);
positions.push((-(y_pixel as f64) / height * world_height + world_top ) as f32);
let color = value as GLfloat / mandel::DETAIL as GLfloat;
colors.push(color);
colors.push(color);
colors.push(color);
}
}
let end = time::precise_time_ns();
println!("Calculated fractal in {}", HumanTimeDuration { nanoseconds: end - start });
(positions, colors)
}
fn draw_fractal(positions : &Vec<GLfloat>, colors : &Vec<GLfloat>, vertex_buffer : GLuint, color_buffer : GLuint, window: &mut Window) {
let points = colors.len() / 3;
unsafe {
load_vector_in_buffer(vertex_buffer, &positions);
load_vector_in_buffer(color_buffer, &colors);
gl::DrawArrays(gl::POINTS, 0, points as i32);
window.swap_buffers();
}
}
fn main() {
let mut glfw = glfw::init(glfw::FAIL_ON_ERRORS).unwrap();
glfw.window_hint(WindowHint::ContextVersion(3, 2));
glfw.window_hint(WindowHint::OpenGlForwardCompat(true));
glfw.window_hint(WindowHint::OpenGlProfile(OpenGlProfileHint::Core));
let x_initial_points = 500;
let y_initial_points = 300;
// since mouse button events don't send mouse positions, we need to store them
let mut mouse = Point::new(0f64, 0f64);
let mut mouse_start_pan = Point::new(0f64, 0f64);
let mut mouse_button_1_pressed = false;
let mut zoom = 2.0;
let mut center = Point::new(-0.7, 0.0);
let (mut window, events) = glfw.create_window(x_initial_points, y_initial_points, "Mandelbrot", WindowMode::Windowed)
.expect("Failed to create GLFW window.");
let mut pixels = {
let (x_pixels, y_pixels) = window.get_framebuffer_size();
Point::new(x_pixels, y_pixels)
};
// on "retina displays" there are two pixels per point, otherwise, it is one
let pixel_size = pixels.x / (x_initial_points as i32);
window.set_key_polling(true);
window.set_framebuffer_size_polling(true);
window.set_scroll_polling(true);
window.set_cursor_pos_polling(true);
window.set_mouse_button_polling(true);
window.make_current();
gl::load_with(|s| window.get_proc_address(s));
let vertex_shader = gl_util::compile_shader(&load_shader("mandel.v.glsl"), gl::VERTEX_SHADER);
let fragment_shader = gl_util::compile_shader(&load_shader("mandel.f.glsl"), gl::FRAGMENT_SHADER);
let program = gl_util::link_program(vertex_shader, fragment_shader);
unsafe {
gl::ClearColor(0.0, 0.0, 0.0, 1.0);
gl::Clear(gl::COLOR_BUFFER_BIT);
}
let mut vertex_array = 0;
let vertex_buffer = create_buffer();
let color_buffer = create_buffer();
unsafe {
gl::GenVertexArrays(1, &mut vertex_array);
gl::BindVertexArray(vertex_array);
gl::UseProgram(program);
gl::BindFragDataLocation(program, 0, CString::new("out_color").unwrap().as_ptr());
bind_attribute_to_buffer(program, "position", vertex_buffer, 2);
bind_attribute_to_buffer(program, "color", color_buffer, 3);
}
let mut current_tile : Option<Tile> = None;
let (tx_incoming_order, rx_incoming_order ) = channel();
let (tx_completed_order, rx_completed_order) = channel();
spawn(move || {
loop {
let tile_spec : TileSpecification = rx_incoming_order.recv().unwrap();
let (positions, colors) = calc_mandelbrot(&tile_spec.pixels, &tile_spec.center, tile_spec.zoom);
tx_completed_order.send(Tile { specification: tile_spec, positions: positions, colors: colors }).unwrap();
}
});
let mut tile_queue_empty = true;
while !window.should_close() {
let mut needs_redraw = false;
glfw.poll_events();
for (_, event) in glfw::flush_messages(&events) {
match event {
glfw::WindowEvent::Key(Key::Escape, _, _, _) => {
window.set_should_close(true)
}
glfw::WindowEvent::FramebufferSize(width, height) => {
pixels.x = width;
pixels.y = height;
needs_redraw = true;
}
glfw::WindowEvent::Scroll(_x, y) => {
let old_world = pixel_to_world(&mouse, zoom, &pixels, ¢er);
zoom += y;
let new_world = pixel_to_world(&mouse, zoom, &pixels, ¢er);
center = center + old_world - new_world;
needs_redraw = true;
}
glfw::WindowEvent::MouseButton(glfw::MouseButton::Button1, glfw::Action::Press, _) => {
mouse_button_1_pressed = true;
mouse_start_pan = mouse;
}
glfw::WindowEvent::MouseButton(glfw::MouseButton::Button1, glfw::Action::Release, _) => {
mouse_button_1_pressed = false;
}
glfw::WindowEvent::CursorPos(x, y) => {
mouse.x = x;
mouse.y = y;
if mouse_button_1_pressed {
let world_per_pixel = world_width_from_zoom(zoom) / (pixels.x as f64);
let world_per_point = world_per_pixel * (pixel_size as f64);
let mut mouse_movement = mouse - mouse_start_pan;
mouse_movement.y = -mouse_movement.y;
center = center - mouse_movement * world_per_point;
mouse_start_pan = mouse;
needs_redraw = true;
}
}
e => { println!("Unhandled event: {:?}", e); }
}
}
match rx_completed_order.try_recv() {
Ok(tile) => {
current_tile = Some(tile);
tile_queue_empty = true;
needs_redraw = true;
},
_ => {
// TODO: Handle disconnect
}
}
if needs_redraw {
unsafe {
gl::ClearColor(0.2, 0.1, 0.05, 1.0); | gl::Clear(gl::COLOR_BUFFER_BIT);
} | random_line_split |
|
main.rs | let ns = self.nanoseconds;
match ns {
0 ... 1_000 => fmt.write_fmt(format_args!("{} ns", ns)),
1_000 ... 1_000_000 => fmt.write_fmt(format_args!("{:.*} µs", 2, (ns as f64) / 1_000f64)),
1_000_000 ... 1_000_000_000 => fmt.write_fmt(format_args!("{:.*} ms", 2, (ns as f64) / 1_000_000f64)),
_ => fmt.write_fmt(format_args!("{:.*} s" , 2, (ns as f64) / 1_000_000_000f64)),
}
}
}
// TODO: return result with a useful error type
fn load_shader(filename: &str) -> String {
let mut file = File::open(filename)
.ok().unwrap_or_else(|| File::open("src/".to_string()+filename)
.ok().expect(&format!("Could not open shader file {}", filename)));
let mut bytes = Vec::new();
file.read_to_end(&mut bytes).ok().expect(&format!("Failed to read from shader file {}", filename));
String::from_utf8(bytes).ok().expect(&format!("Shader file not UTF-8: {}", filename))
}
fn create_buffer() -> GLuint {
unsafe {
let mut buffer = 0;
gl::GenBuffers(1, &mut buffer);
buffer
}
}
unsafe fn load_vector_in_buffer(buffer: u32, values: &Vec<GLfloat>) {
gl::BindBuffer(gl::ARRAY_BUFFER, buffer);
gl::BufferData(gl::ARRAY_BUFFER,
(values.len() * mem::size_of::<GLfloat>()) as GLsizeiptr,
mem::transmute(&values[0]),
gl::STATIC_DRAW);
}
unsafe fn bind_attribute_to_buffer(program: u32, attribute_name: &str, buffer: u32, components: i32) {
gl::BindBuffer(gl::ARRAY_BUFFER, buffer);
let attribute = gl::GetAttribLocation(program, CString::new(attribute_name).unwrap().as_ptr()) as GLuint;
gl::EnableVertexAttribArray(attribute);
gl::VertexAttribPointer(attribute, components, gl::FLOAT, gl::FALSE as GLboolean, 0, ptr::null());
}
fn world_width_from_zoom(zoom: f64) -> f64 {
2f64.powf(zoom)
}
unsafe fn set_viewport(program: GLuint, zoom: f64, pixels: &Point<i32>, center: &Point<f64>) {
let (world_width, world_height, world_left, _world_top, world_bottom) = get_screen_in_world(zoom, &pixels, ¢er);
gl::Uniform2f(gl::GetUniformLocation(program, CString::new("world_bottom_left").unwrap().as_ptr()), world_left as f32, world_bottom as f32);
gl::Uniform2f(gl::GetUniformLocation(program, CString::new("world_dimensions" ).unwrap().as_ptr()), world_width as f32, world_height as f32);
}
fn get_screen_in_world(zoom: f64, pixels: &Point<i32>, center: &Point<f64>) -> (f64, f64, f64, f64, f64) {
let width = pixels.x as f64;
let height = pixels.y as f64;
let world_width = world_width_from_zoom(zoom);
let world_height = world_width * height / width;
let world_left = center.x - world_width / 2.0;
let world_top = center.y + world_height / 2.0;
let world_bottom = center.y - world_height / 2.0;
(world_width, world_height, world_left, world_top, world_bottom)
}
fn pixel_to_world(pixel_coord: &Point<f64>, zoom: f64, pixels: &Point<i32>, center: &Point<f64>) -> Point<f64> {
let (world_width, world_height, world_left, world_top, _world_bottom) = get_screen_in_world(zoom, &pixels, ¢er);
Point {
x: pixel_coord.x / (pixels.x as f64) * world_width + world_left,
y: -pixel_coord.y / (pixels.y as f64) * world_height + world_top,
}
}
fn calc_mandelbrot(pixels: &Point<i32>, center: &Point<f64>, zoom: f64) -> (Vec<GLfloat>, Vec<GLfloat>) {
let start = time::precise_time_ns();
let mut colors : Vec<GLfloat> = vec![];
let mut positions : Vec<GLfloat> = vec![];
let width = pixels.x as f64;
let height = pixels.y as f64;
let (world_width, world_height, world_left, world_top, _world_bottom) = get_screen_in_world(zoom, &pixels, ¢er);
let (tx, rx) = channel();
for y_pixel in 0..pixels.y {
let tx = tx.clone();
let x_pixels = pixels.x;
spawn(move || {
let mut line = vec![];
for x_pixel in 0..x_pixels {
let x = (x_pixel as f64) / width * world_width + world_left;
let y = -(y_pixel as f64) / height * world_height + world_top;
let iterations = mandel::calc(x, y);
line.push(iterations);
}
tx.send(Line { y: y_pixel, values: line }).unwrap();
});
}
for _y_pixel in 0..pixels.y {
let line = rx.recv().unwrap();
let mut x_pixel = 0;
for value in line.values {
x_pixel += 1;
let y_pixel = line.y;
positions.push(( (x_pixel as f64) / width * world_width + world_left) as f32);
positions.push((-(y_pixel as f64) / height * world_height + world_top ) as f32);
let color = value as GLfloat / mandel::DETAIL as GLfloat;
colors.push(color);
colors.push(color);
colors.push(color);
}
}
let end = time::precise_time_ns();
println!("Calculated fractal in {}", HumanTimeDuration { nanoseconds: end - start });
(positions, colors)
}
fn draw_fractal(positions : &Vec<GLfloat>, colors : &Vec<GLfloat>, vertex_buffer : GLuint, color_buffer : GLuint, window: &mut Window) { |
fn main() {
let mut glfw = glfw::init(glfw::FAIL_ON_ERRORS).unwrap();
glfw.window_hint(WindowHint::ContextVersion(3, 2));
glfw.window_hint(WindowHint::OpenGlForwardCompat(true));
glfw.window_hint(WindowHint::OpenGlProfile(OpenGlProfileHint::Core));
let x_initial_points = 500;
let y_initial_points = 300;
// since mouse button events don't send mouse positions, we need to store them
let mut mouse = Point::new(0f64, 0f64);
let mut mouse_start_pan = Point::new(0f64, 0f64);
let mut mouse_button_1_pressed = false;
let mut zoom = 2.0;
let mut center = Point::new(-0.7, 0.0);
let (mut window, events) = glfw.create_window(x_initial_points, y_initial_points, "Mandelbrot", WindowMode::Windowed)
.expect("Failed to create GLFW window.");
let mut pixels = {
let (x_pixels, y_pixels) = window.get_framebuffer_size();
Point::new(x_pixels, y_pixels)
};
// on "retina displays" there are two pixels per point, otherwise, it is one
let pixel_size = pixels.x / (x_initial_points as i32);
window.set_key_polling(true);
window.set_framebuffer_size_polling(true);
window.set_scroll_polling(true);
window.set_cursor_pos_polling(true);
window.set_mouse_button_polling(true);
window.make_current();
gl::load_with(|s| window.get_proc_address(s));
let vertex_shader = gl_util::compile_shader(&load_shader("mandel.v.glsl"), gl::VERTEX_SHADER);
let fragment_shader = gl_util::compile_shader(&load_shader("mandel.f.glsl"), gl::FRAGMENT_SHADER);
let program = gl_util::link_program(vertex_shader, fragment_shader);
unsafe {
gl::ClearColor(0.0, 0.0, 0.0, 1.0);
gl::Clear(gl::COLOR_BUFFER_BIT);
}
let |
let points = colors.len() / 3;
unsafe {
load_vector_in_buffer(vertex_buffer, &positions);
load_vector_in_buffer(color_buffer, &colors);
gl::DrawArrays(gl::POINTS, 0, points as i32);
window.swap_buffers();
}
}
| identifier_body |
main.rs | let ns = self.nanoseconds;
match ns {
0 ... 1_000 => fmt.write_fmt(format_args!("{} ns", ns)),
1_000 ... 1_000_000 => fmt.write_fmt(format_args!("{:.*} µs", 2, (ns as f64) / 1_000f64)),
1_000_000 ... 1_000_000_000 => fmt.write_fmt(format_args!("{:.*} ms", 2, (ns as f64) / 1_000_000f64)),
_ => fmt.write_fmt(format_args!("{:.*} s" , 2, (ns as f64) / 1_000_000_000f64)),
}
}
}
// TODO: return result with a useful error type
fn load_shader(filename: &str) -> String {
let mut file = File::open(filename)
.ok().unwrap_or_else(|| File::open("src/".to_string()+filename)
.ok().expect(&format!("Could not open shader file {}", filename)));
let mut bytes = Vec::new();
file.read_to_end(&mut bytes).ok().expect(&format!("Failed to read from shader file {}", filename));
String::from_utf8(bytes).ok().expect(&format!("Shader file not UTF-8: {}", filename))
}
fn create_buffer() -> GLuint {
unsafe {
let mut buffer = 0;
gl::GenBuffers(1, &mut buffer);
buffer
}
}
unsafe fn l | buffer: u32, values: &Vec<GLfloat>) {
gl::BindBuffer(gl::ARRAY_BUFFER, buffer);
gl::BufferData(gl::ARRAY_BUFFER,
(values.len() * mem::size_of::<GLfloat>()) as GLsizeiptr,
mem::transmute(&values[0]),
gl::STATIC_DRAW);
}
unsafe fn bind_attribute_to_buffer(program: u32, attribute_name: &str, buffer: u32, components: i32) {
gl::BindBuffer(gl::ARRAY_BUFFER, buffer);
let attribute = gl::GetAttribLocation(program, CString::new(attribute_name).unwrap().as_ptr()) as GLuint;
gl::EnableVertexAttribArray(attribute);
gl::VertexAttribPointer(attribute, components, gl::FLOAT, gl::FALSE as GLboolean, 0, ptr::null());
}
fn world_width_from_zoom(zoom: f64) -> f64 {
2f64.powf(zoom)
}
unsafe fn set_viewport(program: GLuint, zoom: f64, pixels: &Point<i32>, center: &Point<f64>) {
let (world_width, world_height, world_left, _world_top, world_bottom) = get_screen_in_world(zoom, &pixels, ¢er);
gl::Uniform2f(gl::GetUniformLocation(program, CString::new("world_bottom_left").unwrap().as_ptr()), world_left as f32, world_bottom as f32);
gl::Uniform2f(gl::GetUniformLocation(program, CString::new("world_dimensions" ).unwrap().as_ptr()), world_width as f32, world_height as f32);
}
fn get_screen_in_world(zoom: f64, pixels: &Point<i32>, center: &Point<f64>) -> (f64, f64, f64, f64, f64) {
let width = pixels.x as f64;
let height = pixels.y as f64;
let world_width = world_width_from_zoom(zoom);
let world_height = world_width * height / width;
let world_left = center.x - world_width / 2.0;
let world_top = center.y + world_height / 2.0;
let world_bottom = center.y - world_height / 2.0;
(world_width, world_height, world_left, world_top, world_bottom)
}
fn pixel_to_world(pixel_coord: &Point<f64>, zoom: f64, pixels: &Point<i32>, center: &Point<f64>) -> Point<f64> {
let (world_width, world_height, world_left, world_top, _world_bottom) = get_screen_in_world(zoom, &pixels, ¢er);
Point {
x: pixel_coord.x / (pixels.x as f64) * world_width + world_left,
y: -pixel_coord.y / (pixels.y as f64) * world_height + world_top,
}
}
fn calc_mandelbrot(pixels: &Point<i32>, center: &Point<f64>, zoom: f64) -> (Vec<GLfloat>, Vec<GLfloat>) {
let start = time::precise_time_ns();
let mut colors : Vec<GLfloat> = vec![];
let mut positions : Vec<GLfloat> = vec![];
let width = pixels.x as f64;
let height = pixels.y as f64;
let (world_width, world_height, world_left, world_top, _world_bottom) = get_screen_in_world(zoom, &pixels, ¢er);
let (tx, rx) = channel();
for y_pixel in 0..pixels.y {
let tx = tx.clone();
let x_pixels = pixels.x;
spawn(move || {
let mut line = vec![];
for x_pixel in 0..x_pixels {
let x = (x_pixel as f64) / width * world_width + world_left;
let y = -(y_pixel as f64) / height * world_height + world_top;
let iterations = mandel::calc(x, y);
line.push(iterations);
}
tx.send(Line { y: y_pixel, values: line }).unwrap();
});
}
for _y_pixel in 0..pixels.y {
let line = rx.recv().unwrap();
let mut x_pixel = 0;
for value in line.values {
x_pixel += 1;
let y_pixel = line.y;
positions.push(( (x_pixel as f64) / width * world_width + world_left) as f32);
positions.push((-(y_pixel as f64) / height * world_height + world_top ) as f32);
let color = value as GLfloat / mandel::DETAIL as GLfloat;
colors.push(color);
colors.push(color);
colors.push(color);
}
}
let end = time::precise_time_ns();
println!("Calculated fractal in {}", HumanTimeDuration { nanoseconds: end - start });
(positions, colors)
}
fn draw_fractal(positions : &Vec<GLfloat>, colors : &Vec<GLfloat>, vertex_buffer : GLuint, color_buffer : GLuint, window: &mut Window) {
let points = colors.len() / 3;
unsafe {
load_vector_in_buffer(vertex_buffer, &positions);
load_vector_in_buffer(color_buffer, &colors);
gl::DrawArrays(gl::POINTS, 0, points as i32);
window.swap_buffers();
}
}
fn main() {
let mut glfw = glfw::init(glfw::FAIL_ON_ERRORS).unwrap();
glfw.window_hint(WindowHint::ContextVersion(3, 2));
glfw.window_hint(WindowHint::OpenGlForwardCompat(true));
glfw.window_hint(WindowHint::OpenGlProfile(OpenGlProfileHint::Core));
let x_initial_points = 500;
let y_initial_points = 300;
// since mouse button events don't send mouse positions, we need to store them
let mut mouse = Point::new(0f64, 0f64);
let mut mouse_start_pan = Point::new(0f64, 0f64);
let mut mouse_button_1_pressed = false;
let mut zoom = 2.0;
let mut center = Point::new(-0.7, 0.0);
let (mut window, events) = glfw.create_window(x_initial_points, y_initial_points, "Mandelbrot", WindowMode::Windowed)
.expect("Failed to create GLFW window.");
let mut pixels = {
let (x_pixels, y_pixels) = window.get_framebuffer_size();
Point::new(x_pixels, y_pixels)
};
// on "retina displays" there are two pixels per point, otherwise, it is one
let pixel_size = pixels.x / (x_initial_points as i32);
window.set_key_polling(true);
window.set_framebuffer_size_polling(true);
window.set_scroll_polling(true);
window.set_cursor_pos_polling(true);
window.set_mouse_button_polling(true);
window.make_current();
gl::load_with(|s| window.get_proc_address(s));
let vertex_shader = gl_util::compile_shader(&load_shader("mandel.v.glsl"), gl::VERTEX_SHADER);
let fragment_shader = gl_util::compile_shader(&load_shader("mandel.f.glsl"), gl::FRAGMENT_SHADER);
let program = gl_util::link_program(vertex_shader, fragment_shader);
unsafe {
gl::ClearColor(0.0, 0.0, 0.0, 1.0);
gl::Clear(gl::COLOR_BUFFER_BIT);
}
let | oad_vector_in_buffer( | identifier_name |
model.rs | Task(_) = self {
writeln!(out, "Deleted task {}", self.task_id())?;
} else {
let task = model.get_task(self.task_id()).unwrap(); // TODO
match self {
DeleteTask(_) => unreachable!(),
AddTask(_) => writeln!(out, "Added Task {}", task.short_id())?,
ChangeTaskTags {
ref added,
ref removed,
..
} => {
if !added.is_empty() {
writeln!(out, "Added tags {:?}", added)?;
}
if !removed.is_empty() {
writeln!(out, "Removed tags {:?}", removed)?;
}
}
ChangeTaskState(_uuid, ref state) => match *state {
TaskState::Done(_) => writeln!(out, "Marking task '{}' as done", task.description)?,
TaskState::Open => writeln!(out, "Marking task '{}' as open", task.description)?,
TaskState::Canceled(_) => {
writeln!(out, "Marking task '{}' as canceled", task.description)?
}
},
ChangeTaskPriority(_uuid, ref priority) => {
writeln!(
out,
"Changed priority of task '{}' to {}",
task.description, priority
)?;
}
};
}
Ok(())
}
}
pub type ScopeName = String;
pub type NumericalIds = HashMap<ScopeName, BTreeMap<u64, Uuid>>;
pub struct Model {
// TODO: hide `tasks` and add `archived_tasks`
pub tasks: HashMap<Uuid, Task>,
pub applied_effects: Vec<Effect>,
pub numerical_ids: NumericalIds,
is_dirty: bool,
}
impl Model {
pub fn new() -> Self {
Model {
tasks: HashMap::new(),
applied_effects: Vec::new(),
numerical_ids: NumericalIds::new(),
is_dirty: false,
}
}
pub fn from_effects(effects: &[Effect]) -> Self {
let mut model = Self::new();
for effect in effects {
model.apply_effect(&effect)
}
model.is_dirty = false;
model
}
pub fn apply_effect(&mut self, effect: &Effect) -> () {
use Effect::*;
match effect.clone() {
AddTask(task) => {
self.add_task(task);
}
ChangeTaskTags {
uuid,
added,
removed,
} => {
self.change_task_tags(&uuid, added, removed);
}
ChangeTaskState(uuid, state) => {
self.change_task_state(&uuid, state);
}
ChangeTaskPriority(uuid, p) => {
self.change_task_priority(&uuid, p);
}
DeleteTask(uuid) => {
self.delete_task(&uuid);
}
}
self.applied_effects.push(effect.clone());
self.is_dirty = true;
}
fn add_task(&mut self, t: Task) -> () {
if self.tasks.insert(t.uuid, t).is_some() {
panic!("UUID collision in Model::add_task");
}
}
fn delete_task(&mut self, u: &Uuid) -> Option<Task> {
self.tasks.remove(&u)
}
fn change_task_state(&mut self, u: &Uuid, state: TaskState) {
self.tasks.get_mut(u).expect("failed to get task").status = state;
}
fn change_task_priority(&mut self, u: &Uuid, priority: Priority) {
self.tasks.get_mut(u).expect("failed to get task").priority = priority;
}
fn change_task_tags(&mut self, u: &Uuid, added: Tags, removed: Tags) {
let ref mut tags = self.tasks.get_mut(u).expect("failed to get task").tags;
for t in removed {
tags.remove(&t);
}
for t in added {
tags.insert(t);
}
}
}
// Numerical-ID Handling
impl Model {
pub fn short_task_id(&self, scope_name: &str, task_id: &Uuid) -> Option<u64> {
self.numerical_ids
.get(scope_name)
.and_then(|ids| ids.iter().find(|&(_, uuid)| uuid == task_id))
.map(|(n, _)| *n)
}
pub fn recalculate_numerical_ids(&mut self, scope: &str, task_ids: &[Uuid]) {
info!("Recalculating numerical-ids for scope {}", scope);
self.is_dirty = true;
let ids = task_ids
.iter()
.enumerate()
.map(|(n, uuid)| ((n as u64) + 1, uuid.clone()))
.collect();
self.numerical_ids.insert(scope.into(), ids);
}
pub fn incremental_numerical_id(&mut self, scope: &str, task: &Uuid) -> u64 {
debug!(
"Calculating incremental numerical-id for {} in scope {}",
task, scope
);
assert!(self.get_task(task).is_some());
self.short_task_id(scope, task).unwrap_or_else(|| {
self.is_dirty = true;
let numerical_ids = self.numerical_ids
.entry(scope.into())
.or_insert(BTreeMap::new());
let n = numerical_ids.iter().map(|(id, _)| *id).max().unwrap_or(0) + 1;
numerical_ids.insert(n, task.clone());
n
})
}
}
#[derive(Debug, PartialEq, Eq, Fail)]
pub enum FindTaskError {
#[fail(display = "Couldn't find task")]
TaskNotFound,
#[fail(display = "Found multiple tasks")]
MultipleResults,
}
pub struct TaskIter<'a> {
tasks: Vec<&'a Task>,
pos: usize,
}
impl<'a> Iterator for TaskIter<'a> {
type Item = &'a Task;
fn next(&mut self) -> Option<Self::Item> {
let v = self.tasks.get(self.pos);
self.pos += 1;
v.map(|x| *x)
}
}
impl Model {
pub fn all_tasks<'a>(&'a self) -> TaskIter<'a> {
let mut v: Vec<&Task> = self.tasks.values().collect();
v.sort_by(|a, b| b.cmp(a));
TaskIter { tasks: v, pos: 0 }
}
pub fn get_task<'a>(&'a self, uuid: &Uuid) -> Option<&'a Task> {
self.tasks.get(uuid)
}
pub fn find_task<'a>(
&'a self,
scope_name: &str,
task_ref: &TaskRef,
) -> Result<&'a Task, FindTaskError> {
let uuids: Vec<&Uuid> = match *task_ref {
TaskRef::FullUUID(ref u) => vec![u],
TaskRef::ShortUUID(ref s) => self.tasks
.keys()
.filter(|uuid| uuid.simple().to_string().starts_with(s))
.collect(),
TaskRef::Numerical(ref n) => {
match self.numerical_ids.get(scope_name).and_then(|x| x.get(n)) {
Some(uuid) => vec![uuid],
None => vec![],
}
}
};
use self::FindTaskError::*;
match uuids.len() {
0 => Err(TaskNotFound),
1 => self.get_task(uuids[0])
.map_or(Err(FindTaskError::TaskNotFound), Ok),
_ => Err(MultipleResults),
}
}
pub fn is_dirty(&self) -> bool {
self.is_dirty
}
}
#[cfg(test)]
mod tests {
use super::*;
use chrono;
use std::str::FromStr;
use uuid::Uuid;
use {Priority, Task, TaskState};
#[test]
fn test_add_delete_task() {
let mut m = Model::new();
let t = Task::new("foo");
m.add_task(t.clone());
assert_eq!(m.get_task(&t.uuid), Some(&t));
assert_eq!(m.delete_task(&t.uuid), Some(t.clone()));
assert_eq!(m.get_task(&t.uuid), None);
}
#[test]
fn test_change_task_task() {
let mut m = Model::new();
let t = Task::new("foo");
let uuid = t.uuid.clone();
m.add_task(t.clone());
assert_eq!(m.tasks[&uuid].status, TaskState::Open);
let s = TaskState::Done(chrono::Utc::now());
m.change_task_state(&uuid, s);
assert_eq!(m.tasks[&uuid].status, s);
}
#[test]
fn test_change_task_priority() {
let mut m = Model::new();
let t = Task::new("foo");
let uuid = t.uuid.clone();
m.add_task(t.clone());
assert_eq!(m.tasks[&uuid].priority, Priority::Default);
m.change_task_priority(&uuid, Priority::High);
assert_eq!(m.tasks[&uuid].priority, Priority::High);
}
#[test]
fn test_numerical_ref() {
assert_eq!(TaskRef::from_str("42"), Ok(TaskRef::Numerical(42)));
assert_eq!(TaskRef::from_str("0"), Ok(TaskRef::Numerical(0)));
assert!(TaskRef::from_str("-0").is_err());
}
| fn test_short_uuid_ref() {
| #[test] | random_line_split |
model.rs | (_) = self {
writeln!(out, "Deleted task {}", self.task_id())?;
} else {
let task = model.get_task(self.task_id()).unwrap(); // TODO
match self {
DeleteTask(_) => unreachable!(),
AddTask(_) => writeln!(out, "Added Task {}", task.short_id())?,
ChangeTaskTags {
ref added,
ref removed,
..
} => {
if !added.is_empty() {
writeln!(out, "Added tags {:?}", added)?;
}
if !removed.is_empty() {
writeln!(out, "Removed tags {:?}", removed)?;
}
}
ChangeTaskState(_uuid, ref state) => match *state {
TaskState::Done(_) => writeln!(out, "Marking task '{}' as done", task.description)?,
TaskState::Open => writeln!(out, "Marking task '{}' as open", task.description)?,
TaskState::Canceled(_) => {
writeln!(out, "Marking task '{}' as canceled", task.description)?
}
},
ChangeTaskPriority(_uuid, ref priority) => {
writeln!(
out,
"Changed priority of task '{}' to {}",
task.description, priority
)?;
}
};
}
Ok(())
}
}
pub type ScopeName = String;
pub type NumericalIds = HashMap<ScopeName, BTreeMap<u64, Uuid>>;
pub struct Model {
// TODO: hide `tasks` and add `archived_tasks`
pub tasks: HashMap<Uuid, Task>,
pub applied_effects: Vec<Effect>,
pub numerical_ids: NumericalIds,
is_dirty: bool,
}
impl Model {
pub fn new() -> Self {
Model {
tasks: HashMap::new(),
applied_effects: Vec::new(),
numerical_ids: NumericalIds::new(),
is_dirty: false,
}
}
pub fn from_effects(effects: &[Effect]) -> Self |
pub fn apply_effect(&mut self, effect: &Effect) -> () {
use Effect::*;
match effect.clone() {
AddTask(task) => {
self.add_task(task);
}
ChangeTaskTags {
uuid,
added,
removed,
} => {
self.change_task_tags(&uuid, added, removed);
}
ChangeTaskState(uuid, state) => {
self.change_task_state(&uuid, state);
}
ChangeTaskPriority(uuid, p) => {
self.change_task_priority(&uuid, p);
}
DeleteTask(uuid) => {
self.delete_task(&uuid);
}
}
self.applied_effects.push(effect.clone());
self.is_dirty = true;
}
fn add_task(&mut self, t: Task) -> () {
if self.tasks.insert(t.uuid, t).is_some() {
panic!("UUID collision in Model::add_task");
}
}
fn delete_task(&mut self, u: &Uuid) -> Option<Task> {
self.tasks.remove(&u)
}
fn change_task_state(&mut self, u: &Uuid, state: TaskState) {
self.tasks.get_mut(u).expect("failed to get task").status = state;
}
fn change_task_priority(&mut self, u: &Uuid, priority: Priority) {
self.tasks.get_mut(u).expect("failed to get task").priority = priority;
}
fn change_task_tags(&mut self, u: &Uuid, added: Tags, removed: Tags) {
let ref mut tags = self.tasks.get_mut(u).expect("failed to get task").tags;
for t in removed {
tags.remove(&t);
}
for t in added {
tags.insert(t);
}
}
}
// Numerical-ID Handling
impl Model {
pub fn short_task_id(&self, scope_name: &str, task_id: &Uuid) -> Option<u64> {
self.numerical_ids
.get(scope_name)
.and_then(|ids| ids.iter().find(|&(_, uuid)| uuid == task_id))
.map(|(n, _)| *n)
}
pub fn recalculate_numerical_ids(&mut self, scope: &str, task_ids: &[Uuid]) {
info!("Recalculating numerical-ids for scope {}", scope);
self.is_dirty = true;
let ids = task_ids
.iter()
.enumerate()
.map(|(n, uuid)| ((n as u64) + 1, uuid.clone()))
.collect();
self.numerical_ids.insert(scope.into(), ids);
}
pub fn incremental_numerical_id(&mut self, scope: &str, task: &Uuid) -> u64 {
debug!(
"Calculating incremental numerical-id for {} in scope {}",
task, scope
);
assert!(self.get_task(task).is_some());
self.short_task_id(scope, task).unwrap_or_else(|| {
self.is_dirty = true;
let numerical_ids = self.numerical_ids
.entry(scope.into())
.or_insert(BTreeMap::new());
let n = numerical_ids.iter().map(|(id, _)| *id).max().unwrap_or(0) + 1;
numerical_ids.insert(n, task.clone());
n
})
}
}
#[derive(Debug, PartialEq, Eq, Fail)]
pub enum FindTaskError {
#[fail(display = "Couldn't find task")]
TaskNotFound,
#[fail(display = "Found multiple tasks")]
MultipleResults,
}
pub struct TaskIter<'a> {
tasks: Vec<&'a Task>,
pos: usize,
}
impl<'a> Iterator for TaskIter<'a> {
type Item = &'a Task;
fn next(&mut self) -> Option<Self::Item> {
let v = self.tasks.get(self.pos);
self.pos += 1;
v.map(|x| *x)
}
}
impl Model {
pub fn all_tasks<'a>(&'a self) -> TaskIter<'a> {
let mut v: Vec<&Task> = self.tasks.values().collect();
v.sort_by(|a, b| b.cmp(a));
TaskIter { tasks: v, pos: 0 }
}
pub fn get_task<'a>(&'a self, uuid: &Uuid) -> Option<&'a Task> {
self.tasks.get(uuid)
}
pub fn find_task<'a>(
&'a self,
scope_name: &str,
task_ref: &TaskRef,
) -> Result<&'a Task, FindTaskError> {
let uuids: Vec<&Uuid> = match *task_ref {
TaskRef::FullUUID(ref u) => vec![u],
TaskRef::ShortUUID(ref s) => self.tasks
.keys()
.filter(|uuid| uuid.simple().to_string().starts_with(s))
.collect(),
TaskRef::Numerical(ref n) => {
match self.numerical_ids.get(scope_name).and_then(|x| x.get(n)) {
Some(uuid) => vec![uuid],
None => vec![],
}
}
};
use self::FindTaskError::*;
match uuids.len() {
0 => Err(TaskNotFound),
1 => self.get_task(uuids[0])
.map_or(Err(FindTaskError::TaskNotFound), Ok),
_ => Err(MultipleResults),
}
}
pub fn is_dirty(&self) -> bool {
self.is_dirty
}
}
#[cfg(test)]
mod tests {
use super::*;
use chrono;
use std::str::FromStr;
use uuid::Uuid;
use {Priority, Task, TaskState};
#[test]
fn test_add_delete_task() {
let mut m = Model::new();
let t = Task::new("foo");
m.add_task(t.clone());
assert_eq!(m.get_task(&t.uuid), Some(&t));
assert_eq!(m.delete_task(&t.uuid), Some(t.clone()));
assert_eq!(m.get_task(&t.uuid), None);
}
#[test]
fn test_change_task_task() {
let mut m = Model::new();
let t = Task::new("foo");
let uuid = t.uuid.clone();
m.add_task(t.clone());
assert_eq!(m.tasks[&uuid].status, TaskState::Open);
let s = TaskState::Done(chrono::Utc::now());
m.change_task_state(&uuid, s);
assert_eq!(m.tasks[&uuid].status, s);
}
#[test]
fn test_change_task_priority() {
let mut m = Model::new();
let t = Task::new("foo");
let uuid = t.uuid.clone();
m.add_task(t.clone());
assert_eq!(m.tasks[&uuid].priority, Priority::Default);
m.change_task_priority(&uuid, Priority::High);
assert_eq!(m.tasks[&uuid].priority, Priority::High);
}
#[test]
fn test_numerical_ref() {
assert_eq!(TaskRef::from_str("42"), Ok(TaskRef::Numerical(42)));
assert_eq!(TaskRef::from_str("0"), Ok(TaskRef::Numerical(0)));
assert!(TaskRef::from_str("-0").is_err());
}
#[test]
fn test_short_uuid_ref() | {
let mut model = Self::new();
for effect in effects {
model.apply_effect(&effect)
}
model.is_dirty = false;
model
} | identifier_body |
model.rs | (_) = self {
writeln!(out, "Deleted task {}", self.task_id())?;
} else {
let task = model.get_task(self.task_id()).unwrap(); // TODO
match self {
DeleteTask(_) => unreachable!(),
AddTask(_) => writeln!(out, "Added Task {}", task.short_id())?,
ChangeTaskTags {
ref added,
ref removed,
..
} => {
if !added.is_empty() {
writeln!(out, "Added tags {:?}", added)?;
}
if !removed.is_empty() {
writeln!(out, "Removed tags {:?}", removed)?;
}
}
ChangeTaskState(_uuid, ref state) => match *state {
TaskState::Done(_) => writeln!(out, "Marking task '{}' as done", task.description)?,
TaskState::Open => writeln!(out, "Marking task '{}' as open", task.description)?,
TaskState::Canceled(_) => {
writeln!(out, "Marking task '{}' as canceled", task.description)?
}
},
ChangeTaskPriority(_uuid, ref priority) => {
writeln!(
out,
"Changed priority of task '{}' to {}",
task.description, priority
)?;
}
};
}
Ok(())
}
}
pub type ScopeName = String;
pub type NumericalIds = HashMap<ScopeName, BTreeMap<u64, Uuid>>;
pub struct Model {
// TODO: hide `tasks` and add `archived_tasks`
pub tasks: HashMap<Uuid, Task>,
pub applied_effects: Vec<Effect>,
pub numerical_ids: NumericalIds,
is_dirty: bool,
}
impl Model {
pub fn new() -> Self {
Model {
tasks: HashMap::new(),
applied_effects: Vec::new(),
numerical_ids: NumericalIds::new(),
is_dirty: false,
}
}
pub fn from_effects(effects: &[Effect]) -> Self {
let mut model = Self::new();
for effect in effects {
model.apply_effect(&effect)
}
model.is_dirty = false;
model
}
pub fn apply_effect(&mut self, effect: &Effect) -> () {
use Effect::*;
match effect.clone() {
AddTask(task) => |
ChangeTaskTags {
uuid,
added,
removed,
} => {
self.change_task_tags(&uuid, added, removed);
}
ChangeTaskState(uuid, state) => {
self.change_task_state(&uuid, state);
}
ChangeTaskPriority(uuid, p) => {
self.change_task_priority(&uuid, p);
}
DeleteTask(uuid) => {
self.delete_task(&uuid);
}
}
self.applied_effects.push(effect.clone());
self.is_dirty = true;
}
fn add_task(&mut self, t: Task) -> () {
if self.tasks.insert(t.uuid, t).is_some() {
panic!("UUID collision in Model::add_task");
}
}
fn delete_task(&mut self, u: &Uuid) -> Option<Task> {
self.tasks.remove(&u)
}
fn change_task_state(&mut self, u: &Uuid, state: TaskState) {
self.tasks.get_mut(u).expect("failed to get task").status = state;
}
fn change_task_priority(&mut self, u: &Uuid, priority: Priority) {
self.tasks.get_mut(u).expect("failed to get task").priority = priority;
}
fn change_task_tags(&mut self, u: &Uuid, added: Tags, removed: Tags) {
let ref mut tags = self.tasks.get_mut(u).expect("failed to get task").tags;
for t in removed {
tags.remove(&t);
}
for t in added {
tags.insert(t);
}
}
}
// Numerical-ID Handling
impl Model {
pub fn short_task_id(&self, scope_name: &str, task_id: &Uuid) -> Option<u64> {
self.numerical_ids
.get(scope_name)
.and_then(|ids| ids.iter().find(|&(_, uuid)| uuid == task_id))
.map(|(n, _)| *n)
}
pub fn recalculate_numerical_ids(&mut self, scope: &str, task_ids: &[Uuid]) {
info!("Recalculating numerical-ids for scope {}", scope);
self.is_dirty = true;
let ids = task_ids
.iter()
.enumerate()
.map(|(n, uuid)| ((n as u64) + 1, uuid.clone()))
.collect();
self.numerical_ids.insert(scope.into(), ids);
}
pub fn incremental_numerical_id(&mut self, scope: &str, task: &Uuid) -> u64 {
debug!(
"Calculating incremental numerical-id for {} in scope {}",
task, scope
);
assert!(self.get_task(task).is_some());
self.short_task_id(scope, task).unwrap_or_else(|| {
self.is_dirty = true;
let numerical_ids = self.numerical_ids
.entry(scope.into())
.or_insert(BTreeMap::new());
let n = numerical_ids.iter().map(|(id, _)| *id).max().unwrap_or(0) + 1;
numerical_ids.insert(n, task.clone());
n
})
}
}
#[derive(Debug, PartialEq, Eq, Fail)]
pub enum FindTaskError {
#[fail(display = "Couldn't find task")]
TaskNotFound,
#[fail(display = "Found multiple tasks")]
MultipleResults,
}
pub struct TaskIter<'a> {
tasks: Vec<&'a Task>,
pos: usize,
}
impl<'a> Iterator for TaskIter<'a> {
type Item = &'a Task;
fn next(&mut self) -> Option<Self::Item> {
let v = self.tasks.get(self.pos);
self.pos += 1;
v.map(|x| *x)
}
}
impl Model {
pub fn all_tasks<'a>(&'a self) -> TaskIter<'a> {
let mut v: Vec<&Task> = self.tasks.values().collect();
v.sort_by(|a, b| b.cmp(a));
TaskIter { tasks: v, pos: 0 }
}
pub fn get_task<'a>(&'a self, uuid: &Uuid) -> Option<&'a Task> {
self.tasks.get(uuid)
}
pub fn find_task<'a>(
&'a self,
scope_name: &str,
task_ref: &TaskRef,
) -> Result<&'a Task, FindTaskError> {
let uuids: Vec<&Uuid> = match *task_ref {
TaskRef::FullUUID(ref u) => vec![u],
TaskRef::ShortUUID(ref s) => self.tasks
.keys()
.filter(|uuid| uuid.simple().to_string().starts_with(s))
.collect(),
TaskRef::Numerical(ref n) => {
match self.numerical_ids.get(scope_name).and_then(|x| x.get(n)) {
Some(uuid) => vec![uuid],
None => vec![],
}
}
};
use self::FindTaskError::*;
match uuids.len() {
0 => Err(TaskNotFound),
1 => self.get_task(uuids[0])
.map_or(Err(FindTaskError::TaskNotFound), Ok),
_ => Err(MultipleResults),
}
}
pub fn is_dirty(&self) -> bool {
self.is_dirty
}
}
#[cfg(test)]
mod tests {
use super::*;
use chrono;
use std::str::FromStr;
use uuid::Uuid;
use {Priority, Task, TaskState};
#[test]
fn test_add_delete_task() {
let mut m = Model::new();
let t = Task::new("foo");
m.add_task(t.clone());
assert_eq!(m.get_task(&t.uuid), Some(&t));
assert_eq!(m.delete_task(&t.uuid), Some(t.clone()));
assert_eq!(m.get_task(&t.uuid), None);
}
#[test]
fn test_change_task_task() {
let mut m = Model::new();
let t = Task::new("foo");
let uuid = t.uuid.clone();
m.add_task(t.clone());
assert_eq!(m.tasks[&uuid].status, TaskState::Open);
let s = TaskState::Done(chrono::Utc::now());
m.change_task_state(&uuid, s);
assert_eq!(m.tasks[&uuid].status, s);
}
#[test]
fn test_change_task_priority() {
let mut m = Model::new();
let t = Task::new("foo");
let uuid = t.uuid.clone();
m.add_task(t.clone());
assert_eq!(m.tasks[&uuid].priority, Priority::Default);
m.change_task_priority(&uuid, Priority::High);
assert_eq!(m.tasks[&uuid].priority, Priority::High);
}
#[test]
fn test_numerical_ref() {
assert_eq!(TaskRef::from_str("42"), Ok(TaskRef::Numerical(42)));
assert_eq!(TaskRef::from_str("0"), Ok(TaskRef::Numerical(0)));
assert!(TaskRef::from_str("-0").is_err());
}
#[test]
fn test_short_uuid_ref() | {
self.add_task(task);
} | conditional_block |
model.rs | (_) = self {
writeln!(out, "Deleted task {}", self.task_id())?;
} else {
let task = model.get_task(self.task_id()).unwrap(); // TODO
match self {
DeleteTask(_) => unreachable!(),
AddTask(_) => writeln!(out, "Added Task {}", task.short_id())?,
ChangeTaskTags {
ref added,
ref removed,
..
} => {
if !added.is_empty() {
writeln!(out, "Added tags {:?}", added)?;
}
if !removed.is_empty() {
writeln!(out, "Removed tags {:?}", removed)?;
}
}
ChangeTaskState(_uuid, ref state) => match *state {
TaskState::Done(_) => writeln!(out, "Marking task '{}' as done", task.description)?,
TaskState::Open => writeln!(out, "Marking task '{}' as open", task.description)?,
TaskState::Canceled(_) => {
writeln!(out, "Marking task '{}' as canceled", task.description)?
}
},
ChangeTaskPriority(_uuid, ref priority) => {
writeln!(
out,
"Changed priority of task '{}' to {}",
task.description, priority
)?;
}
};
}
Ok(())
}
}
pub type ScopeName = String;
pub type NumericalIds = HashMap<ScopeName, BTreeMap<u64, Uuid>>;
pub struct Model {
// TODO: hide `tasks` and add `archived_tasks`
pub tasks: HashMap<Uuid, Task>,
pub applied_effects: Vec<Effect>,
pub numerical_ids: NumericalIds,
is_dirty: bool,
}
impl Model {
pub fn new() -> Self {
Model {
tasks: HashMap::new(),
applied_effects: Vec::new(),
numerical_ids: NumericalIds::new(),
is_dirty: false,
}
}
pub fn from_effects(effects: &[Effect]) -> Self {
let mut model = Self::new();
for effect in effects {
model.apply_effect(&effect)
}
model.is_dirty = false;
model
}
pub fn apply_effect(&mut self, effect: &Effect) -> () {
use Effect::*;
match effect.clone() {
AddTask(task) => {
self.add_task(task);
}
ChangeTaskTags {
uuid,
added,
removed,
} => {
self.change_task_tags(&uuid, added, removed);
}
ChangeTaskState(uuid, state) => {
self.change_task_state(&uuid, state);
}
ChangeTaskPriority(uuid, p) => {
self.change_task_priority(&uuid, p);
}
DeleteTask(uuid) => {
self.delete_task(&uuid);
}
}
self.applied_effects.push(effect.clone());
self.is_dirty = true;
}
fn add_task(&mut self, t: Task) -> () {
if self.tasks.insert(t.uuid, t).is_some() {
panic!("UUID collision in Model::add_task");
}
}
fn delete_task(&mut self, u: &Uuid) -> Option<Task> {
self.tasks.remove(&u)
}
fn change_task_state(&mut self, u: &Uuid, state: TaskState) {
self.tasks.get_mut(u).expect("failed to get task").status = state;
}
fn change_task_priority(&mut self, u: &Uuid, priority: Priority) {
self.tasks.get_mut(u).expect("failed to get task").priority = priority;
}
fn change_task_tags(&mut self, u: &Uuid, added: Tags, removed: Tags) {
let ref mut tags = self.tasks.get_mut(u).expect("failed to get task").tags;
for t in removed {
tags.remove(&t);
}
for t in added {
tags.insert(t);
}
}
}
// Numerical-ID Handling
impl Model {
pub fn short_task_id(&self, scope_name: &str, task_id: &Uuid) -> Option<u64> {
self.numerical_ids
.get(scope_name)
.and_then(|ids| ids.iter().find(|&(_, uuid)| uuid == task_id))
.map(|(n, _)| *n)
}
pub fn recalculate_numerical_ids(&mut self, scope: &str, task_ids: &[Uuid]) {
info!("Recalculating numerical-ids for scope {}", scope);
self.is_dirty = true;
let ids = task_ids
.iter()
.enumerate()
.map(|(n, uuid)| ((n as u64) + 1, uuid.clone()))
.collect();
self.numerical_ids.insert(scope.into(), ids);
}
pub fn incremental_numerical_id(&mut self, scope: &str, task: &Uuid) -> u64 {
debug!(
"Calculating incremental numerical-id for {} in scope {}",
task, scope
);
assert!(self.get_task(task).is_some());
self.short_task_id(scope, task).unwrap_or_else(|| {
self.is_dirty = true;
let numerical_ids = self.numerical_ids
.entry(scope.into())
.or_insert(BTreeMap::new());
let n = numerical_ids.iter().map(|(id, _)| *id).max().unwrap_or(0) + 1;
numerical_ids.insert(n, task.clone());
n
})
}
}
#[derive(Debug, PartialEq, Eq, Fail)]
pub enum FindTaskError {
#[fail(display = "Couldn't find task")]
TaskNotFound,
#[fail(display = "Found multiple tasks")]
MultipleResults,
}
pub struct TaskIter<'a> {
tasks: Vec<&'a Task>,
pos: usize,
}
impl<'a> Iterator for TaskIter<'a> {
type Item = &'a Task;
fn next(&mut self) -> Option<Self::Item> {
let v = self.tasks.get(self.pos);
self.pos += 1;
v.map(|x| *x)
}
}
impl Model {
pub fn all_tasks<'a>(&'a self) -> TaskIter<'a> {
let mut v: Vec<&Task> = self.tasks.values().collect();
v.sort_by(|a, b| b.cmp(a));
TaskIter { tasks: v, pos: 0 }
}
pub fn get_task<'a>(&'a self, uuid: &Uuid) -> Option<&'a Task> {
self.tasks.get(uuid)
}
pub fn find_task<'a>(
&'a self,
scope_name: &str,
task_ref: &TaskRef,
) -> Result<&'a Task, FindTaskError> {
let uuids: Vec<&Uuid> = match *task_ref {
TaskRef::FullUUID(ref u) => vec![u],
TaskRef::ShortUUID(ref s) => self.tasks
.keys()
.filter(|uuid| uuid.simple().to_string().starts_with(s))
.collect(),
TaskRef::Numerical(ref n) => {
match self.numerical_ids.get(scope_name).and_then(|x| x.get(n)) {
Some(uuid) => vec![uuid],
None => vec![],
}
}
};
use self::FindTaskError::*;
match uuids.len() {
0 => Err(TaskNotFound),
1 => self.get_task(uuids[0])
.map_or(Err(FindTaskError::TaskNotFound), Ok),
_ => Err(MultipleResults),
}
}
pub fn | (&self) -> bool {
self.is_dirty
}
}
#[cfg(test)]
mod tests {
use super::*;
use chrono;
use std::str::FromStr;
use uuid::Uuid;
use {Priority, Task, TaskState};
#[test]
fn test_add_delete_task() {
let mut m = Model::new();
let t = Task::new("foo");
m.add_task(t.clone());
assert_eq!(m.get_task(&t.uuid), Some(&t));
assert_eq!(m.delete_task(&t.uuid), Some(t.clone()));
assert_eq!(m.get_task(&t.uuid), None);
}
#[test]
fn test_change_task_task() {
let mut m = Model::new();
let t = Task::new("foo");
let uuid = t.uuid.clone();
m.add_task(t.clone());
assert_eq!(m.tasks[&uuid].status, TaskState::Open);
let s = TaskState::Done(chrono::Utc::now());
m.change_task_state(&uuid, s);
assert_eq!(m.tasks[&uuid].status, s);
}
#[test]
fn test_change_task_priority() {
let mut m = Model::new();
let t = Task::new("foo");
let uuid = t.uuid.clone();
m.add_task(t.clone());
assert_eq!(m.tasks[&uuid].priority, Priority::Default);
m.change_task_priority(&uuid, Priority::High);
assert_eq!(m.tasks[&uuid].priority, Priority::High);
}
#[test]
fn test_numerical_ref() {
assert_eq!(TaskRef::from_str("42"), Ok(TaskRef::Numerical(42)));
assert_eq!(TaskRef::from_str("0"), Ok(TaskRef::Numerical(0)));
assert!(TaskRef::from_str("-0").is_err());
}
#[test]
fn test_short_uuid_ref() {
| is_dirty | identifier_name |
lib.rs | use quest_hook::inline_hook::hook;
/// use quest_hook::libil2cpp::Il2CppObject;
/// use log::info;
///
/// #[hook("", "MainSettingsModelSO", "OnEnable")]
/// fn on_enable(this: &Il2CppObject) {
/// info!("MainSettingsModelSO.OnEnable was called!");
///
/// on_enable.original(this); // Call the original C# method
/// }
///
/// #[no_mangle]
/// pub extern "C" fn load() {
/// info!("Installing hooks!");
///
/// on_enable.install(); // Install the hook
///
/// info!("Installed hooks!");
/// }
/// ```
#[proc_macro_attribute]
pub fn | (attr: TokenStream, item: TokenStream) -> TokenStream {
let punctuated_args =
parse_macro_input!(attr with Punctuated<LitStr, Token![,]>::parse_separated_nonempty);
let input = parse_macro_input!(item as ItemFn);
match create_hook(punctuated_args, input) {
Ok(ts) => ts,
Err(err) => err.to_compile_error().into(),
}
}
fn create_hook(
punctuated_args: Punctuated<LitStr, Token![,]>,
input: ItemFn,
) -> Result<TokenStream, Error> {
let args: Vec<String> = punctuated_args.iter().map(LitStr::value).collect();
let (namespace, class, method) = match args.as_slice() {
[n, c, m] => (n, c, m),
_ => {
let msg = format!("Expected 3 arguments, found {}", args.len());
return Err(Error::new_spanned(punctuated_args, msg));
}
};
let ItemFn { sig, block, .. } = input;
let name = sig.ident;
let return_type = sig.output;
let typecheck_return_type = match &return_type {
ReturnType::Default => quote! { () },
ReturnType::Type(_, ty) => quote! { #ty },
};
let hook_name = format_ident!("{}_hook", name);
let hook_args = sig.inputs;
let mut this_arg_type = None;
let mut num_hook_args: usize = 0;
for hook_arg in &hook_args {
let arg_type = match hook_arg {
FnArg::Typed(arg_type) => arg_type,
FnArg::Receiver(_) => {
let msg = "Hook argument cannot be `self`";
return Err(Error::new_spanned(hook_arg, msg));
}
};
match &*arg_type.pat {
// `il2cpp_class_get_method_from_name` does not count `this` in its argument count
Pat::Ident(pat_ident) if pat_ident.ident == "this" => {
if this_arg_type.is_some() {
let msg = "There cannot be more than one `this` argument.";
return Err(Error::new_spanned(hook_arg, msg));
}
if num_hook_args > 0 {
let msg = "`this` must be the first argument.";
return Err(Error::new_spanned(hook_arg, msg));
}
this_arg_type = Some(arg_type.ty.clone());
}
_ => num_hook_args += 1,
}
}
let hook_struct_name = format_ident!("{}_Struct", name);
let mut hook_args_untyped: Punctuated<Pat, Token![,]> = Punctuated::new();
let mut typecheck_arg_types: Punctuated<Type, Token![,]> = Punctuated::new();
for arg in &hook_args {
if let FnArg::Typed(arg) = arg {
hook_args_untyped.push((*arg.pat).clone());
match &*arg.pat {
Pat::Ident(pat_ident) if pat_ident.ident == "this" => continue,
_ => typecheck_arg_types.push((*arg.ty).clone()),
}
}
}
let typecheck_this_type = match &this_arg_type {
None => quote! { () },
Some(ty) => quote! { #ty },
};
let tokens = quote! {
pub extern "C" fn #hook_name ( #hook_args ) #return_type #block
#[allow(non_camel_case_types)]
struct #hook_struct_name {
original: ::std::sync::atomic::AtomicPtr<()>,
namespace: &'static str,
class_name: &'static str,
method_name: &'static str,
parameters_count: usize,
}
impl #hook_struct_name {
fn install(&self) {
use ::quest_hook::libil2cpp::WrapRaw;
let class = ::quest_hook::libil2cpp::Il2CppClass::find(self.namespace, self.class_name).expect("Class not found");
let method = class.find_method_callee::<
#typecheck_this_type,
( #typecheck_arg_types ),
#typecheck_return_type,
#num_hook_args
>(self.method_name).expect("Method not found");
let mut temp = ::std::ptr::null_mut();
unsafe {
::quest_hook::inline_hook::A64HookFunction(
::std::mem::transmute::<unsafe extern "C" fn(), *mut ::std::ffi::c_void>(method.raw().methodPointer.unwrap()),
::std::mem::transmute::<extern "C" fn( #hook_args ) #return_type, *mut ::std::ffi::c_void>( #hook_name ),
&mut temp,
);
self.original.store(
::std::mem::transmute::<*mut ::std::ffi::c_void, *mut ()>(temp),
::std::sync::atomic::Ordering::Relaxed
);
}
}
fn original(&self, #hook_args ) #return_type {
let ptr = self.original.load(::std::sync::atomic::Ordering::Relaxed);
let original = unsafe {
::std::mem::transmute::<*const (), Option<extern "C" fn( #hook_args ) #return_type >>(ptr)
};
(original.expect("Hook is not installed"))( #hook_args_untyped )
}
fn hook(&self, #hook_args ) #return_type {
#hook_name( #hook_args_untyped )
}
}
impl ::quest_hook::Hook for #hook_struct_name {
fn install(&self) {
self.install()
}
fn namespace(&self) -> &'static str {
self.namespace
}
fn class_name(&self) -> &'static str {
self.class_name
}
fn method_name(&self) -> &'static str {
self.method_name
}
fn parameters_count(&self) -> usize {
self.parameters_count
}
fn hook(&self) -> *mut () {
unsafe {
::std::mem::transmute::<extern "C" fn( #hook_args ) #return_type, *mut ()>( #hook_name )
}
}
fn original(&self) -> *mut () {
self.original.load(::std::sync::atomic::Ordering::Relaxed)
}
}
#[allow(non_upper_case_globals)]
static #name: #hook_struct_name = #hook_struct_name {
original: ::std::sync::atomic::AtomicPtr::new(::std::ptr::null_mut()),
namespace: #namespace,
class_name: #class,
method_name: #method,
parameters_count: #num_hook_args as usize
};
};
Ok(tokens.into())
}
#[doc(hidden)]
#[proc_macro]
pub fn impl_arguments_parameters(input: TokenStream) -> TokenStream {
let range = parse_macro_input!(input as ExprRange);
match create_impl_arguments_parameters(range) {
Ok(ts) => ts,
Err(err) => err.to_compile_error().into(),
}
}
fn create_impl_arguments_parameters(range: ExprRange) -> Result<TokenStream, Error> {
let span = range.span();
let start = range
.from
.ok_or_else(|| Error::new(span, "Tuple length range must have a lower bound"))?;
let start = parse_range_bound(*start)?;
let end = range
.to
.ok_or_else(|| Error::new(span, "Tuple length range must have an upper bound"))?;
let end = parse_range_bound(*end)?;
let range = match range.limits {
RangeLimits::HalfOpen(_) if end <= start => {
return Err(Error::new(span, "Tuple length range must be valid"))
}
RangeLimits::HalfOpen(_) => start..end,
RangeLimits::Closed(_) if end < start => {
return Err(Error::new(span, "Tuple length range must be valid"))
}
RangeLimits::Closed(_) => start..(end + 1),
};
let mut ts = TokenStream::new();
for n in range {
let generic_params_argument = (1..=n).map(|n| format_ident!("A{}", n));
let matches_argument = generic_params_argument
.clone()
.enumerate()
.map(|(n, gp)| quote!(<#gp>::matches(args[#n].ty())));
let invokables = (0..n).map(Index::from).map(|n| quote!(self.#n.invokable()));
let generic_params_parameter = (1..=n).map(|n| format_ident!("P{}", n));
let matches_parameter = generic_params_parameter
.clone()
.enumerate()
.map(|(n, gp)| quote | hook | identifier_name |
lib.rs | use quest_hook::inline_hook::hook;
/// use quest_hook::libil2cpp::Il2CppObject;
/// use log::info;
///
/// #[hook("", "MainSettingsModelSO", "OnEnable")]
/// fn on_enable(this: &Il2CppObject) {
/// info!("MainSettingsModelSO.OnEnable was called!");
///
/// on_enable.original(this); // Call the original C# method
/// }
///
/// #[no_mangle]
/// pub extern "C" fn load() {
/// info!("Installing hooks!");
///
/// on_enable.install(); // Install the hook
///
/// info!("Installed hooks!");
/// }
/// ```
#[proc_macro_attribute]
pub fn hook(attr: TokenStream, item: TokenStream) -> TokenStream {
let punctuated_args =
parse_macro_input!(attr with Punctuated<LitStr, Token![,]>::parse_separated_nonempty);
let input = parse_macro_input!(item as ItemFn);
match create_hook(punctuated_args, input) {
Ok(ts) => ts,
Err(err) => err.to_compile_error().into(),
}
}
fn create_hook(
punctuated_args: Punctuated<LitStr, Token![,]>,
input: ItemFn,
) -> Result<TokenStream, Error> {
let args: Vec<String> = punctuated_args.iter().map(LitStr::value).collect();
let (namespace, class, method) = match args.as_slice() {
[n, c, m] => (n, c, m),
_ => {
let msg = format!("Expected 3 arguments, found {}", args.len());
return Err(Error::new_spanned(punctuated_args, msg));
}
};
let ItemFn { sig, block, .. } = input;
let name = sig.ident;
let return_type = sig.output;
let typecheck_return_type = match &return_type {
ReturnType::Default => quote! { () },
ReturnType::Type(_, ty) => quote! { #ty },
};
let hook_name = format_ident!("{}_hook", name);
let hook_args = sig.inputs;
let mut this_arg_type = None;
let mut num_hook_args: usize = 0;
for hook_arg in &hook_args {
let arg_type = match hook_arg {
FnArg::Typed(arg_type) => arg_type,
FnArg::Receiver(_) => {
let msg = "Hook argument cannot be `self`";
return Err(Error::new_spanned(hook_arg, msg));
}
};
match &*arg_type.pat {
// `il2cpp_class_get_method_from_name` does not count `this` in its argument count
Pat::Ident(pat_ident) if pat_ident.ident == "this" => {
if this_arg_type.is_some() {
let msg = "There cannot be more than one `this` argument.";
return Err(Error::new_spanned(hook_arg, msg));
}
if num_hook_args > 0 {
let msg = "`this` must be the first argument.";
return Err(Error::new_spanned(hook_arg, msg));
}
this_arg_type = Some(arg_type.ty.clone());
}
_ => num_hook_args += 1,
}
}
let hook_struct_name = format_ident!("{}_Struct", name);
let mut hook_args_untyped: Punctuated<Pat, Token![,]> = Punctuated::new();
let mut typecheck_arg_types: Punctuated<Type, Token![,]> = Punctuated::new();
for arg in &hook_args {
if let FnArg::Typed(arg) = arg {
hook_args_untyped.push((*arg.pat).clone());
match &*arg.pat {
Pat::Ident(pat_ident) if pat_ident.ident == "this" => continue,
_ => typecheck_arg_types.push((*arg.ty).clone()),
}
}
}
let typecheck_this_type = match &this_arg_type {
None => quote! { () },
Some(ty) => quote! { #ty },
};
let tokens = quote! {
pub extern "C" fn #hook_name ( #hook_args ) #return_type #block
#[allow(non_camel_case_types)]
struct #hook_struct_name {
original: ::std::sync::atomic::AtomicPtr<()>,
namespace: &'static str,
class_name: &'static str,
method_name: &'static str,
parameters_count: usize,
}
impl #hook_struct_name {
fn install(&self) {
use ::quest_hook::libil2cpp::WrapRaw;
let class = ::quest_hook::libil2cpp::Il2CppClass::find(self.namespace, self.class_name).expect("Class not found");
let method = class.find_method_callee::<
#typecheck_this_type,
( #typecheck_arg_types ),
#typecheck_return_type,
#num_hook_args
>(self.method_name).expect("Method not found");
let mut temp = ::std::ptr::null_mut();
unsafe {
::quest_hook::inline_hook::A64HookFunction(
::std::mem::transmute::<unsafe extern "C" fn(), *mut ::std::ffi::c_void>(method.raw().methodPointer.unwrap()),
::std::mem::transmute::<extern "C" fn( #hook_args ) #return_type, *mut ::std::ffi::c_void>( #hook_name ),
&mut temp,
);
self.original.store(
::std::mem::transmute::<*mut ::std::ffi::c_void, *mut ()>(temp),
::std::sync::atomic::Ordering::Relaxed
);
}
}
fn original(&self, #hook_args ) #return_type {
let ptr = self.original.load(::std::sync::atomic::Ordering::Relaxed);
let original = unsafe {
::std::mem::transmute::<*const (), Option<extern "C" fn( #hook_args ) #return_type >>(ptr)
};
(original.expect("Hook is not installed"))( #hook_args_untyped )
}
fn hook(&self, #hook_args ) #return_type {
#hook_name( #hook_args_untyped )
}
}
impl ::quest_hook::Hook for #hook_struct_name {
fn install(&self) {
self.install()
}
fn namespace(&self) -> &'static str {
self.namespace
}
fn class_name(&self) -> &'static str {
self.class_name
}
fn method_name(&self) -> &'static str {
self.method_name
}
fn parameters_count(&self) -> usize {
self.parameters_count
}
fn hook(&self) -> *mut () {
unsafe {
::std::mem::transmute::<extern "C" fn( #hook_args ) #return_type, *mut ()>( #hook_name )
}
}
fn original(&self) -> *mut () {
self.original.load(::std::sync::atomic::Ordering::Relaxed)
}
}
#[allow(non_upper_case_globals)]
static #name: #hook_struct_name = #hook_struct_name {
original: ::std::sync::atomic::AtomicPtr::new(::std::ptr::null_mut()),
namespace: #namespace,
class_name: #class,
method_name: #method,
parameters_count: #num_hook_args as usize
};
};
Ok(tokens.into())
}
#[doc(hidden)]
#[proc_macro]
pub fn impl_arguments_parameters(input: TokenStream) -> TokenStream |
fn create_impl_arguments_parameters(range: ExprRange) -> Result<TokenStream, Error> {
let span = range.span();
let start = range
.from
.ok_or_else(|| Error::new(span, "Tuple length range must have a lower bound"))?;
let start = parse_range_bound(*start)?;
let end = range
.to
.ok_or_else(|| Error::new(span, "Tuple length range must have an upper bound"))?;
let end = parse_range_bound(*end)?;
let range = match range.limits {
RangeLimits::HalfOpen(_) if end <= start => {
return Err(Error::new(span, "Tuple length range must be valid"))
}
RangeLimits::HalfOpen(_) => start..end,
RangeLimits::Closed(_) if end < start => {
return Err(Error::new(span, "Tuple length range must be valid"))
}
RangeLimits::Closed(_) => start..(end + 1),
};
let mut ts = TokenStream::new();
for n in range {
let generic_params_argument = (1..=n).map(|n| format_ident!("A{}", n));
let matches_argument = generic_params_argument
.clone()
.enumerate()
.map(|(n, gp)| quote!(<#gp>::matches(args[#n].ty())));
let invokables = (0..n).map(Index::from).map(|n| quote!(self.#n.invokable()));
let generic_params_parameter = (1..=n).map(|n| format_ident!("P{}", n));
let matches_parameter = generic_params_parameter
.clone()
.enumerate()
.map(|(n, gp)| | {
let range = parse_macro_input!(input as ExprRange);
match create_impl_arguments_parameters(range) {
Ok(ts) => ts,
Err(err) => err.to_compile_error().into(),
}
} | identifier_body |
lib.rs | use quest_hook::inline_hook::hook;
/// use quest_hook::libil2cpp::Il2CppObject;
/// use log::info;
///
/// #[hook("", "MainSettingsModelSO", "OnEnable")]
/// fn on_enable(this: &Il2CppObject) {
/// info!("MainSettingsModelSO.OnEnable was called!");
///
/// on_enable.original(this); // Call the original C# method
/// }
///
/// #[no_mangle]
/// pub extern "C" fn load() {
/// info!("Installing hooks!");
///
/// on_enable.install(); // Install the hook
///
/// info!("Installed hooks!");
/// }
/// ```
#[proc_macro_attribute]
pub fn hook(attr: TokenStream, item: TokenStream) -> TokenStream {
let punctuated_args =
parse_macro_input!(attr with Punctuated<LitStr, Token![,]>::parse_separated_nonempty);
let input = parse_macro_input!(item as ItemFn);
match create_hook(punctuated_args, input) {
Ok(ts) => ts,
Err(err) => err.to_compile_error().into(),
}
}
fn create_hook(
punctuated_args: Punctuated<LitStr, Token![,]>,
input: ItemFn,
) -> Result<TokenStream, Error> {
let args: Vec<String> = punctuated_args.iter().map(LitStr::value).collect();
let (namespace, class, method) = match args.as_slice() {
[n, c, m] => (n, c, m),
_ => {
let msg = format!("Expected 3 arguments, found {}", args.len());
return Err(Error::new_spanned(punctuated_args, msg));
}
};
let ItemFn { sig, block, .. } = input; | let name = sig.ident;
let return_type = sig.output;
let typecheck_return_type = match &return_type {
ReturnType::Default => quote! { () },
ReturnType::Type(_, ty) => quote! { #ty },
};
let hook_name = format_ident!("{}_hook", name);
let hook_args = sig.inputs;
let mut this_arg_type = None;
let mut num_hook_args: usize = 0;
for hook_arg in &hook_args {
let arg_type = match hook_arg {
FnArg::Typed(arg_type) => arg_type,
FnArg::Receiver(_) => {
let msg = "Hook argument cannot be `self`";
return Err(Error::new_spanned(hook_arg, msg));
}
};
match &*arg_type.pat {
// `il2cpp_class_get_method_from_name` does not count `this` in its argument count
Pat::Ident(pat_ident) if pat_ident.ident == "this" => {
if this_arg_type.is_some() {
let msg = "There cannot be more than one `this` argument.";
return Err(Error::new_spanned(hook_arg, msg));
}
if num_hook_args > 0 {
let msg = "`this` must be the first argument.";
return Err(Error::new_spanned(hook_arg, msg));
}
this_arg_type = Some(arg_type.ty.clone());
}
_ => num_hook_args += 1,
}
}
let hook_struct_name = format_ident!("{}_Struct", name);
let mut hook_args_untyped: Punctuated<Pat, Token![,]> = Punctuated::new();
let mut typecheck_arg_types: Punctuated<Type, Token![,]> = Punctuated::new();
for arg in &hook_args {
if let FnArg::Typed(arg) = arg {
hook_args_untyped.push((*arg.pat).clone());
match &*arg.pat {
Pat::Ident(pat_ident) if pat_ident.ident == "this" => continue,
_ => typecheck_arg_types.push((*arg.ty).clone()),
}
}
}
let typecheck_this_type = match &this_arg_type {
None => quote! { () },
Some(ty) => quote! { #ty },
};
let tokens = quote! {
pub extern "C" fn #hook_name ( #hook_args ) #return_type #block
#[allow(non_camel_case_types)]
struct #hook_struct_name {
original: ::std::sync::atomic::AtomicPtr<()>,
namespace: &'static str,
class_name: &'static str,
method_name: &'static str,
parameters_count: usize,
}
impl #hook_struct_name {
fn install(&self) {
use ::quest_hook::libil2cpp::WrapRaw;
let class = ::quest_hook::libil2cpp::Il2CppClass::find(self.namespace, self.class_name).expect("Class not found");
let method = class.find_method_callee::<
#typecheck_this_type,
( #typecheck_arg_types ),
#typecheck_return_type,
#num_hook_args
>(self.method_name).expect("Method not found");
let mut temp = ::std::ptr::null_mut();
unsafe {
::quest_hook::inline_hook::A64HookFunction(
::std::mem::transmute::<unsafe extern "C" fn(), *mut ::std::ffi::c_void>(method.raw().methodPointer.unwrap()),
::std::mem::transmute::<extern "C" fn( #hook_args ) #return_type, *mut ::std::ffi::c_void>( #hook_name ),
&mut temp,
);
self.original.store(
::std::mem::transmute::<*mut ::std::ffi::c_void, *mut ()>(temp),
::std::sync::atomic::Ordering::Relaxed
);
}
}
fn original(&self, #hook_args ) #return_type {
let ptr = self.original.load(::std::sync::atomic::Ordering::Relaxed);
let original = unsafe {
::std::mem::transmute::<*const (), Option<extern "C" fn( #hook_args ) #return_type >>(ptr)
};
(original.expect("Hook is not installed"))( #hook_args_untyped )
}
fn hook(&self, #hook_args ) #return_type {
#hook_name( #hook_args_untyped )
}
}
impl ::quest_hook::Hook for #hook_struct_name {
fn install(&self) {
self.install()
}
fn namespace(&self) -> &'static str {
self.namespace
}
fn class_name(&self) -> &'static str {
self.class_name
}
fn method_name(&self) -> &'static str {
self.method_name
}
fn parameters_count(&self) -> usize {
self.parameters_count
}
fn hook(&self) -> *mut () {
unsafe {
::std::mem::transmute::<extern "C" fn( #hook_args ) #return_type, *mut ()>( #hook_name )
}
}
fn original(&self) -> *mut () {
self.original.load(::std::sync::atomic::Ordering::Relaxed)
}
}
#[allow(non_upper_case_globals)]
static #name: #hook_struct_name = #hook_struct_name {
original: ::std::sync::atomic::AtomicPtr::new(::std::ptr::null_mut()),
namespace: #namespace,
class_name: #class,
method_name: #method,
parameters_count: #num_hook_args as usize
};
};
Ok(tokens.into())
}
#[doc(hidden)]
#[proc_macro]
pub fn impl_arguments_parameters(input: TokenStream) -> TokenStream {
let range = parse_macro_input!(input as ExprRange);
match create_impl_arguments_parameters(range) {
Ok(ts) => ts,
Err(err) => err.to_compile_error().into(),
}
}
fn create_impl_arguments_parameters(range: ExprRange) -> Result<TokenStream, Error> {
let span = range.span();
let start = range
.from
.ok_or_else(|| Error::new(span, "Tuple length range must have a lower bound"))?;
let start = parse_range_bound(*start)?;
let end = range
.to
.ok_or_else(|| Error::new(span, "Tuple length range must have an upper bound"))?;
let end = parse_range_bound(*end)?;
let range = match range.limits {
RangeLimits::HalfOpen(_) if end <= start => {
return Err(Error::new(span, "Tuple length range must be valid"))
}
RangeLimits::HalfOpen(_) => start..end,
RangeLimits::Closed(_) if end < start => {
return Err(Error::new(span, "Tuple length range must be valid"))
}
RangeLimits::Closed(_) => start..(end + 1),
};
let mut ts = TokenStream::new();
for n in range {
let generic_params_argument = (1..=n).map(|n| format_ident!("A{}", n));
let matches_argument = generic_params_argument
.clone()
.enumerate()
.map(|(n, gp)| quote!(<#gp>::matches(args[#n].ty())));
let invokables = (0..n).map(Index::from).map(|n| quote!(self.#n.invokable()));
let generic_params_parameter = (1..=n).map(|n| format_ident!("P{}", n));
let matches_parameter = generic_params_parameter
.clone()
.enumerate()
.map(|(n, gp)| quote!( | random_line_split |
|
train_arch9.py |
parser.add_argument('--use_cropped_img', action='store_true')
parser.add_argument('--experiment_name', default=datetime.datetime.now().strftime("%Y.%m.%d-%H%M%S"))
parser.add_argument('--num_ckpt', type=int, default=10)
parser.add_argument('--clear', default=False, action='store_true')
args = parser.parse_args()
num_gpu = 1
n_att = len(args.atts)
pylib.mkdir('./output/%s' % args.experiment_name)
with open('./output/%s/setting.txt' % args.experiment_name, 'w') as f:
f.write(json.dumps(vars(args), indent=4, separators=(',', ':')))
# ==============================================================================
# = graphs =
# ==============================================================================
# data
if args.threads >= 0:
cpu_config = tf.ConfigProto(intra_op_parallelism_threads=args.threads // 2,
inter_op_parallelism_threads=args.threads // 2,
device_count={'CPU': args.threads})
sess = tf.Session(config=cpu_config)
else:
sess = tl.session()
crop_ = not args.use_cropped_img
if args.dataset == 'celeba':
tr_data = data.Celeba(args.dataroot, att_default, args.img_size, args.batch_size, part='train', sess=sess, crop=crop_, is_tfrecord = True)
val_data = data.Celeba(args.dataroot, args.atts, args.img_size, args.n_sample, part='val', shuffle=False, sess=sess, crop=crop_)
else:
tr_data = data.x2y(args.dataset, args.dataroot, args.img_size, args.batch_size, part='train', sess=sess, is_tfrecord = True)
val_data = data.x2y(args.dataset, args.dataroot, args.img_size, args.n_sample, part='val', shuffle=False, sess=sess, is_tfrecord = False)
# models
Generator = partial(models.Generator2_wRelu, n_downblks = args.n_downblks_gen, n_intermedblks = args.n_intermedblks_gen, n_upblks = args.n_upblks_gen, ch = args.ch_gen)
MappingNet = partial(models.MappingNet_multiStream_deconv_wIN_wRelu_concat, n_mlp = args.n_mlp_map, n_layers = args.n_layers_map, fc_dim = args.fc_dim_map, ch = args.ch_enc*2)
Encoder = partial(models.Encoder4_wIN_wRelu_concat,n_downblks = args.n_downblks_enc, n_intermedblks = args.n_intermedblks_enc, n_mlp = args.n_mlp_enc, ch = args.ch_enc)
# Discriminator = partial(models.PatchDiscriminator, n_att=n_att, n_resblks = args.n_resblks_dis, ch = args.ch_dis)
# Discriminator = partial(models.Discriminator_multiTask, n_att=n_att, n_resblks = args.n_resblks_dis, ch = args.ch_dis)
Discriminator = partial(models.Discriminator, n_att=n_att, n_resblks = args.n_resblks_dis, ch = args.ch_dis)
# inputs
r_M = tf.get_variable(name='G_R', shape=[182000, args.dim_noise], dtype=tf.float32, initializer=tf.random_normal_initializer(mean=0.0, stddev=1.0))
lr = tf.placeholder(dtype=tf.float32, shape=[])
xs_s = tr_data.batch_op[0]
ls_s = tf.to_float(tr_data.batch_op[1])
rs_s = tr_data.batch_op[2]
if args.dataset == 'celeba':
# b_s = tf.random_shuffle(a_s)
permuted_index = tf.random_shuffle(tf.range(args.batch_size))
xt_s = tf.gather(xs_s, permuted_index)
lt_s = tf.gather(ls_s, permuted_index)
rt_i = tf.gather(rs_s, permuted_index)
rt_s = tf.reshape(tf.gather(r_M, rt_i), [rt_i.shape[0], args.dim_noise])
else:
|
d_opt = tf.train.AdamOptimizer(lr, beta1=0.5)
g_opt = tf.train.AdamOptimizer(lr, beta1=0.5)
tower_d_grads = []
tower_g_grads = []
tower_d_loss_gan = []
tower_gp = []
tower_d_loss_cls = []
tower_g_loss_sim = []
tower_g_loss_r = []
tower_g_loss_r0 = []
tower_g_loss_gan = []
tower_g_loss_cls = []
tower_g_loss_cyc = []
tower_g_loss_reg = []
tower_g_loss_rec = []
tower_g_loss_interp = []
xs_sample = tf.placeholder(tf.float32, shape=[None, args.img_size, args.img_size, 3])
lt_sample = tf.placeholder(tf.float32, shape=[None, n_att])
ls_sample = tf.placeholder(tf.float32, shape=[None, n_att])
with tf.variable_scope(tf.get_variable_scope()):
for i in range(1):
with tf.name_scope("tower_%d" % i):
xs = xs_s[i * args.batch_size // num_gpu: (i + 1) * args.batch_size // num_gpu]
xt = xt_s[i * args.batch_size // num_gpu: (i + 1) * args.batch_size // num_gpu]
rt = rt_s[i * args.batch_size // num_gpu: (i + 1) * args.batch_size // num_gpu]
ls = ls_s[i * args.batch_size // num_gpu: (i + 1) * args.batch_size // num_gpu]
lt = lt_s[i * args.batch_size // num_gpu: (i + 1) * args.batch_size // num_gpu]
rs = rs_s[i * args.batch_size // num_gpu: (i + 1) * args.batch_size // num_gpu]
# generate
r_1 = tf.random_normal((args.batch_size // num_gpu, args.dim_noise))
s_rand_t1 = MappingNet(r_1, lt - ls)
r_0 = rt
s_rand_t = MappingNet(r_0, lt - ls)
s_rand_s, _ = Encoder(xs, ls - lt)
s_rand1 = s_rand_s + s_rand_t1
xg_rand1 = Generator(xs, s_targ=s_rand1)
s_targ_t, _ = Encoder(xt, lt - ls)
loss_latent = tf.losses.mean_squared_error(s_targ_t, s_rand_t)
r_grads = tf.gradients(loss_latent, r_0)[0]
grads_l2 = tf.reshape(tf.sqrt(tf.reduce_mean(tf.square(r_grads), 1)), [args.batch_size, 1])
normalized_grads = (0.9 / grads_l2 + 5.0) * r_grads
r = tf.stop_gradient(tf.clip_by_value((r_0 + normalized_grads), -1.0, 1.0)) #
tower_assign = []
for index_i in range(args.batch_size):
tower_assign.append(tf.assign(r_M[tf.reshape(rt_i[index_i], [])], r[index_i]))
s_rand_t = MappingNet(r, lt - ls)
s_rand = s_rand_s + s_rand_t
xg_rand = Generator(xs, s_targ=s_rand)
s_targ = s_targ_t + s_rand_s
xg_targ = Generator(xs, s_targ=s_targ)
alpha = tf.random_uniform((args.batch_size // num_gpu, 1, 1, 1), maxval=1)
s_interp = alpha * s_rand + (1 - alpha) * s_targ
xg_interp = Generator(xs, s_targ=s_interp)
s_targ_t_rand, _ = Encoder(tf.stop_gradient(xg_rand), lt - ls)
s_targ_rand = s_targ_t_rand + s_rand_s
s_targ_t_targ, _ = Encoder(tf.stop_gradient(xg_targ), lt - ls)
s_targ_targ = s_targ_t_targ + s_rand_s
s_rand_rec_t = MappingNet(r, ls - ls)
s_rand_rec_s,_ = Encoder(xs, ls - ls)
s_rand_rec = s_rand_rec_t + s_rand_rec_s
xg_rand_rec = Generator(xs, s_targ=s_rand_rec)
s_targ_t_rec, _ = Encoder(xt, ls - ls)
s_targ_rec = s_targ_t_rec + s_rand_rec_s
xg_targ_rec = Generator(xs, s_targ=s_targ_rec)
l_1 = tf.ones_like(lt)
s_targ_t_targ_oth, _ = Encoder(xg_rand, (l_1-tf.abs(lt - ls))*(l_1-2.0*ls))
s_rand_t_oth, _ = Encoder(xs, (l_1-tf.abs(lt - ls))*(l_1-2.0*ls))
# discriminate
xs_logit_gan, xs_logit_att,_= Discriminator(xs)
xgr_logit_gan, xgr_logit_att,xgr_logit_att_feature = Discriminator(xg_rand)
xgt_logit_gan, xgt_logit_att,xgt_logit_att_feature= Discriminator(xg_targ)
xgi_logit_gan, xgi_logit_att, xgi_logit_att_feature = Discriminator(xg_interp)
# discriminator losses
if args.mode == | pass | conditional_block |
train_arch9.py | (x1, x2, y1, y2, margin):
return tf.reduce_mean(tf.nn.relu(models.inner_product(x1, x2) - models.inner_product(y1, y2) + margin))
parser = argparse.ArgumentParser()
# settings
dataroot_default = './data/CelebA'
parser.add_argument('--dataroot', type=str, default=dataroot_default)
parser.add_argument('--dataset', type=str, default='celeba')
parser.add_argument('--gpu', type=str, default='0,1',
help='Specify which gpu to use by `CUDA_VISIBLE_DEVICES=num python train.py **kwargs`\
or `python train.py --gpu num` if you\'re running on a multi-gpu enviroment.\
You need to do nothing if your\'re running on a single-gpu environment or\
the gpu is assigned by a resource manager program.')
parser.add_argument('--threads', type=int, default=-1,
help='Control parallel computation threads,\
please leave it as is if no heavy cpu burden is observed.')
# model
# att_default = ['Bald', 'Bangs', 'Black_Hair', 'Blond_Hair', 'Brown_Hair', 'Bushy_Eyebrows', 'Eyeglasses',
# 'Male', 'Mouth_Slightly_Open', 'Mustache', 'No_Beard', 'Pale_Skin', 'Young']
att_default = ['Young', 'Mouth_Slightly_Open', 'Smiling', 'Black_Hair', 'Blond_Hair', 'Brown_Hair', 'Gray_Hair',
'Receding_Hairline', 'Bangs', 'Male', 'No_Beard', 'Mustache', 'Goatee','Sideburns']
parser.add_argument('--atts', default=att_default, choices=data.Celeba.att_dict.keys(), nargs='+',
help='Attributes to modify by the model')
parser.add_argument('--img_size', type=int, default=128, help='input image size')
# generator
parser.add_argument('--n_downblks_gen', type=int, default=3)
parser.add_argument('--n_intermedblks_gen', type=int, default=2)
parser.add_argument('--n_upblks_gen', type=int, default=3)
parser.add_argument('--ch_gen', type=int, default=32)
# mappingnet
parser.add_argument('--n_layers_map', type=int, default=4)
parser.add_argument('--n_mlp_map', type=int, default=3)
parser.add_argument('--fc_dim_map', type=int, default=64)
parser.add_argument('--dim_noise', type=int, default=16)
# encoder
parser.add_argument('--n_downblks_enc', type=int, default=1)
parser.add_argument('--n_intermedblks_enc', type=int, default=3)
parser.add_argument('--n_mlp_enc', type=int, default=3)
parser.add_argument('--ch_enc', type=int, default=16)
# discriminator
parser.add_argument('--n_resblks_dis', type=int, default=4)
parser.add_argument('--ch_dis', type=int, default=16)
parser.add_argument('--rec_loss_weight', type=float, default=100.0)
# training
parser.add_argument('--mode', default='wgan', choices=['wgan', 'lsgan', 'dcgan'])
parser.add_argument('--epoch', type=int, default=100, help='# of epochs') #200
parser.add_argument('--init_epoch', type=int, default=100, help='# of epochs with init lr.') # 100
parser.add_argument('--batch_size', type=int, default=60)
parser.add_argument('--lr', type=float, default=0.0002, help='learning rate')
parser.add_argument('--n_d', type=int, default=5, help='# of d updates per g update')
parser.add_argument('--n_sample', type=int, default=8, help='# of sample images')
parser.add_argument('--save_freq', type=int, default=0,
help='save model evary save_freq iters, 0 means to save evary epoch.')
parser.add_argument('--sample_freq', type=int, default=0,
help='eval on validation set every sample_freq iters, 0 means to save every epoch.')
# others
parser.add_argument('--use_cropped_img', action='store_true')
parser.add_argument('--experiment_name', default=datetime.datetime.now().strftime("%Y.%m.%d-%H%M%S"))
parser.add_argument('--num_ckpt', type=int, default=10)
parser.add_argument('--clear', default=False, action='store_true')
args = parser.parse_args()
num_gpu = 1
n_att = len(args.atts)
pylib.mkdir('./output/%s' % args.experiment_name)
with open('./output/%s/setting.txt' % args.experiment_name, 'w') as f:
f.write(json.dumps(vars(args), indent=4, separators=(',', ':')))
# ==============================================================================
# = graphs =
# ==============================================================================
# data
if args.threads >= 0:
cpu_config = tf.ConfigProto(intra_op_parallelism_threads=args.threads // 2,
inter_op_parallelism_threads=args.threads // 2,
device_count={'CPU': args.threads})
sess = tf.Session(config=cpu_config)
else:
sess = tl.session()
crop_ = not args.use_cropped_img
if args.dataset == 'celeba':
tr_data = data.Celeba(args.dataroot, att_default, args.img_size, args.batch_size, part='train', sess=sess, crop=crop_, is_tfrecord = True)
val_data = data.Celeba(args.dataroot, args.atts, args.img_size, args.n_sample, part='val', shuffle=False, sess=sess, crop=crop_)
else:
tr_data = data.x2y(args.dataset, args.dataroot, args.img_size, args.batch_size, part='train', sess=sess, is_tfrecord = True)
val_data = data.x2y(args.dataset, args.dataroot, args.img_size, args.n_sample, part='val', shuffle=False, sess=sess, is_tfrecord = False)
# models
Generator = partial(models.Generator2_wRelu, n_downblks = args.n_downblks_gen, n_intermedblks = args.n_intermedblks_gen, n_upblks = args.n_upblks_gen, ch = args.ch_gen)
MappingNet = partial(models.MappingNet_multiStream_deconv_wIN_wRelu_concat, n_mlp = args.n_mlp_map, n_layers = args.n_layers_map, fc_dim = args.fc_dim_map, ch = args.ch_enc*2)
Encoder = partial(models.Encoder4_wIN_wRelu_concat,n_downblks = args.n_downblks_enc, n_intermedblks = args.n_intermedblks_enc, n_mlp = args.n_mlp_enc, ch = args.ch_enc)
# Discriminator = partial(models.PatchDiscriminator, n_att=n_att, n_resblks = args.n_resblks_dis, ch = args.ch_dis)
# Discriminator = partial(models.Discriminator_multiTask, n_att=n_att, n_resblks = args.n_resblks_dis, ch = args.ch_dis)
Discriminator = partial(models.Discriminator, n_att=n_att, n_resblks = args.n_resblks_dis, ch = args.ch_dis)
# inputs
r_M = tf.get_variable(name='G_R', shape=[182000, args.dim_noise], dtype=tf.float32, initializer=tf.random_normal_initializer(mean=0.0, stddev=1.0))
lr = tf.placeholder(dtype=tf.float32, shape=[])
xs_s = tr_data.batch_op[0]
ls_s = tf.to_float(tr_data.batch_op[1])
rs_s = tr_data.batch_op[2]
if args.dataset == 'celeba':
# b_s = tf.random_shuffle(a_s)
permuted_index = tf.random_shuffle(tf.range(args.batch_size))
xt_s = tf.gather(xs_s, permuted_index)
lt_s = tf.gather(ls_s, permuted_index)
rt_i = tf.gather(rs_s, permuted_index)
rt_s = tf.reshape(tf.gather(r_M, rt_i), [rt_i.shape[0], args.dim_noise])
else:
pass
d_opt = tf.train.AdamOptimizer(lr, beta1=0.5)
g_opt = tf.train.AdamOptimizer(lr, beta1=0.5)
tower_d_grads = []
tower_g_grads = []
tower_d_loss_gan = []
tower_gp = []
tower_d_loss_cls = []
tower_g_loss_sim = []
tower_g_loss_r = []
tower_g_loss_r0 = []
tower_g_loss_gan = []
tower_g_loss_cls = []
tower_g_loss_cyc = []
tower_g_loss_reg = []
tower_g_loss_rec = []
tower_g_loss_interp = []
xs_sample = tf.placeholder(tf.float32, shape=[None, args.img_size, args.img_size, 3])
lt_sample = tf.placeholder(tf.float32, shape=[None, n_att])
ls_sample = tf.placeholder(tf.float32, shape=[None, n_att])
with tf.variable_scope(tf.get_variable_scope()):
for i in range(1):
with tf.name_scope("tower_%d" % i):
xs = xs_s[i * args.batch_size // num_gpu: (i + 1) * args.batch_size // num_gpu]
xt = xt_s[i * args.batch_size // num_gpu: (i | matching_loss | identifier_name |
|
train_arch9.py |
parser = argparse.ArgumentParser()
# settings
dataroot_default = './data/CelebA'
parser.add_argument('--dataroot', type=str, default=dataroot_default)
parser.add_argument('--dataset', type=str, default='celeba')
parser.add_argument('--gpu', type=str, default='0,1',
help='Specify which gpu to use by `CUDA_VISIBLE_DEVICES=num python train.py **kwargs`\
or `python train.py --gpu num` if you\'re running on a multi-gpu enviroment.\
You need to do nothing if your\'re running on a single-gpu environment or\
the gpu is assigned by a resource manager program.')
parser.add_argument('--threads', type=int, default=-1,
help='Control parallel computation threads,\
please leave it as is if no heavy cpu burden is observed.')
# model
# att_default = ['Bald', 'Bangs', 'Black_Hair', 'Blond_Hair', 'Brown_Hair', 'Bushy_Eyebrows', 'Eyeglasses',
# 'Male', 'Mouth_Slightly_Open', 'Mustache', 'No_Beard', 'Pale_Skin', 'Young']
att_default = ['Young', 'Mouth_Slightly_Open', 'Smiling', 'Black_Hair', 'Blond_Hair', 'Brown_Hair', 'Gray_Hair',
'Receding_Hairline', 'Bangs', 'Male', 'No_Beard', 'Mustache', 'Goatee','Sideburns']
parser.add_argument('--atts', default=att_default, choices=data.Celeba.att_dict.keys(), nargs='+',
help='Attributes to modify by the model')
parser.add_argument('--img_size', type=int, default=128, help='input image size')
# generator
parser.add_argument('--n_downblks_gen', type=int, default=3)
parser.add_argument('--n_intermedblks_gen', type=int, default=2)
parser.add_argument('--n_upblks_gen', type=int, default=3)
parser.add_argument('--ch_gen', type=int, default=32)
# mappingnet
parser.add_argument('--n_layers_map', type=int, default=4)
parser.add_argument('--n_mlp_map', type=int, default=3)
parser.add_argument('--fc_dim_map', type=int, default=64)
parser.add_argument('--dim_noise', type=int, default=16)
# encoder
parser.add_argument('--n_downblks_enc', type=int, default=1)
parser.add_argument('--n_intermedblks_enc', type=int, default=3)
parser.add_argument('--n_mlp_enc', type=int, default=3)
parser.add_argument('--ch_enc', type=int, default=16)
# discriminator
parser.add_argument('--n_resblks_dis', type=int, default=4)
parser.add_argument('--ch_dis', type=int, default=16)
parser.add_argument('--rec_loss_weight', type=float, default=100.0)
# training
parser.add_argument('--mode', default='wgan', choices=['wgan', 'lsgan', 'dcgan'])
parser.add_argument('--epoch', type=int, default=100, help='# of epochs') #200
parser.add_argument('--init_epoch', type=int, default=100, help='# of epochs with init lr.') # 100
parser.add_argument('--batch_size', type=int, default=60)
parser.add_argument('--lr', type=float, default=0.0002, help='learning rate')
parser.add_argument('--n_d', type=int, default=5, help='# of d updates per g update')
parser.add_argument('--n_sample', type=int, default=8, help='# of sample images')
parser.add_argument('--save_freq', type=int, default=0,
help='save model evary save_freq iters, 0 means to save evary epoch.')
parser.add_argument('--sample_freq', type=int, default=0,
help='eval on validation set every sample_freq iters, 0 means to save every epoch.')
# others
parser.add_argument('--use_cropped_img', action='store_true')
parser.add_argument('--experiment_name', default=datetime.datetime.now().strftime("%Y.%m.%d-%H%M%S"))
parser.add_argument('--num_ckpt', type=int, default=10)
parser.add_argument('--clear', default=False, action='store_true')
args = parser.parse_args()
num_gpu = 1
n_att = len(args.atts)
pylib.mkdir('./output/%s' % args.experiment_name)
with open('./output/%s/setting.txt' % args.experiment_name, 'w') as f:
f.write(json.dumps(vars(args), indent=4, separators=(',', ':')))
# ==============================================================================
# = graphs =
# ==============================================================================
# data
if args.threads >= 0:
cpu_config = tf.ConfigProto(intra_op_parallelism_threads=args.threads // 2,
inter_op_parallelism_threads=args.threads // 2,
device_count={'CPU': args.threads})
sess = tf.Session(config=cpu_config)
else:
sess = tl.session()
crop_ = not args.use_cropped_img
if args.dataset == 'celeba':
tr_data = data.Celeba(args.dataroot, att_default, args.img_size, args.batch_size, part='train', sess=sess, crop=crop_, is_tfrecord = True)
val_data = data.Celeba(args.dataroot, args.atts, args.img_size, args.n_sample, part='val', shuffle=False, sess=sess, crop=crop_)
else:
tr_data = data.x2y(args.dataset, args.dataroot, args.img_size, args.batch_size, part='train', sess=sess, is_tfrecord = True)
val_data = data.x2y(args.dataset, args.dataroot, args.img_size, args.n_sample, part='val', shuffle=False, sess=sess, is_tfrecord = False)
# models
Generator = partial(models.Generator2_wRelu, n_downblks = args.n_downblks_gen, n_intermedblks = args.n_intermedblks_gen, n_upblks = args.n_upblks_gen, ch = args.ch_gen)
MappingNet = partial(models.MappingNet_multiStream_deconv_wIN_wRelu_concat, n_mlp = args.n_mlp_map, n_layers = args.n_layers_map, fc_dim = args.fc_dim_map, ch = args.ch_enc*2)
Encoder = partial(models.Encoder4_wIN_wRelu_concat,n_downblks = args.n_downblks_enc, n_intermedblks = args.n_intermedblks_enc, n_mlp = args.n_mlp_enc, ch = args.ch_enc)
# Discriminator = partial(models.PatchDiscriminator, n_att=n_att, n_resblks = args.n_resblks_dis, ch = args.ch_dis)
# Discriminator = partial(models.Discriminator_multiTask, n_att=n_att, n_resblks = args.n_resblks_dis, ch = args.ch_dis)
Discriminator = partial(models.Discriminator, n_att=n_att, n_resblks = args.n_resblks_dis, ch = args.ch_dis)
# inputs
r_M = tf.get_variable(name='G_R', shape=[182000, args.dim_noise], dtype=tf.float32, initializer=tf.random_normal_initializer(mean=0.0, stddev=1.0))
lr = tf.placeholder(dtype=tf.float32, shape=[])
xs_s = tr_data.batch_op[0]
ls_s = tf.to_float(tr_data.batch_op[1])
rs_s = tr_data.batch_op[2]
if args.dataset == 'celeba':
# b_s = tf.random_shuffle(a_s)
permuted_index = tf.random_shuffle(tf.range(args.batch_size))
xt_s = tf.gather(xs_s, permuted_index)
lt_s = tf.gather(ls_s, permuted_index)
rt_i = tf.gather(rs_s, permuted_index)
rt_s = tf.reshape(tf.gather(r_M, rt_i), [rt_i.shape[0], args.dim_noise])
else:
pass
d_opt = tf.train.AdamOptimizer(lr, beta1=0.5)
g_opt = tf.train.AdamOptimizer(lr, beta1=0.5)
tower_d_grads = []
tower_g_grads = []
tower_d_loss_gan = []
tower_gp = []
tower_d_loss_cls = []
tower_g_loss_sim = []
tower_g_loss_r = []
tower_g_loss_r0 = []
tower_g_loss_gan = []
tower_g_loss_cls = []
tower_g_loss_cyc = []
tower_g_loss_reg = []
tower_g_loss_rec = []
tower_g_loss_interp = []
xs_sample = tf.placeholder(tf.float32, shape=[None, args.img_size, args.img_size, 3])
lt_sample = tf.placeholder(tf.float32, shape=[None, n_att])
ls_sample = tf.placeholder(tf.float32, shape=[None, n_att])
with tf.variable_scope(tf.get_variable_scope()):
for i in range(1):
with tf.name_scope("tower_%d" % i):
xs = xs_s[i * args.batch_size // num_gpu: (i + 1) * args.batch_size // num_gpu]
xt = xt_s[i * args.batch_size // num_gpu: (i + 1) * args.batch_size // num_gpu]
rt = rt | return tf.reduce_mean(tf.nn.relu(models.inner_product(x1, x2) - models.inner_product(y1, y2) + margin)) | identifier_body |
|
train_arch9.py |
parser.add_argument('--use_cropped_img', action='store_true')
parser.add_argument('--experiment_name', default=datetime.datetime.now().strftime("%Y.%m.%d-%H%M%S"))
parser.add_argument('--num_ckpt', type=int, default=10)
parser.add_argument('--clear', default=False, action='store_true')
args = parser.parse_args()
num_gpu = 1
n_att = len(args.atts)
pylib.mkdir('./output/%s' % args.experiment_name)
with open('./output/%s/setting.txt' % args.experiment_name, 'w') as f:
f.write(json.dumps(vars(args), indent=4, separators=(',', ':')))
# ==============================================================================
# = graphs =
# ==============================================================================
# data
if args.threads >= 0:
cpu_config = tf.ConfigProto(intra_op_parallelism_threads=args.threads // 2,
inter_op_parallelism_threads=args.threads // 2,
device_count={'CPU': args.threads})
sess = tf.Session(config=cpu_config)
else:
sess = tl.session()
crop_ = not args.use_cropped_img
if args.dataset == 'celeba':
tr_data = data.Celeba(args.dataroot, att_default, args.img_size, args.batch_size, part='train', sess=sess, crop=crop_, is_tfrecord = True)
val_data = data.Celeba(args.dataroot, args.atts, args.img_size, args.n_sample, part='val', shuffle=False, sess=sess, crop=crop_)
else:
tr_data = data.x2y(args.dataset, args.dataroot, args.img_size, args.batch_size, part='train', sess=sess, is_tfrecord = True)
val_data = data.x2y(args.dataset, args.dataroot, args.img_size, args.n_sample, part='val', shuffle=False, sess=sess, is_tfrecord = False)
# models
Generator = partial(models.Generator2_wRelu, n_downblks = args.n_downblks_gen, n_intermedblks = args.n_intermedblks_gen, n_upblks = args.n_upblks_gen, ch = args.ch_gen)
MappingNet = partial(models.MappingNet_multiStream_deconv_wIN_wRelu_concat, n_mlp = args.n_mlp_map, n_layers = args.n_layers_map, fc_dim = args.fc_dim_map, ch = args.ch_enc*2)
Encoder = partial(models.Encoder4_wIN_wRelu_concat,n_downblks = args.n_downblks_enc, n_intermedblks = args.n_intermedblks_enc, n_mlp = args.n_mlp_enc, ch = args.ch_enc)
# Discriminator = partial(models.PatchDiscriminator, n_att=n_att, n_resblks = args.n_resblks_dis, ch = args.ch_dis)
# Discriminator = partial(models.Discriminator_multiTask, n_att=n_att, n_resblks = args.n_resblks_dis, ch = args.ch_dis)
Discriminator = partial(models.Discriminator, n_att=n_att, n_resblks = args.n_resblks_dis, ch = args.ch_dis)
# inputs
r_M = tf.get_variable(name='G_R', shape=[182000, args.dim_noise], dtype=tf.float32, initializer=tf.random_normal_initializer(mean=0.0, stddev=1.0))
lr = tf.placeholder(dtype=tf.float32, shape=[])
xs_s = tr_data.batch_op[0]
ls_s = tf.to_float(tr_data.batch_op[1])
rs_s = tr_data.batch_op[2]
if args.dataset == 'celeba':
# b_s = tf.random_shuffle(a_s)
permuted_index = tf.random_shuffle(tf.range(args.batch_size)) | else:
pass
d_opt = tf.train.AdamOptimizer(lr, beta1=0.5)
g_opt = tf.train.AdamOptimizer(lr, beta1=0.5)
tower_d_grads = []
tower_g_grads = []
tower_d_loss_gan = []
tower_gp = []
tower_d_loss_cls = []
tower_g_loss_sim = []
tower_g_loss_r = []
tower_g_loss_r0 = []
tower_g_loss_gan = []
tower_g_loss_cls = []
tower_g_loss_cyc = []
tower_g_loss_reg = []
tower_g_loss_rec = []
tower_g_loss_interp = []
xs_sample = tf.placeholder(tf.float32, shape=[None, args.img_size, args.img_size, 3])
lt_sample = tf.placeholder(tf.float32, shape=[None, n_att])
ls_sample = tf.placeholder(tf.float32, shape=[None, n_att])
with tf.variable_scope(tf.get_variable_scope()):
for i in range(1):
with tf.name_scope("tower_%d" % i):
xs = xs_s[i * args.batch_size // num_gpu: (i + 1) * args.batch_size // num_gpu]
xt = xt_s[i * args.batch_size // num_gpu: (i + 1) * args.batch_size // num_gpu]
rt = rt_s[i * args.batch_size // num_gpu: (i + 1) * args.batch_size // num_gpu]
ls = ls_s[i * args.batch_size // num_gpu: (i + 1) * args.batch_size // num_gpu]
lt = lt_s[i * args.batch_size // num_gpu: (i + 1) * args.batch_size // num_gpu]
rs = rs_s[i * args.batch_size // num_gpu: (i + 1) * args.batch_size // num_gpu]
# generate
r_1 = tf.random_normal((args.batch_size // num_gpu, args.dim_noise))
s_rand_t1 = MappingNet(r_1, lt - ls)
r_0 = rt
s_rand_t = MappingNet(r_0, lt - ls)
s_rand_s, _ = Encoder(xs, ls - lt)
s_rand1 = s_rand_s + s_rand_t1
xg_rand1 = Generator(xs, s_targ=s_rand1)
s_targ_t, _ = Encoder(xt, lt - ls)
loss_latent = tf.losses.mean_squared_error(s_targ_t, s_rand_t)
r_grads = tf.gradients(loss_latent, r_0)[0]
grads_l2 = tf.reshape(tf.sqrt(tf.reduce_mean(tf.square(r_grads), 1)), [args.batch_size, 1])
normalized_grads = (0.9 / grads_l2 + 5.0) * r_grads
r = tf.stop_gradient(tf.clip_by_value((r_0 + normalized_grads), -1.0, 1.0)) #
tower_assign = []
for index_i in range(args.batch_size):
tower_assign.append(tf.assign(r_M[tf.reshape(rt_i[index_i], [])], r[index_i]))
s_rand_t = MappingNet(r, lt - ls)
s_rand = s_rand_s + s_rand_t
xg_rand = Generator(xs, s_targ=s_rand)
s_targ = s_targ_t + s_rand_s
xg_targ = Generator(xs, s_targ=s_targ)
alpha = tf.random_uniform((args.batch_size // num_gpu, 1, 1, 1), maxval=1)
s_interp = alpha * s_rand + (1 - alpha) * s_targ
xg_interp = Generator(xs, s_targ=s_interp)
s_targ_t_rand, _ = Encoder(tf.stop_gradient(xg_rand), lt - ls)
s_targ_rand = s_targ_t_rand + s_rand_s
s_targ_t_targ, _ = Encoder(tf.stop_gradient(xg_targ), lt - ls)
s_targ_targ = s_targ_t_targ + s_rand_s
s_rand_rec_t = MappingNet(r, ls - ls)
s_rand_rec_s,_ = Encoder(xs, ls - ls)
s_rand_rec = s_rand_rec_t + s_rand_rec_s
xg_rand_rec = Generator(xs, s_targ=s_rand_rec)
s_targ_t_rec, _ = Encoder(xt, ls - ls)
s_targ_rec = s_targ_t_rec + s_rand_rec_s
xg_targ_rec = Generator(xs, s_targ=s_targ_rec)
l_1 = tf.ones_like(lt)
s_targ_t_targ_oth, _ = Encoder(xg_rand, (l_1-tf.abs(lt - ls))*(l_1-2.0*ls))
s_rand_t_oth, _ = Encoder(xs, (l_1-tf.abs(lt - ls))*(l_1-2.0*ls))
# discriminate
xs_logit_gan, xs_logit_att,_= Discriminator(xs)
xgr_logit_gan, xgr_logit_att,xgr_logit_att_feature = Discriminator(xg_rand)
xgt_logit_gan, xgt_logit_att,xgt_logit_att_feature= Discriminator(xg_targ)
xgi_logit_gan, xgi_logit_att, xgi_logit_att_feature = Discriminator(xg_interp)
# discriminator losses
if args.mode == | xt_s = tf.gather(xs_s, permuted_index)
lt_s = tf.gather(ls_s, permuted_index)
rt_i = tf.gather(rs_s, permuted_index)
rt_s = tf.reshape(tf.gather(r_M, rt_i), [rt_i.shape[0], args.dim_noise]) | random_line_split |
teslatar.py | ( aPrices ):
found=False
i=0
dt=datetime.now()
#dt=datetime(2019,4,3,11,59,59)
oneHour=timedelta(hours=1)
while i<len(aPrices):
#print( aPrices[i][0], aPrices[i][1] )
if( aPrices[i][0]<=dt and dt<aPrices[i][0]+oneHour ):
found=True
break
i+=1
return found
# basic vars
aChargeMode = [] # time until charge must be finished (-1=start immediate)
aPricesChosen = []
#logging.basicConfig(format='%(asctime)s - %(message)s', level=logging.DEBUG)
logging.basicConfig(filename="file.log", format='%(asctime)s - %(message)s', level=logging.INFO)
logging.critical("Startup Tesla Avatar")
time.sleep(10)
logging.info("Opening connection to Tesla API...")
while True:
try:
tesla = teslapy.Tesla( email, password )
tesla.fetch_token()
break
except:
logging.error("...could not connect (yet), wait, then try again...", exc_info=True)
time.sleep(60)
vehicles = tesla.vehicle_list()
nNumCars = 0
while nNumCars<len(vehicles):
v = vehicles[nNumCars]
logging.info("Car #%u VIN=%s Name=%s, State=%s", nNumCars+1, v["vin"], v["display_name"], v["state"] )
nNumCars+=1
cntExceptions=0
startHour = datetime.now().hour
aPrices=[]
oldPriceHour=-1
oldLenPrices=-1
timeToFull=[]
mode=[]
oldMode=[]
lastModeChange=[]
oldChargeLimitSoc=[]
i=0
while i<nNumCars:
aChargeMode+=[finishHour]
aPricesChosen+=[0]
mode+=[0]
oldMode+=[-1]
lastModeChange+=[0]
oldChargeLimitSoc+=[0]
timeToFull+=[0]
i+=1
while True:
try:
# new hour? then load new prices from aWATTar
if oldPriceHour != datetime.now().hour:
# get array with price for every hour in the future
aPrices=getHourlyPrices()
oldPriceHour = datetime.now().hour
# update vehicle structure
vehicles=tesla.vehicle_list() # this command does not effect sleep mode of car
#print( vehicles )
# check every car
nCar=0
info=["state","position","charge mode","idle","charge state"]
while nCar<nNumCars:
#
v = vehicles[nCar]
#
# check if inside a charge hour, if yes then prepare to charge
if mode[nCar]==3 : # CHARGE
# check if current hour is allowed to CHARGE
if isInsidePriceHour(aPricesChosen[nCar]) :
# yes, allowed to charge, then go ONLINE
if v["state"]!="online":
logging.info("Time to charge, wake the car up")
v.sync_wake_up()
lastModeChange[nCar]=0
# query Tesla API for "state"
info[0]=v["state"]
if v["state"]!="online":
# car is not online - let it sleep
logging.info("Car#%u state is '%s', mode=%u", nCar+1, v["state"], mode[nCar] )
oldMode[nCar]=mode[nCar]
lastModeChange[nCar]=0
nCar+=1
continue
# car is online, check if vehicle mode didn't change for x cycles (15min)
if mode[nCar]==3 and isInsidePriceHour(aPricesChosen[nCar]) : # charge hour? don't check for letting sleep
lastModeChange[nCar]=0
lastModeChange[nCar]+=1
if lastModeChange[nCar]>15 :
logging.info("Car#%u seems to be idle, do not poll data anymore -> bring to sleep", nCar+1 )
if lastModeChange[nCar]>15+30: # try 30min to let it sleep
logging.info("Car#%u doesn't go asleep, start polling again", nCar+1 )
lastModeChange[nCar]=0
nCar+=1
continue
# Car needs to be online for getting more data
v = vehicles[nCar]
if v["state"]!="online":
v.sync_wake_up()
vd = v.get_vehicle_data()
ds=vd["drive_state"]
if ds["shift_state"]!=None : # if driving then reset test-for-sleep counter
lastModeChange[nCar]=0
nCar+=1
continue
lati=ds["latitude"] # get position of car
longi=ds["longitude"]
if int(lati*1000)!=int(home_latitute*1000) or int(longi*1000)!=int(home_longitute*1000) :
# car is not at home charger position, ignore
info[1]="anywhere"
mode[nCar]=0
if mode[nCar]!=oldMode[nCar] :
logging.info("Car #%u is not at charger position - ignore", nCar+1)
logging.info( "%u, %u, %u, %u", int(lati*1000), int(home_latitute*1000), int(longi*1000), int(home_longitute*1000) )
oldMode[nCar]=mode[nCar]
nCar+=1
continue
info[1]="@home"
#
cs=vd['charge_state']
logging.debug("Loop car #%u, mode=%u", nCar+1, mode[nCar])
#
# general check if charge logic should NOT be activated
# if no charge schedule is set (owner wants to start NOW, let the car do its thing)
# if charge cable is not inserted
if cs["scheduled_charging_start_time"]==None :
# no charge schedule, let it charge
info[2]="always"
mode[nCar]=0
if mode[nCar]!=oldMode[nCar] :
logging.info("Charge 'always' activated in car #%u", nCar+1)
else:
info[2]="aWATTar"
if cs["charge_limit_soc"]==100 and \
cs["charging_state"]!="Charging" :
v.command('START_CHARGE')
info[2]="topup" # finish to 100% now
mode[nCar]=0
logging.info("CHARGE_LIMIT_SOC is 100 -> start charging now")
if cs["battery_level"]<10 and \
cs["charging_state"]!="Charging" :
v.command('START_CHARGE')
info[2]="toolow" # always charge if SOC<10%
mode[nCar]=0
logging.info("STATE_OF_CHARGE<10 -> too low -> start charging now")
#if cs["charge_port_door_open"]==False and \
# cs["charge_port_latch"]!="Engaged" :
if cs["charge_port_door_open"]==False:
# no charge cable - reset everything
mode[nCar]=0
if mode[nCar]!=oldMode[nCar] :
logging.info("Cable unplugged in car #%u", nCar+1)
if mode[nCar]==0 and cs["charge_port_door_open"]==True and \
cs["charge_port_latch"]=="Engaged" and \
cs["charge_limit_soc"]<100 and \
cs["scheduled_charging_start_time"]!=None : # is charging scheduled?
mode[nCar]=1 # I want to charge depending on aWATTar pricing until next morning
if mode[nCar]!=oldMode[nCar] :
logging.info("Cable inserted in car #%u", nCar+1)
if mode[nCar]==1 : # I_WANT_TO_CHARGE
# check if charge is possible
# only if current SOC is at least 10% lower then MAX_SOC
if cs["charge_limit_soc"]-cs["battery_level"]<10 :
# SOC is too high, no charge
logging.info("SOC is high enough, no charging necessary")
else:
# if still not charging then start charging again
if cs["charging_state"]!="Charging" :
v.command('START_CHARGE')
logging.info("send cmd: start charging")
else:
# now it's charging!
# But wait a bit until charging on full power and give the car time to calculate 'time_to_full_charge'
timeToFull[nCar]=0
i=10
while( i>0 ):
if( cs["charger_power"]<(nWallboxKW-nWallboxKW/10) ) : # wait until on full power, so that extimated time is exact
logging.info("...charging but not on full power yet (%s) - waiting...", cs["charger_power"])
else:
if( timeToFull[nCar | isInsidePriceHour | identifier_name |
|
teslatar.py | =datetime.now()
#dt=datetime(2019,4,3,11,59,59)
oneHour=timedelta(hours=1)
while i<len(aPrices):
#print( aPrices[i][0], aPrices[i][1] )
if( aPrices[i][0]<=dt and dt<aPrices[i][0]+oneHour ):
found=True
break
i+=1
return found
# basic vars
aChargeMode = [] # time until charge must be finished (-1=start immediate)
aPricesChosen = []
#logging.basicConfig(format='%(asctime)s - %(message)s', level=logging.DEBUG)
logging.basicConfig(filename="file.log", format='%(asctime)s - %(message)s', level=logging.INFO)
logging.critical("Startup Tesla Avatar")
time.sleep(10)
logging.info("Opening connection to Tesla API...")
while True:
try:
tesla = teslapy.Tesla( email, password )
tesla.fetch_token()
break
except:
logging.error("...could not connect (yet), wait, then try again...", exc_info=True)
time.sleep(60)
vehicles = tesla.vehicle_list()
nNumCars = 0
while nNumCars<len(vehicles):
v = vehicles[nNumCars]
logging.info("Car #%u VIN=%s Name=%s, State=%s", nNumCars+1, v["vin"], v["display_name"], v["state"] )
nNumCars+=1
cntExceptions=0
startHour = datetime.now().hour
aPrices=[]
oldPriceHour=-1
oldLenPrices=-1
timeToFull=[]
mode=[]
oldMode=[]
lastModeChange=[]
oldChargeLimitSoc=[]
i=0
while i<nNumCars:
aChargeMode+=[finishHour]
aPricesChosen+=[0]
mode+=[0]
oldMode+=[-1]
lastModeChange+=[0]
oldChargeLimitSoc+=[0]
timeToFull+=[0]
i+=1
while True:
try:
# new hour? then load new prices from aWATTar
if oldPriceHour != datetime.now().hour:
# get array with price for every hour in the future
|
# update vehicle structure
vehicles=tesla.vehicle_list() # this command does not effect sleep mode of car
#print( vehicles )
# check every car
nCar=0
info=["state","position","charge mode","idle","charge state"]
while nCar<nNumCars:
#
v = vehicles[nCar]
#
# check if inside a charge hour, if yes then prepare to charge
if mode[nCar]==3 : # CHARGE
# check if current hour is allowed to CHARGE
if isInsidePriceHour(aPricesChosen[nCar]) :
# yes, allowed to charge, then go ONLINE
if v["state"]!="online":
logging.info("Time to charge, wake the car up")
v.sync_wake_up()
lastModeChange[nCar]=0
# query Tesla API for "state"
info[0]=v["state"]
if v["state"]!="online":
# car is not online - let it sleep
logging.info("Car#%u state is '%s', mode=%u", nCar+1, v["state"], mode[nCar] )
oldMode[nCar]=mode[nCar]
lastModeChange[nCar]=0
nCar+=1
continue
# car is online, check if vehicle mode didn't change for x cycles (15min)
if mode[nCar]==3 and isInsidePriceHour(aPricesChosen[nCar]) : # charge hour? don't check for letting sleep
lastModeChange[nCar]=0
lastModeChange[nCar]+=1
if lastModeChange[nCar]>15 :
logging.info("Car#%u seems to be idle, do not poll data anymore -> bring to sleep", nCar+1 )
if lastModeChange[nCar]>15+30: # try 30min to let it sleep
logging.info("Car#%u doesn't go asleep, start polling again", nCar+1 )
lastModeChange[nCar]=0
nCar+=1
continue
# Car needs to be online for getting more data
v = vehicles[nCar]
if v["state"]!="online":
v.sync_wake_up()
vd = v.get_vehicle_data()
ds=vd["drive_state"]
if ds["shift_state"]!=None : # if driving then reset test-for-sleep counter
lastModeChange[nCar]=0
nCar+=1
continue
lati=ds["latitude"] # get position of car
longi=ds["longitude"]
if int(lati*1000)!=int(home_latitute*1000) or int(longi*1000)!=int(home_longitute*1000) :
# car is not at home charger position, ignore
info[1]="anywhere"
mode[nCar]=0
if mode[nCar]!=oldMode[nCar] :
logging.info("Car #%u is not at charger position - ignore", nCar+1)
logging.info( "%u, %u, %u, %u", int(lati*1000), int(home_latitute*1000), int(longi*1000), int(home_longitute*1000) )
oldMode[nCar]=mode[nCar]
nCar+=1
continue
info[1]="@home"
#
cs=vd['charge_state']
logging.debug("Loop car #%u, mode=%u", nCar+1, mode[nCar])
#
# general check if charge logic should NOT be activated
# if no charge schedule is set (owner wants to start NOW, let the car do its thing)
# if charge cable is not inserted
if cs["scheduled_charging_start_time"]==None :
# no charge schedule, let it charge
info[2]="always"
mode[nCar]=0
if mode[nCar]!=oldMode[nCar] :
logging.info("Charge 'always' activated in car #%u", nCar+1)
else:
info[2]="aWATTar"
if cs["charge_limit_soc"]==100 and \
cs["charging_state"]!="Charging" :
v.command('START_CHARGE')
info[2]="topup" # finish to 100% now
mode[nCar]=0
logging.info("CHARGE_LIMIT_SOC is 100 -> start charging now")
if cs["battery_level"]<10 and \
cs["charging_state"]!="Charging" :
v.command('START_CHARGE')
info[2]="toolow" # always charge if SOC<10%
mode[nCar]=0
logging.info("STATE_OF_CHARGE<10 -> too low -> start charging now")
#if cs["charge_port_door_open"]==False and \
# cs["charge_port_latch"]!="Engaged" :
if cs["charge_port_door_open"]==False:
# no charge cable - reset everything
mode[nCar]=0
if mode[nCar]!=oldMode[nCar] :
logging.info("Cable unplugged in car #%u", nCar+1)
if mode[nCar]==0 and cs["charge_port_door_open"]==True and \
cs["charge_port_latch"]=="Engaged" and \
cs["charge_limit_soc"]<100 and \
cs["scheduled_charging_start_time"]!=None : # is charging scheduled?
mode[nCar]=1 # I want to charge depending on aWATTar pricing until next morning
if mode[nCar]!=oldMode[nCar] :
logging.info("Cable inserted in car #%u", nCar+1)
if mode[nCar]==1 : # I_WANT_TO_CHARGE
# check if charge is possible
# only if current SOC is at least 10% lower then MAX_SOC
if cs["charge_limit_soc"]-cs["battery_level"]<10 :
# SOC is too high, no charge
logging.info("SOC is high enough, no charging necessary")
else:
# if still not charging then start charging again
if cs["charging_state"]!="Charging" :
v.command('START_CHARGE')
logging.info("send cmd: start charging")
else:
# now it's charging!
# But wait a bit until charging on full power and give the car time to calculate 'time_to_full_charge'
timeToFull[nCar]=0
i=10
while( i>0 ):
if( cs["charger_power"]<(nWallboxKW-nWallboxKW/10) ) : # wait until on full power, so that extimated time is exact
logging.info("...charging but not on full power yet (%s) - waiting...", cs["charger_power"])
else:
if( timeToFull[nCar]!=0 and timeToFull[nCar]==cs["time_to_full_charge"] ): # is | aPrices=getHourlyPrices()
oldPriceHour = datetime.now().hour | conditional_block |
teslatar.py |
#gets an Array of datetime and prices (here for testing only random)
def getHourlyPrices():
aPrices=[]
logging.info("Query aWATTar for new pricing...")
r = requests.get('https://api.awattar.de/v1/marketdata')
j = r.json()["data"]
#print( j )
for i in j:
#print( i["start_timestamp"]/1000, i["marketprice"], time.ctime(i["start_timestamp"]/1000), round(i["marketprice"]/10*1.19,2) )
dt = datetime.fromtimestamp(i["start_timestamp"]/1000)
p = round(i["marketprice"]/10*1.19,2) # convert from Eur/MWh to Cent/kWh plus 19% VAT
logging.info( dt.strftime("%Y-%m-%d %H = ")+str(p) )
aPrices.append([dt,p ])
return aPrices
#checks if nowtime is in current pricearray
def isInsidePriceHour( aPrices ):
found=False
i=0
dt=datetime.now()
#dt=datetime(2019,4,3,11,59,59)
oneHour=timedelta(hours=1)
while i<len(aPrices):
#print( aPrices[i][0], aPrices[i][1] )
if( aPrices[i][0]<=dt and dt<aPrices[i][0]+oneHour ):
found=True
break
i+=1
return found
# basic vars
aChargeMode = [] # time until charge must be finished (-1=start immediate)
aPricesChosen = []
#logging.basicConfig(format='%(asctime)s - %(message)s', level=logging.DEBUG)
logging.basicConfig(filename="file.log", format='%(asctime)s - %(message)s', level=logging.INFO)
logging.critical("Startup Tesla Avatar")
time.sleep(10)
logging.info("Opening connection to Tesla API...")
while True:
try:
tesla = teslapy.Tesla( email, password )
tesla.fetch_token()
break
except:
logging.error("...could not connect (yet), wait, then try again...", exc_info=True)
time.sleep(60)
vehicles = tesla.vehicle_list()
nNumCars = 0
while nNumCars<len(vehicles):
v = vehicles[nNumCars]
logging.info("Car #%u VIN=%s Name=%s, State=%s", nNumCars+1, v["vin"], v["display_name"], v["state"] )
nNumCars+=1
cntExceptions=0
startHour = datetime.now().hour
aPrices=[]
oldPriceHour=-1
oldLenPrices=-1
timeToFull=[]
mode=[]
oldMode=[]
lastModeChange=[]
oldChargeLimitSoc=[]
i=0
while i<nNumCars:
aChargeMode+=[finishHour]
aPricesChosen+=[0]
mode+=[0]
oldMode+=[-1]
lastModeChange+=[0]
oldChargeLimitSoc+=[0]
timeToFull+=[0]
i+=1
while True:
try:
# new hour? then load new prices from aWATTar
if oldPriceHour != datetime.now().hour:
# get array with price for every hour in the future
aPrices=getHourlyPrices()
oldPriceHour = datetime.now().hour
# update vehicle structure
vehicles=tesla.vehicle_list() # this command does not effect sleep mode of car
#print( vehicles )
# check every car
nCar=0
info=["state","position","charge mode","idle","charge state"]
while nCar<nNumCars:
#
v = vehicles[nCar]
#
# check if inside a charge hour, if yes then prepare to charge
if mode[nCar]==3 : # CHARGE
# check if current hour is allowed to CHARGE
if isInsidePriceHour(aPricesChosen[nCar]) :
# yes, allowed to charge, then go ONLINE
if v["state"]!="online":
logging.info("Time to charge, wake the car up")
v.sync_wake_up()
lastModeChange[nCar]=0
# query Tesla API for "state"
info[0]=v["state"]
if v["state"]!="online":
# car is not online - let it sleep
logging.info("Car#%u state is '%s', mode=%u", nCar+1, v["state"], mode[nCar] )
oldMode[nCar]=mode[nCar]
lastModeChange[nCar]=0
nCar+=1
continue
# car is online, check if vehicle mode didn't change for x cycles (15min)
if mode[nCar]==3 and isInsidePriceHour(aPricesChosen[nCar]) : # charge hour? don't check for letting sleep
lastModeChange[nCar]=0
lastModeChange[nCar]+=1
if lastModeChange[nCar]>15 :
logging.info("Car#%u seems to be idle, do not poll data anymore -> bring to sleep", nCar+1 )
if lastModeChange[nCar]>15+30: # try 30min to let it sleep
logging.info("Car#%u doesn't go asleep, start polling again", nCar+1 )
lastModeChange[nCar]=0
nCar+=1
continue
# Car needs to be online for getting more data
v = vehicles[nCar]
if v["state"]!="online":
v.sync_wake_up()
vd = v.get_vehicle_data()
ds=vd["drive_state"]
if ds["shift_state"]!=None : # if driving then reset test-for-sleep counter
lastModeChange[nCar]=0
nCar+=1
continue
lati=ds["latitude"] # get position of car
longi=ds["longitude"]
if int(lati*1000)!=int(home_latitute*1000) or int(longi*1000)!=int(home_longitute*1000) :
# car is not at home charger position, ignore
info[1]="anywhere"
mode[nCar]=0
if mode[nCar]!=oldMode[nCar] :
logging.info("Car #%u is not at charger position - ignore", nCar+1)
logging.info( "%u, %u, %u, %u", int(lati*1000), int(home_latitute*1000), int(longi*1000), int(home_longitute*1000) )
oldMode[nCar]=mode[nCar]
nCar+=1
continue
info[1]="@home"
#
cs=vd['charge_state']
logging.debug("Loop car #%u, mode=%u", nCar+1, mode[nCar])
#
# general check if charge logic should NOT be activated
# if no charge schedule is set (owner wants to start NOW, let the car do its thing)
# if charge cable is not inserted
if cs["scheduled_charging_start_time"]==None :
# no charge schedule, let it charge
info[2]="always"
mode[nCar]=0
if mode[nCar]!=oldMode[nCar] :
logging.info("Charge 'always' activated in car #%u", nCar+1)
else:
info[2]="aWATTar"
if cs["charge_limit_soc"]==100 and \
cs["charging_state"]!="Charging" :
v.command('START_CHARGE')
info[2]="topup" # finish to 100% now
mode[nCar]=0
logging.info("CHARGE_LIMIT_SOC is 100 -> start charging now")
if cs["battery_level"]<10 and \
cs["charging_state"]!="Charging" :
v.command('START_CHARGE')
info[2]="toolow" # always charge if SOC<10%
mode[nCar]=0
logging.info("STATE_OF_CHARGE<10 -> too low -> start charging now")
#if cs["charge_port_door_open"]==False and \
# cs["charge_port_latch"]!="Engaged" :
if cs["charge_port_door_open"]==False:
# no charge cable - reset everything
mode[nCar]=0
if mode[nCar]!=oldMode[nCar] :
logging.info("Cable unplugged in car #%u", nCar+1)
if mode[nCar]==0 and cs["charge_port_door_open"]==True and \
cs["charge_port_latch"]=="Engaged" and \
cs["charge_limit_soc"]<100 and \
cs["scheduled_charging_start_time"]!=None : # is charging scheduled?
mode[nCar]=1 # I want to charge depending on aWATTar pricing until next | now=float(datetime.now().strftime("%H"))+float(datetime.now().strftime("%M"))/60
hoursLeft=0
if( now>then ):
hoursLeft=24-now+then
else:
hoursLeft=then-now
return( hoursLeft ) | identifier_body |
|
teslatar.py | =datetime.now()
#dt=datetime(2019,4,3,11,59,59)
oneHour=timedelta(hours=1)
while i<len(aPrices):
#print( aPrices[i][0], aPrices[i][1] )
if( aPrices[i][0]<=dt and dt<aPrices[i][0]+oneHour ):
found=True
break
i+=1
return found
# basic vars
aChargeMode = [] # time until charge must be finished (-1=start immediate)
aPricesChosen = []
#logging.basicConfig(format='%(asctime)s - %(message)s', level=logging.DEBUG)
logging.basicConfig(filename="file.log", format='%(asctime)s - %(message)s', level=logging.INFO)
logging.critical("Startup Tesla Avatar")
time.sleep(10)
logging.info("Opening connection to Tesla API...")
while True:
try:
tesla = teslapy.Tesla( email, password )
tesla.fetch_token()
break
except:
logging.error("...could not connect (yet), wait, then try again...", exc_info=True)
time.sleep(60)
vehicles = tesla.vehicle_list()
nNumCars = 0
while nNumCars<len(vehicles):
v = vehicles[nNumCars]
logging.info("Car #%u VIN=%s Name=%s, State=%s", nNumCars+1, v["vin"], v["display_name"], v["state"] )
nNumCars+=1
cntExceptions=0
| startHour = datetime.now().hour
aPrices=[]
oldPriceHour=-1
oldLenPrices=-1
timeToFull=[]
mode=[]
oldMode=[]
lastModeChange=[]
oldChargeLimitSoc=[]
i=0
while i<nNumCars:
aChargeMode+=[finishHour]
aPricesChosen+=[0]
mode+=[0]
oldMode+=[-1]
lastModeChange+=[0]
oldChargeLimitSoc+=[0]
timeToFull+=[0]
i+=1
while True:
try:
# new hour? then load new prices from aWATTar
if oldPriceHour != datetime.now().hour:
# get array with price for every hour in the future
aPrices=getHourlyPrices()
oldPriceHour = datetime.now().hour
# update vehicle structure
vehicles=tesla.vehicle_list() # this command does not effect sleep mode of car
#print( vehicles )
# check every car
nCar=0
info=["state","position","charge mode","idle","charge state"]
while nCar<nNumCars:
#
v = vehicles[nCar]
#
# check if inside a charge hour, if yes then prepare to charge
if mode[nCar]==3 : # CHARGE
# check if current hour is allowed to CHARGE
if isInsidePriceHour(aPricesChosen[nCar]) :
# yes, allowed to charge, then go ONLINE
if v["state"]!="online":
logging.info("Time to charge, wake the car up")
v.sync_wake_up()
lastModeChange[nCar]=0
# query Tesla API for "state"
info[0]=v["state"]
if v["state"]!="online":
# car is not online - let it sleep
logging.info("Car#%u state is '%s', mode=%u", nCar+1, v["state"], mode[nCar] )
oldMode[nCar]=mode[nCar]
lastModeChange[nCar]=0
nCar+=1
continue
# car is online, check if vehicle mode didn't change for x cycles (15min)
if mode[nCar]==3 and isInsidePriceHour(aPricesChosen[nCar]) : # charge hour? don't check for letting sleep
lastModeChange[nCar]=0
lastModeChange[nCar]+=1
if lastModeChange[nCar]>15 :
logging.info("Car#%u seems to be idle, do not poll data anymore -> bring to sleep", nCar+1 )
if lastModeChange[nCar]>15+30: # try 30min to let it sleep
logging.info("Car#%u doesn't go asleep, start polling again", nCar+1 )
lastModeChange[nCar]=0
nCar+=1
continue
# Car needs to be online for getting more data
v = vehicles[nCar]
if v["state"]!="online":
v.sync_wake_up()
vd = v.get_vehicle_data()
ds=vd["drive_state"]
if ds["shift_state"]!=None : # if driving then reset test-for-sleep counter
lastModeChange[nCar]=0
nCar+=1
continue
lati=ds["latitude"] # get position of car
longi=ds["longitude"]
if int(lati*1000)!=int(home_latitute*1000) or int(longi*1000)!=int(home_longitute*1000) :
# car is not at home charger position, ignore
info[1]="anywhere"
mode[nCar]=0
if mode[nCar]!=oldMode[nCar] :
logging.info("Car #%u is not at charger position - ignore", nCar+1)
logging.info( "%u, %u, %u, %u", int(lati*1000), int(home_latitute*1000), int(longi*1000), int(home_longitute*1000) )
oldMode[nCar]=mode[nCar]
nCar+=1
continue
info[1]="@home"
#
cs=vd['charge_state']
logging.debug("Loop car #%u, mode=%u", nCar+1, mode[nCar])
#
# general check if charge logic should NOT be activated
# if no charge schedule is set (owner wants to start NOW, let the car do its thing)
# if charge cable is not inserted
if cs["scheduled_charging_start_time"]==None :
# no charge schedule, let it charge
info[2]="always"
mode[nCar]=0
if mode[nCar]!=oldMode[nCar] :
logging.info("Charge 'always' activated in car #%u", nCar+1)
else:
info[2]="aWATTar"
if cs["charge_limit_soc"]==100 and \
cs["charging_state"]!="Charging" :
v.command('START_CHARGE')
info[2]="topup" # finish to 100% now
mode[nCar]=0
logging.info("CHARGE_LIMIT_SOC is 100 -> start charging now")
if cs["battery_level"]<10 and \
cs["charging_state"]!="Charging" :
v.command('START_CHARGE')
info[2]="toolow" # always charge if SOC<10%
mode[nCar]=0
logging.info("STATE_OF_CHARGE<10 -> too low -> start charging now")
#if cs["charge_port_door_open"]==False and \
# cs["charge_port_latch"]!="Engaged" :
if cs["charge_port_door_open"]==False:
# no charge cable - reset everything
mode[nCar]=0
if mode[nCar]!=oldMode[nCar] :
logging.info("Cable unplugged in car #%u", nCar+1)
if mode[nCar]==0 and cs["charge_port_door_open"]==True and \
cs["charge_port_latch"]=="Engaged" and \
cs["charge_limit_soc"]<100 and \
cs["scheduled_charging_start_time"]!=None : # is charging scheduled?
mode[nCar]=1 # I want to charge depending on aWATTar pricing until next morning
if mode[nCar]!=oldMode[nCar] :
logging.info("Cable inserted in car #%u", nCar+1)
if mode[nCar]==1 : # I_WANT_TO_CHARGE
# check if charge is possible
# only if current SOC is at least 10% lower then MAX_SOC
if cs["charge_limit_soc"]-cs["battery_level"]<10 :
# SOC is too high, no charge
logging.info("SOC is high enough, no charging necessary")
else:
# if still not charging then start charging again
if cs["charging_state"]!="Charging" :
v.command('START_CHARGE')
logging.info("send cmd: start charging")
else:
# now it's charging!
# But wait a bit until charging on full power and give the car time to calculate 'time_to_full_charge'
timeToFull[nCar]=0
i=10
while( i>0 ):
if( cs["charger_power"]<(nWallboxKW-nWallboxKW/10) ) : # wait until on full power, so that extimated time is exact
logging.info("...charging but not on full power yet (%s) - waiting...", cs["charger_power"])
else:
if( timeToFull[nCar]!=0 and timeToFull[nCar]==cs["time_to_full_charge"] ): # is | random_line_split |
|
my_agent.py | 4), (8, 5), (10, 5)],
(9,6): [(9, 7), (9, 5), (8, 6), (10, 6)],
(9,7): [(9, 8), (9, 6), (8, 7), (10, 7)],
(9,8): [(9, 9), (9, 7), (8, 8), (10, 8)],
(9,9): [(9, 8), (8, 9), (10, 9)],
(10,0): [(10, 1), (9, 0), (11, 0)],
(10,1): [(10, 2), (10, 0), (9, 1), (11, 1)],
(10,2): [(10, 3), (10, 1), (9, 2), (11, 2)],
(10,3): [(10, 4), (10, 2), (9, 3), (11, 3)],
(10,4): [(10, 5), (10, 3), (9, 4), (11, 4)],
(10,5): [(10, 6), (10, 4), (9, 5), (11, 5)],
(10,6): [(10, 7), (10, 5), (9, 6), (11, 6)],
(10,7): [(10, 8), (10, 6), (9, 7), (11, 7)],
(10,8): [(10, 9), (10, 7), (9, 8), (11, 8)],
(10,9): [(10, 8), (9, 9), (11, 9)],
(11,0): [(11, 1), (10, 0)],
(11,1): [(11, 2), (11, 0), (10, 1)],
(11,2): [(11, 3), (11, 1), (10, 2)],
(11,3): [(11, 4), (11, 2), (10, 3)],
(11,4): [(11, 5), (11, 3), (10, 4)],
(11,5): [(11, 6), (11, 4), (10, 5)],
(11,6): [(11, 7), (11, 5), (10, 6)],
(11,7): [(11, 8), (11, 6), (10, 7)],
(11,8): [(11, 9), (11, 7), (10, 8)],
(11,9): [(11, 8), (10, 9)],
}
for block in blocks:
for neighbour in graph[block]:
graph[neighbour].remove(block)
graph.pop(block)
return graph
# generate graph edges in the up, down, left, right directions that are within the 12 * 10 bounds
# for i in range(12):
# for j in range(10):
# tuples = []
# if j + 1 < 10:
# tuples.append((i, j + 1))
# if j - 1 >= 0:
# tuples.append((i, j - 1))
# if i - 1 >= 0:
# tuples.append((i - 1, j))
# if i + 1 < 12:
# tuples.append((i + 1, j))
# print(f'({i},{j}): {tuples},')
def print_graph(graph, traps=None):
"Prints graph in a nice format"
output = [
['[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]'],
['[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]'],
['[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]'],
['[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]'],
['[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]'],
['[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]'],
['[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]'],
['[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]'],
['[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]'],
['[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]']
]
for node in graph:
output[node[1]][node[0]] = '[ ]'
if traps is not None:
for trap in traps:
output[trap[1]][trap[0]] = '[O]'
for i in range(9, -1, -1):
print(f'{output[i][0]} {output[i][1]} {output[i][2]} {output[i][3]} {output[i][4]} {output[i][5]} {output[i][6]} {output[i][7]} {output[i][8]} {output[i][9]} {output[i][10]} {output[i][11]}')
def random_blocks():
""" Generate 43 random blocks (board always starts with 43)
Can set seed for consistent testing graph
"""
cells = []
while len(cells) != 43:
cell_to_add = (random.randint(0, 11), random.randint(0, 9))
if cell_to_add not in cells:
cells.append(cell_to_add)
return cells
def locate_traps(graph):
""" Returns a list containing traps. """
traps = []
for node in graph:
if len(graph[node]) < 2:
traps.append(node)
continue
else:
neighbours = graph[node]
# copy graph and delete the node
temp_graph = copy.deepcopy(graph)
for neighbour in neighbours:
temp_graph[neighbour].remove(node)
temp_graph.pop(node)
# heuristic: if you can BFS from a node's neighbour to all other neighbours in < 10 steps (after removing that node), then graph is still connected => not a trappable node
BFS_q = deque()
visited = [[False] * 12 for _ in range(10)]
visited[neighbours[0][1]][neighbours[0][0]] = True
BFS_q.append(neighbours[0])
counter = 0
while len(BFS_q) > 0 and counter < 10:
u = BFS_q.popleft()
for BFS_neighbour in temp_graph[u]:
if not visited[BFS_neighbour[1]][BFS_neighbour[0]]:
visited[BFS_neighbour[1]][BFS_neighbour[0]] = True
BFS_q.append(BFS_neighbour)
counter += 1
for neighbour in neighbours:
if visited[neighbour[1]][neighbour[0]] is False:
traps.append(node)
continue
return (traps)
if __name__ == "__main__":
| blocks = random_blocks()
graph = convert_to_graph(blocks)
traps = locate_traps(graph)
print_graph(graph, traps) # x = solid block, O = trap. set traps=None to see a clear game state.
# function to time decision making
# times = []
# for i in range(500):
# blocks = random_blocks()
# graph = convert_to_graph(blocks)
# start = timeit.default_timer()
# traps = locate_traps(graph)
# stop = timeit.default_timer()
# times.append(stop-start)
# print(str(sum(times)/500)) | conditional_block |
|
my_agent.py | (self):
'''
Place any initialisation code for your agent here (if any)
'''
pass
def next_move(self, game_state, player_state):
'''
This method is called each time your Agent is required to choose an action
If you're just starting out or are new to Python, you can place all your
code within the ### CODE HERE ### tags. If you're more familiar with Python
and how classes and modules work, then go nuts.
(Although we recommend that you use the Scrims to check your Agent is working)
'''
###### CODE HERE ######
# a list of all the actions your Agent can choose from
actions = ['','u','d','l','r','p']
# randomly choosing an action
action = random.choice(actions)
###### END CODE ######
return action
def convert_to_graph(blocks):
""" Converts tiles to nodes and adds an edge between tiles that are not a solid block.
"""
# declaring graph and removing solid blocks is more efficient than generating the whole graph
graph = {
(0,0): [(0, 1), (1, 0)],
(0,1): [(0, 2), (0, 0), (1, 1)],
(0,2): [(0, 3), (0, 1), (1, 2)],
(0,3): [(0, 4), (0, 2), (1, 3)],
(0,4): [(0, 5), (0, 3), (1, 4)],
(0,5): [(0, 6), (0, 4), (1, 5)],
(0,6): [(0, 7), (0, 5), (1, 6)],
(0,7): [(0, 8), (0, 6), (1, 7)],
(0,8): [(0, 9), (0, 7), (1, 8)],
(0,9): [(0, 8), (1, 9)],
(1,0): [(1, 1), (0, 0), (2, 0)],
(1,1): [(1, 2), (1, 0), (0, 1), (2, 1)],
(1,2): [(1, 3), (1, 1), (0, 2), (2, 2)],
(1,3): [(1, 4), (1, 2), (0, 3), (2, 3)],
(1,4): [(1, 5), (1, 3), (0, 4), (2, 4)],
(1,5): [(1, 6), (1, 4), (0, 5), (2, 5)],
(1,6): [(1, 7), (1, 5), (0, 6), (2, 6)],
(1,7): [(1, 8), (1, 6), (0, 7), (2, 7)],
(1,8): [(1, 9), (1, 7), (0, 8), (2, 8)],
(1,9): [(1, 8), (0, 9), (2, 9)],
(2,0): [(2, 1), (1, 0), (3, 0)],
(2,1): [(2, 2), (2, 0), (1, 1), (3, 1)],
(2,2): [(2, 3), (2, 1), (1, 2), (3, 2)],
(2,3): [(2, 4), (2, 2), (1, 3), (3, 3)],
(2,4): [(2, 5), (2, 3), (1, 4), (3, 4)],
(2,5): [(2, 6), (2, 4), (1, 5), (3, 5)],
(2,6): [(2, 7), (2, 5), (1, 6), (3, 6)],
(2,7): [(2, 8), (2, 6), (1, 7), (3, 7)],
(2,8): [(2, 9), (2, 7), (1, 8), (3, 8)],
(2,9): [(2, 8), (1, 9), (3, 9)],
(3,0): [(3, 1), (2, 0), (4, 0)],
(3,1): [(3, 2), (3, 0), (2, 1), (4, 1)],
(3,2): [(3, 3), (3, 1), (2, 2), (4, 2)],
(3,3): [(3, 4), (3, 2), (2, 3), (4, 3)],
(3,4): [(3, 5), (3, 3), (2, 4), (4, 4)],
(3,5): [(3, 6), (3, 4), (2, 5), (4, 5)],
(3,6): [(3, 7), (3, 5), (2, 6), (4, 6)],
(3,7): [(3, 8), (3, 6), (2, 7), (4, 7)],
(3,8): [(3, 9), (3, 7), (2, 8), (4, 8)],
(3,9): [(3, 8), (2, 9), (4, 9)],
(4,0): [(4, 1), (3, 0), (5, 0)],
(4,1): [(4, 2), (4, 0), (3, 1), (5, 1)],
(4,2): [(4, 3), (4, 1), (3, 2), (5, 2)],
(4,3): [(4, 4), (4, 2), (3, 3), (5, 3)],
(4,4): [(4, 5), (4, 3), (3, 4), (5, 4)],
(4,5): [(4, 6), (4, 4), (3, 5), (5, 5)],
(4,6): [(4, 7), (4, 5), (3, 6), (5, 6)],
(4,7): [(4, 8), (4, 6), (3, 7), (5, 7)],
(4,8): [(4, 9), (4, 7), (3, 8), (5, 8)],
(4,9): [(4, 8), (3, 9), (5, 9)],
(5,0): [(5, 1), (4, 0), (6, 0)],
(5,1): [(5, 2), (5, 0), (4, 1), (6, 1)],
(5,2): [(5, 3), (5, 1), (4, 2), (6, 2)],
(5,3): [(5, 4), (5, 2), (4, 3), (6, 3)],
(5,4): [(5, 5), (5, 3), (4, 4), (6, 4)],
(5,5): [(5, 6), (5, 4), (4, 5), (6, 5)],
(5,6): [(5, 7), (5, 5), (4, 6), (6, 6)],
(5,7): [(5, 8), (5, 6), (4, 7), (6, 7)],
(5,8): [(5, 9), (5, 7), (4, 8), (6, 8)],
(5,9): [(5, 8), (4, 9), (6, 9)],
(6,0): [(6, 1), (5, 0), (7, 0)],
(6,1): [(6, 2), (6, 0), (5, 1), (7, 1)],
(6,2): [(6, 3), (6, 1), (5, 2), (7, 2)],
(6,3): [(6, 4), (6, 2), (5, 3), (7, 3)],
(6,4): [(6, 5), (6, 3), (5, 4), (7, 4)],
| __init__ | identifier_name |
|
my_agent.py | 9), (8, 9)],
(8,0): [(8, 1), (7, 0), (9, 0)],
(8,1): [(8, 2), (8, 0), (7, 1), (9, 1)],
(8,2): [(8, 3), (8, 1), (7, 2), (9, 2)],
(8,3): [(8, 4), (8, 2), (7, 3), (9, 3)],
(8,4): [(8, 5), (8, 3), (7, 4), (9, 4)],
(8,5): [(8, 6), (8, 4), (7, 5), (9, 5)],
(8,6): [(8, 7), (8, 5), (7, 6), (9, 6)],
(8,7): [(8, 8), (8, 6), (7, 7), (9, 7)],
(8,8): [(8, 9), (8, 7), (7, 8), (9, 8)],
(8,9): [(8, 8), (7, 9), (9, 9)],
(9,0): [(9, 1), (8, 0), (10, 0)],
(9,1): [(9, 2), (9, 0), (8, 1), (10, 1)],
(9,2): [(9, 3), (9, 1), (8, 2), (10, 2)],
(9,3): [(9, 4), (9, 2), (8, 3), (10, 3)],
(9,4): [(9, 5), (9, 3), (8, 4), (10, 4)],
(9,5): [(9, 6), (9, 4), (8, 5), (10, 5)],
(9,6): [(9, 7), (9, 5), (8, 6), (10, 6)],
(9,7): [(9, 8), (9, 6), (8, 7), (10, 7)],
(9,8): [(9, 9), (9, 7), (8, 8), (10, 8)],
(9,9): [(9, 8), (8, 9), (10, 9)],
(10,0): [(10, 1), (9, 0), (11, 0)],
(10,1): [(10, 2), (10, 0), (9, 1), (11, 1)],
(10,2): [(10, 3), (10, 1), (9, 2), (11, 2)],
(10,3): [(10, 4), (10, 2), (9, 3), (11, 3)],
(10,4): [(10, 5), (10, 3), (9, 4), (11, 4)],
(10,5): [(10, 6), (10, 4), (9, 5), (11, 5)],
(10,6): [(10, 7), (10, 5), (9, 6), (11, 6)],
(10,7): [(10, 8), (10, 6), (9, 7), (11, 7)],
(10,8): [(10, 9), (10, 7), (9, 8), (11, 8)],
(10,9): [(10, 8), (9, 9), (11, 9)],
(11,0): [(11, 1), (10, 0)],
(11,1): [(11, 2), (11, 0), (10, 1)],
(11,2): [(11, 3), (11, 1), (10, 2)],
(11,3): [(11, 4), (11, 2), (10, 3)],
(11,4): [(11, 5), (11, 3), (10, 4)],
(11,5): [(11, 6), (11, 4), (10, 5)],
(11,6): [(11, 7), (11, 5), (10, 6)],
(11,7): [(11, 8), (11, 6), (10, 7)],
(11,8): [(11, 9), (11, 7), (10, 8)],
(11,9): [(11, 8), (10, 9)],
}
for block in blocks:
for neighbour in graph[block]:
graph[neighbour].remove(block)
graph.pop(block)
return graph
# generate graph edges in the up, down, left, right directions that are within the 12 * 10 bounds
# for i in range(12):
# for j in range(10):
# tuples = []
# if j + 1 < 10:
# tuples.append((i, j + 1))
# if j - 1 >= 0:
# tuples.append((i, j - 1))
# if i - 1 >= 0:
# tuples.append((i - 1, j))
# if i + 1 < 12:
# tuples.append((i + 1, j))
# print(f'({i},{j}): {tuples},')
def print_graph(graph, traps=None):
"Prints graph in a nice format"
output = [
['[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]'],
['[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]'],
['[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]'],
['[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]'],
['[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]'],
['[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]'],
['[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]'],
['[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]'],
['[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]'],
['[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]']
]
for node in graph:
output[node[1]][node[0]] = '[ ]'
if traps is not None:
for trap in traps:
output[trap[1]][trap[0]] = '[O]'
for i in range(9, -1, -1):
print(f'{output[i][0]} {output[i][1]} {output[i][2]} {output[i][3]} {output[i][4]} {output[i][5]} {output[i][6]} {output[i][7]} {output[i][8]} {output[i][9]} {output[i][10]} {output[i][11]}')
def random_blocks():
""" Generate 43 random blocks (board always starts with 43)
Can set seed for consistent testing graph
"""
cells = []
while len(cells) != 43:
cell_to_add = (random.randint(0, 11), random.randint(0, 9))
if cell_to_add not in cells:
cells.append(cell_to_add)
return cells
|
def locate_traps(graph): | random_line_split |
|
my_agent.py | , 8), (6, 9), (8, 9)],
(8,0): [(8, 1), (7, 0), (9, 0)],
(8,1): [(8, 2), (8, 0), (7, 1), (9, 1)],
(8,2): [(8, 3), (8, 1), (7, 2), (9, 2)],
(8,3): [(8, 4), (8, 2), (7, 3), (9, 3)],
(8,4): [(8, 5), (8, 3), (7, 4), (9, 4)],
(8,5): [(8, 6), (8, 4), (7, 5), (9, 5)],
(8,6): [(8, 7), (8, 5), (7, 6), (9, 6)],
(8,7): [(8, 8), (8, 6), (7, 7), (9, 7)],
(8,8): [(8, 9), (8, 7), (7, 8), (9, 8)],
(8,9): [(8, 8), (7, 9), (9, 9)],
(9,0): [(9, 1), (8, 0), (10, 0)],
(9,1): [(9, 2), (9, 0), (8, 1), (10, 1)],
(9,2): [(9, 3), (9, 1), (8, 2), (10, 2)],
(9,3): [(9, 4), (9, 2), (8, 3), (10, 3)],
(9,4): [(9, 5), (9, 3), (8, 4), (10, 4)],
(9,5): [(9, 6), (9, 4), (8, 5), (10, 5)],
(9,6): [(9, 7), (9, 5), (8, 6), (10, 6)],
(9,7): [(9, 8), (9, 6), (8, 7), (10, 7)],
(9,8): [(9, 9), (9, 7), (8, 8), (10, 8)],
(9,9): [(9, 8), (8, 9), (10, 9)],
(10,0): [(10, 1), (9, 0), (11, 0)],
(10,1): [(10, 2), (10, 0), (9, 1), (11, 1)],
(10,2): [(10, 3), (10, 1), (9, 2), (11, 2)],
(10,3): [(10, 4), (10, 2), (9, 3), (11, 3)],
(10,4): [(10, 5), (10, 3), (9, 4), (11, 4)],
(10,5): [(10, 6), (10, 4), (9, 5), (11, 5)],
(10,6): [(10, 7), (10, 5), (9, 6), (11, 6)],
(10,7): [(10, 8), (10, 6), (9, 7), (11, 7)],
(10,8): [(10, 9), (10, 7), (9, 8), (11, 8)],
(10,9): [(10, 8), (9, 9), (11, 9)],
(11,0): [(11, 1), (10, 0)],
(11,1): [(11, 2), (11, 0), (10, 1)],
(11,2): [(11, 3), (11, 1), (10, 2)],
(11,3): [(11, 4), (11, 2), (10, 3)],
(11,4): [(11, 5), (11, 3), (10, 4)],
(11,5): [(11, 6), (11, 4), (10, 5)],
(11,6): [(11, 7), (11, 5), (10, 6)],
(11,7): [(11, 8), (11, 6), (10, 7)],
(11,8): [(11, 9), (11, 7), (10, 8)],
(11,9): [(11, 8), (10, 9)],
}
for block in blocks:
for neighbour in graph[block]:
graph[neighbour].remove(block)
graph.pop(block)
return graph
# generate graph edges in the up, down, left, right directions that are within the 12 * 10 bounds
# for i in range(12):
# for j in range(10):
# tuples = []
# if j + 1 < 10:
# tuples.append((i, j + 1))
# if j - 1 >= 0:
# tuples.append((i, j - 1))
# if i - 1 >= 0:
# tuples.append((i - 1, j))
# if i + 1 < 12:
# tuples.append((i + 1, j))
# print(f'({i},{j}): {tuples},')
def print_graph(graph, traps=None):
"Prints graph in a nice format"
output = [
['[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]'],
['[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]'],
['[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]'],
['[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]'],
['[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]'],
['[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]'],
['[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]'],
['[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]'],
['[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]'],
['[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]', '[x]']
]
for node in graph:
output[node[1]][node[0]] = '[ ]'
if traps is not None:
for trap in traps:
output[trap[1]][trap[0]] = '[O]'
for i in range(9, -1, -1):
print(f'{output[i][0]} {output[i][1]} {output[i][2]} {output[i][3]} {output[i][4]} {output[i][5]} {output[i][6]} {output[i][7]} {output[i][8]} {output[i][9]} {output[i][10]} {output[i][11]}')
def random_blocks():
| """ Generate 43 random blocks (board always starts with 43)
Can set seed for consistent testing graph
"""
cells = []
while len(cells) != 43:
cell_to_add = (random.randint(0, 11), random.randint(0, 9))
if cell_to_add not in cells:
cells.append(cell_to_add)
return cells | identifier_body |
|
calculations.js | Masa : true, // HP
mlMalayalaMasa : true, // HP
malayalaMasaNum : true, // HP
adhimasa : true,
paksa : true,
tithiDay : true,
ftithi : true,
naksatra : true,
malayalaNaksatra : true, // HP
mlMalayalaNaksatra : true, // HP
sunriseTime : {
hour : true,
minute : true
}
},
setPaksa : function () {
// TODO: Add Tests if/when feasible
if (15 < this.calendarData.tithiDay) | else {
this.calendarData.paksa = 'Suklapaksa';
}
},
fromGregorian : function (settings, gregorianDate) {
// TODO: Add Tests if/when feasible
_setConstants(settings);
var julianDay = calendar.gregorianDateToJulianDay(gregorianDate);
var ahargana = calendar.julianDayToAhargana(julianDay);
julianDay = math.truncate(julianDay + 0.5);
var aharganaRounded = math.truncate(ahargana + 0.5);
// Definition of desantara
// http://books.google.com/books?id=kt9DIY1g9HYC&pg=PA683&lpg=PA683&dq=desantara&source=bl&ots=NLd1wFKFfN&sig=jCfG95R-6eiSff3L73DCodijo1I&hl=en&sa=X&ei=uKgHU__uKOr7yAGm0YGoBQ&ved=0CF8Q6AEwCDgK#v=onepage&q=desantara&f=false
var desantara = (settings.longitude - locations.Ujjain.longitude) / 360;
celestial.setAyanamsa(ahargana);
// at 6 o'clock
ahargana += 0.25;
// desantara
ahargana -= desantara;
// time of sunrise at local latitude
var equationOfTime = celestial.getDaylightEquation(gregorianDate.getFullYear(), settings.latitude, ahargana);
ahargana -= equationOfTime;
this.calendarData.sunriseTime = celestial.getSunriseTime(equationOfTime);
// Lunar apogee and node at sunrise
celestial.planetarySystem.planets.candrocca.MeanPosition = celestial.getMeanLongitude(ahargana, celestial.planetarySystem.planets.candrocca.YugaRotation) + 90;
celestial.planetarySystem.planets.candrocca.MeanPosition = math.zero360(celestial.planetarySystem.planets.candrocca.MeanPosition);
celestial.planetarySystem.planets.rahu.MeanPosition = celestial.getMeanLongitude(ahargana, celestial.planetarySystem.planets.rahu.YugaRotation) + 180;
celestial.planetarySystem.planets.rahu.MeanPosition = math.zero360(celestial.planetarySystem.planets.rahu.MeanPosition);
// mean and true sun at sunrise
var meanSolarLongitude = celestial.getMeanLongitude(ahargana, celestial.planetarySystem.planets.sun.YugaRotation);
celestial.planetarySystem.planets.sun.MeanPosition = meanSolarLongitude;
var trueSolarLongitude = math.zero360(meanSolarLongitude -
celestial.getMandaEquation((meanSolarLongitude - celestial.planetarySystem.planets.sun.Apogee), 'sun'));
celestial.planetarySystem.planets.sun.TruePosition = trueSolarLongitude;
// mean and true moon at sunrise
var meanLunarLongitude = celestial.getMeanLongitude(ahargana, celestial.planetarySystem.planets.moon.YugaRotation);
celestial.planetarySystem.planets.moon.MeanPosition = meanLunarLongitude;
celestial.planetarySystem.planets.moon.Apogee = celestial.planetarySystem.planets.candrocca.MeanPosition;
var trueLunarLongitude = math.zero360(meanLunarLongitude -
celestial.getMandaEquation((meanLunarLongitude - celestial.planetarySystem.planets.moon.Apogee), 'moon'));
celestial.planetarySystem.planets.moon.TruePosition = trueLunarLongitude;
// finding tithi and longitude of conjunction
var tithi = celestial.getTithi(trueLunarLongitude, trueSolarLongitude);
this.calendarData.tithiDay = math.truncate(tithi) + 1;
this.calendarData.ftithi = math.fractional(tithi);
this.setPaksa();
// last conjunction
var lastConjunctionLongitude = celestial.getLastConjunctionLongitude(ahargana, tithi);
// next conjunction
var nextConjunctionLongitude = celestial.getNextConjunctionLongitude(ahargana, tithi);
this.calendarData.adhimasa = calendar.getAdhimasa(lastConjunctionLongitude, nextConjunctionLongitude);
this.calendarData.masaNum = calendar.getMasaNum(trueSolarLongitude, lastConjunctionLongitude);
// TODO: Move the below function to within KollavarshamDate class
this.calendarData.masa = calendar.getMasaName(this.calendarData.masaNum).saka;
var sauraMasaMonthDay = calendar.getSauraMasaMonthDay(ahargana, desantara);
var sauraMasaNum = sauraMasaMonthDay.month;
var sauraMasaDay = sauraMasaMonthDay.day;
// TODO: Move the below function to within KollavarshamDate class
this.calendarData.sauraMasa = calendar.getMasaName(sauraMasaNum).saura;
this.calendarData.malayalaMasaNum = (sauraMasaNum - 4 + 12 ) % 12;
// TODO: Move the below function to within KollavarshamDate class
var malayalaMasa = calendar.getMasaName(this.calendarData.malayalaMasaNum);
this.calendarData.malayalaMasa = malayalaMasa.enMalayalam;
this.calendarData.mlMalayalaMasa = malayalaMasa.mlMalayalam;
var naksatra = calendar.getNaksatra(trueLunarLongitude);
this.calendarData.naksatra = naksatra.saka;
this.calendarData.malayalaNaksatra = naksatra.enMalayalam;
this.calendarData.mlMalayalaNaksatra = naksatra.mlMalayalam;
// kali and Saka era
this.calendarData.YearKali = calendar.aharganaToKali(ahargana + ( 4 - this.calendarData.masaNum ) * 30);
this.calendarData.YearSaka = calendar.kaliToSaka(this.calendarData.YearKali);
this.calendarData.YearVikrama = this.calendarData.YearSaka + 135;
// Sewell p.45 - https://archive.org/stream/indiancalendarwi00sewerich#page/45/mode/1up
var malayalamYear = this.calendarData.YearSaka - 747 +
math.truncate((this.calendarData.masaNum - this.calendarData.malayalaMasaNum + 12) / 12);
// The below was a separate method named calculations.planetary (ported from planetary_calculations in perl)
var planets = ['mercury', 'venus', 'mars', 'jupiter', 'saturn'];
for (var i = 0; i < planets.length; i++) {
celestial.planetarySystem.planets[planets[i]].MeanPosition = celestial.getMeanLongitude(ahargana, celestial.planetarySystem.planets[planets[i]].Rotation);
celestial.planetarySystem.planets[planets[i]].TruePosition = celestial.getTrueLongitude(ahargana, meanSolarLongitude, planets[i]);
}
var kollavarshamDate = new KollavarshamDate(malayalamYear, sauraMasaNum, sauraMasaDay);
kollavarshamDate.gregorianDate = gregorianDate;
kollavarshamDate.julianDay = julianDay;
var weekdayName = calendar.julianDayToWeekday(julianDay);
kollavarshamDate.weekdayName = weekdayName.en;
kollavarshamDate.mlWeekdayName = weekdayName.ml;
kollavarshamDate.ahargana = aharganaRounded;
kollavarshamDate.calendarData = this.calendarData;
return kollavarshamDate;
},
toGregorian : function (settings) {
// TODO: Implement this to convert a Kollavarsham date to Gregorian after figuring out the samkranti discrepancies
// between Saka year and Malayalam year
/* This is how it works in Perl - Set these below variables before calling this */
/* this.calendarData.YearSaka, this.calendarData.masaNum, globals.paksa, globals.tithi | {
this.calendarData.tithiDay -= 15;
this.calendarData.paksa = 'Krsnapaksa';
} | conditional_block |
calculations.js | Masa : true, // HP
mlMalayalaMasa : true, // HP
malayalaMasaNum : true, // HP
adhimasa : true,
paksa : true,
tithiDay : true, | malayalaNaksatra : true, // HP
mlMalayalaNaksatra : true, // HP
sunriseTime : {
hour : true,
minute : true
}
},
setPaksa : function () {
// TODO: Add Tests if/when feasible
if (15 < this.calendarData.tithiDay) {
this.calendarData.tithiDay -= 15;
this.calendarData.paksa = 'Krsnapaksa';
} else {
this.calendarData.paksa = 'Suklapaksa';
}
},
fromGregorian : function (settings, gregorianDate) {
// TODO: Add Tests if/when feasible
_setConstants(settings);
var julianDay = calendar.gregorianDateToJulianDay(gregorianDate);
var ahargana = calendar.julianDayToAhargana(julianDay);
julianDay = math.truncate(julianDay + 0.5);
var aharganaRounded = math.truncate(ahargana + 0.5);
// Definition of desantara
// http://books.google.com/books?id=kt9DIY1g9HYC&pg=PA683&lpg=PA683&dq=desantara&source=bl&ots=NLd1wFKFfN&sig=jCfG95R-6eiSff3L73DCodijo1I&hl=en&sa=X&ei=uKgHU__uKOr7yAGm0YGoBQ&ved=0CF8Q6AEwCDgK#v=onepage&q=desantara&f=false
var desantara = (settings.longitude - locations.Ujjain.longitude) / 360;
celestial.setAyanamsa(ahargana);
// at 6 o'clock
ahargana += 0.25;
// desantara
ahargana -= desantara;
// time of sunrise at local latitude
var equationOfTime = celestial.getDaylightEquation(gregorianDate.getFullYear(), settings.latitude, ahargana);
ahargana -= equationOfTime;
this.calendarData.sunriseTime = celestial.getSunriseTime(equationOfTime);
// Lunar apogee and node at sunrise
celestial.planetarySystem.planets.candrocca.MeanPosition = celestial.getMeanLongitude(ahargana, celestial.planetarySystem.planets.candrocca.YugaRotation) + 90;
celestial.planetarySystem.planets.candrocca.MeanPosition = math.zero360(celestial.planetarySystem.planets.candrocca.MeanPosition);
celestial.planetarySystem.planets.rahu.MeanPosition = celestial.getMeanLongitude(ahargana, celestial.planetarySystem.planets.rahu.YugaRotation) + 180;
celestial.planetarySystem.planets.rahu.MeanPosition = math.zero360(celestial.planetarySystem.planets.rahu.MeanPosition);
// mean and true sun at sunrise
var meanSolarLongitude = celestial.getMeanLongitude(ahargana, celestial.planetarySystem.planets.sun.YugaRotation);
celestial.planetarySystem.planets.sun.MeanPosition = meanSolarLongitude;
var trueSolarLongitude = math.zero360(meanSolarLongitude -
celestial.getMandaEquation((meanSolarLongitude - celestial.planetarySystem.planets.sun.Apogee), 'sun'));
celestial.planetarySystem.planets.sun.TruePosition = trueSolarLongitude;
// mean and true moon at sunrise
var meanLunarLongitude = celestial.getMeanLongitude(ahargana, celestial.planetarySystem.planets.moon.YugaRotation);
celestial.planetarySystem.planets.moon.MeanPosition = meanLunarLongitude;
celestial.planetarySystem.planets.moon.Apogee = celestial.planetarySystem.planets.candrocca.MeanPosition;
var trueLunarLongitude = math.zero360(meanLunarLongitude -
celestial.getMandaEquation((meanLunarLongitude - celestial.planetarySystem.planets.moon.Apogee), 'moon'));
celestial.planetarySystem.planets.moon.TruePosition = trueLunarLongitude;
// finding tithi and longitude of conjunction
var tithi = celestial.getTithi(trueLunarLongitude, trueSolarLongitude);
this.calendarData.tithiDay = math.truncate(tithi) + 1;
this.calendarData.ftithi = math.fractional(tithi);
this.setPaksa();
// last conjunction
var lastConjunctionLongitude = celestial.getLastConjunctionLongitude(ahargana, tithi);
// next conjunction
var nextConjunctionLongitude = celestial.getNextConjunctionLongitude(ahargana, tithi);
this.calendarData.adhimasa = calendar.getAdhimasa(lastConjunctionLongitude, nextConjunctionLongitude);
this.calendarData.masaNum = calendar.getMasaNum(trueSolarLongitude, lastConjunctionLongitude);
// TODO: Move the below function to within KollavarshamDate class
this.calendarData.masa = calendar.getMasaName(this.calendarData.masaNum).saka;
var sauraMasaMonthDay = calendar.getSauraMasaMonthDay(ahargana, desantara);
var sauraMasaNum = sauraMasaMonthDay.month;
var sauraMasaDay = sauraMasaMonthDay.day;
// TODO: Move the below function to within KollavarshamDate class
this.calendarData.sauraMasa = calendar.getMasaName(sauraMasaNum).saura;
this.calendarData.malayalaMasaNum = (sauraMasaNum - 4 + 12 ) % 12;
// TODO: Move the below function to within KollavarshamDate class
var malayalaMasa = calendar.getMasaName(this.calendarData.malayalaMasaNum);
this.calendarData.malayalaMasa = malayalaMasa.enMalayalam;
this.calendarData.mlMalayalaMasa = malayalaMasa.mlMalayalam;
var naksatra = calendar.getNaksatra(trueLunarLongitude);
this.calendarData.naksatra = naksatra.saka;
this.calendarData.malayalaNaksatra = naksatra.enMalayalam;
this.calendarData.mlMalayalaNaksatra = naksatra.mlMalayalam;
// kali and Saka era
this.calendarData.YearKali = calendar.aharganaToKali(ahargana + ( 4 - this.calendarData.masaNum ) * 30);
this.calendarData.YearSaka = calendar.kaliToSaka(this.calendarData.YearKali);
this.calendarData.YearVikrama = this.calendarData.YearSaka + 135;
// Sewell p.45 - https://archive.org/stream/indiancalendarwi00sewerich#page/45/mode/1up
var malayalamYear = this.calendarData.YearSaka - 747 +
math.truncate((this.calendarData.masaNum - this.calendarData.malayalaMasaNum + 12) / 12);
// The below was a separate method named calculations.planetary (ported from planetary_calculations in perl)
var planets = ['mercury', 'venus', 'mars', 'jupiter', 'saturn'];
for (var i = 0; i < planets.length; i++) {
celestial.planetarySystem.planets[planets[i]].MeanPosition = celestial.getMeanLongitude(ahargana, celestial.planetarySystem.planets[planets[i]].Rotation);
celestial.planetarySystem.planets[planets[i]].TruePosition = celestial.getTrueLongitude(ahargana, meanSolarLongitude, planets[i]);
}
var kollavarshamDate = new KollavarshamDate(malayalamYear, sauraMasaNum, sauraMasaDay);
kollavarshamDate.gregorianDate = gregorianDate;
kollavarshamDate.julianDay = julianDay;
var weekdayName = calendar.julianDayToWeekday(julianDay);
kollavarshamDate.weekdayName = weekdayName.en;
kollavarshamDate.mlWeekdayName = weekdayName.ml;
kollavarshamDate.ahargana = aharganaRounded;
kollavarshamDate.calendarData = this.calendarData;
return kollavarshamDate;
},
toGregorian : function (settings) {
// TODO: Implement this to convert a Kollavarsham date to Gregorian after figuring out the samkranti discrepancies
// between Saka year and Malayalam year
/* This is how it works in Perl - Set these below variables before calling this */
/* this.calendarData.YearSaka, this.calendarData.masaNum, globals.paksa, globals.tithiDay | ftithi : true,
naksatra : true, | random_line_split |
global.go | fmt.Sprintf("'%s' must be 'text' or 'json'", format))
}
}
}
if rawURL := c.GetV1().GetExternal().GetAutomate().GetNode().GetValue(); rawURL != "" {
externalAutomateURL, err := url.Parse(rawURL)
if err != nil {
cfgErr.AddInvalidValue("global.v1.external.automate.node", "must be url")
} else {
scheme := externalAutomateURL.Scheme
switch scheme {
case "", "http", "https":
default:
cfgErr.AddInvalidValue("global.v1.external.automate.node", "only https and http are supported")
}
}
}
switch c.GetV1().GetExternal().GetAutomate().GetAuth().GetScheme().GetValue() {
case "", "token":
break
default:
cfgErr.AddInvalidValue("global.v1.external.data_collector.auth.scheme", "scheme must be one of '', 'token'")
}
if externalES := c.GetV1().GetExternal().GetElasticsearch(); externalES.GetEnable().GetValue() {
// External ES nodes all either have https urls or https urls
nodes := externalES.GetNodes()
httpsNodes := make([]string, 0)
for _, n := range nodes {
ns := n.GetValue()
if strings.HasPrefix(ns, "https") {
httpsNodes = append(httpsNodes, ns)
}
}
if len(httpsNodes) > 0 && len(httpsNodes) < len(nodes) {
cfgErr.AddInvalidValue("global.v1.external.elasticsearch.nodes", "Cannot mix http and https nodes")
}
// Only one of root_cert or root_cert_file has been specified
rc := c.GetV1().GetExternal().GetElasticsearch().GetSsl().GetRootCert().GetValue()
rcf := c.GetV1().GetExternal().GetElasticsearch().GetSsl().GetRootCertFile().GetValue()
if rc != "" && rcf != "" {
cfgErr.AddInvalidValue("global.v1.external.elasticsearch.ssl", "Specify either global.v1.external.elasticsearch.ssl.root_cert or global.v1.external.elasticsearch.ssl.root_cert_file, but not both.")
}
auth := c.GetV1().GetExternal().GetElasticsearch().GetAuth()
scheme := auth.GetScheme().GetValue()
switch scheme {
case "basic_auth":
u := auth.GetBasicAuth().GetUsername().GetValue()
p := auth.GetBasicAuth().GetPassword().GetValue()
if u == "" {
cfgErr.AddMissingKey("global.v1.external.elasticsearch.auth.basic_auth.username")
}
if p == "" {
args := []string{
"show",
"userconfig.es_password",
}
execGetPass := exec.Command(getLatestPlatformToolsPath()+"/bin/secrets-helper", args...)
getPass, err := execGetPass.Output()
if err != nil || string(getPass) == "" {
cfgErr.AddMissingKey("global.v1.external.elasticsearch.auth.basic_auth.password")
}
}
case "aws_es":
u := auth.GetAwsEs().GetUsername().GetValue()
p := auth.GetAwsEs().GetPassword().GetValue()
if u == "" {
cfgErr.AddMissingKey("global.v1.external.elasticsearch.auth.aws_es.username")
}
if p == "" {
cfgErr.AddMissingKey("global.v1.external.elasticsearch.auth.aws_es.password")
}
case "":
default:
cfgErr.AddInvalidValue("global.v1.external.elasticsearch.auth.scheme",
"Scheme should be one of 'basic_auth', 'aws_es'.")
}
}
if externalOS := c.GetV1().GetExternal().GetOpensearch(); externalOS.GetEnable().GetValue() {
// External ES nodes all either have https urls or https urls
nodes := externalOS.GetNodes()
httpsNodes := make([]string, 0)
for _, n := range nodes {
ns := n.GetValue()
if strings.HasPrefix(ns, "https") {
httpsNodes = append(httpsNodes, ns)
}
}
if len(httpsNodes) > 0 && len(httpsNodes) < len(nodes) {
cfgErr.AddInvalidValue("global.v1.external.opensearch.nodes", "Cannot mix http and https nodes")
}
// Only one of root_cert or root_cert_file has been specified
rc := c.GetV1().GetExternal().GetOpensearch().GetSsl().GetRootCert().GetValue()
rcf := c.GetV1().GetExternal().GetOpensearch().GetSsl().GetRootCertFile().GetValue()
if rc != "" && rcf != "" {
cfgErr.AddInvalidValue("global.v1.external.opensearch.ssl", "Specify either global.v1.external.opensearch.ssl.root_cert or global.v1.external.opensearch.ssl.root_cert_file, but not both.")
}
auth := c.GetV1().GetExternal().GetOpensearch().GetAuth()
scheme := auth.GetScheme().GetValue()
switch scheme {
case "basic_auth":
u := auth.GetBasicAuth().GetUsername().GetValue()
p := auth.GetBasicAuth().GetPassword().GetValue()
if u == "" {
cfgErr.AddMissingKey("global.v1.external.opensearch.auth.basic_auth.username")
}
if p == "" {
args := []string{
"show",
"userconfig.os_password",
}
execGetPass := exec.Command(getLatestPlatformToolsPath()+"/bin/secrets-helper", args...)
getPass, err := execGetPass.Output()
if err != nil || string(getPass) == "" {
cfgErr.AddMissingKey("global.v1.external.opensearch.auth.basic_auth.password")
}
}
case "aws_os":
u := auth.GetAwsOs().GetUsername().GetValue()
p := auth.GetAwsOs().GetPassword().GetValue()
if u == "" {
cfgErr.AddMissingKey("global.v1.external.opensearch.auth.aws_os.username")
}
if p == "" {
cfgErr.AddMissingKey("global.v1.external.opensearch.auth.aws_os.password")
}
case "":
default:
cfgErr.AddInvalidValue("global.v1.external.opensearch.auth.scheme",
"Scheme should be one of 'basic_auth', 'aws_os'.")
}
}
if externalPG := c.GetV1().GetExternal().GetPostgresql(); externalPG.GetEnable().GetValue() {
if auth := c.GetV1().GetExternal().GetPostgresql().GetAuth(); auth.GetScheme().GetValue() != "password" {
// use supported auth scheme (currently only password auth is
// supported for postgres)
cfgErr.AddInvalidValue("global.v1.external.postgresql.auth.scheme", "Scheme should be 'password'.")
} else {
// superuser username and password
su := auth.GetPassword().GetSuperuser().GetUsername().GetValue()
sp := auth.GetPassword().GetSuperuser().GetPassword().GetValue()
if su == "" {
cfgErr.AddMissingKey("global.v1.external.postgresql.auth.password.superuser.username")
}
if sp == "" {
cfgErr.AddMissingKey("global.v1.external.postgresql.auth.password.superuser.password")
}
// dbuser username and password
du := auth.GetPassword().GetDbuser().GetUsername().GetValue()
dp := auth.GetPassword().GetDbuser().GetPassword().GetValue()
if du == "" {
cfgErr.AddMissingKey("global.v1.external.postgresql.auth.password.dbuser.username")
}
if dp == "" {
cfgErr.AddMissingKey("global.v1.external.postgresql.auth.password.dbuser.password")
}
}
}
if cfgErr.IsEmpty() {
return nil
}
return cfgErr
}
// ProxyString returns the proxy configuration formatted into the canonical
// HTTP_PROXY style formatting.
func (c *GlobalConfig) ProxyString() *gw.StringValue {
if c.V1.Proxy == nil {
return nil
}
proxy := c.V1.Proxy
if proxy.Host == nil {
return nil
}
b := strings.Builder{}
// NOTE: from testing, it appears that Rust (hab) requires "http://" to be
// at the head of the proxy URLs
b.WriteString("http://") // nolint: errcheck
if proxy.User != nil {
authPart := fmt.Sprintf("%s:%s", proxy.User.Value, proxy.Password.Value)
b.WriteString(url.PathEscape(authPart)) // nolint: errcheck
b.WriteString("@") // nolint: errcheck
}
hostPortPart := fmt.Sprintf("%s:%d", proxy.Host.Value, proxy.Port.Value)
b.WriteString(hostPortPart) // nolint: errcheck
return w.String(b.String())
}
// NoProxyString turns a non-empty NoProxy whitelist into a string of comma-separated
// entries for easier consumption by the hab config.
func (c *GlobalConfig) NoProxyString() *gw.StringValue {
// If no proxy is set at all, move along.
if c.V1.Proxy == nil {
return nil
}
// Just return the default list
if c.V1.Proxy.NoProxy == nil | {
return w.String(strings.Join(proxy.DefaultNoProxyEntries, ","))
} | conditional_block |
|
global.go | () error { // nolint gocyclo
cfgErr := NewInvalidConfigError()
if c.GetV1() == nil {
cfgErr.AddMissingKey("global.v1")
return cfgErr
}
if c.GetV1().GetFqdn() == nil {
cfgErr.AddMissingKey("global.v1.fqdn")
}
if len(c.GetV1().GetFrontendTls()) < 1 {
// It's not currently mandatory to configure frontend_tls certs via
// the global config. They can be set on the load_balancer config instead.
//cfgErr.AddMissingKey("global.v1.frontend_tls")
} else {
for _, tls := range c.V1.FrontendTls {
if tls.Cert == "" {
cfgErr.AddMissingKey("global.v1.frontend_tls.cert")
}
if tls.Key == "" {
cfgErr.AddMissingKey("global.v1.frontend_tls.key")
}
// TODO: The load balancer code will copy the FQDN (above) over
// the server_name setting if the server name is set to "" or
// "localhost" It feels wrong to do that in a validation function.
// Maybe we need to add a method on GlobalConfig to return a
// computed fixed up version (?)
// if tls.ServerName == "" {
// cfgErr.AddInvalidValue("global.v1.frontend_tls.server_name", "server_name must be a valid FQDN")
// }
}
}
bu := c.GetV1().GetBackups()
location := bu.GetLocation().GetValue()
switch location {
case "filesystem":
p := bu.GetFilesystem().GetPath().GetValue()
if p == "" {
cfgErr.AddMissingKey("global.v1.backups.filesystem.path")
}
// NOTE: We don't manage the permissions of the backup directory but
// we'd like to prevent the user from setting the backup directory to
// an invalid location. Because we do validation at deploy time,
// restore time, and config patch/set time, we cannot guarantee that
// the backup directory will exist yet or that the hab user exists.
// Therefore, we'll only validate on the happy path: raise a
// validation error if the hab user exists, the backup directory exists,
// and the hab user doesn't have read/write/exec permissions on the
// directory.
ok, err := fileutils.ReadWriteExecutable("hab", p)
if err != nil {
logrus.WithError(err).WithFields(logrus.Fields{
"user": "hab",
"path": p,
}).Debug("failed checking for read/write/exec on path")
}
if err == nil && !ok {
cfgErr.AddInvalidValue(
"global.v1.backups.filesystem.path",
fmt.Sprintf("the 'hab' user must have read/write/exec permissions to path: %s", p),
)
}
case "s3":
if bu.GetS3().GetBucket().GetEndpoint().GetValue() == "" {
cfgErr.AddMissingKey("global.v1.backups.s3.bucket.endpoint")
}
if bu.GetS3().GetBucket().GetName().GetValue() == "" {
cfgErr.AddMissingKey("global.v1.backups.s3.bucket.name")
}
// The user might be relying on IAM for S3 credentials. Here we'll
// make sure that if credentials are provided that both an access_key
// and secret_key have been provided.
if bu.GetS3().GetCredentials() != nil {
access_key := bu.GetS3().GetCredentials().GetAccessKey().GetValue()
secret_key := bu.GetS3().GetCredentials().GetSecretKey().GetValue()
if secret_key != "" || access_key != "" {
if secret_key == "" {
cfgErr.AddMissingKey("global.v1.backups.s3.credentials.secret_key")
}
if access_key == "" {
cfgErr.AddMissingKey("global.v1.backups.s3.credentials.access_key")
}
}
}
case "gcs":
if bu.GetGcs().GetBucket().GetName().GetValue() == "" {
cfgErr.AddMissingKey("global.v1.backups.gcs.bucket.name")
}
gcsJSON := bu.GetGcs().GetCredentials().GetJson().GetValue()
if gcsJSON == "" {
cfgErr.AddMissingKey("global.v1.backups.gcs.credentials.json")
}
default:
// Make sure that if a backup location is specified that is valid. If
// none is given the default configuration "filesystem" location will
// be used.
if location != "" {
cfgErr.AddInvalidValue("global.v1.backups.location", "Must be 'filesystem', 's3', or 'gcs'")
}
}
if log := c.GetV1().Log; log != nil {
if level := log.GetLevel().GetValue(); level != "" {
switch level {
case "debug", "info", "warning", "error", "fatal", "panic":
default:
cfgErr.AddInvalidValue("global.v1.log.level",
fmt.Sprintf("'%s' must be one of 'debug, 'info', 'warning', 'error', 'fatal', 'panic'", level))
}
}
if format := log.GetFormat().GetValue(); format != "" {
// logrus does support custom formatters. For now we'll only
// support the built-in text and json formatters at the global
// level.
if format != "text" && format != "json" {
cfgErr.AddInvalidValue("global.v1.log.format",
fmt.Sprintf("'%s' must be 'text' or 'json'", format))
}
}
}
if rawURL := c.GetV1().GetExternal().GetAutomate().GetNode().GetValue(); rawURL != "" {
externalAutomateURL, err := url.Parse(rawURL)
if err != nil {
cfgErr.AddInvalidValue("global.v1.external.automate.node", "must be url")
} else {
scheme := externalAutomateURL.Scheme
switch scheme {
case "", "http", "https":
default:
cfgErr.AddInvalidValue("global.v1.external.automate.node", "only https and http are supported")
}
}
}
switch c.GetV1().GetExternal().GetAutomate().GetAuth().GetScheme().GetValue() {
case "", "token":
break
default:
cfgErr.AddInvalidValue("global.v1.external.data_collector.auth.scheme", "scheme must be one of '', 'token'")
}
if externalES := c.GetV1().GetExternal().GetElasticsearch(); externalES.GetEnable().GetValue() {
// External ES nodes all either have https urls or https urls
nodes := externalES.GetNodes()
httpsNodes := make([]string, 0)
for _, n := range nodes {
ns := n.GetValue()
if strings.HasPrefix(ns, "https") {
httpsNodes = append(httpsNodes, ns)
}
}
if len(httpsNodes) > 0 && len(httpsNodes) < len(nodes) {
cfgErr.AddInvalidValue("global.v1.external.elasticsearch.nodes", "Cannot mix http and https nodes")
}
// Only one of root_cert or root_cert_file has been specified
rc := c.GetV1().GetExternal().GetElasticsearch().GetSsl().GetRootCert().GetValue()
rcf := c.GetV1().GetExternal().GetElasticsearch().GetSsl().GetRootCertFile().GetValue()
if rc != "" && rcf != "" {
cfgErr.AddInvalidValue("global.v1.external.elasticsearch.ssl", "Specify either global.v1.external.elasticsearch.ssl.root_cert or global.v1.external.elasticsearch.ssl.root_cert_file, but not both.")
}
auth := c.GetV1().GetExternal().GetElasticsearch().GetAuth()
scheme := auth.GetScheme().GetValue()
switch scheme {
case "basic_auth":
u := auth.GetBasicAuth().GetUsername().GetValue()
p := auth.GetBasicAuth().GetPassword().GetValue()
if u == "" {
cfgErr.AddMissingKey("global.v1.external.elasticsearch.auth.basic_auth.username")
}
if p == "" {
args := []string{
"show",
"userconfig.es_password",
}
execGetPass := exec.Command(getLatestPlatformToolsPath()+"/bin/secrets-helper", args...)
getPass, err := execGetPass.Output()
if err != nil || string(getPass) == "" {
cfgErr.AddMissingKey("global.v1.external.elasticsearch.auth.basic_auth.password")
}
}
case "aws_es":
u := auth.GetAwsEs().GetUsername().GetValue()
p := auth.GetAwsEs().GetPassword().GetValue()
if u == "" {
cfgErr.AddMissingKey("global.v1.external.elasticsearch.auth.aws_es.username")
}
if p == "" {
cfgErr.AddMissingKey("global.v1.external.elasticsearch.auth.aws_es.password")
}
case "":
default:
cfgErr.AddInvalidValue("global.v1.external | Validate | identifier_name |
|
global.go | ",
"path": p,
}).Debug("failed checking for read/write/exec on path")
}
if err == nil && !ok {
cfgErr.AddInvalidValue(
"global.v1.backups.filesystem.path",
fmt.Sprintf("the 'hab' user must have read/write/exec permissions to path: %s", p),
)
}
case "s3":
if bu.GetS3().GetBucket().GetEndpoint().GetValue() == "" {
cfgErr.AddMissingKey("global.v1.backups.s3.bucket.endpoint")
}
if bu.GetS3().GetBucket().GetName().GetValue() == "" {
cfgErr.AddMissingKey("global.v1.backups.s3.bucket.name")
}
// The user might be relying on IAM for S3 credentials. Here we'll
// make sure that if credentials are provided that both an access_key
// and secret_key have been provided.
if bu.GetS3().GetCredentials() != nil {
access_key := bu.GetS3().GetCredentials().GetAccessKey().GetValue()
secret_key := bu.GetS3().GetCredentials().GetSecretKey().GetValue()
if secret_key != "" || access_key != "" {
if secret_key == "" {
cfgErr.AddMissingKey("global.v1.backups.s3.credentials.secret_key")
}
if access_key == "" {
cfgErr.AddMissingKey("global.v1.backups.s3.credentials.access_key")
}
}
}
case "gcs":
if bu.GetGcs().GetBucket().GetName().GetValue() == "" {
cfgErr.AddMissingKey("global.v1.backups.gcs.bucket.name")
}
gcsJSON := bu.GetGcs().GetCredentials().GetJson().GetValue()
if gcsJSON == "" {
cfgErr.AddMissingKey("global.v1.backups.gcs.credentials.json")
}
default:
// Make sure that if a backup location is specified that is valid. If
// none is given the default configuration "filesystem" location will
// be used.
if location != "" {
cfgErr.AddInvalidValue("global.v1.backups.location", "Must be 'filesystem', 's3', or 'gcs'")
}
}
if log := c.GetV1().Log; log != nil {
if level := log.GetLevel().GetValue(); level != "" {
switch level {
case "debug", "info", "warning", "error", "fatal", "panic":
default:
cfgErr.AddInvalidValue("global.v1.log.level",
fmt.Sprintf("'%s' must be one of 'debug, 'info', 'warning', 'error', 'fatal', 'panic'", level))
}
}
if format := log.GetFormat().GetValue(); format != "" {
// logrus does support custom formatters. For now we'll only
// support the built-in text and json formatters at the global
// level.
if format != "text" && format != "json" {
cfgErr.AddInvalidValue("global.v1.log.format",
fmt.Sprintf("'%s' must be 'text' or 'json'", format))
}
}
}
if rawURL := c.GetV1().GetExternal().GetAutomate().GetNode().GetValue(); rawURL != "" {
externalAutomateURL, err := url.Parse(rawURL)
if err != nil {
cfgErr.AddInvalidValue("global.v1.external.automate.node", "must be url")
} else {
scheme := externalAutomateURL.Scheme
switch scheme {
case "", "http", "https":
default:
cfgErr.AddInvalidValue("global.v1.external.automate.node", "only https and http are supported")
}
}
}
switch c.GetV1().GetExternal().GetAutomate().GetAuth().GetScheme().GetValue() {
case "", "token":
break
default:
cfgErr.AddInvalidValue("global.v1.external.data_collector.auth.scheme", "scheme must be one of '', 'token'")
}
if externalES := c.GetV1().GetExternal().GetElasticsearch(); externalES.GetEnable().GetValue() {
// External ES nodes all either have https urls or https urls
nodes := externalES.GetNodes()
httpsNodes := make([]string, 0)
for _, n := range nodes {
ns := n.GetValue()
if strings.HasPrefix(ns, "https") {
httpsNodes = append(httpsNodes, ns)
}
}
if len(httpsNodes) > 0 && len(httpsNodes) < len(nodes) {
cfgErr.AddInvalidValue("global.v1.external.elasticsearch.nodes", "Cannot mix http and https nodes")
}
// Only one of root_cert or root_cert_file has been specified
rc := c.GetV1().GetExternal().GetElasticsearch().GetSsl().GetRootCert().GetValue()
rcf := c.GetV1().GetExternal().GetElasticsearch().GetSsl().GetRootCertFile().GetValue()
if rc != "" && rcf != "" {
cfgErr.AddInvalidValue("global.v1.external.elasticsearch.ssl", "Specify either global.v1.external.elasticsearch.ssl.root_cert or global.v1.external.elasticsearch.ssl.root_cert_file, but not both.")
}
auth := c.GetV1().GetExternal().GetElasticsearch().GetAuth()
scheme := auth.GetScheme().GetValue()
switch scheme {
case "basic_auth":
u := auth.GetBasicAuth().GetUsername().GetValue()
p := auth.GetBasicAuth().GetPassword().GetValue()
if u == "" {
cfgErr.AddMissingKey("global.v1.external.elasticsearch.auth.basic_auth.username")
}
if p == "" {
args := []string{
"show",
"userconfig.es_password",
}
execGetPass := exec.Command(getLatestPlatformToolsPath()+"/bin/secrets-helper", args...)
getPass, err := execGetPass.Output()
if err != nil || string(getPass) == "" {
cfgErr.AddMissingKey("global.v1.external.elasticsearch.auth.basic_auth.password")
}
}
case "aws_es":
u := auth.GetAwsEs().GetUsername().GetValue()
p := auth.GetAwsEs().GetPassword().GetValue()
if u == "" {
cfgErr.AddMissingKey("global.v1.external.elasticsearch.auth.aws_es.username")
}
if p == "" {
cfgErr.AddMissingKey("global.v1.external.elasticsearch.auth.aws_es.password")
}
case "":
default:
cfgErr.AddInvalidValue("global.v1.external.elasticsearch.auth.scheme",
"Scheme should be one of 'basic_auth', 'aws_es'.")
}
}
if externalOS := c.GetV1().GetExternal().GetOpensearch(); externalOS.GetEnable().GetValue() {
// External ES nodes all either have https urls or https urls
nodes := externalOS.GetNodes()
httpsNodes := make([]string, 0)
for _, n := range nodes {
ns := n.GetValue()
if strings.HasPrefix(ns, "https") {
httpsNodes = append(httpsNodes, ns)
}
}
if len(httpsNodes) > 0 && len(httpsNodes) < len(nodes) {
cfgErr.AddInvalidValue("global.v1.external.opensearch.nodes", "Cannot mix http and https nodes")
}
// Only one of root_cert or root_cert_file has been specified
rc := c.GetV1().GetExternal().GetOpensearch().GetSsl().GetRootCert().GetValue()
rcf := c.GetV1().GetExternal().GetOpensearch().GetSsl().GetRootCertFile().GetValue()
if rc != "" && rcf != "" {
cfgErr.AddInvalidValue("global.v1.external.opensearch.ssl", "Specify either global.v1.external.opensearch.ssl.root_cert or global.v1.external.opensearch.ssl.root_cert_file, but not both.")
}
auth := c.GetV1().GetExternal().GetOpensearch().GetAuth()
scheme := auth.GetScheme().GetValue()
switch scheme {
case "basic_auth":
u := auth.GetBasicAuth().GetUsername().GetValue()
p := auth.GetBasicAuth().GetPassword().GetValue()
if u == "" {
cfgErr.AddMissingKey("global.v1.external.opensearch.auth.basic_auth.username")
}
if p == "" {
args := []string{
"show",
"userconfig.os_password",
}
execGetPass := exec.Command(getLatestPlatformToolsPath()+"/bin/secrets-helper", args...)
getPass, err := execGetPass.Output()
if err != nil || string(getPass) == "" {
cfgErr.AddMissingKey("global.v1.external.opensearch.auth.basic_auth.password")
}
}
case "aws_os":
u := auth.GetAwsOs().GetUsername().GetValue()
p := auth.GetAwsOs().GetPassword().GetValue()
if u == "" {
cfgErr.AddMissingKey("global.v1.external.opensearch.auth.aws_os.username")
} | if p == "" {
cfgErr.AddMissingKey("global.v1.external.opensearch.auth.aws_os.password")
}
case "": | random_line_split |
|
global.go | ", "warning", "error", "fatal", "panic":
default:
cfgErr.AddInvalidValue("global.v1.log.level",
fmt.Sprintf("'%s' must be one of 'debug, 'info', 'warning', 'error', 'fatal', 'panic'", level))
}
}
if format := log.GetFormat().GetValue(); format != "" {
// logrus does support custom formatters. For now we'll only
// support the built-in text and json formatters at the global
// level.
if format != "text" && format != "json" {
cfgErr.AddInvalidValue("global.v1.log.format",
fmt.Sprintf("'%s' must be 'text' or 'json'", format))
}
}
}
if rawURL := c.GetV1().GetExternal().GetAutomate().GetNode().GetValue(); rawURL != "" {
externalAutomateURL, err := url.Parse(rawURL)
if err != nil {
cfgErr.AddInvalidValue("global.v1.external.automate.node", "must be url")
} else {
scheme := externalAutomateURL.Scheme
switch scheme {
case "", "http", "https":
default:
cfgErr.AddInvalidValue("global.v1.external.automate.node", "only https and http are supported")
}
}
}
switch c.GetV1().GetExternal().GetAutomate().GetAuth().GetScheme().GetValue() {
case "", "token":
break
default:
cfgErr.AddInvalidValue("global.v1.external.data_collector.auth.scheme", "scheme must be one of '', 'token'")
}
if externalES := c.GetV1().GetExternal().GetElasticsearch(); externalES.GetEnable().GetValue() {
// External ES nodes all either have https urls or https urls
nodes := externalES.GetNodes()
httpsNodes := make([]string, 0)
for _, n := range nodes {
ns := n.GetValue()
if strings.HasPrefix(ns, "https") {
httpsNodes = append(httpsNodes, ns)
}
}
if len(httpsNodes) > 0 && len(httpsNodes) < len(nodes) {
cfgErr.AddInvalidValue("global.v1.external.elasticsearch.nodes", "Cannot mix http and https nodes")
}
// Only one of root_cert or root_cert_file has been specified
rc := c.GetV1().GetExternal().GetElasticsearch().GetSsl().GetRootCert().GetValue()
rcf := c.GetV1().GetExternal().GetElasticsearch().GetSsl().GetRootCertFile().GetValue()
if rc != "" && rcf != "" {
cfgErr.AddInvalidValue("global.v1.external.elasticsearch.ssl", "Specify either global.v1.external.elasticsearch.ssl.root_cert or global.v1.external.elasticsearch.ssl.root_cert_file, but not both.")
}
auth := c.GetV1().GetExternal().GetElasticsearch().GetAuth()
scheme := auth.GetScheme().GetValue()
switch scheme {
case "basic_auth":
u := auth.GetBasicAuth().GetUsername().GetValue()
p := auth.GetBasicAuth().GetPassword().GetValue()
if u == "" {
cfgErr.AddMissingKey("global.v1.external.elasticsearch.auth.basic_auth.username")
}
if p == "" {
args := []string{
"show",
"userconfig.es_password",
}
execGetPass := exec.Command(getLatestPlatformToolsPath()+"/bin/secrets-helper", args...)
getPass, err := execGetPass.Output()
if err != nil || string(getPass) == "" {
cfgErr.AddMissingKey("global.v1.external.elasticsearch.auth.basic_auth.password")
}
}
case "aws_es":
u := auth.GetAwsEs().GetUsername().GetValue()
p := auth.GetAwsEs().GetPassword().GetValue()
if u == "" {
cfgErr.AddMissingKey("global.v1.external.elasticsearch.auth.aws_es.username")
}
if p == "" {
cfgErr.AddMissingKey("global.v1.external.elasticsearch.auth.aws_es.password")
}
case "":
default:
cfgErr.AddInvalidValue("global.v1.external.elasticsearch.auth.scheme",
"Scheme should be one of 'basic_auth', 'aws_es'.")
}
}
if externalOS := c.GetV1().GetExternal().GetOpensearch(); externalOS.GetEnable().GetValue() {
// External ES nodes all either have https urls or https urls
nodes := externalOS.GetNodes()
httpsNodes := make([]string, 0)
for _, n := range nodes {
ns := n.GetValue()
if strings.HasPrefix(ns, "https") {
httpsNodes = append(httpsNodes, ns)
}
}
if len(httpsNodes) > 0 && len(httpsNodes) < len(nodes) {
cfgErr.AddInvalidValue("global.v1.external.opensearch.nodes", "Cannot mix http and https nodes")
}
// Only one of root_cert or root_cert_file has been specified
rc := c.GetV1().GetExternal().GetOpensearch().GetSsl().GetRootCert().GetValue()
rcf := c.GetV1().GetExternal().GetOpensearch().GetSsl().GetRootCertFile().GetValue()
if rc != "" && rcf != "" {
cfgErr.AddInvalidValue("global.v1.external.opensearch.ssl", "Specify either global.v1.external.opensearch.ssl.root_cert or global.v1.external.opensearch.ssl.root_cert_file, but not both.")
}
auth := c.GetV1().GetExternal().GetOpensearch().GetAuth()
scheme := auth.GetScheme().GetValue()
switch scheme {
case "basic_auth":
u := auth.GetBasicAuth().GetUsername().GetValue()
p := auth.GetBasicAuth().GetPassword().GetValue()
if u == "" {
cfgErr.AddMissingKey("global.v1.external.opensearch.auth.basic_auth.username")
}
if p == "" {
args := []string{
"show",
"userconfig.os_password",
}
execGetPass := exec.Command(getLatestPlatformToolsPath()+"/bin/secrets-helper", args...)
getPass, err := execGetPass.Output()
if err != nil || string(getPass) == "" {
cfgErr.AddMissingKey("global.v1.external.opensearch.auth.basic_auth.password")
}
}
case "aws_os":
u := auth.GetAwsOs().GetUsername().GetValue()
p := auth.GetAwsOs().GetPassword().GetValue()
if u == "" {
cfgErr.AddMissingKey("global.v1.external.opensearch.auth.aws_os.username")
}
if p == "" {
cfgErr.AddMissingKey("global.v1.external.opensearch.auth.aws_os.password")
}
case "":
default:
cfgErr.AddInvalidValue("global.v1.external.opensearch.auth.scheme",
"Scheme should be one of 'basic_auth', 'aws_os'.")
}
}
if externalPG := c.GetV1().GetExternal().GetPostgresql(); externalPG.GetEnable().GetValue() {
if auth := c.GetV1().GetExternal().GetPostgresql().GetAuth(); auth.GetScheme().GetValue() != "password" {
// use supported auth scheme (currently only password auth is
// supported for postgres)
cfgErr.AddInvalidValue("global.v1.external.postgresql.auth.scheme", "Scheme should be 'password'.")
} else {
// superuser username and password
su := auth.GetPassword().GetSuperuser().GetUsername().GetValue()
sp := auth.GetPassword().GetSuperuser().GetPassword().GetValue()
if su == "" {
cfgErr.AddMissingKey("global.v1.external.postgresql.auth.password.superuser.username")
}
if sp == "" {
cfgErr.AddMissingKey("global.v1.external.postgresql.auth.password.superuser.password")
}
// dbuser username and password
du := auth.GetPassword().GetDbuser().GetUsername().GetValue()
dp := auth.GetPassword().GetDbuser().GetPassword().GetValue()
if du == "" {
cfgErr.AddMissingKey("global.v1.external.postgresql.auth.password.dbuser.username")
}
if dp == "" {
cfgErr.AddMissingKey("global.v1.external.postgresql.auth.password.dbuser.password")
}
}
}
if cfgErr.IsEmpty() {
return nil
}
return cfgErr
}
// ProxyString returns the proxy configuration formatted into the canonical
// HTTP_PROXY style formatting.
func (c *GlobalConfig) ProxyString() *gw.StringValue | {
if c.V1.Proxy == nil {
return nil
}
proxy := c.V1.Proxy
if proxy.Host == nil {
return nil
}
b := strings.Builder{}
// NOTE: from testing, it appears that Rust (hab) requires "http://" to be
// at the head of the proxy URLs
b.WriteString("http://") // nolint: errcheck
if proxy.User != nil {
authPart := fmt.Sprintf("%s:%s", proxy.User.Value, proxy.Password.Value)
b.WriteString(url.PathEscape(authPart)) // nolint: errcheck
b.WriteString("@") // nolint: errcheck
} | identifier_body |
|
binocular_input_topographic_map.py | 'sigma_form_lateral': sigma_form_lateral,
'p_form_lateral': p_form_lateral,
'p_form_forward': p_form_forward,
'p_elim_dep': p_elim_dep,
'p_elim_pot': p_elim_pot,
'f_rew': f_rew,
'lateral_inhibition': args.lateral_inhibition,
'delay': args.delay,
'b': b,
't_minus': tau_minus,
't_plus': tau_plus,
'tau_refrac': args.tau_refrac,
'a_minus': a_minus,
'a_plus': a_plus
}
if args.gaussian_input:
gen_rate = generate_gaussian_input_rates
else:
gen_rate = generate_rates
# +-------------------------------------------------------------------+
# | Initial network setup |
# +-------------------------------------------------------------------+
# Need to setup the moving input
one_row = np.asarray(np.arange(16) % 2, dtype=bool)
binoc_positions = np.asarray([one_row if i % 2 == 0 else np.logical_not(one_row) for i in range(16)])
left_positions = np.where(binoc_positions==0)
right_positions = np.where(binoc_positions==1)
positions = [left_positions, right_positions]
if case == CASE_REW_NO_CORR:
raise NotImplementedError
elif case == CASE_CORR_AND_REW or case == CASE_CORR_NO_REW:
rates = np.empty((simtime // t_stim, grid[0], grid[1]))
for rate_id in range(simtime // t_stim):
rand_offset = np.random.randint(0, N_layer//2)
stim_position = (positions[rate_id%2][0][rand_offset], positions[rate_id%2][1][rand_offset])
assert binoc_positions[stim_position] == rate_id%2
r = gen_rate(stim_position,
f_base=f_base,
grid=grid,
f_peak=f_peak,
sigma_stim=sigma_stim)
rates[rate_id, :, :] = r
rates = rates.reshape(simtime // t_stim, N_layer)
source_pop = sim.Population(N_layer,
sim.SpikeSourcePoissonVariable,
{'rate': rates,
'start': 100,
'duration': simtime,
'rate_interval_duration': t_stim
}, label="Variable-rate Poisson spike source")
ff_s = np.zeros(N_layer, dtype=np.uint)
lat_s = np.zeros(N_layer, dtype=np.uint)
init_ff_connections = []
init_lat_connections = []
if args.initial_connectivity_file is None:
raise NotImplementedError
else:
if "npz" in args.initial_connectivity_file:
initial_connectivity = np.load(args.initial_connectivity_file)
else:
import scipy.io as io
initial_connectivity = io.loadmat(args.initial_connectivity_file)
conn = initial_connectivity['ConnPostToPre'] - 1
weight = initial_connectivity['WeightPostToPre']
for target in range(conn.shape[1]):
for index in range(conn.shape[0]):
if conn[index, target] >= 0:
if conn[index, target] < N_layer:
init_ff_connections.append(
(conn[index, target], target,
weight[index, target], 1))
else:
init_lat_connections.append(
(conn[index, target] - N_layer, target,
weight[index, target], 1))
# Neuron populations
target_pop = sim.Population(N_layer, model, cell_params, label="TARGET_POP")
# Putting this populations on chip 0 1 makes it easier to copy the provenance
# data somewhere else
target_pop.set_constraint(PlacerChipAndCoreConstraint(0, 1))
# Connections
# Plastic Connections between pre_pop and post_pop
stdp_model = sim.STDPMechanism(
timing_dependence=sim.SpikePairRule(tau_plus=tau_plus,
tau_minus=tau_minus),
weight_dependence=sim.AdditiveWeightDependence(w_min=0, w_max=g_max,
# A_plus=0.02, A_minus=0.02
A_plus=a_plus,
A_minus=a_minus)
)
if case == CASE_CORR_AND_REW or case == CASE_REW_NO_CORR:
structure_model_w_stdp = sim.StructuralMechanism(
stdp_model=stdp_model,
weight=g_max,
delay=args.delay,
s_max=s_max,
grid=grid,
f_rew=f_rew,
lateral_inhibition=args.lateral_inhibition,
random_partner=args.random_partner,
p_elim_dep=p_elim_dep,
p_elim_pot=p_elim_pot,
sigma_form_forward=sigma_form_forward,
sigma_form_lateral=sigma_form_lateral,
p_form_forward=p_form_forward,
p_form_lateral=p_form_lateral
)
elif case == CASE_CORR_NO_REW:
structure_model_w_stdp = stdp_model
# structure_model_w_stdp = sim.StructuralMechanism(weight=g_max, s_max=s_max)
ff_projection = sim.Projection(
source_pop, target_pop,
sim.FromListConnector(init_ff_connections),
synapse_dynamics=sim.SynapseDynamics(slow=structure_model_w_stdp),
label="plastic_ff_projection"
)
lat_projection = sim.Projection(
target_pop, target_pop,
sim.FromListConnector(init_lat_connections),
synapse_dynamics=sim.SynapseDynamics(slow=structure_model_w_stdp),
label="plastic_lat_projection",
target="inhibitory" if args.lateral_inhibition else "excitatory"
)
# +-------------------------------------------------------------------+
# | Simulation and results |
# +-------------------------------------------------------------------+
# Record neurons' potentials
# target_pop.record_v()
# Record spikes
# if case == CASE_REW_NO_CORR:
if args.record_source:
source_pop.record()
target_pop.record()
# Run simulation
pre_spikes = []
post_spikes = []
pre_sources = []
pre_targets = []
pre_weights = []
pre_delays = []
post_sources = []
post_targets = []
post_weights = []
post_delays = []
# rates_history = np.zeros((16, 16, simtime // t_stim))
e = None
print "Starting the sim"
no_runs = simtime // t_record
run_duration = t_record
try:
for current_run in range(no_runs):
print "run", current_run + 1, "of", no_runs
sim.run(run_duration)
if (current_run + 1) * run_duration % t_record == 0:
pre_weights.append(
np.array([
ff_projection._get_synaptic_data(True, 'source'),
ff_projection._get_synaptic_data(True, 'target'),
ff_projection._get_synaptic_data(True, 'weight'),
ff_projection._get_synaptic_data(True, 'delay')]).T)
post_weights.append(
np.array([
lat_projection._get_synaptic_data(True, 'source'),
lat_projection._get_synaptic_data(True, 'target'),
lat_projection._get_synaptic_data(True, 'weight'),
lat_projection._get_synaptic_data(True, 'delay')]).T)
if args.record_source:
pre_spikes = source_pop.getSpikes(compatible_output=True)
else:
pre_spikes = []
post_spikes = target_pop.getSpikes(compatible_output=True)
# End simulation on SpiNNaker
sim.end()
except Exception as e:
print e
# print("Weights:", plastic_projection.getWeights())
end_time = plt.datetime.datetime.now()
total_time = end_time - start_time
pre_spikes = np.asarray(pre_spikes)
post_spikes = np.asarray(post_spikes)
print "Total time elapsed -- " + str(total_time)
suffix = end_time.strftime("_%H%M%S_%d%m%Y")
if args.filename:
filename = args.filename
else:
filename = "ocular_preference_results" + str(suffix)
total_target_neuron_mean_spike_rate = \
post_spikes.shape[0] / float(simtime) * 1000. / N_layer
np.savez(filename, pre_spikes=pre_spikes,
post_spikes=post_spikes,
init_ff_connections=init_ff_connections,
init_lat_connections=init_lat_connections,
ff_connections=pre_weights,
lat_connections=post_weights,
final_pre_weights=pre_weights[-1],
final_post_weights=post_weights[-1],
simtime=simtime,
sim_params=sim_params,
total_time=total_time,
mean_firing_rate=total_target_neuron_mean_spike_rate,
exception=e,
insult=args.insult)
# Plotting
if args.plot and e is None:
init_ff_conn_network = np.ones((256, 256)) * np.nan
init_lat_conn_network = np.ones((256, 256)) * np.nan
for source, target, weight, delay in init_ff_connections:
if np.isnan(init_ff_conn_network[int(source), int(target)]):
init_ff_conn_network[int(source), int(target)] = weight
else:
init_ff_conn_network[int(source), int(target)] += weight
for source, target, weight, delay in init_lat_connections:
if np.isnan(init_lat_conn_network[int(source), int(target)]):
init_lat_conn_network[int(source), int(target)] = weight
else:
init_lat_conn_network[int(source), int(target)] += weight
def | plot_spikes | identifier_name |
|
binocular_input_topographic_map.py | rac,
'a_minus': a_minus,
'a_plus': a_plus
}
if args.gaussian_input:
gen_rate = generate_gaussian_input_rates
else:
gen_rate = generate_rates
# +-------------------------------------------------------------------+
# | Initial network setup |
# +-------------------------------------------------------------------+
# Need to setup the moving input
one_row = np.asarray(np.arange(16) % 2, dtype=bool)
binoc_positions = np.asarray([one_row if i % 2 == 0 else np.logical_not(one_row) for i in range(16)])
left_positions = np.where(binoc_positions==0)
right_positions = np.where(binoc_positions==1)
positions = [left_positions, right_positions]
if case == CASE_REW_NO_CORR:
raise NotImplementedError
elif case == CASE_CORR_AND_REW or case == CASE_CORR_NO_REW:
rates = np.empty((simtime // t_stim, grid[0], grid[1]))
for rate_id in range(simtime // t_stim):
rand_offset = np.random.randint(0, N_layer//2)
stim_position = (positions[rate_id%2][0][rand_offset], positions[rate_id%2][1][rand_offset])
assert binoc_positions[stim_position] == rate_id%2
r = gen_rate(stim_position,
f_base=f_base,
grid=grid,
f_peak=f_peak,
sigma_stim=sigma_stim)
rates[rate_id, :, :] = r
rates = rates.reshape(simtime // t_stim, N_layer)
source_pop = sim.Population(N_layer,
sim.SpikeSourcePoissonVariable,
{'rate': rates,
'start': 100,
'duration': simtime,
'rate_interval_duration': t_stim
}, label="Variable-rate Poisson spike source")
ff_s = np.zeros(N_layer, dtype=np.uint)
lat_s = np.zeros(N_layer, dtype=np.uint)
init_ff_connections = []
init_lat_connections = []
if args.initial_connectivity_file is None:
raise NotImplementedError
else:
if "npz" in args.initial_connectivity_file:
initial_connectivity = np.load(args.initial_connectivity_file)
else:
import scipy.io as io
initial_connectivity = io.loadmat(args.initial_connectivity_file)
conn = initial_connectivity['ConnPostToPre'] - 1
weight = initial_connectivity['WeightPostToPre']
for target in range(conn.shape[1]):
for index in range(conn.shape[0]):
if conn[index, target] >= 0:
if conn[index, target] < N_layer:
init_ff_connections.append(
(conn[index, target], target,
weight[index, target], 1))
else:
init_lat_connections.append(
(conn[index, target] - N_layer, target,
weight[index, target], 1))
# Neuron populations
target_pop = sim.Population(N_layer, model, cell_params, label="TARGET_POP")
# Putting this populations on chip 0 1 makes it easier to copy the provenance
# data somewhere else
target_pop.set_constraint(PlacerChipAndCoreConstraint(0, 1))
# Connections
# Plastic Connections between pre_pop and post_pop
stdp_model = sim.STDPMechanism(
timing_dependence=sim.SpikePairRule(tau_plus=tau_plus,
tau_minus=tau_minus),
weight_dependence=sim.AdditiveWeightDependence(w_min=0, w_max=g_max,
# A_plus=0.02, A_minus=0.02
A_plus=a_plus,
A_minus=a_minus)
)
if case == CASE_CORR_AND_REW or case == CASE_REW_NO_CORR:
structure_model_w_stdp = sim.StructuralMechanism(
stdp_model=stdp_model,
weight=g_max,
delay=args.delay,
s_max=s_max,
grid=grid,
f_rew=f_rew,
lateral_inhibition=args.lateral_inhibition,
random_partner=args.random_partner,
p_elim_dep=p_elim_dep,
p_elim_pot=p_elim_pot,
sigma_form_forward=sigma_form_forward,
sigma_form_lateral=sigma_form_lateral,
p_form_forward=p_form_forward,
p_form_lateral=p_form_lateral
)
elif case == CASE_CORR_NO_REW:
structure_model_w_stdp = stdp_model
# structure_model_w_stdp = sim.StructuralMechanism(weight=g_max, s_max=s_max)
ff_projection = sim.Projection(
source_pop, target_pop,
sim.FromListConnector(init_ff_connections),
synapse_dynamics=sim.SynapseDynamics(slow=structure_model_w_stdp),
label="plastic_ff_projection"
)
lat_projection = sim.Projection(
target_pop, target_pop,
sim.FromListConnector(init_lat_connections),
synapse_dynamics=sim.SynapseDynamics(slow=structure_model_w_stdp),
label="plastic_lat_projection",
target="inhibitory" if args.lateral_inhibition else "excitatory"
)
# +-------------------------------------------------------------------+
# | Simulation and results |
# +-------------------------------------------------------------------+
# Record neurons' potentials
# target_pop.record_v()
# Record spikes
# if case == CASE_REW_NO_CORR:
if args.record_source:
source_pop.record()
target_pop.record()
# Run simulation
pre_spikes = []
post_spikes = []
pre_sources = []
pre_targets = []
pre_weights = []
pre_delays = []
post_sources = []
post_targets = []
post_weights = []
post_delays = []
# rates_history = np.zeros((16, 16, simtime // t_stim))
e = None
print "Starting the sim"
no_runs = simtime // t_record
run_duration = t_record
try:
for current_run in range(no_runs):
print "run", current_run + 1, "of", no_runs
sim.run(run_duration)
if (current_run + 1) * run_duration % t_record == 0:
pre_weights.append(
np.array([
ff_projection._get_synaptic_data(True, 'source'),
ff_projection._get_synaptic_data(True, 'target'),
ff_projection._get_synaptic_data(True, 'weight'),
ff_projection._get_synaptic_data(True, 'delay')]).T)
post_weights.append(
np.array([
lat_projection._get_synaptic_data(True, 'source'),
lat_projection._get_synaptic_data(True, 'target'),
lat_projection._get_synaptic_data(True, 'weight'),
lat_projection._get_synaptic_data(True, 'delay')]).T)
if args.record_source:
pre_spikes = source_pop.getSpikes(compatible_output=True)
else:
pre_spikes = []
post_spikes = target_pop.getSpikes(compatible_output=True)
# End simulation on SpiNNaker
sim.end()
except Exception as e:
print e
# print("Weights:", plastic_projection.getWeights())
end_time = plt.datetime.datetime.now()
total_time = end_time - start_time
pre_spikes = np.asarray(pre_spikes)
post_spikes = np.asarray(post_spikes)
print "Total time elapsed -- " + str(total_time)
suffix = end_time.strftime("_%H%M%S_%d%m%Y")
if args.filename:
filename = args.filename
else:
filename = "ocular_preference_results" + str(suffix)
total_target_neuron_mean_spike_rate = \
post_spikes.shape[0] / float(simtime) * 1000. / N_layer
np.savez(filename, pre_spikes=pre_spikes,
post_spikes=post_spikes,
init_ff_connections=init_ff_connections,
init_lat_connections=init_lat_connections,
ff_connections=pre_weights,
lat_connections=post_weights,
final_pre_weights=pre_weights[-1],
final_post_weights=post_weights[-1],
simtime=simtime,
sim_params=sim_params,
total_time=total_time,
mean_firing_rate=total_target_neuron_mean_spike_rate,
exception=e,
insult=args.insult)
# Plotting
if args.plot and e is None:
init_ff_conn_network = np.ones((256, 256)) * np.nan
init_lat_conn_network = np.ones((256, 256)) * np.nan
for source, target, weight, delay in init_ff_connections:
if np.isnan(init_ff_conn_network[int(source), int(target)]):
init_ff_conn_network[int(source), int(target)] = weight
else:
init_ff_conn_network[int(source), int(target)] += weight
for source, target, weight, delay in init_lat_connections:
if np.isnan(init_lat_conn_network[int(source), int(target)]):
init_lat_conn_network[int(source), int(target)] = weight
else:
init_lat_conn_network[int(source), int(target)] += weight
def plot_spikes(spikes, title):
| if spikes is not None and len(spikes) > 0:
f, ax1 = plt.subplots(1, 1, figsize=(16, 8))
ax1.set_xlim((0, simtime))
ax1.scatter([i[1] for i in spikes], [i[0] for i in spikes], s=.2)
ax1.set_xlabel('Time/ms')
ax1.set_ylabel('spikes')
ax1.set_title(title)
else:
print "No spikes received" | identifier_body |
|
binocular_input_topographic_map.py | args.delay,
'b': b,
't_minus': tau_minus,
't_plus': tau_plus,
'tau_refrac': args.tau_refrac,
'a_minus': a_minus,
'a_plus': a_plus
}
if args.gaussian_input:
gen_rate = generate_gaussian_input_rates
else:
gen_rate = generate_rates
# +-------------------------------------------------------------------+
# | Initial network setup |
# +-------------------------------------------------------------------+
# Need to setup the moving input
one_row = np.asarray(np.arange(16) % 2, dtype=bool)
binoc_positions = np.asarray([one_row if i % 2 == 0 else np.logical_not(one_row) for i in range(16)])
left_positions = np.where(binoc_positions==0)
right_positions = np.where(binoc_positions==1)
positions = [left_positions, right_positions]
if case == CASE_REW_NO_CORR:
raise NotImplementedError
elif case == CASE_CORR_AND_REW or case == CASE_CORR_NO_REW:
rates = np.empty((simtime // t_stim, grid[0], grid[1]))
for rate_id in range(simtime // t_stim):
rand_offset = np.random.randint(0, N_layer//2)
stim_position = (positions[rate_id%2][0][rand_offset], positions[rate_id%2][1][rand_offset])
assert binoc_positions[stim_position] == rate_id%2
r = gen_rate(stim_position,
f_base=f_base,
grid=grid,
f_peak=f_peak,
sigma_stim=sigma_stim)
rates[rate_id, :, :] = r
rates = rates.reshape(simtime // t_stim, N_layer)
source_pop = sim.Population(N_layer,
sim.SpikeSourcePoissonVariable,
{'rate': rates,
'start': 100,
'duration': simtime,
'rate_interval_duration': t_stim
}, label="Variable-rate Poisson spike source")
ff_s = np.zeros(N_layer, dtype=np.uint)
lat_s = np.zeros(N_layer, dtype=np.uint)
init_ff_connections = []
init_lat_connections = []
if args.initial_connectivity_file is None:
raise NotImplementedError
else:
if "npz" in args.initial_connectivity_file:
initial_connectivity = np.load(args.initial_connectivity_file)
else:
import scipy.io as io
initial_connectivity = io.loadmat(args.initial_connectivity_file)
conn = initial_connectivity['ConnPostToPre'] - 1
weight = initial_connectivity['WeightPostToPre']
for target in range(conn.shape[1]):
for index in range(conn.shape[0]):
if conn[index, target] >= 0:
if conn[index, target] < N_layer:
init_ff_connections.append(
(conn[index, target], target,
weight[index, target], 1))
else:
init_lat_connections.append(
(conn[index, target] - N_layer, target,
weight[index, target], 1))
# Neuron populations
target_pop = sim.Population(N_layer, model, cell_params, label="TARGET_POP")
# Putting this populations on chip 0 1 makes it easier to copy the provenance
# data somewhere else
target_pop.set_constraint(PlacerChipAndCoreConstraint(0, 1))
# Connections
# Plastic Connections between pre_pop and post_pop
stdp_model = sim.STDPMechanism(
timing_dependence=sim.SpikePairRule(tau_plus=tau_plus,
tau_minus=tau_minus),
weight_dependence=sim.AdditiveWeightDependence(w_min=0, w_max=g_max,
# A_plus=0.02, A_minus=0.02
A_plus=a_plus,
A_minus=a_minus)
)
if case == CASE_CORR_AND_REW or case == CASE_REW_NO_CORR:
structure_model_w_stdp = sim.StructuralMechanism(
stdp_model=stdp_model,
weight=g_max,
delay=args.delay,
s_max=s_max,
grid=grid,
f_rew=f_rew,
lateral_inhibition=args.lateral_inhibition,
random_partner=args.random_partner,
p_elim_dep=p_elim_dep,
p_elim_pot=p_elim_pot,
sigma_form_forward=sigma_form_forward,
sigma_form_lateral=sigma_form_lateral,
p_form_forward=p_form_forward,
p_form_lateral=p_form_lateral
)
elif case == CASE_CORR_NO_REW:
structure_model_w_stdp = stdp_model
# structure_model_w_stdp = sim.StructuralMechanism(weight=g_max, s_max=s_max)
ff_projection = sim.Projection(
source_pop, target_pop,
sim.FromListConnector(init_ff_connections),
synapse_dynamics=sim.SynapseDynamics(slow=structure_model_w_stdp),
label="plastic_ff_projection"
)
lat_projection = sim.Projection(
target_pop, target_pop,
sim.FromListConnector(init_lat_connections),
synapse_dynamics=sim.SynapseDynamics(slow=structure_model_w_stdp),
label="plastic_lat_projection",
target="inhibitory" if args.lateral_inhibition else "excitatory"
)
# +-------------------------------------------------------------------+
# | Simulation and results |
# +-------------------------------------------------------------------+
# Record neurons' potentials
# target_pop.record_v()
# Record spikes
# if case == CASE_REW_NO_CORR:
if args.record_source:
source_pop.record()
target_pop.record()
# Run simulation
pre_spikes = []
post_spikes = []
pre_sources = []
pre_targets = []
pre_weights = []
pre_delays = []
post_sources = []
post_targets = []
post_weights = []
post_delays = []
# rates_history = np.zeros((16, 16, simtime // t_stim))
e = None
print "Starting the sim"
no_runs = simtime // t_record
run_duration = t_record
try:
for current_run in range(no_runs):
print "run", current_run + 1, "of", no_runs
sim.run(run_duration)
if (current_run + 1) * run_duration % t_record == 0:
pre_weights.append(
np.array([
ff_projection._get_synaptic_data(True, 'source'),
ff_projection._get_synaptic_data(True, 'target'),
ff_projection._get_synaptic_data(True, 'weight'),
ff_projection._get_synaptic_data(True, 'delay')]).T)
post_weights.append(
np.array([
lat_projection._get_synaptic_data(True, 'source'),
lat_projection._get_synaptic_data(True, 'target'),
lat_projection._get_synaptic_data(True, 'weight'),
lat_projection._get_synaptic_data(True, 'delay')]).T)
if args.record_source:
pre_spikes = source_pop.getSpikes(compatible_output=True)
else:
pre_spikes = []
post_spikes = target_pop.getSpikes(compatible_output=True)
# End simulation on SpiNNaker
sim.end()
except Exception as e:
print e
# print("Weights:", plastic_projection.getWeights())
end_time = plt.datetime.datetime.now()
total_time = end_time - start_time
pre_spikes = np.asarray(pre_spikes)
post_spikes = np.asarray(post_spikes)
print "Total time elapsed -- " + str(total_time)
suffix = end_time.strftime("_%H%M%S_%d%m%Y")
if args.filename:
filename = args.filename
else:
filename = "ocular_preference_results" + str(suffix)
total_target_neuron_mean_spike_rate = \
post_spikes.shape[0] / float(simtime) * 1000. / N_layer
np.savez(filename, pre_spikes=pre_spikes,
post_spikes=post_spikes,
init_ff_connections=init_ff_connections,
init_lat_connections=init_lat_connections,
ff_connections=pre_weights,
lat_connections=post_weights,
final_pre_weights=pre_weights[-1],
final_post_weights=post_weights[-1],
simtime=simtime,
sim_params=sim_params,
total_time=total_time,
mean_firing_rate=total_target_neuron_mean_spike_rate,
exception=e,
insult=args.insult)
# Plotting
if args.plot and e is None:
init_ff_conn_network = np.ones((256, 256)) * np.nan
init_lat_conn_network = np.ones((256, 256)) * np.nan
for source, target, weight, delay in init_ff_connections:
if np.isnan(init_ff_conn_network[int(source), int(target)]):
init_ff_conn_network[int(source), int(target)] = weight
else:
init_ff_conn_network[int(source), int(target)] += weight
for source, target, weight, delay in init_lat_connections:
if np.isnan(init_lat_conn_network[int(source), int(target)]):
init_lat_conn_network[int(source), int(target)] = weight
else:
init_lat_conn_network[int(source), int(target)] += weight
def plot_spikes(spikes, title):
if spikes is not None and len(spikes) > 0: | f, ax1 = plt.subplots(1, 1, figsize=(16, 8))
ax1.set_xlim((0, simtime))
ax1.scatter([i[1] for i in spikes], [i[0] for i in spikes], s=.2) | random_line_split |
|
binocular_input_topographic_map.py | _positions==1)
positions = [left_positions, right_positions]
if case == CASE_REW_NO_CORR:
raise NotImplementedError
elif case == CASE_CORR_AND_REW or case == CASE_CORR_NO_REW:
rates = np.empty((simtime // t_stim, grid[0], grid[1]))
for rate_id in range(simtime // t_stim):
rand_offset = np.random.randint(0, N_layer//2)
stim_position = (positions[rate_id%2][0][rand_offset], positions[rate_id%2][1][rand_offset])
assert binoc_positions[stim_position] == rate_id%2
r = gen_rate(stim_position,
f_base=f_base,
grid=grid,
f_peak=f_peak,
sigma_stim=sigma_stim)
rates[rate_id, :, :] = r
rates = rates.reshape(simtime // t_stim, N_layer)
source_pop = sim.Population(N_layer,
sim.SpikeSourcePoissonVariable,
{'rate': rates,
'start': 100,
'duration': simtime,
'rate_interval_duration': t_stim
}, label="Variable-rate Poisson spike source")
ff_s = np.zeros(N_layer, dtype=np.uint)
lat_s = np.zeros(N_layer, dtype=np.uint)
init_ff_connections = []
init_lat_connections = []
if args.initial_connectivity_file is None:
raise NotImplementedError
else:
if "npz" in args.initial_connectivity_file:
initial_connectivity = np.load(args.initial_connectivity_file)
else:
import scipy.io as io
initial_connectivity = io.loadmat(args.initial_connectivity_file)
conn = initial_connectivity['ConnPostToPre'] - 1
weight = initial_connectivity['WeightPostToPre']
for target in range(conn.shape[1]):
for index in range(conn.shape[0]):
if conn[index, target] >= 0:
if conn[index, target] < N_layer:
init_ff_connections.append(
(conn[index, target], target,
weight[index, target], 1))
else:
init_lat_connections.append(
(conn[index, target] - N_layer, target,
weight[index, target], 1))
# Neuron populations
target_pop = sim.Population(N_layer, model, cell_params, label="TARGET_POP")
# Putting this populations on chip 0 1 makes it easier to copy the provenance
# data somewhere else
target_pop.set_constraint(PlacerChipAndCoreConstraint(0, 1))
# Connections
# Plastic Connections between pre_pop and post_pop
stdp_model = sim.STDPMechanism(
timing_dependence=sim.SpikePairRule(tau_plus=tau_plus,
tau_minus=tau_minus),
weight_dependence=sim.AdditiveWeightDependence(w_min=0, w_max=g_max,
# A_plus=0.02, A_minus=0.02
A_plus=a_plus,
A_minus=a_minus)
)
if case == CASE_CORR_AND_REW or case == CASE_REW_NO_CORR:
structure_model_w_stdp = sim.StructuralMechanism(
stdp_model=stdp_model,
weight=g_max,
delay=args.delay,
s_max=s_max,
grid=grid,
f_rew=f_rew,
lateral_inhibition=args.lateral_inhibition,
random_partner=args.random_partner,
p_elim_dep=p_elim_dep,
p_elim_pot=p_elim_pot,
sigma_form_forward=sigma_form_forward,
sigma_form_lateral=sigma_form_lateral,
p_form_forward=p_form_forward,
p_form_lateral=p_form_lateral
)
elif case == CASE_CORR_NO_REW:
structure_model_w_stdp = stdp_model
# structure_model_w_stdp = sim.StructuralMechanism(weight=g_max, s_max=s_max)
ff_projection = sim.Projection(
source_pop, target_pop,
sim.FromListConnector(init_ff_connections),
synapse_dynamics=sim.SynapseDynamics(slow=structure_model_w_stdp),
label="plastic_ff_projection"
)
lat_projection = sim.Projection(
target_pop, target_pop,
sim.FromListConnector(init_lat_connections),
synapse_dynamics=sim.SynapseDynamics(slow=structure_model_w_stdp),
label="plastic_lat_projection",
target="inhibitory" if args.lateral_inhibition else "excitatory"
)
# +-------------------------------------------------------------------+
# | Simulation and results |
# +-------------------------------------------------------------------+
# Record neurons' potentials
# target_pop.record_v()
# Record spikes
# if case == CASE_REW_NO_CORR:
if args.record_source:
source_pop.record()
target_pop.record()
# Run simulation
pre_spikes = []
post_spikes = []
pre_sources = []
pre_targets = []
pre_weights = []
pre_delays = []
post_sources = []
post_targets = []
post_weights = []
post_delays = []
# rates_history = np.zeros((16, 16, simtime // t_stim))
e = None
print "Starting the sim"
no_runs = simtime // t_record
run_duration = t_record
try:
for current_run in range(no_runs):
print "run", current_run + 1, "of", no_runs
sim.run(run_duration)
if (current_run + 1) * run_duration % t_record == 0:
pre_weights.append(
np.array([
ff_projection._get_synaptic_data(True, 'source'),
ff_projection._get_synaptic_data(True, 'target'),
ff_projection._get_synaptic_data(True, 'weight'),
ff_projection._get_synaptic_data(True, 'delay')]).T)
post_weights.append(
np.array([
lat_projection._get_synaptic_data(True, 'source'),
lat_projection._get_synaptic_data(True, 'target'),
lat_projection._get_synaptic_data(True, 'weight'),
lat_projection._get_synaptic_data(True, 'delay')]).T)
if args.record_source:
pre_spikes = source_pop.getSpikes(compatible_output=True)
else:
pre_spikes = []
post_spikes = target_pop.getSpikes(compatible_output=True)
# End simulation on SpiNNaker
sim.end()
except Exception as e:
print e
# print("Weights:", plastic_projection.getWeights())
end_time = plt.datetime.datetime.now()
total_time = end_time - start_time
pre_spikes = np.asarray(pre_spikes)
post_spikes = np.asarray(post_spikes)
print "Total time elapsed -- " + str(total_time)
suffix = end_time.strftime("_%H%M%S_%d%m%Y")
if args.filename:
filename = args.filename
else:
filename = "ocular_preference_results" + str(suffix)
total_target_neuron_mean_spike_rate = \
post_spikes.shape[0] / float(simtime) * 1000. / N_layer
np.savez(filename, pre_spikes=pre_spikes,
post_spikes=post_spikes,
init_ff_connections=init_ff_connections,
init_lat_connections=init_lat_connections,
ff_connections=pre_weights,
lat_connections=post_weights,
final_pre_weights=pre_weights[-1],
final_post_weights=post_weights[-1],
simtime=simtime,
sim_params=sim_params,
total_time=total_time,
mean_firing_rate=total_target_neuron_mean_spike_rate,
exception=e,
insult=args.insult)
# Plotting
if args.plot and e is None:
init_ff_conn_network = np.ones((256, 256)) * np.nan
init_lat_conn_network = np.ones((256, 256)) * np.nan
for source, target, weight, delay in init_ff_connections:
if np.isnan(init_ff_conn_network[int(source), int(target)]):
init_ff_conn_network[int(source), int(target)] = weight
else:
init_ff_conn_network[int(source), int(target)] += weight
for source, target, weight, delay in init_lat_connections:
if np.isnan(init_lat_conn_network[int(source), int(target)]):
init_lat_conn_network[int(source), int(target)] = weight
else:
init_lat_conn_network[int(source), int(target)] += weight
def plot_spikes(spikes, title):
if spikes is not None and len(spikes) > 0:
f, ax1 = plt.subplots(1, 1, figsize=(16, 8))
ax1.set_xlim((0, simtime))
ax1.scatter([i[1] for i in spikes], [i[0] for i in spikes], s=.2)
ax1.set_xlabel('Time/ms')
ax1.set_ylabel('spikes')
ax1.set_title(title)
else:
print "No spikes received"
plot_spikes(pre_spikes, "Source layer spikes")
plt.show()
plot_spikes(post_spikes, "Target layer spikes")
plt.show()
final_ff_conn_network = np.ones((256, 256)) * np.nan
final_lat_conn_network = np.ones((256, 256)) * np.nan
for source, target, weight, delay in pre_weights[-1]:
if np.isnan(final_ff_conn_network[int(source), int(target)]):
final_ff_conn_network[int(source), int(target)] = weight
else:
| final_ff_conn_network[int(source), int(target)] += weight | conditional_block |
|
imaginate.rs |
fn initiate_server_check_maybe_fail(&mut self) -> Result<Option<ImaginateFuture>, Error> {
use futures::future::FutureExt;
let Some(client) = &self.client else {
return Ok(None);
};
if self.pending_server_check.is_some() {
return Ok(None);
}
self.server_status = ImaginateServerStatus::Checking;
let url = join_url(&self.host_name, SDAPI_PROGRESS)?;
let request = new_get_request(client, url)?;
let (send, recv) = futures::channel::oneshot::channel();
let response_future = client.execute(request).map(move |r| {
let _ = send.send(r);
});
self.pending_server_check = Some(recv);
Ok(Some(Box::pin(response_future)))
}
pub fn initiate_server_check(&mut self) -> Option<ImaginateFuture> {
match self.initiate_server_check_maybe_fail() {
Ok(f) => f,
Err(err) => {
self.server_status = ImaginateServerStatus::Failed(err.to_string());
None
}
}
}
pub fn poll_server_check(&mut self) {
if let Some(mut check) = self.pending_server_check.take() {
self.server_status = match check.try_recv().map(|r| r.map(|r| r.and_then(reqwest::Response::error_for_status))) {
Ok(Some(Ok(_response))) => ImaginateServerStatus::Connected,
Ok(Some(Err(_))) | Err(_) => ImaginateServerStatus::Unavailable,
Ok(None) => {
self.pending_server_check = Some(check);
ImaginateServerStatus::Checking
}
}
}
}
pub fn server_status(&self) -> &ImaginateServerStatus {
&self.server_status
}
pub fn is_checking(&self) -> bool {
matches!(self.server_status, ImaginateServerStatus::Checking)
}
}
#[derive(Debug)]
struct ImaginateFutureAbortHandle(futures::future::AbortHandle);
impl ImaginateTerminationHandle for ImaginateFutureAbortHandle {
fn terminate(&self) {
self.0.abort()
}
}
#[derive(Debug)]
enum Error {
UrlParse { text: String, err: <&'static str as TryInto<Url>>::Error },
ClientBuild(reqwest::Error),
RequestBuild(reqwest::Error),
Request(reqwest::Error),
ResponseFormat(reqwest::Error),
NoImage,
Base64Decode(base64::DecodeError),
ImageDecode(image::error::ImageError),
ImageEncode(image::error::ImageError),
UnsupportedPixelType(&'static str),
InconsistentImageSize,
Terminated,
TerminationFailed(reqwest::Error),
}
impl core::fmt::Display for Error {
fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
match self {
Self::UrlParse { text, err } => write!(f, "invalid url '{text}' ({err})"),
Self::ClientBuild(err) => write!(f, "failed to create a reqwest client ({err})"),
Self::RequestBuild(err) => write!(f, "failed to create a reqwest request ({err})"),
Self::Request(err) => write!(f, "request failed ({err})"),
Self::ResponseFormat(err) => write!(f, "got an invalid API response ({err})"),
Self::NoImage => write!(f, "got an empty API response"),
Self::Base64Decode(err) => write!(f, "failed to decode base64 encoded image ({err})"),
Self::ImageDecode(err) => write!(f, "failed to decode png image ({err})"),
Self::ImageEncode(err) => write!(f, "failed to encode png image ({err})"),
Self::UnsupportedPixelType(ty) => write!(f, "pixel type `{ty}` not supported for imaginate images"),
Self::InconsistentImageSize => write!(f, "image width and height do not match the image byte size"),
Self::Terminated => write!(f, "imaginate request was terminated by the user"),
Self::TerminationFailed(err) => write!(f, "termination failed ({err})"),
}
}
}
impl std::error::Error for Error {}
#[derive(Default, Debug, Clone)]
#[cfg_attr(feature = "serde", derive(serde::Deserialize))]
struct ImageResponse {
images: Vec<String>,
}
#[derive(Default, Debug, Clone)]
#[cfg_attr(feature = "serde", derive(serde::Deserialize))]
struct ProgressResponse {
progress: f64,
}
#[derive(Debug, Clone, Copy)]
#[cfg_attr(feature = "serde", derive(serde::Serialize))]
struct ImaginateTextToImageRequestOverrideSettings {
show_progress_every_n_steps: u32,
}
impl Default for ImaginateTextToImageRequestOverrideSettings {
fn default() -> Self {
Self {
show_progress_every_n_steps: PROGRESS_EVERY_N_STEPS,
}
}
}
#[derive(Debug, Clone, Copy)]
#[cfg_attr(feature = "serde", derive(serde::Serialize))]
struct ImaginateImageToImageRequestOverrideSettings {
show_progress_every_n_steps: u32,
img2img_fix_steps: bool,
}
impl Default for ImaginateImageToImageRequestOverrideSettings {
fn default() -> Self {
Self {
show_progress_every_n_steps: PROGRESS_EVERY_N_STEPS,
img2img_fix_steps: true,
}
}
}
#[derive(Debug, Clone)]
#[cfg_attr(feature = "serde", derive(serde::Serialize))]
struct ImaginateTextToImageRequest<'a> {
#[serde(flatten)]
common: ImaginateCommonImageRequest<'a>,
override_settings: ImaginateTextToImageRequestOverrideSettings,
}
#[derive(Debug, Clone)]
#[cfg_attr(feature = "serde", derive(serde::Serialize))]
struct ImaginateMask {
mask: String,
mask_blur: String,
inpainting_fill: u32,
inpaint_full_res: bool,
inpainting_mask_invert: u32,
}
#[derive(Debug, Clone)]
#[cfg_attr(feature = "serde", derive(serde::Serialize))]
struct ImaginateImageToImageRequest<'a> {
#[serde(flatten)]
common: ImaginateCommonImageRequest<'a>,
override_settings: ImaginateImageToImageRequestOverrideSettings,
init_images: Vec<String>,
denoising_strength: f64,
#[serde(flatten)]
mask: Option<ImaginateMask>,
}
#[derive(Debug, Clone)]
#[cfg_attr(feature = "serde", derive(serde::Serialize))]
struct ImaginateCommonImageRequest<'a> {
prompt: String,
seed: f64,
steps: u32,
cfg_scale: f64,
width: f64,
height: f64,
restore_faces: bool,
tiling: bool,
negative_prompt: String,
sampler_index: &'a str,
}
#[cfg(feature = "imaginate")]
#[allow(clippy::too_many_arguments)]
pub async fn imaginate<'a, P: Pixel>(
image: Image<P>,
editor_api: impl Future<Output = WasmEditorApi<'a>>,
controller: ImaginateController,
seed: impl Future<Output = f64>,
res: impl Future<Output = Option<DVec2>>,
samples: impl Future<Output = u32>,
sampling_method: impl Future<Output = ImaginateSamplingMethod>,
prompt_guidance: impl Future<Output = f32>,
prompt: impl Future<Output = String>,
negative_prompt: impl Future<Output = String>,
adapt_input_image: impl Future<Output = bool>,
image_creativity: impl Future<Output = f32>,
masking_layer: impl Future<Output = Option<Vec<u64>>>,
inpaint: impl Future<Output = bool>,
mask_blur: impl Future<Output = f32>,
mask_starting_fill: impl Future<Output = ImaginateMaskStartingFill>,
improve_faces: impl Future<Output = bool>,
tiling: impl Future<Output = bool>,
) -> Image<P> {
let WasmEditorApi {
node_graph_message_sender,
imaginate_preferences,
..
} = editor_api.await;
let set_progress = |progress: ImaginateStatus| {
controller.set_status(progress);
node_graph_message_sender.send(NodeGraphUpdateMessage::ImaginateStatusUpdate);
};
let host_name = imaginate_preferences.get_host_name();
imaginate_maybe_fail(
image,
host_name,
set_progress,
&controller,
seed,
res,
samples,
sampling_method,
prompt_guidance,
prompt,
negative_prompt,
adapt_input_image,
image_creativity,
masking_layer,
inpaint,
mask_blur,
mask_starting_fill,
improve_faces,
tiling,
)
.await
.unwrap_or_else(|err| {
match err {
Error::Terminated => {
set_progress(ImaginateStatus::Terminated);
}
err => {
error!("{err}");
set_progress(ImaginateStatus::Failed(err.to_string()));
}
};
Image::empty()
| {
match parse_url(name) {
Ok(url) => self.host_name = url,
Err(err) => self.server_status = ImaginateServerStatus::Failed(err.to_string()),
}
} | identifier_body |
|
imaginate.rs | })"),
Self::ImageEncode(err) => write!(f, "failed to encode png image ({err})"),
Self::UnsupportedPixelType(ty) => write!(f, "pixel type `{ty}` not supported for imaginate images"),
Self::InconsistentImageSize => write!(f, "image width and height do not match the image byte size"),
Self::Terminated => write!(f, "imaginate request was terminated by the user"),
Self::TerminationFailed(err) => write!(f, "termination failed ({err})"),
}
}
}
impl std::error::Error for Error {}
#[derive(Default, Debug, Clone)]
#[cfg_attr(feature = "serde", derive(serde::Deserialize))]
struct ImageResponse {
images: Vec<String>,
}
#[derive(Default, Debug, Clone)]
#[cfg_attr(feature = "serde", derive(serde::Deserialize))]
struct ProgressResponse {
progress: f64,
}
#[derive(Debug, Clone, Copy)]
#[cfg_attr(feature = "serde", derive(serde::Serialize))]
struct ImaginateTextToImageRequestOverrideSettings {
show_progress_every_n_steps: u32,
}
impl Default for ImaginateTextToImageRequestOverrideSettings {
fn default() -> Self {
Self {
show_progress_every_n_steps: PROGRESS_EVERY_N_STEPS,
}
}
}
#[derive(Debug, Clone, Copy)]
#[cfg_attr(feature = "serde", derive(serde::Serialize))]
struct ImaginateImageToImageRequestOverrideSettings {
show_progress_every_n_steps: u32,
img2img_fix_steps: bool,
}
impl Default for ImaginateImageToImageRequestOverrideSettings {
fn default() -> Self {
Self {
show_progress_every_n_steps: PROGRESS_EVERY_N_STEPS,
img2img_fix_steps: true,
}
}
}
#[derive(Debug, Clone)]
#[cfg_attr(feature = "serde", derive(serde::Serialize))]
struct ImaginateTextToImageRequest<'a> {
#[serde(flatten)]
common: ImaginateCommonImageRequest<'a>,
override_settings: ImaginateTextToImageRequestOverrideSettings,
}
#[derive(Debug, Clone)]
#[cfg_attr(feature = "serde", derive(serde::Serialize))]
struct ImaginateMask {
mask: String,
mask_blur: String,
inpainting_fill: u32,
inpaint_full_res: bool,
inpainting_mask_invert: u32,
}
#[derive(Debug, Clone)]
#[cfg_attr(feature = "serde", derive(serde::Serialize))]
struct ImaginateImageToImageRequest<'a> {
#[serde(flatten)]
common: ImaginateCommonImageRequest<'a>,
override_settings: ImaginateImageToImageRequestOverrideSettings,
init_images: Vec<String>,
denoising_strength: f64,
#[serde(flatten)]
mask: Option<ImaginateMask>,
}
#[derive(Debug, Clone)]
#[cfg_attr(feature = "serde", derive(serde::Serialize))]
struct ImaginateCommonImageRequest<'a> {
prompt: String,
seed: f64,
steps: u32,
cfg_scale: f64,
width: f64,
height: f64,
restore_faces: bool,
tiling: bool,
negative_prompt: String,
sampler_index: &'a str,
}
#[cfg(feature = "imaginate")]
#[allow(clippy::too_many_arguments)]
pub async fn imaginate<'a, P: Pixel>(
image: Image<P>,
editor_api: impl Future<Output = WasmEditorApi<'a>>,
controller: ImaginateController,
seed: impl Future<Output = f64>,
res: impl Future<Output = Option<DVec2>>,
samples: impl Future<Output = u32>,
sampling_method: impl Future<Output = ImaginateSamplingMethod>,
prompt_guidance: impl Future<Output = f32>,
prompt: impl Future<Output = String>,
negative_prompt: impl Future<Output = String>,
adapt_input_image: impl Future<Output = bool>,
image_creativity: impl Future<Output = f32>,
masking_layer: impl Future<Output = Option<Vec<u64>>>,
inpaint: impl Future<Output = bool>,
mask_blur: impl Future<Output = f32>,
mask_starting_fill: impl Future<Output = ImaginateMaskStartingFill>,
improve_faces: impl Future<Output = bool>,
tiling: impl Future<Output = bool>,
) -> Image<P> {
let WasmEditorApi {
node_graph_message_sender,
imaginate_preferences,
..
} = editor_api.await;
let set_progress = |progress: ImaginateStatus| {
controller.set_status(progress);
node_graph_message_sender.send(NodeGraphUpdateMessage::ImaginateStatusUpdate);
};
let host_name = imaginate_preferences.get_host_name();
imaginate_maybe_fail(
image,
host_name,
set_progress,
&controller,
seed,
res,
samples,
sampling_method,
prompt_guidance,
prompt,
negative_prompt,
adapt_input_image,
image_creativity,
masking_layer,
inpaint,
mask_blur,
mask_starting_fill,
improve_faces,
tiling,
)
.await
.unwrap_or_else(|err| {
match err {
Error::Terminated => {
set_progress(ImaginateStatus::Terminated);
}
err => {
error!("{err}");
set_progress(ImaginateStatus::Failed(err.to_string()));
}
};
Image::empty()
})
}
#[cfg(feature = "imaginate")]
#[allow(clippy::too_many_arguments)]
async fn imaginate_maybe_fail<'a, P: Pixel, F: Fn(ImaginateStatus)>(
image: Image<P>,
host_name: &str,
set_progress: F,
controller: &ImaginateController,
seed: impl Future<Output = f64>,
res: impl Future<Output = Option<DVec2>>,
samples: impl Future<Output = u32>,
sampling_method: impl Future<Output = ImaginateSamplingMethod>,
prompt_guidance: impl Future<Output = f32>,
prompt: impl Future<Output = String>,
negative_prompt: impl Future<Output = String>,
adapt_input_image: impl Future<Output = bool>,
image_creativity: impl Future<Output = f32>,
_masking_layer: impl Future<Output = Option<Vec<u64>>>,
_inpaint: impl Future<Output = bool>,
_mask_blur: impl Future<Output = f32>,
_mask_starting_fill: impl Future<Output = ImaginateMaskStartingFill>,
improve_faces: impl Future<Output = bool>,
tiling: impl Future<Output = bool>,
) -> Result<Image<P>, Error> {
set_progress(ImaginateStatus::Beginning);
let base_url: Url = parse_url(host_name)?;
let client = new_client()?;
let sampler_index = sampling_method.await;
let sampler_index = sampler_index.api_value();
let res = res.await.unwrap_or_else(|| {
let (width, height) = pick_safe_imaginate_resolution((image.width as _, image.height as _));
DVec2::new(width as _, height as _)
});
let common_request_data = ImaginateCommonImageRequest {
prompt: prompt.await,
seed: seed.await,
steps: samples.await,
cfg_scale: prompt_guidance.await as f64,
width: res.x,
height: res.y,
restore_faces: improve_faces.await,
tiling: tiling.await,
negative_prompt: negative_prompt.await,
sampler_index,
};
let request_builder = if adapt_input_image.await {
let base64_data = image_to_base64(image)?;
let request_data = ImaginateImageToImageRequest {
common: common_request_data,
override_settings: Default::default(),
init_images: vec![base64_data],
denoising_strength: image_creativity.await as f64 * 0.01,
mask: None,
};
let url = join_url(&base_url, SDAPI_IMAGE_TO_IMAGE)?;
client.post(url).json(&request_data)
} else {
let request_data = ImaginateTextToImageRequest {
common: common_request_data,
override_settings: Default::default(),
};
let url = join_url(&base_url, SDAPI_TEXT_TO_IMAGE)?;
client.post(url).json(&request_data)
};
let request = request_builder.header("Accept", "*/*").build().map_err(Error::RequestBuild)?;
let (response_future, abort_handle) = futures::future::abortable(client.execute(request));
controller.set_termination_handle(Box::new(ImaginateFutureAbortHandle(abort_handle)));
let progress_url = join_url(&base_url, SDAPI_PROGRESS)?;
futures::pin_mut!(response_future);
let response = loop {
let progress_request = new_get_request(&client, progress_url.clone())?;
let progress_response_future = client.execute(progress_request).and_then(|response| response.json());
futures::pin_mut!(progress_response_future);
response_future = match futures::future::select(response_future, progress_response_future).await {
Either::Left((response, _)) => break response,
Either::Right((progress, response_future)) => | {
if let Ok(ProgressResponse { progress }) = progress {
set_progress(ImaginateStatus::Generating(progress * 100.));
}
response_future
} | conditional_block |
|
imaginate.rs | let url = join_url(&self.host_name, SDAPI_PROGRESS)?;
let request = new_get_request(client, url)?;
let (send, recv) = futures::channel::oneshot::channel();
let response_future = client.execute(request).map(move |r| {
let _ = send.send(r);
});
self.pending_server_check = Some(recv);
Ok(Some(Box::pin(response_future)))
}
pub fn initiate_server_check(&mut self) -> Option<ImaginateFuture> {
match self.initiate_server_check_maybe_fail() {
Ok(f) => f,
Err(err) => {
self.server_status = ImaginateServerStatus::Failed(err.to_string());
None
}
}
}
pub fn poll_server_check(&mut self) {
if let Some(mut check) = self.pending_server_check.take() {
self.server_status = match check.try_recv().map(|r| r.map(|r| r.and_then(reqwest::Response::error_for_status))) {
Ok(Some(Ok(_response))) => ImaginateServerStatus::Connected,
Ok(Some(Err(_))) | Err(_) => ImaginateServerStatus::Unavailable,
Ok(None) => {
self.pending_server_check = Some(check);
ImaginateServerStatus::Checking
}
}
}
}
pub fn server_status(&self) -> &ImaginateServerStatus {
&self.server_status
}
pub fn is_checking(&self) -> bool {
matches!(self.server_status, ImaginateServerStatus::Checking)
}
}
#[derive(Debug)]
struct | (futures::future::AbortHandle);
impl ImaginateTerminationHandle for ImaginateFutureAbortHandle {
fn terminate(&self) {
self.0.abort()
}
}
#[derive(Debug)]
enum Error {
UrlParse { text: String, err: <&'static str as TryInto<Url>>::Error },
ClientBuild(reqwest::Error),
RequestBuild(reqwest::Error),
Request(reqwest::Error),
ResponseFormat(reqwest::Error),
NoImage,
Base64Decode(base64::DecodeError),
ImageDecode(image::error::ImageError),
ImageEncode(image::error::ImageError),
UnsupportedPixelType(&'static str),
InconsistentImageSize,
Terminated,
TerminationFailed(reqwest::Error),
}
impl core::fmt::Display for Error {
fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
match self {
Self::UrlParse { text, err } => write!(f, "invalid url '{text}' ({err})"),
Self::ClientBuild(err) => write!(f, "failed to create a reqwest client ({err})"),
Self::RequestBuild(err) => write!(f, "failed to create a reqwest request ({err})"),
Self::Request(err) => write!(f, "request failed ({err})"),
Self::ResponseFormat(err) => write!(f, "got an invalid API response ({err})"),
Self::NoImage => write!(f, "got an empty API response"),
Self::Base64Decode(err) => write!(f, "failed to decode base64 encoded image ({err})"),
Self::ImageDecode(err) => write!(f, "failed to decode png image ({err})"),
Self::ImageEncode(err) => write!(f, "failed to encode png image ({err})"),
Self::UnsupportedPixelType(ty) => write!(f, "pixel type `{ty}` not supported for imaginate images"),
Self::InconsistentImageSize => write!(f, "image width and height do not match the image byte size"),
Self::Terminated => write!(f, "imaginate request was terminated by the user"),
Self::TerminationFailed(err) => write!(f, "termination failed ({err})"),
}
}
}
impl std::error::Error for Error {}
#[derive(Default, Debug, Clone)]
#[cfg_attr(feature = "serde", derive(serde::Deserialize))]
struct ImageResponse {
images: Vec<String>,
}
#[derive(Default, Debug, Clone)]
#[cfg_attr(feature = "serde", derive(serde::Deserialize))]
struct ProgressResponse {
progress: f64,
}
#[derive(Debug, Clone, Copy)]
#[cfg_attr(feature = "serde", derive(serde::Serialize))]
struct ImaginateTextToImageRequestOverrideSettings {
show_progress_every_n_steps: u32,
}
impl Default for ImaginateTextToImageRequestOverrideSettings {
fn default() -> Self {
Self {
show_progress_every_n_steps: PROGRESS_EVERY_N_STEPS,
}
}
}
#[derive(Debug, Clone, Copy)]
#[cfg_attr(feature = "serde", derive(serde::Serialize))]
struct ImaginateImageToImageRequestOverrideSettings {
show_progress_every_n_steps: u32,
img2img_fix_steps: bool,
}
impl Default for ImaginateImageToImageRequestOverrideSettings {
fn default() -> Self {
Self {
show_progress_every_n_steps: PROGRESS_EVERY_N_STEPS,
img2img_fix_steps: true,
}
}
}
#[derive(Debug, Clone)]
#[cfg_attr(feature = "serde", derive(serde::Serialize))]
struct ImaginateTextToImageRequest<'a> {
#[serde(flatten)]
common: ImaginateCommonImageRequest<'a>,
override_settings: ImaginateTextToImageRequestOverrideSettings,
}
#[derive(Debug, Clone)]
#[cfg_attr(feature = "serde", derive(serde::Serialize))]
struct ImaginateMask {
mask: String,
mask_blur: String,
inpainting_fill: u32,
inpaint_full_res: bool,
inpainting_mask_invert: u32,
}
#[derive(Debug, Clone)]
#[cfg_attr(feature = "serde", derive(serde::Serialize))]
struct ImaginateImageToImageRequest<'a> {
#[serde(flatten)]
common: ImaginateCommonImageRequest<'a>,
override_settings: ImaginateImageToImageRequestOverrideSettings,
init_images: Vec<String>,
denoising_strength: f64,
#[serde(flatten)]
mask: Option<ImaginateMask>,
}
#[derive(Debug, Clone)]
#[cfg_attr(feature = "serde", derive(serde::Serialize))]
struct ImaginateCommonImageRequest<'a> {
prompt: String,
seed: f64,
steps: u32,
cfg_scale: f64,
width: f64,
height: f64,
restore_faces: bool,
tiling: bool,
negative_prompt: String,
sampler_index: &'a str,
}
#[cfg(feature = "imaginate")]
#[allow(clippy::too_many_arguments)]
pub async fn imaginate<'a, P: Pixel>(
image: Image<P>,
editor_api: impl Future<Output = WasmEditorApi<'a>>,
controller: ImaginateController,
seed: impl Future<Output = f64>,
res: impl Future<Output = Option<DVec2>>,
samples: impl Future<Output = u32>,
sampling_method: impl Future<Output = ImaginateSamplingMethod>,
prompt_guidance: impl Future<Output = f32>,
prompt: impl Future<Output = String>,
negative_prompt: impl Future<Output = String>,
adapt_input_image: impl Future<Output = bool>,
image_creativity: impl Future<Output = f32>,
masking_layer: impl Future<Output = Option<Vec<u64>>>,
inpaint: impl Future<Output = bool>,
mask_blur: impl Future<Output = f32>,
mask_starting_fill: impl Future<Output = ImaginateMaskStartingFill>,
improve_faces: impl Future<Output = bool>,
tiling: impl Future<Output = bool>,
) -> Image<P> {
let WasmEditorApi {
node_graph_message_sender,
imaginate_preferences,
..
} = editor_api.await;
let set_progress = |progress: ImaginateStatus| {
controller.set_status(progress);
node_graph_message_sender.send(NodeGraphUpdateMessage::ImaginateStatusUpdate);
};
let host_name = imaginate_preferences.get_host_name();
imaginate_maybe_fail(
image,
host_name,
set_progress,
&controller,
seed,
res,
samples,
sampling_method,
prompt_guidance,
prompt,
negative_prompt,
adapt_input_image,
image_creativity,
masking_layer,
inpaint,
mask_blur,
mask_starting_fill,
improve_faces,
tiling,
)
.await
.unwrap_or_else(|err| {
match err {
Error::Terminated => {
set_progress(ImaginateStatus::Terminated);
}
err => {
error!("{err}");
set_progress(ImaginateStatus::Failed(err.to_string()));
}
};
Image::empty()
})
}
#[cfg(feature = "imaginate")]
#[allow(clippy::too_many_arguments)]
async fn imaginate_maybe_fail<'a, P: Pixel, F: Fn(ImaginateStatus)>(
image: Image<P>,
host_name: &str,
set_progress: F,
controller: &ImaginateController,
seed: impl Future<Output = f64>,
res: impl Future<Output = Option<DVec2>>,
samples: impl Future<Output = u32>,
sampling_method: impl Future<Output = ImaginateSamplingMethod>,
prompt_guidance: impl Future | ImaginateFutureAbortHandle | identifier_name |
imaginate.rs |
}
}
}
}
pub fn server_status(&self) -> &ImaginateServerStatus {
&self.server_status
}
pub fn is_checking(&self) -> bool {
matches!(self.server_status, ImaginateServerStatus::Checking)
}
}
#[derive(Debug)]
struct ImaginateFutureAbortHandle(futures::future::AbortHandle);
impl ImaginateTerminationHandle for ImaginateFutureAbortHandle {
fn terminate(&self) {
self.0.abort()
}
}
#[derive(Debug)]
enum Error {
UrlParse { text: String, err: <&'static str as TryInto<Url>>::Error },
ClientBuild(reqwest::Error),
RequestBuild(reqwest::Error),
Request(reqwest::Error),
ResponseFormat(reqwest::Error),
NoImage,
Base64Decode(base64::DecodeError),
ImageDecode(image::error::ImageError),
ImageEncode(image::error::ImageError),
UnsupportedPixelType(&'static str),
InconsistentImageSize,
Terminated,
TerminationFailed(reqwest::Error),
}
impl core::fmt::Display for Error {
fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
match self {
Self::UrlParse { text, err } => write!(f, "invalid url '{text}' ({err})"),
Self::ClientBuild(err) => write!(f, "failed to create a reqwest client ({err})"),
Self::RequestBuild(err) => write!(f, "failed to create a reqwest request ({err})"),
Self::Request(err) => write!(f, "request failed ({err})"),
Self::ResponseFormat(err) => write!(f, "got an invalid API response ({err})"),
Self::NoImage => write!(f, "got an empty API response"),
Self::Base64Decode(err) => write!(f, "failed to decode base64 encoded image ({err})"),
Self::ImageDecode(err) => write!(f, "failed to decode png image ({err})"),
Self::ImageEncode(err) => write!(f, "failed to encode png image ({err})"),
Self::UnsupportedPixelType(ty) => write!(f, "pixel type `{ty}` not supported for imaginate images"),
Self::InconsistentImageSize => write!(f, "image width and height do not match the image byte size"),
Self::Terminated => write!(f, "imaginate request was terminated by the user"),
Self::TerminationFailed(err) => write!(f, "termination failed ({err})"),
}
}
}
impl std::error::Error for Error {}
#[derive(Default, Debug, Clone)]
#[cfg_attr(feature = "serde", derive(serde::Deserialize))]
struct ImageResponse {
images: Vec<String>,
}
#[derive(Default, Debug, Clone)]
#[cfg_attr(feature = "serde", derive(serde::Deserialize))]
struct ProgressResponse {
progress: f64,
}
#[derive(Debug, Clone, Copy)]
#[cfg_attr(feature = "serde", derive(serde::Serialize))]
struct ImaginateTextToImageRequestOverrideSettings {
show_progress_every_n_steps: u32,
}
impl Default for ImaginateTextToImageRequestOverrideSettings {
fn default() -> Self {
Self {
show_progress_every_n_steps: PROGRESS_EVERY_N_STEPS,
}
}
}
#[derive(Debug, Clone, Copy)]
#[cfg_attr(feature = "serde", derive(serde::Serialize))]
struct ImaginateImageToImageRequestOverrideSettings {
show_progress_every_n_steps: u32,
img2img_fix_steps: bool,
}
impl Default for ImaginateImageToImageRequestOverrideSettings {
fn default() -> Self {
Self {
show_progress_every_n_steps: PROGRESS_EVERY_N_STEPS,
img2img_fix_steps: true,
}
}
}
#[derive(Debug, Clone)]
#[cfg_attr(feature = "serde", derive(serde::Serialize))]
struct ImaginateTextToImageRequest<'a> {
#[serde(flatten)]
common: ImaginateCommonImageRequest<'a>,
override_settings: ImaginateTextToImageRequestOverrideSettings,
}
#[derive(Debug, Clone)]
#[cfg_attr(feature = "serde", derive(serde::Serialize))]
struct ImaginateMask {
mask: String,
mask_blur: String,
inpainting_fill: u32,
inpaint_full_res: bool,
inpainting_mask_invert: u32,
}
#[derive(Debug, Clone)]
#[cfg_attr(feature = "serde", derive(serde::Serialize))]
struct ImaginateImageToImageRequest<'a> {
#[serde(flatten)]
common: ImaginateCommonImageRequest<'a>,
override_settings: ImaginateImageToImageRequestOverrideSettings,
init_images: Vec<String>,
denoising_strength: f64,
#[serde(flatten)]
mask: Option<ImaginateMask>,
}
#[derive(Debug, Clone)]
#[cfg_attr(feature = "serde", derive(serde::Serialize))]
struct ImaginateCommonImageRequest<'a> {
prompt: String,
seed: f64,
steps: u32,
cfg_scale: f64,
width: f64,
height: f64,
restore_faces: bool,
tiling: bool,
negative_prompt: String,
sampler_index: &'a str,
}
#[cfg(feature = "imaginate")]
#[allow(clippy::too_many_arguments)]
pub async fn imaginate<'a, P: Pixel>(
image: Image<P>,
editor_api: impl Future<Output = WasmEditorApi<'a>>,
controller: ImaginateController,
seed: impl Future<Output = f64>,
res: impl Future<Output = Option<DVec2>>,
samples: impl Future<Output = u32>,
sampling_method: impl Future<Output = ImaginateSamplingMethod>,
prompt_guidance: impl Future<Output = f32>,
prompt: impl Future<Output = String>,
negative_prompt: impl Future<Output = String>,
adapt_input_image: impl Future<Output = bool>,
image_creativity: impl Future<Output = f32>,
masking_layer: impl Future<Output = Option<Vec<u64>>>,
inpaint: impl Future<Output = bool>,
mask_blur: impl Future<Output = f32>,
mask_starting_fill: impl Future<Output = ImaginateMaskStartingFill>,
improve_faces: impl Future<Output = bool>,
tiling: impl Future<Output = bool>,
) -> Image<P> {
let WasmEditorApi {
node_graph_message_sender,
imaginate_preferences,
..
} = editor_api.await;
let set_progress = |progress: ImaginateStatus| {
controller.set_status(progress);
node_graph_message_sender.send(NodeGraphUpdateMessage::ImaginateStatusUpdate);
};
let host_name = imaginate_preferences.get_host_name();
imaginate_maybe_fail(
image,
host_name,
set_progress,
&controller,
seed,
res,
samples,
sampling_method,
prompt_guidance,
prompt,
negative_prompt,
adapt_input_image,
image_creativity,
masking_layer,
inpaint,
mask_blur,
mask_starting_fill,
improve_faces,
tiling,
)
.await
.unwrap_or_else(|err| {
match err {
Error::Terminated => {
set_progress(ImaginateStatus::Terminated);
}
err => {
error!("{err}");
set_progress(ImaginateStatus::Failed(err.to_string()));
}
};
Image::empty()
})
}
#[cfg(feature = "imaginate")]
#[allow(clippy::too_many_arguments)]
async fn imaginate_maybe_fail<'a, P: Pixel, F: Fn(ImaginateStatus)>(
image: Image<P>,
host_name: &str,
set_progress: F,
controller: &ImaginateController,
seed: impl Future<Output = f64>,
res: impl Future<Output = Option<DVec2>>,
samples: impl Future<Output = u32>,
sampling_method: impl Future<Output = ImaginateSamplingMethod>,
prompt_guidance: impl Future<Output = f32>,
prompt: impl Future<Output = String>,
negative_prompt: impl Future<Output = String>,
adapt_input_image: impl Future<Output = bool>,
image_creativity: impl Future<Output = f32>,
_masking_layer: impl Future<Output = Option<Vec<u64>>>,
_inpaint: impl Future<Output = bool>,
_mask_blur: impl Future<Output = f32>,
_mask_starting_fill: impl Future<Output = ImaginateMaskStartingFill>,
improve_faces: impl Future<Output = bool>,
tiling: impl Future<Output = bool>,
) -> Result<Image<P>, Error> {
set_progress(ImaginateStatus::Beginning);
let base_url: Url = parse_url(host_name)?;
let client = new_client()?;
let sampler_index = sampling_method.await;
let sampler_index = sampler_index.api_value();
let res = res.await.unwrap_or_else(|| {
let (width, height) = pick_safe_imaginate_resolution((image.width as _, image.height as _));
DVec2::new(width as _, height as _)
});
let common_request_data = ImaginateCommonImageRequest {
prompt: prompt.await, | seed: seed.await,
steps: samples.await,
cfg_scale: prompt_guidance.await as f64, | random_line_split |
|
tax_task.py | _true', help='use position embed or not')
parser.add_argument('--position_embed_size', type=int, default=100, help='position embed size')
parser.add_argument('--position_embed_mode', type=str, default='sum', choices=['sum','concat'], help='position embed mode[sum,concat]')
parser.add_argument('--self_attention_units', type=int, default=64, help='self attention units')
parser.add_argument('--self_attention_num_heads', type=int, default=4, help='self attention num heads')
parser.add_argument('--no_history', action='store_true', help='use history attention or not')
parser.add_argument('--no_interaction', action='store_true', help='use interaction attention or not')
parser.add_argument('--no_memory', action='store_true', help='remove memory or not')
parser.add_argument('--memory_word_num', type=int, default=256, help='memory word num')
parser.add_argument('--memory_word_size', type=int, default=64, help='memory word size')
parser.add_argument('--memory_read_heads', type=int, default=4, help='memory read heads')
parser.add_argument('--feature_size', type=int, default=256, help='feature size')
parser.add_argument('--multi', action='store_true', help='multi-label classification or not')
parser.add_argument('--epochs', type=int, default=10, help='epochs')
parser.add_argument('--focal_loss', action='store_false', help='use focal loss')
parser.add_argument('--focal_loss_alpha', type=float, default=0.6, help='focal loss alpha')
parser.add_argument('--focal_loss_gamma', type=float, default=2.0, help='focal loss gamma')
parser.add_argument('--optimizer', type=str, default='adam', help='optimizer')
parser.add_argument('--lr', type=float, default=0.00005, help='learning rate')
parser.add_argument('--lr_decay', type=float, default=1e-6, help='learning rate decay')
parser.add_argument('--model_path', type=str, help='model path')
args = parser.parse_args()
model_name = "AMANet-tax"
# time
time_str = time.strftime("%Y%m%d%H%M%S", time.localtime())
def write_log(callback, names, logs, epoch_no):
for name, value in zip(names, logs):
summary = tf.Summary()
summary_value = summary.value.add()
summary_value.simple_value = value
summary_value.tag = name
callback.writer.add_summary(summary, epoch_no)
callback.writer.flush()
CallBack = TensorBoard(log_dir=('../tb-logs/tax-task/%s/%s' %(model_name, time_str)), # log dir
histogram_freq=0,
write_graph=True,
write_grads=True,
write_images=True,
embeddings_freq=0,
embeddings_layer_names=None,
embeddings_metadata=None)
train_names = ['train_loss']
val_names = ["val_acc", "val_prec", "val_recall", "val_f1", "val_prauc", "val_roc_auc"]
def | (config):
# model save path
model_save_dir = os.path.join("../model/tax-task", model_name, time_str)
if not os.path.exists(model_save_dir):
os.makedirs(model_save_dir)
# log save path
log_save_dir = os.path.join("../logs/tax-task", model_name, time_str)
if not os.path.exists(log_save_dir):
os.makedirs(log_save_dir)
# load data
data_train, data_valid, data_test, voc_size = load_tax_data(config["datapath"])
# input1 vocab size
config["vocab_size1"] = voc_size[0]
# input1 vocab size
config["vocab_size2"] = voc_size[1]
# output vocab size
config["output_size"] = voc_size[2]
# build model
model = build(config)
# plot model graph
model_graph_file = os.path.join(model_save_dir, ("model_%s.png" % time_str))
plot_model(model, to_file=model_graph_file)
# model summary
model.summary()
# model tensorboard logs
CallBack.set_model(model)
# eval logs
file = open(os.path.join(log_save_dir, "statistic_%s.txt" % time_str), "w+")
file.write(str(config)+"\n")
model.summary(print_fn=lambda x: file.write(x + '\n'))
train_size = len(data_train)
best_f1 = 0.0
best_epoch = 0
best_model = ""
# train
for epoch in range(config["epochs"]):
# 新一次迭代,打乱训练集
data_train = shuffle(data_train)
start_time = time.time()
llprint("Epoch %d/%d\n" % (epoch + 1, config["epochs"]))
losses = []
train_pred_output_prob = []
train_pred_output = []
train_real_output = []
file.write("Epoch: %d/%d\n" % ((epoch + 1), config["epochs"]))
for patient_index in range(train_size):
llprint("\rBatch %d/%d" % (patient_index + 1, train_size))
# 获取第index个企业dual序列
input_vec1, input_vec2, output_vec, o = prepare_tax_dual(data_train, index=patient_index)
train_real_output.append(o[0])
res = model.train_on_batch([input_vec1, input_vec2], output_vec)
losses.append(res[0])
prob = res[1][0][0]
train_pred_output_prob.append(prob)
if prob >= 0.5:
train_pred_output.append(1)
else:
train_pred_output.append(0)
end_time = time.time()
elapsed_time = (end_time - start_time) / 60
train_acc, train_prec, train_recall, train_f1 = metrics_non_multi(train_real_output, train_pred_output)
train_roc_auc = roc_auc_non_multi(train_real_output, train_pred_output_prob)
train_prauc = prc_auc_non_multi(train_real_output, train_pred_output_prob)
if config["use_tensorboard"]:
train_logs = [sum(losses)/len(losses)]
write_log(CallBack, train_names, train_logs, epoch+1)
print('')
acc, pre, recall, f1, prauc, roc_auc = model_eval(model, data_valid, config)
if config["use_tensorboard"]:
val_logs = [acc, pre, recall, f1, prauc, roc_auc]
write_log(CallBack, val_names, val_logs, epoch+1)
file.write("spend time to train: %.2f min\n" % elapsed_time)
file.write("train loss: %f\n" % (sum(losses)/ len(losses)))
file.write("valid acc: %f, prec: %f, recall: %f, f1: %f, prauc: %f, roc_auc: %f\n" % (acc, pre, recall, f1, prauc, roc_auc))
print("spend time to train: %.2f min" % elapsed_time)
print("train loss: %f, acc: %f, prec: %f, recall: %f, f1: %f, prauc: %f, roc_auc: %f" % ((sum(losses)/ len(losses)), train_acc, train_prec, train_recall, train_f1, train_prauc, train_roc_auc))
print("valid acc: %f, prec: %f, recall: %f, f1: %f, prauc: %f, roc_auc: %f" % (acc, pre, recall, f1, prauc, roc_auc))
model_save_path = os.path.join(model_save_dir, 'model_%d_%s_%.4f.h5' % ((epoch+1), time_str, f1))
model.save(model_save_path)
if best_f1 < f1:
best_f1 = f1
best_epoch = epoch + 1
best_model = model_save_path
acc, pre, recall, f1, prauc, roc_auc = model_eval(model, data_test, config, type="test")
print("test acc: %f, prec: %f, recall: %f, f1: %f, prauc: %f, roc_auc: %f" % (acc, pre, recall, f1, prauc, roc_auc))
file.write("test acc: %f, prec: %f, recall: %f, f1: %f, prauc: %f, roc_auc: %f\n" % (acc, pre, recall, f1, prauc, roc_auc))
file.write("###############################################################\n")
print("###############################################################\n")
file.flush()
os.rename(best_model, best_model.replace(".h5", "_best.h5"))
print("train done. best epoch: %d, best: f1: %f, model path: %s" % (best_epoch, best_f1, best_model))
file.write("train done. best epoch: %d, best: f1: %f, model path: %s\n" % (best_epoch, best_f1, best_model))
CallBack.on_train_end(None)
file.close()
# evaluate
def model_eval(model, dataset, config, type="eval"):
eval | train | identifier_name |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.